Deleted Added
full compact
fold-const.c (174532) fold-const.c (220150)
1/* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
22
23/*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
30
31/* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type.
33
34 fold takes a tree as argument and returns a simplified tree.
35
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
39
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
42
43 force_fit_type takes a constant, an overflowable flag and prior
44 overflow indicators. It forces the value to fit the type and sets
45 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46
47#include "config.h"
48#include "system.h"
49#include "coretypes.h"
50#include "tm.h"
51#include "flags.h"
52#include "tree.h"
53#include "real.h"
54#include "rtl.h"
55#include "expr.h"
56#include "tm_p.h"
57#include "toplev.h"
58#include "intl.h"
59#include "ggc.h"
60#include "hashtab.h"
61#include "langhooks.h"
62#include "md5.h"
63
64/* Non-zero if we are folding constants inside an initializer; zero
65 otherwise. */
66int folding_initializer = 0;
67
68/* The following constants represent a bit based encoding of GCC's
69 comparison operators. This encoding simplifies transformations
70 on relational comparison operators, such as AND and OR. */
71enum comparison_code {
72 COMPCODE_FALSE = 0,
73 COMPCODE_LT = 1,
74 COMPCODE_EQ = 2,
75 COMPCODE_LE = 3,
76 COMPCODE_GT = 4,
77 COMPCODE_LTGT = 5,
78 COMPCODE_GE = 6,
79 COMPCODE_ORD = 7,
80 COMPCODE_UNORD = 8,
81 COMPCODE_UNLT = 9,
82 COMPCODE_UNEQ = 10,
83 COMPCODE_UNLE = 11,
84 COMPCODE_UNGT = 12,
85 COMPCODE_NE = 13,
86 COMPCODE_UNGE = 14,
87 COMPCODE_TRUE = 15
88};
89
90static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
91static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
92static bool negate_mathfn_p (enum built_in_function);
93static bool negate_expr_p (tree);
94static tree negate_expr (tree);
95static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96static tree associate_trees (tree, tree, enum tree_code, tree);
97static tree const_binop (enum tree_code, tree, tree, int);
98static enum comparison_code comparison_to_compcode (enum tree_code);
99static enum tree_code compcode_to_comparison (enum comparison_code);
100static tree combine_comparisons (enum tree_code, enum tree_code,
101 enum tree_code, tree, tree, tree);
102static int truth_value_p (enum tree_code);
103static int operand_equal_for_comparison_p (tree, tree, tree);
104static int twoval_comparison_p (tree, tree *, tree *, int *);
105static tree eval_subst (tree, tree, tree, tree, tree);
106static tree pedantic_omit_one_operand (tree, tree, tree);
107static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
108static tree make_bit_field_ref (tree, tree, int, int, int);
109static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
110static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
111 enum machine_mode *, int *, int *,
112 tree *, tree *);
113static int all_ones_mask_p (tree, int);
114static tree sign_bit_p (tree, tree);
115static int simple_operand_p (tree);
116static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117static tree range_predecessor (tree);
118static tree range_successor (tree);
119static tree make_range (tree, int *, tree *, tree *, bool *);
120static tree build_range_check (tree, tree, int, tree, tree);
121static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
122 tree);
123static tree fold_range_test (enum tree_code, tree, tree, tree);
124static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
125static tree unextend (tree, int, int, tree);
126static tree fold_truthop (enum tree_code, tree, tree, tree);
127static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
128static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130static int multiple_of_p (tree, tree, tree);
131static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134static bool fold_real_zero_addition_p (tree, tree, int);
135static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136 tree, tree, tree);
137static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138static tree fold_div_compare (enum tree_code, tree, tree, tree);
139static bool reorder_operands_p (tree, tree);
140static tree fold_negate_const (tree, tree);
141static tree fold_not_const (tree, tree);
142static tree fold_relational_const (enum tree_code, tree, tree, tree);
143static int native_encode_expr (tree, unsigned char *, int);
144static tree native_interpret_expr (tree, unsigned char *, int);
145
146
147/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
150 addition.
151
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 sign. */
155#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156
157/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
161
162#define LOWPART(x) \
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164#define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167
168/* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
171
172static void
173encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174{
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
179}
180
181/* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184
185static void
186decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 HOST_WIDE_INT *hi)
188{
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
191}
192
193/* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
194 in overflow of the value, when >0 we are only interested in signed
195 overflow, for <0 we are interested in any overflow. OVERFLOWED
196 indicates whether overflow has already occurred. CONST_OVERFLOWED
197 indicates whether constant overflow has already occurred. We force
198 T's value to be within range of T's type (by setting to 0 or 1 all
199 the bits outside the type's range). We set TREE_OVERFLOWED if,
200 OVERFLOWED is nonzero,
201 or OVERFLOWABLE is >0 and signed overflow occurs
202 or OVERFLOWABLE is <0 and any overflow occurs
203 We set TREE_CONSTANT_OVERFLOWED if,
204 CONST_OVERFLOWED is nonzero
205 or we set TREE_OVERFLOWED.
206 We return either the original T, or a copy. */
207
208tree
209force_fit_type (tree t, int overflowable,
210 bool overflowed, bool overflowed_const)
211{
212 unsigned HOST_WIDE_INT low;
213 HOST_WIDE_INT high;
214 unsigned int prec;
215 int sign_extended_type;
216
217 gcc_assert (TREE_CODE (t) == INTEGER_CST);
218
219 low = TREE_INT_CST_LOW (t);
220 high = TREE_INT_CST_HIGH (t);
221
222 if (POINTER_TYPE_P (TREE_TYPE (t))
223 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = POINTER_SIZE;
225 else
226 prec = TYPE_PRECISION (TREE_TYPE (t));
227 /* Size types *are* sign extended. */
228 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
229 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
231
232 /* First clear all bits that are beyond the type's precision. */
233
234 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 ;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
237 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238 else
239 {
240 high = 0;
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 }
244
245 if (!sign_extended_type)
246 /* No sign extension */;
247 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
248 /* Correct width already. */;
249 else if (prec > HOST_BITS_PER_WIDE_INT)
250 {
251 /* Sign extend top half? */
252 if (high & ((unsigned HOST_WIDE_INT)1
253 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
254 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
255 }
256 else if (prec == HOST_BITS_PER_WIDE_INT)
257 {
258 if ((HOST_WIDE_INT)low < 0)
259 high = -1;
260 }
261 else
262 {
263 /* Sign extend bottom half? */
264 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 {
266 high = -1;
267 low |= (HOST_WIDE_INT)(-1) << prec;
268 }
269 }
270
271 /* If the value changed, return a new node. */
272 if (overflowed || overflowed_const
273 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
274 {
275 t = build_int_cst_wide (TREE_TYPE (t), low, high);
276
277 if (overflowed
278 || overflowable < 0
279 || (overflowable > 0 && sign_extended_type))
280 {
281 t = copy_node (t);
282 TREE_OVERFLOW (t) = 1;
283 TREE_CONSTANT_OVERFLOW (t) = 1;
284 }
285 else if (overflowed_const)
286 {
287 t = copy_node (t);
288 TREE_CONSTANT_OVERFLOW (t) = 1;
289 }
290 }
291
292 return t;
293}
294
295/* Add two doubleword integers with doubleword result.
296 Return nonzero if the operation overflows according to UNSIGNED_P.
297 Each argument is given as two `HOST_WIDE_INT' pieces.
298 One argument is L1 and H1; the other, L2 and H2.
299 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300
301int
302add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
303 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
304 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305 bool unsigned_p)
306{
307 unsigned HOST_WIDE_INT l;
308 HOST_WIDE_INT h;
309
310 l = l1 + l2;
311 h = h1 + h2 + (l < l1);
312
313 *lv = l;
314 *hv = h;
315
316 if (unsigned_p)
317 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
318 else
319 return OVERFLOW_SUM_SIGN (h1, h2, h);
320}
321
322/* Negate a doubleword integer with doubleword result.
323 Return nonzero if the operation overflows, assuming it's signed.
324 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
325 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326
327int
328neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
329 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
330{
331 if (l1 == 0)
332 {
333 *lv = 0;
334 *hv = - h1;
335 return (*hv & h1) < 0;
336 }
337 else
338 {
339 *lv = -l1;
340 *hv = ~h1;
341 return 0;
342 }
343}
344
345/* Multiply two doubleword integers with doubleword result.
346 Return nonzero if the operation overflows according to UNSIGNED_P.
347 Each argument is given as two `HOST_WIDE_INT' pieces.
348 One argument is L1 and H1; the other, L2 and H2.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350
351int
352mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
354 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355 bool unsigned_p)
356{
357 HOST_WIDE_INT arg1[4];
358 HOST_WIDE_INT arg2[4];
359 HOST_WIDE_INT prod[4 * 2];
360 unsigned HOST_WIDE_INT carry;
361 int i, j, k;
362 unsigned HOST_WIDE_INT toplow, neglow;
363 HOST_WIDE_INT tophigh, neghigh;
364
365 encode (arg1, l1, h1);
366 encode (arg2, l2, h2);
367
368 memset (prod, 0, sizeof prod);
369
370 for (i = 0; i < 4; i++)
371 {
372 carry = 0;
373 for (j = 0; j < 4; j++)
374 {
375 k = i + j;
376 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
377 carry += arg1[i] * arg2[j];
378 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
379 carry += prod[k];
380 prod[k] = LOWPART (carry);
381 carry = HIGHPART (carry);
382 }
383 prod[i + 4] = carry;
384 }
385
386 decode (prod, lv, hv);
387 decode (prod + 4, &toplow, &tophigh);
388
389 /* Unsigned overflow is immediate. */
390 if (unsigned_p)
391 return (toplow | tophigh) != 0;
392
393 /* Check for signed overflow by calculating the signed representation of the
394 top half of the result; it should agree with the low half's sign bit. */
395 if (h1 < 0)
396 {
397 neg_double (l2, h2, &neglow, &neghigh);
398 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
399 }
400 if (h2 < 0)
401 {
402 neg_double (l1, h1, &neglow, &neghigh);
403 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
404 }
405 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406}
407
408/* Shift the doubleword integer in L1, H1 left by COUNT places
409 keeping only PREC bits of result.
410 Shift right if COUNT is negative.
411 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
412 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413
414void
415lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
416 HOST_WIDE_INT count, unsigned int prec,
417 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
418{
419 unsigned HOST_WIDE_INT signmask;
420
421 if (count < 0)
422 {
423 rshift_double (l1, h1, -count, prec, lv, hv, arith);
424 return;
425 }
426
427 if (SHIFT_COUNT_TRUNCATED)
428 count %= prec;
429
430 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
431 {
432 /* Shifting by the host word size is undefined according to the
433 ANSI standard, so we must handle this as a special case. */
434 *hv = 0;
435 *lv = 0;
436 }
437 else if (count >= HOST_BITS_PER_WIDE_INT)
438 {
439 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
440 *lv = 0;
441 }
442 else
443 {
444 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
445 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
446 *lv = l1 << count;
447 }
448
449 /* Sign extend all bits that are beyond the precision. */
450
451 signmask = -((prec > HOST_BITS_PER_WIDE_INT
452 ? ((unsigned HOST_WIDE_INT) *hv
453 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
454 : (*lv >> (prec - 1))) & 1);
455
456 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
457 ;
458 else if (prec >= HOST_BITS_PER_WIDE_INT)
459 {
460 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
461 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
462 }
463 else
464 {
465 *hv = signmask;
466 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
467 *lv |= signmask << prec;
468 }
469}
470
471/* Shift the doubleword integer in L1, H1 right by COUNT places
472 keeping only PREC bits of result. COUNT must be positive.
473 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
474 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
475
476void
477rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
478 HOST_WIDE_INT count, unsigned int prec,
479 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480 int arith)
481{
482 unsigned HOST_WIDE_INT signmask;
483
484 signmask = (arith
485 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486 : 0);
487
488 if (SHIFT_COUNT_TRUNCATED)
489 count %= prec;
490
491 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
492 {
493 /* Shifting by the host word size is undefined according to the
494 ANSI standard, so we must handle this as a special case. */
495 *hv = 0;
496 *lv = 0;
497 }
498 else if (count >= HOST_BITS_PER_WIDE_INT)
499 {
500 *hv = 0;
501 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
502 }
503 else
504 {
505 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
506 *lv = ((l1 >> count)
507 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508 }
509
510 /* Zero / sign extend all bits that are beyond the precision. */
511
512 if (count >= (HOST_WIDE_INT)prec)
513 {
514 *hv = signmask;
515 *lv = signmask;
516 }
517 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
518 ;
519 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
520 {
521 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
522 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
523 }
524 else
525 {
526 *hv = signmask;
527 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
528 *lv |= signmask << (prec - count);
529 }
530}
531
532/* Rotate the doubleword integer in L1, H1 left by COUNT places
533 keeping only PREC bits of result.
534 Rotate right if COUNT is negative.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536
537void
538lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541{
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
544
545 count %= prec;
546 if (count < 0)
547 count += prec;
548
549 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
553}
554
555/* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result. COUNT must be positive.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558
559void
560rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563{
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
566
567 count %= prec;
568 if (count < 0)
569 count += prec;
570
571 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
575}
576
577/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
578 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
579 CODE is a tree code for a kind of division, one of
580 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
581 or EXACT_DIV_EXPR
582 It controls how the quotient is rounded to an integer.
583 Return nonzero if the operation overflows.
584 UNS nonzero says do unsigned division. */
585
586int
587div_and_round_double (enum tree_code code, int uns,
588 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
589 HOST_WIDE_INT hnum_orig,
590 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
591 HOST_WIDE_INT hden_orig,
592 unsigned HOST_WIDE_INT *lquo,
593 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
594 HOST_WIDE_INT *hrem)
595{
596 int quo_neg = 0;
597 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
598 HOST_WIDE_INT den[4], quo[4];
599 int i, j;
600 unsigned HOST_WIDE_INT work;
601 unsigned HOST_WIDE_INT carry = 0;
602 unsigned HOST_WIDE_INT lnum = lnum_orig;
603 HOST_WIDE_INT hnum = hnum_orig;
604 unsigned HOST_WIDE_INT lden = lden_orig;
605 HOST_WIDE_INT hden = hden_orig;
606 int overflow = 0;
607
608 if (hden == 0 && lden == 0)
609 overflow = 1, lden = 1;
610
611 /* Calculate quotient sign and convert operands to unsigned. */
612 if (!uns)
613 {
614 if (hnum < 0)
615 {
616 quo_neg = ~ quo_neg;
617 /* (minimum integer) / (-1) is the only overflow case. */
618 if (neg_double (lnum, hnum, &lnum, &hnum)
619 && ((HOST_WIDE_INT) lden & hden) == -1)
620 overflow = 1;
621 }
622 if (hden < 0)
623 {
624 quo_neg = ~ quo_neg;
625 neg_double (lden, hden, &lden, &hden);
626 }
627 }
628
629 if (hnum == 0 && hden == 0)
630 { /* single precision */
631 *hquo = *hrem = 0;
632 /* This unsigned division rounds toward zero. */
633 *lquo = lnum / lden;
634 goto finish_up;
635 }
636
637 if (hnum == 0)
638 { /* trivial case: dividend < divisor */
639 /* hden != 0 already checked. */
640 *hquo = *lquo = 0;
641 *hrem = hnum;
642 *lrem = lnum;
643 goto finish_up;
644 }
645
646 memset (quo, 0, sizeof quo);
647
648 memset (num, 0, sizeof num); /* to zero 9th element */
649 memset (den, 0, sizeof den);
650
651 encode (num, lnum, hnum);
652 encode (den, lden, hden);
653
654 /* Special code for when the divisor < BASE. */
655 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
656 {
657 /* hnum != 0 already checked. */
658 for (i = 4 - 1; i >= 0; i--)
659 {
660 work = num[i] + carry * BASE;
661 quo[i] = work / lden;
662 carry = work % lden;
663 }
664 }
665 else
666 {
667 /* Full double precision division,
668 with thanks to Don Knuth's "Seminumerical Algorithms". */
669 int num_hi_sig, den_hi_sig;
670 unsigned HOST_WIDE_INT quo_est, scale;
671
672 /* Find the highest nonzero divisor digit. */
673 for (i = 4 - 1;; i--)
674 if (den[i] != 0)
675 {
676 den_hi_sig = i;
677 break;
678 }
679
680 /* Insure that the first digit of the divisor is at least BASE/2.
681 This is required by the quotient digit estimation algorithm. */
682
683 scale = BASE / (den[den_hi_sig] + 1);
684 if (scale > 1)
685 { /* scale divisor and dividend */
686 carry = 0;
687 for (i = 0; i <= 4 - 1; i++)
688 {
689 work = (num[i] * scale) + carry;
690 num[i] = LOWPART (work);
691 carry = HIGHPART (work);
692 }
693
694 num[4] = carry;
695 carry = 0;
696 for (i = 0; i <= 4 - 1; i++)
697 {
698 work = (den[i] * scale) + carry;
699 den[i] = LOWPART (work);
700 carry = HIGHPART (work);
701 if (den[i] != 0) den_hi_sig = i;
702 }
703 }
704
705 num_hi_sig = 4;
706
707 /* Main loop */
708 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
709 {
710 /* Guess the next quotient digit, quo_est, by dividing the first
711 two remaining dividend digits by the high order quotient digit.
712 quo_est is never low and is at most 2 high. */
713 unsigned HOST_WIDE_INT tmp;
714
715 num_hi_sig = i + den_hi_sig + 1;
716 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
717 if (num[num_hi_sig] != den[den_hi_sig])
718 quo_est = work / den[den_hi_sig];
719 else
720 quo_est = BASE - 1;
721
722 /* Refine quo_est so it's usually correct, and at most one high. */
723 tmp = work - quo_est * den[den_hi_sig];
724 if (tmp < BASE
725 && (den[den_hi_sig - 1] * quo_est
726 > (tmp * BASE + num[num_hi_sig - 2])))
727 quo_est--;
728
729 /* Try QUO_EST as the quotient digit, by multiplying the
730 divisor by QUO_EST and subtracting from the remaining dividend.
731 Keep in mind that QUO_EST is the I - 1st digit. */
732
733 carry = 0;
734 for (j = 0; j <= den_hi_sig; j++)
735 {
736 work = quo_est * den[j] + carry;
737 carry = HIGHPART (work);
738 work = num[i + j] - LOWPART (work);
739 num[i + j] = LOWPART (work);
740 carry += HIGHPART (work) != 0;
741 }
742
743 /* If quo_est was high by one, then num[i] went negative and
744 we need to correct things. */
745 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746 {
747 quo_est--;
748 carry = 0; /* add divisor back in */
749 for (j = 0; j <= den_hi_sig; j++)
750 {
751 work = num[i + j] + den[j] + carry;
752 carry = HIGHPART (work);
753 num[i + j] = LOWPART (work);
754 }
755
756 num [num_hi_sig] += carry;
757 }
758
759 /* Store the quotient digit. */
760 quo[i] = quo_est;
761 }
762 }
763
764 decode (quo, lquo, hquo);
765
766 finish_up:
767 /* If result is negative, make it so. */
768 if (quo_neg)
769 neg_double (*lquo, *hquo, lquo, hquo);
770
771 /* Compute trial remainder: rem = num - (quo * den) */
772 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773 neg_double (*lrem, *hrem, lrem, hrem);
774 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
775
776 switch (code)
777 {
778 case TRUNC_DIV_EXPR:
779 case TRUNC_MOD_EXPR: /* round toward zero */
780 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
781 return overflow;
782
783 case FLOOR_DIV_EXPR:
784 case FLOOR_MOD_EXPR: /* round toward negative infinity */
785 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
786 {
787 /* quo = quo - 1; */
788 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
789 lquo, hquo);
790 }
791 else
792 return overflow;
793 break;
794
795 case CEIL_DIV_EXPR:
796 case CEIL_MOD_EXPR: /* round toward positive infinity */
797 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
798 {
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
800 lquo, hquo);
801 }
802 else
803 return overflow;
804 break;
805
806 case ROUND_DIV_EXPR:
807 case ROUND_MOD_EXPR: /* round to closest integer */
808 {
809 unsigned HOST_WIDE_INT labs_rem = *lrem;
810 HOST_WIDE_INT habs_rem = *hrem;
811 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
812 HOST_WIDE_INT habs_den = hden, htwice;
813
814 /* Get absolute values. */
815 if (*hrem < 0)
816 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
817 if (hden < 0)
818 neg_double (lden, hden, &labs_den, &habs_den);
819
820 /* If (2 * abs (lrem) >= abs (lden)) */
821 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
822 labs_rem, habs_rem, &ltwice, &htwice);
823
824 if (((unsigned HOST_WIDE_INT) habs_den
825 < (unsigned HOST_WIDE_INT) htwice)
826 || (((unsigned HOST_WIDE_INT) habs_den
827 == (unsigned HOST_WIDE_INT) htwice)
828 && (labs_den < ltwice)))
829 {
830 if (*hquo < 0)
831 /* quo = quo - 1; */
832 add_double (*lquo, *hquo,
833 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834 else
835 /* quo = quo + 1; */
836 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
837 lquo, hquo);
838 }
839 else
840 return overflow;
841 }
842 break;
843
844 default:
845 gcc_unreachable ();
846 }
847
848 /* Compute true remainder: rem = num - (quo * den) */
849 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
850 neg_double (*lrem, *hrem, lrem, hrem);
851 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
852 return overflow;
853}
854
855/* If ARG2 divides ARG1 with zero remainder, carries out the division
856 of type CODE and returns the quotient.
857 Otherwise returns NULL_TREE. */
858
859static tree
860div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
861{
862 unsigned HOST_WIDE_INT int1l, int2l;
863 HOST_WIDE_INT int1h, int2h;
864 unsigned HOST_WIDE_INT quol, reml;
865 HOST_WIDE_INT quoh, remh;
866 tree type = TREE_TYPE (arg1);
867 int uns = TYPE_UNSIGNED (type);
868
869 int1l = TREE_INT_CST_LOW (arg1);
870 int1h = TREE_INT_CST_HIGH (arg1);
871 int2l = TREE_INT_CST_LOW (arg2);
872 int2h = TREE_INT_CST_HIGH (arg2);
873
874 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
875 &quol, &quoh, &reml, &remh);
876 if (remh != 0 || reml != 0)
877 return NULL_TREE;
878
879 return build_int_cst_wide (type, quol, quoh);
880}
881
882/* This is non-zero if we should defer warnings about undefined
883 overflow. This facility exists because these warnings are a
884 special case. The code to estimate loop iterations does not want
885 to issue any warnings, since it works with expressions which do not
886 occur in user code. Various bits of cleanup code call fold(), but
887 only use the result if it has certain characteristics (e.g., is a
888 constant); that code only wants to issue a warning if the result is
889 used. */
890
891static int fold_deferring_overflow_warnings;
892
893/* If a warning about undefined overflow is deferred, this is the
894 warning. Note that this may cause us to turn two warnings into
895 one, but that is fine since it is sufficient to only give one
896 warning per expression. */
897
898static const char* fold_deferred_overflow_warning;
899
900/* If a warning about undefined overflow is deferred, this is the
901 level at which the warning should be emitted. */
902
903static enum warn_strict_overflow_code fold_deferred_overflow_code;
904
905/* Start deferring overflow warnings. We could use a stack here to
906 permit nested calls, but at present it is not necessary. */
907
908void
909fold_defer_overflow_warnings (void)
910{
911 ++fold_deferring_overflow_warnings;
912}
913
914/* Stop deferring overflow warnings. If there is a pending warning,
915 and ISSUE is true, then issue the warning if appropriate. STMT is
916 the statement with which the warning should be associated (used for
917 location information); STMT may be NULL. CODE is the level of the
918 warning--a warn_strict_overflow_code value. This function will use
919 the smaller of CODE and the deferred code when deciding whether to
920 issue the warning. CODE may be zero to mean to always use the
921 deferred code. */
922
923void
924fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
925{
926 const char *warnmsg;
927 location_t locus;
928
929 gcc_assert (fold_deferring_overflow_warnings > 0);
930 --fold_deferring_overflow_warnings;
931 if (fold_deferring_overflow_warnings > 0)
932 {
933 if (fold_deferred_overflow_warning != NULL
934 && code != 0
935 && code < (int) fold_deferred_overflow_code)
936 fold_deferred_overflow_code = code;
937 return;
938 }
939
940 warnmsg = fold_deferred_overflow_warning;
941 fold_deferred_overflow_warning = NULL;
942
943 if (!issue || warnmsg == NULL)
944 return;
945
946 /* Use the smallest code level when deciding to issue the
947 warning. */
948 if (code == 0 || code > (int) fold_deferred_overflow_code)
949 code = fold_deferred_overflow_code;
950
951 if (!issue_strict_overflow_warning (code))
952 return;
953
954 if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
955 locus = input_location;
956 else
957 locus = EXPR_LOCATION (stmt);
958 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
959}
960
961/* Stop deferring overflow warnings, ignoring any deferred
962 warnings. */
963
964void
965fold_undefer_and_ignore_overflow_warnings (void)
966{
967 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
968}
969
970/* Whether we are deferring overflow warnings. */
971
972bool
973fold_deferring_overflow_warnings_p (void)
974{
975 return fold_deferring_overflow_warnings > 0;
976}
977
978/* This is called when we fold something based on the fact that signed
979 overflow is undefined. */
980
981static void
982fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
983{
984 gcc_assert (!flag_wrapv && !flag_trapv);
985 if (fold_deferring_overflow_warnings > 0)
986 {
987 if (fold_deferred_overflow_warning == NULL
988 || wc < fold_deferred_overflow_code)
989 {
990 fold_deferred_overflow_warning = gmsgid;
991 fold_deferred_overflow_code = wc;
992 }
993 }
994 else if (issue_strict_overflow_warning (wc))
995 warning (OPT_Wstrict_overflow, gmsgid);
996}
997
998/* Return true if the built-in mathematical function specified by CODE
999 is odd, i.e. -f(x) == f(-x). */
1000
1001static bool
1002negate_mathfn_p (enum built_in_function code)
1003{
1004 switch (code)
1005 {
1006 CASE_FLT_FN (BUILT_IN_ASIN):
1007 CASE_FLT_FN (BUILT_IN_ASINH):
1008 CASE_FLT_FN (BUILT_IN_ATAN):
1009 CASE_FLT_FN (BUILT_IN_ATANH):
1010 CASE_FLT_FN (BUILT_IN_CBRT):
1011 CASE_FLT_FN (BUILT_IN_SIN):
1012 CASE_FLT_FN (BUILT_IN_SINH):
1013 CASE_FLT_FN (BUILT_IN_TAN):
1014 CASE_FLT_FN (BUILT_IN_TANH):
1015 return true;
1016
1017 default:
1018 break;
1019 }
1020 return false;
1021}
1022
1023/* Check whether we may negate an integer constant T without causing
1024 overflow. */
1025
1026bool
1027may_negate_without_overflow_p (tree t)
1028{
1029 unsigned HOST_WIDE_INT val;
1030 unsigned int prec;
1031 tree type;
1032
1033 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1034
1035 type = TREE_TYPE (t);
1036 if (TYPE_UNSIGNED (type))
1037 return false;
1038
1039 prec = TYPE_PRECISION (type);
1040 if (prec > HOST_BITS_PER_WIDE_INT)
1041 {
1042 if (TREE_INT_CST_LOW (t) != 0)
1043 return true;
1044 prec -= HOST_BITS_PER_WIDE_INT;
1045 val = TREE_INT_CST_HIGH (t);
1046 }
1047 else
1048 val = TREE_INT_CST_LOW (t);
1049 if (prec < HOST_BITS_PER_WIDE_INT)
1050 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1051 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1052}
1053
1054/* Determine whether an expression T can be cheaply negated using
1055 the function negate_expr without introducing undefined overflow. */
1056
1057static bool
1058negate_expr_p (tree t)
1059{
1060 tree type;
1061
1062 if (t == 0)
1063 return false;
1064
1065 type = TREE_TYPE (t);
1066
1067 STRIP_SIGN_NOPS (t);
1068 switch (TREE_CODE (t))
1069 {
1070 case INTEGER_CST:
1071 if (TYPE_OVERFLOW_WRAPS (type))
1072 return true;
1073
1074 /* Check that -CST will not overflow type. */
1075 return may_negate_without_overflow_p (t);
1076 case BIT_NOT_EXPR:
1077 return (INTEGRAL_TYPE_P (type)
1078 && TYPE_OVERFLOW_WRAPS (type));
1079
1080 case REAL_CST:
1081 case NEGATE_EXPR:
1082 return true;
1083
1084 case COMPLEX_CST:
1085 return negate_expr_p (TREE_REALPART (t))
1086 && negate_expr_p (TREE_IMAGPART (t));
1087
1088 case PLUS_EXPR:
1089 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1090 return false;
1091 /* -(A + B) -> (-B) - A. */
1092 if (negate_expr_p (TREE_OPERAND (t, 1))
1093 && reorder_operands_p (TREE_OPERAND (t, 0),
1094 TREE_OPERAND (t, 1)))
1095 return true;
1096 /* -(A + B) -> (-A) - B. */
1097 return negate_expr_p (TREE_OPERAND (t, 0));
1098
1099 case MINUS_EXPR:
1100 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1101 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1102 && reorder_operands_p (TREE_OPERAND (t, 0),
1103 TREE_OPERAND (t, 1));
1104
1105 case MULT_EXPR:
1106 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1107 break;
1108
1109 /* Fall through. */
1110
1111 case RDIV_EXPR:
1112 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1113 return negate_expr_p (TREE_OPERAND (t, 1))
1114 || negate_expr_p (TREE_OPERAND (t, 0));
1115 break;
1116
1117 case TRUNC_DIV_EXPR:
1118 case ROUND_DIV_EXPR:
1119 case FLOOR_DIV_EXPR:
1120 case CEIL_DIV_EXPR:
1121 case EXACT_DIV_EXPR:
1122 /* In general we can't negate A / B, because if A is INT_MIN and
1123 B is 1, we may turn this into INT_MIN / -1 which is undefined
1124 and actually traps on some architectures. But if overflow is
1125 undefined, we can negate, because - (INT_MIN / 1) is an
1126 overflow. */
1127 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1128 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1129 break;
1130 return negate_expr_p (TREE_OPERAND (t, 1))
1131 || negate_expr_p (TREE_OPERAND (t, 0));
1132
1133 case NOP_EXPR:
1134 /* Negate -((double)float) as (double)(-float). */
1135 if (TREE_CODE (type) == REAL_TYPE)
1136 {
1137 tree tem = strip_float_extensions (t);
1138 if (tem != t)
1139 return negate_expr_p (tem);
1140 }
1141 break;
1142
1143 case CALL_EXPR:
1144 /* Negate -f(x) as f(-x). */
1145 if (negate_mathfn_p (builtin_mathfn_code (t)))
1146 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1147 break;
1148
1149 case RSHIFT_EXPR:
1150 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1151 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1152 {
1153 tree op1 = TREE_OPERAND (t, 1);
1154 if (TREE_INT_CST_HIGH (op1) == 0
1155 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1156 == TREE_INT_CST_LOW (op1))
1157 return true;
1158 }
1159 break;
1160
1161 default:
1162 break;
1163 }
1164 return false;
1165}
1166
1167/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1168 simplification is possible.
1169 If negate_expr_p would return true for T, NULL_TREE will never be
1170 returned. */
1171
1172static tree
1173fold_negate_expr (tree t)
1174{
1175 tree type = TREE_TYPE (t);
1176 tree tem;
1177
1178 switch (TREE_CODE (t))
1179 {
1180 /* Convert - (~A) to A + 1. */
1181 case BIT_NOT_EXPR:
1182 if (INTEGRAL_TYPE_P (type))
1183 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1184 build_int_cst (type, 1));
1185 break;
1186
1187 case INTEGER_CST:
1188 tem = fold_negate_const (t, type);
1189 if (!TREE_OVERFLOW (tem)
1190 || !TYPE_OVERFLOW_TRAPS (type))
1191 return tem;
1192 break;
1193
1194 case REAL_CST:
1195 tem = fold_negate_const (t, type);
1196 /* Two's complement FP formats, such as c4x, may overflow. */
1197 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1198 return tem;
1199 break;
1200
1201 case COMPLEX_CST:
1202 {
1203 tree rpart = negate_expr (TREE_REALPART (t));
1204 tree ipart = negate_expr (TREE_IMAGPART (t));
1205
1206 if ((TREE_CODE (rpart) == REAL_CST
1207 && TREE_CODE (ipart) == REAL_CST)
1208 || (TREE_CODE (rpart) == INTEGER_CST
1209 && TREE_CODE (ipart) == INTEGER_CST))
1210 return build_complex (type, rpart, ipart);
1211 }
1212 break;
1213
1214 case NEGATE_EXPR:
1215 return TREE_OPERAND (t, 0);
1216
1217 case PLUS_EXPR:
1218 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1219 {
1220 /* -(A + B) -> (-B) - A. */
1221 if (negate_expr_p (TREE_OPERAND (t, 1))
1222 && reorder_operands_p (TREE_OPERAND (t, 0),
1223 TREE_OPERAND (t, 1)))
1224 {
1225 tem = negate_expr (TREE_OPERAND (t, 1));
1226 return fold_build2 (MINUS_EXPR, type,
1227 tem, TREE_OPERAND (t, 0));
1228 }
1229
1230 /* -(A + B) -> (-A) - B. */
1231 if (negate_expr_p (TREE_OPERAND (t, 0)))
1232 {
1233 tem = negate_expr (TREE_OPERAND (t, 0));
1234 return fold_build2 (MINUS_EXPR, type,
1235 tem, TREE_OPERAND (t, 1));
1236 }
1237 }
1238 break;
1239
1240 case MINUS_EXPR:
1241 /* - (A - B) -> B - A */
1242 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1243 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1244 return fold_build2 (MINUS_EXPR, type,
1245 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1246 break;
1247
1248 case MULT_EXPR:
1249 if (TYPE_UNSIGNED (type))
1250 break;
1251
1252 /* Fall through. */
1253
1254 case RDIV_EXPR:
1255 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1256 {
1257 tem = TREE_OPERAND (t, 1);
1258 if (negate_expr_p (tem))
1259 return fold_build2 (TREE_CODE (t), type,
1260 TREE_OPERAND (t, 0), negate_expr (tem));
1261 tem = TREE_OPERAND (t, 0);
1262 if (negate_expr_p (tem))
1263 return fold_build2 (TREE_CODE (t), type,
1264 negate_expr (tem), TREE_OPERAND (t, 1));
1265 }
1266 break;
1267
1268 case TRUNC_DIV_EXPR:
1269 case ROUND_DIV_EXPR:
1270 case FLOOR_DIV_EXPR:
1271 case CEIL_DIV_EXPR:
1272 case EXACT_DIV_EXPR:
1273 /* In general we can't negate A / B, because if A is INT_MIN and
1274 B is 1, we may turn this into INT_MIN / -1 which is undefined
1275 and actually traps on some architectures. But if overflow is
1276 undefined, we can negate, because - (INT_MIN / 1) is an
1277 overflow. */
1278 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1279 {
1280 const char * const warnmsg = G_("assuming signed overflow does not "
1281 "occur when negating a division");
1282 tem = TREE_OPERAND (t, 1);
1283 if (negate_expr_p (tem))
1284 {
1285 if (INTEGRAL_TYPE_P (type)
1286 && (TREE_CODE (tem) != INTEGER_CST
1287 || integer_onep (tem)))
1288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1289 return fold_build2 (TREE_CODE (t), type,
1290 TREE_OPERAND (t, 0), negate_expr (tem));
1291 }
1292 tem = TREE_OPERAND (t, 0);
1293 if (negate_expr_p (tem))
1294 {
1295 if (INTEGRAL_TYPE_P (type)
1296 && (TREE_CODE (tem) != INTEGER_CST
1297 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1298 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1299 return fold_build2 (TREE_CODE (t), type,
1300 negate_expr (tem), TREE_OPERAND (t, 1));
1301 }
1302 }
1303 break;
1304
1305 case NOP_EXPR:
1306 /* Convert -((double)float) into (double)(-float). */
1307 if (TREE_CODE (type) == REAL_TYPE)
1308 {
1309 tem = strip_float_extensions (t);
1310 if (tem != t && negate_expr_p (tem))
1311 return negate_expr (tem);
1312 }
1313 break;
1314
1315 case CALL_EXPR:
1316 /* Negate -f(x) as f(-x). */
1317 if (negate_mathfn_p (builtin_mathfn_code (t))
1318 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1319 {
1320 tree fndecl, arg, arglist;
1321
1322 fndecl = get_callee_fndecl (t);
1323 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1324 arglist = build_tree_list (NULL_TREE, arg);
1325 return build_function_call_expr (fndecl, arglist);
1326 }
1327 break;
1328
1329 case RSHIFT_EXPR:
1330 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1331 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1332 {
1333 tree op1 = TREE_OPERAND (t, 1);
1334 if (TREE_INT_CST_HIGH (op1) == 0
1335 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1336 == TREE_INT_CST_LOW (op1))
1337 {
1338 tree ntype = TYPE_UNSIGNED (type)
1339 ? lang_hooks.types.signed_type (type)
1340 : lang_hooks.types.unsigned_type (type);
1341 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1342 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1343 return fold_convert (type, temp);
1344 }
1345 }
1346 break;
1347
1348 default:
1349 break;
1350 }
1351
1352 return NULL_TREE;
1353}
1354
1355/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1356 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1357 return NULL_TREE. */
1358
1359static tree
1360negate_expr (tree t)
1361{
1362 tree type, tem;
1363
1364 if (t == NULL_TREE)
1365 return NULL_TREE;
1366
1367 type = TREE_TYPE (t);
1368 STRIP_SIGN_NOPS (t);
1369
1370 tem = fold_negate_expr (t);
1371 if (!tem)
1372 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1373 return fold_convert (type, tem);
1374}
1375
1376/* Split a tree IN into a constant, literal and variable parts that could be
1377 combined with CODE to make IN. "constant" means an expression with
1378 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1379 commutative arithmetic operation. Store the constant part into *CONP,
1380 the literal in *LITP and return the variable part. If a part isn't
1381 present, set it to null. If the tree does not decompose in this way,
1382 return the entire tree as the variable part and the other parts as null.
1383
1384 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1385 case, we negate an operand that was subtracted. Except if it is a
1386 literal for which we use *MINUS_LITP instead.
1387
1388 If NEGATE_P is true, we are negating all of IN, again except a literal
1389 for which we use *MINUS_LITP instead.
1390
1391 If IN is itself a literal or constant, return it as appropriate.
1392
1393 Note that we do not guarantee that any of the three values will be the
1394 same type as IN, but they will have the same signedness and mode. */
1395
1396static tree
1397split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1398 tree *minus_litp, int negate_p)
1399{
1400 tree var = 0;
1401
1402 *conp = 0;
1403 *litp = 0;
1404 *minus_litp = 0;
1405
1406 /* Strip any conversions that don't change the machine mode or signedness. */
1407 STRIP_SIGN_NOPS (in);
1408
1409 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1410 *litp = in;
1411 else if (TREE_CODE (in) == code
1412 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1413 /* We can associate addition and subtraction together (even
1414 though the C standard doesn't say so) for integers because
1415 the value is not affected. For reals, the value might be
1416 affected, so we can't. */
1417 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1418 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1419 {
1420 tree op0 = TREE_OPERAND (in, 0);
1421 tree op1 = TREE_OPERAND (in, 1);
1422 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1423 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1424
1425 /* First see if either of the operands is a literal, then a constant. */
1426 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1427 *litp = op0, op0 = 0;
1428 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1429 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1430
1431 if (op0 != 0 && TREE_CONSTANT (op0))
1432 *conp = op0, op0 = 0;
1433 else if (op1 != 0 && TREE_CONSTANT (op1))
1434 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1435
1436 /* If we haven't dealt with either operand, this is not a case we can
1437 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1438 if (op0 != 0 && op1 != 0)
1439 var = in;
1440 else if (op0 != 0)
1441 var = op0;
1442 else
1443 var = op1, neg_var_p = neg1_p;
1444
1445 /* Now do any needed negations. */
1446 if (neg_litp_p)
1447 *minus_litp = *litp, *litp = 0;
1448 if (neg_conp_p)
1449 *conp = negate_expr (*conp);
1450 if (neg_var_p)
1451 var = negate_expr (var);
1452 }
1453 else if (TREE_CONSTANT (in))
1454 *conp = in;
1455 else
1456 var = in;
1457
1458 if (negate_p)
1459 {
1460 if (*litp)
1461 *minus_litp = *litp, *litp = 0;
1462 else if (*minus_litp)
1463 *litp = *minus_litp, *minus_litp = 0;
1464 *conp = negate_expr (*conp);
1465 var = negate_expr (var);
1466 }
1467
1468 return var;
1469}
1470
1471/* Re-associate trees split by the above function. T1 and T2 are either
1472 expressions to associate or null. Return the new expression, if any. If
1473 we build an operation, do it in TYPE and with CODE. */
1474
1475static tree
1476associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1477{
1478 if (t1 == 0)
1479 return t2;
1480 else if (t2 == 0)
1481 return t1;
1482
1483 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1484 try to fold this since we will have infinite recursion. But do
1485 deal with any NEGATE_EXPRs. */
1486 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1487 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1488 {
1489 if (code == PLUS_EXPR)
1490 {
1491 if (TREE_CODE (t1) == NEGATE_EXPR)
1492 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1493 fold_convert (type, TREE_OPERAND (t1, 0)));
1494 else if (TREE_CODE (t2) == NEGATE_EXPR)
1495 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1496 fold_convert (type, TREE_OPERAND (t2, 0)));
1497 else if (integer_zerop (t2))
1498 return fold_convert (type, t1);
1499 }
1500 else if (code == MINUS_EXPR)
1501 {
1502 if (integer_zerop (t2))
1503 return fold_convert (type, t1);
1504 }
1505
1506 return build2 (code, type, fold_convert (type, t1),
1507 fold_convert (type, t2));
1508 }
1509
1510 return fold_build2 (code, type, fold_convert (type, t1),
1511 fold_convert (type, t2));
1512}
1513
1514/* Combine two integer constants ARG1 and ARG2 under operation CODE
1515 to produce a new constant. Return NULL_TREE if we don't know how
1516 to evaluate CODE at compile-time.
1517
1518 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1519
1520tree
1521int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1522{
1523 unsigned HOST_WIDE_INT int1l, int2l;
1524 HOST_WIDE_INT int1h, int2h;
1525 unsigned HOST_WIDE_INT low;
1526 HOST_WIDE_INT hi;
1527 unsigned HOST_WIDE_INT garbagel;
1528 HOST_WIDE_INT garbageh;
1529 tree t;
1530 tree type = TREE_TYPE (arg1);
1531 int uns = TYPE_UNSIGNED (type);
1532 int is_sizetype
1533 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1534 int overflow = 0;
1535
1536 int1l = TREE_INT_CST_LOW (arg1);
1537 int1h = TREE_INT_CST_HIGH (arg1);
1538 int2l = TREE_INT_CST_LOW (arg2);
1539 int2h = TREE_INT_CST_HIGH (arg2);
1540
1541 switch (code)
1542 {
1543 case BIT_IOR_EXPR:
1544 low = int1l | int2l, hi = int1h | int2h;
1545 break;
1546
1547 case BIT_XOR_EXPR:
1548 low = int1l ^ int2l, hi = int1h ^ int2h;
1549 break;
1550
1551 case BIT_AND_EXPR:
1552 low = int1l & int2l, hi = int1h & int2h;
1553 break;
1554
1555 case RSHIFT_EXPR:
1556 int2l = -int2l;
1557 case LSHIFT_EXPR:
1558 /* It's unclear from the C standard whether shifts can overflow.
1559 The following code ignores overflow; perhaps a C standard
1560 interpretation ruling is needed. */
1561 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1562 &low, &hi, !uns);
1563 break;
1564
1565 case RROTATE_EXPR:
1566 int2l = - int2l;
1567 case LROTATE_EXPR:
1568 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1569 &low, &hi);
1570 break;
1571
1572 case PLUS_EXPR:
1573 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1574 break;
1575
1576 case MINUS_EXPR:
1577 neg_double (int2l, int2h, &low, &hi);
1578 add_double (int1l, int1h, low, hi, &low, &hi);
1579 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1580 break;
1581
1582 case MULT_EXPR:
1583 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1584 break;
1585
1586 case TRUNC_DIV_EXPR:
1587 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1588 case EXACT_DIV_EXPR:
1589 /* This is a shortcut for a common special case. */
1590 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1591 && ! TREE_CONSTANT_OVERFLOW (arg1)
1592 && ! TREE_CONSTANT_OVERFLOW (arg2)
1593 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1594 {
1595 if (code == CEIL_DIV_EXPR)
1596 int1l += int2l - 1;
1597
1598 low = int1l / int2l, hi = 0;
1599 break;
1600 }
1601
1602 /* ... fall through ... */
1603
1604 case ROUND_DIV_EXPR:
1605 if (int2h == 0 && int2l == 0)
1606 return NULL_TREE;
1607 if (int2h == 0 && int2l == 1)
1608 {
1609 low = int1l, hi = int1h;
1610 break;
1611 }
1612 if (int1l == int2l && int1h == int2h
1613 && ! (int1l == 0 && int1h == 0))
1614 {
1615 low = 1, hi = 0;
1616 break;
1617 }
1618 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1619 &low, &hi, &garbagel, &garbageh);
1620 break;
1621
1622 case TRUNC_MOD_EXPR:
1623 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1624 /* This is a shortcut for a common special case. */
1625 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1626 && ! TREE_CONSTANT_OVERFLOW (arg1)
1627 && ! TREE_CONSTANT_OVERFLOW (arg2)
1628 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1629 {
1630 if (code == CEIL_MOD_EXPR)
1631 int1l += int2l - 1;
1632 low = int1l % int2l, hi = 0;
1633 break;
1634 }
1635
1636 /* ... fall through ... */
1637
1638 case ROUND_MOD_EXPR:
1639 if (int2h == 0 && int2l == 0)
1640 return NULL_TREE;
1641 overflow = div_and_round_double (code, uns,
1642 int1l, int1h, int2l, int2h,
1643 &garbagel, &garbageh, &low, &hi);
1644 break;
1645
1646 case MIN_EXPR:
1647 case MAX_EXPR:
1648 if (uns)
1649 low = (((unsigned HOST_WIDE_INT) int1h
1650 < (unsigned HOST_WIDE_INT) int2h)
1651 || (((unsigned HOST_WIDE_INT) int1h
1652 == (unsigned HOST_WIDE_INT) int2h)
1653 && int1l < int2l));
1654 else
1655 low = (int1h < int2h
1656 || (int1h == int2h && int1l < int2l));
1657
1658 if (low == (code == MIN_EXPR))
1659 low = int1l, hi = int1h;
1660 else
1661 low = int2l, hi = int2h;
1662 break;
1663
1664 default:
1665 return NULL_TREE;
1666 }
1667
1668 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1669
1670 if (notrunc)
1671 {
1672 /* Propagate overflow flags ourselves. */
1673 if (((!uns || is_sizetype) && overflow)
1674 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1675 {
1676 t = copy_node (t);
1677 TREE_OVERFLOW (t) = 1;
1678 TREE_CONSTANT_OVERFLOW (t) = 1;
1679 }
1680 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1681 {
1682 t = copy_node (t);
1683 TREE_CONSTANT_OVERFLOW (t) = 1;
1684 }
1685 }
1686 else
1687 t = force_fit_type (t, 1,
1688 ((!uns || is_sizetype) && overflow)
1689 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1690 TREE_CONSTANT_OVERFLOW (arg1)
1691 | TREE_CONSTANT_OVERFLOW (arg2));
1692
1693 return t;
1694}
1695
1696/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1697 constant. We assume ARG1 and ARG2 have the same data type, or at least
1698 are the same kind of constant and the same machine mode. Return zero if
1699 combining the constants is not allowed in the current operating mode.
1700
1701 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1702
1703static tree
1704const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1705{
1706 /* Sanity check for the recursive cases. */
1707 if (!arg1 || !arg2)
1708 return NULL_TREE;
1709
1710 STRIP_NOPS (arg1);
1711 STRIP_NOPS (arg2);
1712
1713 if (TREE_CODE (arg1) == INTEGER_CST)
1714 return int_const_binop (code, arg1, arg2, notrunc);
1715
1716 if (TREE_CODE (arg1) == REAL_CST)
1717 {
1718 enum machine_mode mode;
1719 REAL_VALUE_TYPE d1;
1720 REAL_VALUE_TYPE d2;
1721 REAL_VALUE_TYPE value;
1722 REAL_VALUE_TYPE result;
1723 bool inexact;
1724 tree t, type;
1725
1726 /* The following codes are handled by real_arithmetic. */
1727 switch (code)
1728 {
1729 case PLUS_EXPR:
1730 case MINUS_EXPR:
1731 case MULT_EXPR:
1732 case RDIV_EXPR:
1733 case MIN_EXPR:
1734 case MAX_EXPR:
1735 break;
1736
1737 default:
1738 return NULL_TREE;
1739 }
1740
1741 d1 = TREE_REAL_CST (arg1);
1742 d2 = TREE_REAL_CST (arg2);
1743
1744 type = TREE_TYPE (arg1);
1745 mode = TYPE_MODE (type);
1746
1747 /* Don't perform operation if we honor signaling NaNs and
1748 either operand is a NaN. */
1749 if (HONOR_SNANS (mode)
1750 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1751 return NULL_TREE;
1752
1753 /* Don't perform operation if it would raise a division
1754 by zero exception. */
1755 if (code == RDIV_EXPR
1756 && REAL_VALUES_EQUAL (d2, dconst0)
1757 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1758 return NULL_TREE;
1759
1760 /* If either operand is a NaN, just return it. Otherwise, set up
1761 for floating-point trap; we return an overflow. */
1762 if (REAL_VALUE_ISNAN (d1))
1763 return arg1;
1764 else if (REAL_VALUE_ISNAN (d2))
1765 return arg2;
1766
1767 inexact = real_arithmetic (&value, code, &d1, &d2);
1768 real_convert (&result, mode, &value);
1769
1770 /* Don't constant fold this floating point operation if
1771 the result has overflowed and flag_trapping_math. */
1772 if (flag_trapping_math
1773 && MODE_HAS_INFINITIES (mode)
1774 && REAL_VALUE_ISINF (result)
1775 && !REAL_VALUE_ISINF (d1)
1776 && !REAL_VALUE_ISINF (d2))
1777 return NULL_TREE;
1778
1779 /* Don't constant fold this floating point operation if the
1780 result may dependent upon the run-time rounding mode and
1781 flag_rounding_math is set, or if GCC's software emulation
1782 is unable to accurately represent the result. */
1783 if ((flag_rounding_math
1784 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1785 && !flag_unsafe_math_optimizations))
1786 && (inexact || !real_identical (&result, &value)))
1787 return NULL_TREE;
1788
1789 t = build_real (type, result);
1790
1791 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1792 TREE_CONSTANT_OVERFLOW (t)
1793 = TREE_OVERFLOW (t)
1794 | TREE_CONSTANT_OVERFLOW (arg1)
1795 | TREE_CONSTANT_OVERFLOW (arg2);
1796 return t;
1797 }
1798
1799 if (TREE_CODE (arg1) == COMPLEX_CST)
1800 {
1801 tree type = TREE_TYPE (arg1);
1802 tree r1 = TREE_REALPART (arg1);
1803 tree i1 = TREE_IMAGPART (arg1);
1804 tree r2 = TREE_REALPART (arg2);
1805 tree i2 = TREE_IMAGPART (arg2);
1806 tree real, imag;
1807
1808 switch (code)
1809 {
1810 case PLUS_EXPR:
1811 case MINUS_EXPR:
1812 real = const_binop (code, r1, r2, notrunc);
1813 imag = const_binop (code, i1, i2, notrunc);
1814 break;
1815
1816 case MULT_EXPR:
1817 real = const_binop (MINUS_EXPR,
1818 const_binop (MULT_EXPR, r1, r2, notrunc),
1819 const_binop (MULT_EXPR, i1, i2, notrunc),
1820 notrunc);
1821 imag = const_binop (PLUS_EXPR,
1822 const_binop (MULT_EXPR, r1, i2, notrunc),
1823 const_binop (MULT_EXPR, i1, r2, notrunc),
1824 notrunc);
1825 break;
1826
1827 case RDIV_EXPR:
1828 {
1829 tree magsquared
1830 = const_binop (PLUS_EXPR,
1831 const_binop (MULT_EXPR, r2, r2, notrunc),
1832 const_binop (MULT_EXPR, i2, i2, notrunc),
1833 notrunc);
1834 tree t1
1835 = const_binop (PLUS_EXPR,
1836 const_binop (MULT_EXPR, r1, r2, notrunc),
1837 const_binop (MULT_EXPR, i1, i2, notrunc),
1838 notrunc);
1839 tree t2
1840 = const_binop (MINUS_EXPR,
1841 const_binop (MULT_EXPR, i1, r2, notrunc),
1842 const_binop (MULT_EXPR, r1, i2, notrunc),
1843 notrunc);
1844
1845 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1846 code = TRUNC_DIV_EXPR;
1847
1848 real = const_binop (code, t1, magsquared, notrunc);
1849 imag = const_binop (code, t2, magsquared, notrunc);
1850 }
1851 break;
1852
1853 default:
1854 return NULL_TREE;
1855 }
1856
1857 if (real && imag)
1858 return build_complex (type, real, imag);
1859 }
1860
1861 return NULL_TREE;
1862}
1863
1864/* Create a size type INT_CST node with NUMBER sign extended. KIND
1865 indicates which particular sizetype to create. */
1866
1867tree
1868size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1869{
1870 return build_int_cst (sizetype_tab[(int) kind], number);
1871}
1872
1873/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1874 is a tree code. The type of the result is taken from the operands.
1875 Both must be the same type integer type and it must be a size type.
1876 If the operands are constant, so is the result. */
1877
1878tree
1879size_binop (enum tree_code code, tree arg0, tree arg1)
1880{
1881 tree type = TREE_TYPE (arg0);
1882
1883 if (arg0 == error_mark_node || arg1 == error_mark_node)
1884 return error_mark_node;
1885
1886 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1887 && type == TREE_TYPE (arg1));
1888
1889 /* Handle the special case of two integer constants faster. */
1890 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1891 {
1892 /* And some specific cases even faster than that. */
1893 if (code == PLUS_EXPR && integer_zerop (arg0))
1894 return arg1;
1895 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1896 && integer_zerop (arg1))
1897 return arg0;
1898 else if (code == MULT_EXPR && integer_onep (arg0))
1899 return arg1;
1900
1901 /* Handle general case of two integer constants. */
1902 return int_const_binop (code, arg0, arg1, 0);
1903 }
1904
1905 return fold_build2 (code, type, arg0, arg1);
1906}
1907
1908/* Given two values, either both of sizetype or both of bitsizetype,
1909 compute the difference between the two values. Return the value
1910 in signed type corresponding to the type of the operands. */
1911
1912tree
1913size_diffop (tree arg0, tree arg1)
1914{
1915 tree type = TREE_TYPE (arg0);
1916 tree ctype;
1917
1918 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1919 && type == TREE_TYPE (arg1));
1920
1921 /* If the type is already signed, just do the simple thing. */
1922 if (!TYPE_UNSIGNED (type))
1923 return size_binop (MINUS_EXPR, arg0, arg1);
1924
1925 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1926
1927 /* If either operand is not a constant, do the conversions to the signed
1928 type and subtract. The hardware will do the right thing with any
1929 overflow in the subtraction. */
1930 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1931 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1932 fold_convert (ctype, arg1));
1933
1934 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1935 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1936 overflow) and negate (which can't either). Special-case a result
1937 of zero while we're here. */
1938 if (tree_int_cst_equal (arg0, arg1))
1939 return build_int_cst (ctype, 0);
1940 else if (tree_int_cst_lt (arg1, arg0))
1941 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1942 else
1943 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1944 fold_convert (ctype, size_binop (MINUS_EXPR,
1945 arg1, arg0)));
1946}
1947
1948/* A subroutine of fold_convert_const handling conversions of an
1949 INTEGER_CST to another integer type. */
1950
1951static tree
1952fold_convert_const_int_from_int (tree type, tree arg1)
1953{
1954 tree t;
1955
1956 /* Given an integer constant, make new constant with new type,
1957 appropriately sign-extended or truncated. */
1958 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1959 TREE_INT_CST_HIGH (arg1));
1960
1961 t = force_fit_type (t,
1962 /* Don't set the overflow when
1963 converting a pointer */
1964 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1965 (TREE_INT_CST_HIGH (arg1) < 0
1966 && (TYPE_UNSIGNED (type)
1967 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1968 | TREE_OVERFLOW (arg1),
1969 TREE_CONSTANT_OVERFLOW (arg1));
1970
1971 return t;
1972}
1973
1974/* A subroutine of fold_convert_const handling conversions a REAL_CST
1975 to an integer type. */
1976
1977static tree
1978fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1979{
1980 int overflow = 0;
1981 tree t;
1982
1983 /* The following code implements the floating point to integer
1984 conversion rules required by the Java Language Specification,
1985 that IEEE NaNs are mapped to zero and values that overflow
1986 the target precision saturate, i.e. values greater than
1987 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1988 are mapped to INT_MIN. These semantics are allowed by the
1989 C and C++ standards that simply state that the behavior of
1990 FP-to-integer conversion is unspecified upon overflow. */
1991
1992 HOST_WIDE_INT high, low;
1993 REAL_VALUE_TYPE r;
1994 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1995
1996 switch (code)
1997 {
1998 case FIX_TRUNC_EXPR:
1999 real_trunc (&r, VOIDmode, &x);
2000 break;
2001
2002 case FIX_CEIL_EXPR:
2003 real_ceil (&r, VOIDmode, &x);
2004 break;
2005
2006 case FIX_FLOOR_EXPR:
2007 real_floor (&r, VOIDmode, &x);
2008 break;
2009
2010 case FIX_ROUND_EXPR:
2011 real_round (&r, VOIDmode, &x);
2012 break;
2013
2014 default:
2015 gcc_unreachable ();
2016 }
2017
2018 /* If R is NaN, return zero and show we have an overflow. */
2019 if (REAL_VALUE_ISNAN (r))
2020 {
2021 overflow = 1;
2022 high = 0;
2023 low = 0;
2024 }
2025
2026 /* See if R is less than the lower bound or greater than the
2027 upper bound. */
2028
2029 if (! overflow)
2030 {
2031 tree lt = TYPE_MIN_VALUE (type);
2032 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2033 if (REAL_VALUES_LESS (r, l))
2034 {
2035 overflow = 1;
2036 high = TREE_INT_CST_HIGH (lt);
2037 low = TREE_INT_CST_LOW (lt);
2038 }
2039 }
2040
2041 if (! overflow)
2042 {
2043 tree ut = TYPE_MAX_VALUE (type);
2044 if (ut)
2045 {
2046 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047 if (REAL_VALUES_LESS (u, r))
2048 {
2049 overflow = 1;
2050 high = TREE_INT_CST_HIGH (ut);
2051 low = TREE_INT_CST_LOW (ut);
2052 }
2053 }
2054 }
2055
2056 if (! overflow)
2057 REAL_VALUE_TO_INT (&low, &high, r);
2058
2059 t = build_int_cst_wide (type, low, high);
2060
2061 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2062 TREE_CONSTANT_OVERFLOW (arg1));
2063 return t;
2064}
2065
2066/* A subroutine of fold_convert_const handling conversions a REAL_CST
2067 to another floating point type. */
2068
2069static tree
2070fold_convert_const_real_from_real (tree type, tree arg1)
2071{
2072 REAL_VALUE_TYPE value;
2073 tree t;
2074
2075 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2076 t = build_real (type, value);
2077
2078 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2079 TREE_CONSTANT_OVERFLOW (t)
2080 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2081 return t;
2082}
2083
2084/* Attempt to fold type conversion operation CODE of expression ARG1 to
2085 type TYPE. If no simplification can be done return NULL_TREE. */
2086
2087static tree
2088fold_convert_const (enum tree_code code, tree type, tree arg1)
2089{
2090 if (TREE_TYPE (arg1) == type)
2091 return arg1;
2092
2093 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2094 {
2095 if (TREE_CODE (arg1) == INTEGER_CST)
2096 return fold_convert_const_int_from_int (type, arg1);
2097 else if (TREE_CODE (arg1) == REAL_CST)
2098 return fold_convert_const_int_from_real (code, type, arg1);
2099 }
2100 else if (TREE_CODE (type) == REAL_TYPE)
2101 {
2102 if (TREE_CODE (arg1) == INTEGER_CST)
2103 return build_real_from_int_cst (type, arg1);
2104 if (TREE_CODE (arg1) == REAL_CST)
2105 return fold_convert_const_real_from_real (type, arg1);
2106 }
2107 return NULL_TREE;
2108}
2109
2110/* Construct a vector of zero elements of vector type TYPE. */
2111
2112static tree
2113build_zero_vector (tree type)
2114{
2115 tree elem, list;
2116 int i, units;
2117
2118 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2119 units = TYPE_VECTOR_SUBPARTS (type);
2120
2121 list = NULL_TREE;
2122 for (i = 0; i < units; i++)
2123 list = tree_cons (NULL_TREE, elem, list);
2124 return build_vector (type, list);
2125}
2126
2127/* Convert expression ARG to type TYPE. Used by the middle-end for
2128 simple conversions in preference to calling the front-end's convert. */
2129
2130tree
2131fold_convert (tree type, tree arg)
2132{
2133 tree orig = TREE_TYPE (arg);
2134 tree tem;
2135
2136 if (type == orig)
2137 return arg;
2138
2139 if (TREE_CODE (arg) == ERROR_MARK
2140 || TREE_CODE (type) == ERROR_MARK
2141 || TREE_CODE (orig) == ERROR_MARK)
2142 return error_mark_node;
2143
2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2145 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2146 TYPE_MAIN_VARIANT (orig)))
2147 return fold_build1 (NOP_EXPR, type, arg);
2148
2149 switch (TREE_CODE (type))
2150 {
2151 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2152 case POINTER_TYPE: case REFERENCE_TYPE:
2153 case OFFSET_TYPE:
2154 if (TREE_CODE (arg) == INTEGER_CST)
2155 {
2156 tem = fold_convert_const (NOP_EXPR, type, arg);
2157 if (tem != NULL_TREE)
2158 return tem;
2159 }
2160 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2161 || TREE_CODE (orig) == OFFSET_TYPE)
2162 return fold_build1 (NOP_EXPR, type, arg);
2163 if (TREE_CODE (orig) == COMPLEX_TYPE)
2164 {
2165 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2166 return fold_convert (type, tem);
2167 }
2168 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2169 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2170 return fold_build1 (NOP_EXPR, type, arg);
2171
2172 case REAL_TYPE:
2173 if (TREE_CODE (arg) == INTEGER_CST)
2174 {
2175 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2176 if (tem != NULL_TREE)
2177 return tem;
2178 }
2179 else if (TREE_CODE (arg) == REAL_CST)
2180 {
2181 tem = fold_convert_const (NOP_EXPR, type, arg);
2182 if (tem != NULL_TREE)
2183 return tem;
2184 }
2185
2186 switch (TREE_CODE (orig))
2187 {
2188 case INTEGER_TYPE:
2189 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2190 case POINTER_TYPE: case REFERENCE_TYPE:
2191 return fold_build1 (FLOAT_EXPR, type, arg);
2192
2193 case REAL_TYPE:
2194 return fold_build1 (NOP_EXPR, type, arg);
2195
2196 case COMPLEX_TYPE:
2197 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2198 return fold_convert (type, tem);
2199
2200 default:
2201 gcc_unreachable ();
2202 }
2203
2204 case COMPLEX_TYPE:
2205 switch (TREE_CODE (orig))
2206 {
2207 case INTEGER_TYPE:
2208 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2209 case POINTER_TYPE: case REFERENCE_TYPE:
2210 case REAL_TYPE:
2211 return build2 (COMPLEX_EXPR, type,
2212 fold_convert (TREE_TYPE (type), arg),
2213 fold_convert (TREE_TYPE (type), integer_zero_node));
2214 case COMPLEX_TYPE:
2215 {
2216 tree rpart, ipart;
2217
2218 if (TREE_CODE (arg) == COMPLEX_EXPR)
2219 {
2220 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2221 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2222 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2223 }
2224
2225 arg = save_expr (arg);
2226 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2227 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2228 rpart = fold_convert (TREE_TYPE (type), rpart);
2229 ipart = fold_convert (TREE_TYPE (type), ipart);
2230 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2231 }
2232
2233 default:
2234 gcc_unreachable ();
2235 }
2236
2237 case VECTOR_TYPE:
2238 if (integer_zerop (arg))
2239 return build_zero_vector (type);
2240 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2241 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2242 || TREE_CODE (orig) == VECTOR_TYPE);
2243 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2244
2245 case VOID_TYPE:
2246 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2247
2248 default:
2249 gcc_unreachable ();
2250 }
2251}
2252
2253/* Return false if expr can be assumed not to be an lvalue, true
2254 otherwise. */
2255
2256static bool
2257maybe_lvalue_p (tree x)
2258{
2259 /* We only need to wrap lvalue tree codes. */
2260 switch (TREE_CODE (x))
2261 {
2262 case VAR_DECL:
2263 case PARM_DECL:
2264 case RESULT_DECL:
2265 case LABEL_DECL:
2266 case FUNCTION_DECL:
2267 case SSA_NAME:
2268
2269 case COMPONENT_REF:
2270 case INDIRECT_REF:
2271 case ALIGN_INDIRECT_REF:
2272 case MISALIGNED_INDIRECT_REF:
2273 case ARRAY_REF:
2274 case ARRAY_RANGE_REF:
2275 case BIT_FIELD_REF:
2276 case OBJ_TYPE_REF:
2277
2278 case REALPART_EXPR:
2279 case IMAGPART_EXPR:
2280 case PREINCREMENT_EXPR:
2281 case PREDECREMENT_EXPR:
2282 case SAVE_EXPR:
2283 case TRY_CATCH_EXPR:
2284 case WITH_CLEANUP_EXPR:
2285 case COMPOUND_EXPR:
2286 case MODIFY_EXPR:
2287 case TARGET_EXPR:
2288 case COND_EXPR:
2289 case BIND_EXPR:
2290 case MIN_EXPR:
2291 case MAX_EXPR:
2292 break;
2293
2294 default:
2295 /* Assume the worst for front-end tree codes. */
2296 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2297 break;
2298 return false;
2299 }
2300
2301 return true;
2302}
2303
2304/* Return an expr equal to X but certainly not valid as an lvalue. */
2305
2306tree
2307non_lvalue (tree x)
2308{
2309 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2310 us. */
2311 if (in_gimple_form)
2312 return x;
2313
2314 if (! maybe_lvalue_p (x))
2315 return x;
2316 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2317}
2318
2319/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2320 Zero means allow extended lvalues. */
2321
2322int pedantic_lvalues;
2323
2324/* When pedantic, return an expr equal to X but certainly not valid as a
2325 pedantic lvalue. Otherwise, return X. */
2326
2327static tree
2328pedantic_non_lvalue (tree x)
2329{
2330 if (pedantic_lvalues)
2331 return non_lvalue (x);
2332 else
2333 return x;
2334}
2335
2336/* Given a tree comparison code, return the code that is the logical inverse
2337 of the given code. It is not safe to do this for floating-point
2338 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2339 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2340
2341enum tree_code
2342invert_tree_comparison (enum tree_code code, bool honor_nans)
2343{
2344 if (honor_nans && flag_trapping_math)
2345 return ERROR_MARK;
2346
2347 switch (code)
2348 {
2349 case EQ_EXPR:
2350 return NE_EXPR;
2351 case NE_EXPR:
2352 return EQ_EXPR;
2353 case GT_EXPR:
2354 return honor_nans ? UNLE_EXPR : LE_EXPR;
2355 case GE_EXPR:
2356 return honor_nans ? UNLT_EXPR : LT_EXPR;
2357 case LT_EXPR:
2358 return honor_nans ? UNGE_EXPR : GE_EXPR;
2359 case LE_EXPR:
2360 return honor_nans ? UNGT_EXPR : GT_EXPR;
2361 case LTGT_EXPR:
2362 return UNEQ_EXPR;
2363 case UNEQ_EXPR:
2364 return LTGT_EXPR;
2365 case UNGT_EXPR:
2366 return LE_EXPR;
2367 case UNGE_EXPR:
2368 return LT_EXPR;
2369 case UNLT_EXPR:
2370 return GE_EXPR;
2371 case UNLE_EXPR:
2372 return GT_EXPR;
2373 case ORDERED_EXPR:
2374 return UNORDERED_EXPR;
2375 case UNORDERED_EXPR:
2376 return ORDERED_EXPR;
2377 default:
2378 gcc_unreachable ();
2379 }
2380}
2381
2382/* Similar, but return the comparison that results if the operands are
2383 swapped. This is safe for floating-point. */
2384
2385enum tree_code
2386swap_tree_comparison (enum tree_code code)
2387{
2388 switch (code)
2389 {
2390 case EQ_EXPR:
2391 case NE_EXPR:
2392 case ORDERED_EXPR:
2393 case UNORDERED_EXPR:
2394 case LTGT_EXPR:
2395 case UNEQ_EXPR:
2396 return code;
2397 case GT_EXPR:
2398 return LT_EXPR;
2399 case GE_EXPR:
2400 return LE_EXPR;
2401 case LT_EXPR:
2402 return GT_EXPR;
2403 case LE_EXPR:
2404 return GE_EXPR;
2405 case UNGT_EXPR:
2406 return UNLT_EXPR;
2407 case UNGE_EXPR:
2408 return UNLE_EXPR;
2409 case UNLT_EXPR:
2410 return UNGT_EXPR;
2411 case UNLE_EXPR:
2412 return UNGE_EXPR;
2413 default:
2414 gcc_unreachable ();
2415 }
2416}
2417
2418
2419/* Convert a comparison tree code from an enum tree_code representation
2420 into a compcode bit-based encoding. This function is the inverse of
2421 compcode_to_comparison. */
2422
2423static enum comparison_code
2424comparison_to_compcode (enum tree_code code)
2425{
2426 switch (code)
2427 {
2428 case LT_EXPR:
2429 return COMPCODE_LT;
2430 case EQ_EXPR:
2431 return COMPCODE_EQ;
2432 case LE_EXPR:
2433 return COMPCODE_LE;
2434 case GT_EXPR:
2435 return COMPCODE_GT;
2436 case NE_EXPR:
2437 return COMPCODE_NE;
2438 case GE_EXPR:
2439 return COMPCODE_GE;
2440 case ORDERED_EXPR:
2441 return COMPCODE_ORD;
2442 case UNORDERED_EXPR:
2443 return COMPCODE_UNORD;
2444 case UNLT_EXPR:
2445 return COMPCODE_UNLT;
2446 case UNEQ_EXPR:
2447 return COMPCODE_UNEQ;
2448 case UNLE_EXPR:
2449 return COMPCODE_UNLE;
2450 case UNGT_EXPR:
2451 return COMPCODE_UNGT;
2452 case LTGT_EXPR:
2453 return COMPCODE_LTGT;
2454 case UNGE_EXPR:
2455 return COMPCODE_UNGE;
2456 default:
2457 gcc_unreachable ();
2458 }
2459}
2460
2461/* Convert a compcode bit-based encoding of a comparison operator back
2462 to GCC's enum tree_code representation. This function is the
2463 inverse of comparison_to_compcode. */
2464
2465static enum tree_code
2466compcode_to_comparison (enum comparison_code code)
2467{
2468 switch (code)
2469 {
2470 case COMPCODE_LT:
2471 return LT_EXPR;
2472 case COMPCODE_EQ:
2473 return EQ_EXPR;
2474 case COMPCODE_LE:
2475 return LE_EXPR;
2476 case COMPCODE_GT:
2477 return GT_EXPR;
2478 case COMPCODE_NE:
2479 return NE_EXPR;
2480 case COMPCODE_GE:
2481 return GE_EXPR;
2482 case COMPCODE_ORD:
2483 return ORDERED_EXPR;
2484 case COMPCODE_UNORD:
2485 return UNORDERED_EXPR;
2486 case COMPCODE_UNLT:
2487 return UNLT_EXPR;
2488 case COMPCODE_UNEQ:
2489 return UNEQ_EXPR;
2490 case COMPCODE_UNLE:
2491 return UNLE_EXPR;
2492 case COMPCODE_UNGT:
2493 return UNGT_EXPR;
2494 case COMPCODE_LTGT:
2495 return LTGT_EXPR;
2496 case COMPCODE_UNGE:
2497 return UNGE_EXPR;
2498 default:
2499 gcc_unreachable ();
2500 }
2501}
2502
2503/* Return a tree for the comparison which is the combination of
2504 doing the AND or OR (depending on CODE) of the two operations LCODE
2505 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2506 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2507 if this makes the transformation invalid. */
2508
2509tree
2510combine_comparisons (enum tree_code code, enum tree_code lcode,
2511 enum tree_code rcode, tree truth_type,
2512 tree ll_arg, tree lr_arg)
2513{
2514 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2515 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2516 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2517 enum comparison_code compcode;
2518
2519 switch (code)
2520 {
2521 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2522 compcode = lcompcode & rcompcode;
2523 break;
2524
2525 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2526 compcode = lcompcode | rcompcode;
2527 break;
2528
2529 default:
2530 return NULL_TREE;
2531 }
2532
2533 if (!honor_nans)
2534 {
2535 /* Eliminate unordered comparisons, as well as LTGT and ORD
2536 which are not used unless the mode has NaNs. */
2537 compcode &= ~COMPCODE_UNORD;
2538 if (compcode == COMPCODE_LTGT)
2539 compcode = COMPCODE_NE;
2540 else if (compcode == COMPCODE_ORD)
2541 compcode = COMPCODE_TRUE;
2542 }
2543 else if (flag_trapping_math)
2544 {
2545 /* Check that the original operation and the optimized ones will trap
2546 under the same condition. */
2547 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2548 && (lcompcode != COMPCODE_EQ)
2549 && (lcompcode != COMPCODE_ORD);
2550 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2551 && (rcompcode != COMPCODE_EQ)
2552 && (rcompcode != COMPCODE_ORD);
2553 bool trap = (compcode & COMPCODE_UNORD) == 0
2554 && (compcode != COMPCODE_EQ)
2555 && (compcode != COMPCODE_ORD);
2556
2557 /* In a short-circuited boolean expression the LHS might be
2558 such that the RHS, if evaluated, will never trap. For
2559 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2560 if neither x nor y is NaN. (This is a mixed blessing: for
2561 example, the expression above will never trap, hence
2562 optimizing it to x < y would be invalid). */
2563 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2564 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2565 rtrap = false;
2566
2567 /* If the comparison was short-circuited, and only the RHS
2568 trapped, we may now generate a spurious trap. */
2569 if (rtrap && !ltrap
2570 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2571 return NULL_TREE;
2572
2573 /* If we changed the conditions that cause a trap, we lose. */
2574 if ((ltrap || rtrap) != trap)
2575 return NULL_TREE;
2576 }
2577
2578 if (compcode == COMPCODE_TRUE)
2579 return constant_boolean_node (true, truth_type);
2580 else if (compcode == COMPCODE_FALSE)
2581 return constant_boolean_node (false, truth_type);
2582 else
2583 return fold_build2 (compcode_to_comparison (compcode),
2584 truth_type, ll_arg, lr_arg);
2585}
2586
2587/* Return nonzero if CODE is a tree code that represents a truth value. */
2588
2589static int
2590truth_value_p (enum tree_code code)
2591{
2592 return (TREE_CODE_CLASS (code) == tcc_comparison
2593 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2594 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2595 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2596}
2597
2598/* Return nonzero if two operands (typically of the same tree node)
2599 are necessarily equal. If either argument has side-effects this
2600 function returns zero. FLAGS modifies behavior as follows:
2601
2602 If OEP_ONLY_CONST is set, only return nonzero for constants.
2603 This function tests whether the operands are indistinguishable;
2604 it does not test whether they are equal using C's == operation.
2605 The distinction is important for IEEE floating point, because
2606 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2607 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2608
2609 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2610 even though it may hold multiple values during a function.
2611 This is because a GCC tree node guarantees that nothing else is
2612 executed between the evaluation of its "operands" (which may often
2613 be evaluated in arbitrary order). Hence if the operands themselves
2614 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2615 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2616 unset means assuming isochronic (or instantaneous) tree equivalence.
2617 Unless comparing arbitrary expression trees, such as from different
2618 statements, this flag can usually be left unset.
2619
2620 If OEP_PURE_SAME is set, then pure functions with identical arguments
2621 are considered the same. It is used when the caller has other ways
2622 to ensure that global memory is unchanged in between. */
2623
2624int
2625operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2626{
2627 /* If either is ERROR_MARK, they aren't equal. */
2628 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2629 return 0;
2630
2631 /* If both types don't have the same signedness, then we can't consider
2632 them equal. We must check this before the STRIP_NOPS calls
2633 because they may change the signedness of the arguments. */
2634 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635 return 0;
2636
2637 /* If both types don't have the same precision, then it is not safe
2638 to strip NOPs. */
2639 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2640 return 0;
2641
2642 STRIP_NOPS (arg0);
2643 STRIP_NOPS (arg1);
2644
2645 /* In case both args are comparisons but with different comparison
2646 code, try to swap the comparison operands of one arg to produce
2647 a match and compare that variant. */
2648 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2649 && COMPARISON_CLASS_P (arg0)
2650 && COMPARISON_CLASS_P (arg1))
2651 {
2652 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2653
2654 if (TREE_CODE (arg0) == swap_code)
2655 return operand_equal_p (TREE_OPERAND (arg0, 0),
2656 TREE_OPERAND (arg1, 1), flags)
2657 && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 TREE_OPERAND (arg1, 0), flags);
2659 }
2660
2661 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2662 /* This is needed for conversions and for COMPONENT_REF.
2663 Might as well play it safe and always test this. */
2664 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2665 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2666 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2667 return 0;
2668
2669 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2670 We don't care about side effects in that case because the SAVE_EXPR
2671 takes care of that for us. In all other cases, two expressions are
2672 equal if they have no side effects. If we have two identical
2673 expressions with side effects that should be treated the same due
2674 to the only side effects being identical SAVE_EXPR's, that will
2675 be detected in the recursive calls below. */
2676 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2677 && (TREE_CODE (arg0) == SAVE_EXPR
2678 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2679 return 1;
2680
2681 /* Next handle constant cases, those for which we can return 1 even
2682 if ONLY_CONST is set. */
2683 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2684 switch (TREE_CODE (arg0))
2685 {
2686 case INTEGER_CST:
2687 return (! TREE_CONSTANT_OVERFLOW (arg0)
2688 && ! TREE_CONSTANT_OVERFLOW (arg1)
2689 && tree_int_cst_equal (arg0, arg1));
2690
2691 case REAL_CST:
2692 return (! TREE_CONSTANT_OVERFLOW (arg0)
2693 && ! TREE_CONSTANT_OVERFLOW (arg1)
2694 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2695 TREE_REAL_CST (arg1)));
2696
2697 case VECTOR_CST:
2698 {
2699 tree v1, v2;
2700
2701 if (TREE_CONSTANT_OVERFLOW (arg0)
2702 || TREE_CONSTANT_OVERFLOW (arg1))
2703 return 0;
2704
2705 v1 = TREE_VECTOR_CST_ELTS (arg0);
2706 v2 = TREE_VECTOR_CST_ELTS (arg1);
2707 while (v1 && v2)
2708 {
2709 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2710 flags))
2711 return 0;
2712 v1 = TREE_CHAIN (v1);
2713 v2 = TREE_CHAIN (v2);
2714 }
2715
2716 return v1 == v2;
2717 }
2718
2719 case COMPLEX_CST:
2720 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2721 flags)
2722 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2723 flags));
2724
2725 case STRING_CST:
2726 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2727 && ! memcmp (TREE_STRING_POINTER (arg0),
2728 TREE_STRING_POINTER (arg1),
2729 TREE_STRING_LENGTH (arg0)));
2730
2731 case ADDR_EXPR:
2732 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2733 0);
2734 default:
2735 break;
2736 }
2737
2738 if (flags & OEP_ONLY_CONST)
2739 return 0;
2740
2741/* Define macros to test an operand from arg0 and arg1 for equality and a
2742 variant that allows null and views null as being different from any
2743 non-null value. In the latter case, if either is null, the both
2744 must be; otherwise, do the normal comparison. */
2745#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2746 TREE_OPERAND (arg1, N), flags)
2747
2748#define OP_SAME_WITH_NULL(N) \
2749 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2750 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2751
2752 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2753 {
2754 case tcc_unary:
2755 /* Two conversions are equal only if signedness and modes match. */
2756 switch (TREE_CODE (arg0))
2757 {
2758 case NOP_EXPR:
2759 case CONVERT_EXPR:
2760 case FIX_CEIL_EXPR:
2761 case FIX_TRUNC_EXPR:
2762 case FIX_FLOOR_EXPR:
2763 case FIX_ROUND_EXPR:
2764 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2765 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2766 return 0;
2767 break;
2768 default:
2769 break;
2770 }
2771
2772 return OP_SAME (0);
2773
2774
2775 case tcc_comparison:
2776 case tcc_binary:
2777 if (OP_SAME (0) && OP_SAME (1))
2778 return 1;
2779
2780 /* For commutative ops, allow the other order. */
2781 return (commutative_tree_code (TREE_CODE (arg0))
2782 && operand_equal_p (TREE_OPERAND (arg0, 0),
2783 TREE_OPERAND (arg1, 1), flags)
2784 && operand_equal_p (TREE_OPERAND (arg0, 1),
2785 TREE_OPERAND (arg1, 0), flags));
2786
2787 case tcc_reference:
2788 /* If either of the pointer (or reference) expressions we are
2789 dereferencing contain a side effect, these cannot be equal. */
2790 if (TREE_SIDE_EFFECTS (arg0)
2791 || TREE_SIDE_EFFECTS (arg1))
2792 return 0;
2793
2794 switch (TREE_CODE (arg0))
2795 {
2796 case INDIRECT_REF:
2797 case ALIGN_INDIRECT_REF:
2798 case MISALIGNED_INDIRECT_REF:
2799 case REALPART_EXPR:
2800 case IMAGPART_EXPR:
2801 return OP_SAME (0);
2802
2803 case ARRAY_REF:
2804 case ARRAY_RANGE_REF:
2805 /* Operands 2 and 3 may be null. */
2806 return (OP_SAME (0)
2807 && OP_SAME (1)
2808 && OP_SAME_WITH_NULL (2)
2809 && OP_SAME_WITH_NULL (3));
2810
2811 case COMPONENT_REF:
2812 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2813 may be NULL when we're called to compare MEM_EXPRs. */
2814 return OP_SAME_WITH_NULL (0)
2815 && OP_SAME (1)
2816 && OP_SAME_WITH_NULL (2);
2817
2818 case BIT_FIELD_REF:
2819 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2820
2821 default:
2822 return 0;
2823 }
2824
2825 case tcc_expression:
2826 switch (TREE_CODE (arg0))
2827 {
2828 case ADDR_EXPR:
2829 case TRUTH_NOT_EXPR:
2830 return OP_SAME (0);
2831
2832 case TRUTH_ANDIF_EXPR:
2833 case TRUTH_ORIF_EXPR:
2834 return OP_SAME (0) && OP_SAME (1);
2835
2836 case TRUTH_AND_EXPR:
2837 case TRUTH_OR_EXPR:
2838 case TRUTH_XOR_EXPR:
2839 if (OP_SAME (0) && OP_SAME (1))
2840 return 1;
2841
2842 /* Otherwise take into account this is a commutative operation. */
2843 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2844 TREE_OPERAND (arg1, 1), flags)
2845 && operand_equal_p (TREE_OPERAND (arg0, 1),
2846 TREE_OPERAND (arg1, 0), flags));
2847
2848 case CALL_EXPR:
2849 /* If the CALL_EXPRs call different functions, then they
2850 clearly can not be equal. */
2851 if (!OP_SAME (0))
2852 return 0;
2853
2854 {
2855 unsigned int cef = call_expr_flags (arg0);
2856 if (flags & OEP_PURE_SAME)
2857 cef &= ECF_CONST | ECF_PURE;
2858 else
2859 cef &= ECF_CONST;
2860 if (!cef)
2861 return 0;
2862 }
2863
2864 /* Now see if all the arguments are the same. operand_equal_p
2865 does not handle TREE_LIST, so we walk the operands here
2866 feeding them to operand_equal_p. */
2867 arg0 = TREE_OPERAND (arg0, 1);
2868 arg1 = TREE_OPERAND (arg1, 1);
2869 while (arg0 && arg1)
2870 {
2871 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2872 flags))
2873 return 0;
2874
2875 arg0 = TREE_CHAIN (arg0);
2876 arg1 = TREE_CHAIN (arg1);
2877 }
2878
2879 /* If we get here and both argument lists are exhausted
2880 then the CALL_EXPRs are equal. */
2881 return ! (arg0 || arg1);
2882
2883 default:
2884 return 0;
2885 }
2886
2887 case tcc_declaration:
2888 /* Consider __builtin_sqrt equal to sqrt. */
2889 return (TREE_CODE (arg0) == FUNCTION_DECL
2890 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2891 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2892 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2893
2894 default:
2895 return 0;
2896 }
2897
2898#undef OP_SAME
2899#undef OP_SAME_WITH_NULL
2900}
2901
2902/* Similar to operand_equal_p, but see if ARG0 might have been made by
2903 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2904
2905 When in doubt, return 0. */
2906
2907static int
2908operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2909{
2910 int unsignedp1, unsignedpo;
2911 tree primarg0, primarg1, primother;
2912 unsigned int correct_width;
2913
2914 if (operand_equal_p (arg0, arg1, 0))
2915 return 1;
2916
2917 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2918 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2919 return 0;
2920
2921 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2922 and see if the inner values are the same. This removes any
2923 signedness comparison, which doesn't matter here. */
2924 primarg0 = arg0, primarg1 = arg1;
2925 STRIP_NOPS (primarg0);
2926 STRIP_NOPS (primarg1);
2927 if (operand_equal_p (primarg0, primarg1, 0))
2928 return 1;
2929
2930 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2931 actual comparison operand, ARG0.
2932
2933 First throw away any conversions to wider types
2934 already present in the operands. */
2935
2936 primarg1 = get_narrower (arg1, &unsignedp1);
2937 primother = get_narrower (other, &unsignedpo);
2938
2939 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2940 if (unsignedp1 == unsignedpo
2941 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2942 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2943 {
2944 tree type = TREE_TYPE (arg0);
2945
2946 /* Make sure shorter operand is extended the right way
2947 to match the longer operand. */
2948 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2949 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2950
2951 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2952 return 1;
2953 }
2954
2955 return 0;
2956}
2957
2958/* See if ARG is an expression that is either a comparison or is performing
2959 arithmetic on comparisons. The comparisons must only be comparing
2960 two different values, which will be stored in *CVAL1 and *CVAL2; if
2961 they are nonzero it means that some operands have already been found.
2962 No variables may be used anywhere else in the expression except in the
2963 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2964 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2965
2966 If this is true, return 1. Otherwise, return zero. */
2967
2968static int
2969twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2970{
2971 enum tree_code code = TREE_CODE (arg);
2972 enum tree_code_class class = TREE_CODE_CLASS (code);
2973
2974 /* We can handle some of the tcc_expression cases here. */
2975 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2976 class = tcc_unary;
2977 else if (class == tcc_expression
2978 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2979 || code == COMPOUND_EXPR))
2980 class = tcc_binary;
2981
2982 else if (class == tcc_expression && code == SAVE_EXPR
2983 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2984 {
2985 /* If we've already found a CVAL1 or CVAL2, this expression is
2986 two complex to handle. */
2987 if (*cval1 || *cval2)
2988 return 0;
2989
2990 class = tcc_unary;
2991 *save_p = 1;
2992 }
2993
2994 switch (class)
2995 {
2996 case tcc_unary:
2997 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2998
2999 case tcc_binary:
3000 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3001 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3002 cval1, cval2, save_p));
3003
3004 case tcc_constant:
3005 return 1;
3006
3007 case tcc_expression:
3008 if (code == COND_EXPR)
3009 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3010 cval1, cval2, save_p)
3011 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3012 cval1, cval2, save_p)
3013 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3014 cval1, cval2, save_p));
3015 return 0;
3016
3017 case tcc_comparison:
3018 /* First see if we can handle the first operand, then the second. For
3019 the second operand, we know *CVAL1 can't be zero. It must be that
3020 one side of the comparison is each of the values; test for the
3021 case where this isn't true by failing if the two operands
3022 are the same. */
3023
3024 if (operand_equal_p (TREE_OPERAND (arg, 0),
3025 TREE_OPERAND (arg, 1), 0))
3026 return 0;
3027
3028 if (*cval1 == 0)
3029 *cval1 = TREE_OPERAND (arg, 0);
3030 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3031 ;
3032 else if (*cval2 == 0)
3033 *cval2 = TREE_OPERAND (arg, 0);
3034 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3035 ;
3036 else
3037 return 0;
3038
3039 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3040 ;
3041 else if (*cval2 == 0)
3042 *cval2 = TREE_OPERAND (arg, 1);
3043 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3044 ;
3045 else
3046 return 0;
3047
3048 return 1;
3049
3050 default:
3051 return 0;
3052 }
3053}
3054
3055/* ARG is a tree that is known to contain just arithmetic operations and
3056 comparisons. Evaluate the operations in the tree substituting NEW0 for
3057 any occurrence of OLD0 as an operand of a comparison and likewise for
3058 NEW1 and OLD1. */
3059
3060static tree
3061eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3062{
3063 tree type = TREE_TYPE (arg);
3064 enum tree_code code = TREE_CODE (arg);
3065 enum tree_code_class class = TREE_CODE_CLASS (code);
3066
3067 /* We can handle some of the tcc_expression cases here. */
3068 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3069 class = tcc_unary;
3070 else if (class == tcc_expression
3071 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3072 class = tcc_binary;
3073
3074 switch (class)
3075 {
3076 case tcc_unary:
3077 return fold_build1 (code, type,
3078 eval_subst (TREE_OPERAND (arg, 0),
3079 old0, new0, old1, new1));
3080
3081 case tcc_binary:
3082 return fold_build2 (code, type,
3083 eval_subst (TREE_OPERAND (arg, 0),
3084 old0, new0, old1, new1),
3085 eval_subst (TREE_OPERAND (arg, 1),
3086 old0, new0, old1, new1));
3087
3088 case tcc_expression:
3089 switch (code)
3090 {
3091 case SAVE_EXPR:
3092 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3093
3094 case COMPOUND_EXPR:
3095 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3096
3097 case COND_EXPR:
3098 return fold_build3 (code, type,
3099 eval_subst (TREE_OPERAND (arg, 0),
3100 old0, new0, old1, new1),
3101 eval_subst (TREE_OPERAND (arg, 1),
3102 old0, new0, old1, new1),
3103 eval_subst (TREE_OPERAND (arg, 2),
3104 old0, new0, old1, new1));
3105 default:
3106 break;
3107 }
3108 /* Fall through - ??? */
3109
3110 case tcc_comparison:
3111 {
3112 tree arg0 = TREE_OPERAND (arg, 0);
3113 tree arg1 = TREE_OPERAND (arg, 1);
3114
3115 /* We need to check both for exact equality and tree equality. The
3116 former will be true if the operand has a side-effect. In that
3117 case, we know the operand occurred exactly once. */
3118
3119 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3120 arg0 = new0;
3121 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3122 arg0 = new1;
3123
3124 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3125 arg1 = new0;
3126 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3127 arg1 = new1;
3128
3129 return fold_build2 (code, type, arg0, arg1);
3130 }
3131
3132 default:
3133 return arg;
3134 }
3135}
3136
3137/* Return a tree for the case when the result of an expression is RESULT
3138 converted to TYPE and OMITTED was previously an operand of the expression
3139 but is now not needed (e.g., we folded OMITTED * 0).
3140
3141 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3142 the conversion of RESULT to TYPE. */
3143
3144tree
3145omit_one_operand (tree type, tree result, tree omitted)
3146{
3147 tree t = fold_convert (type, result);
3148
3149 if (TREE_SIDE_EFFECTS (omitted))
3150 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3151
3152 return non_lvalue (t);
3153}
3154
3155/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3156
3157static tree
3158pedantic_omit_one_operand (tree type, tree result, tree omitted)
3159{
3160 tree t = fold_convert (type, result);
3161
3162 if (TREE_SIDE_EFFECTS (omitted))
3163 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3164
3165 return pedantic_non_lvalue (t);
3166}
3167
3168/* Return a tree for the case when the result of an expression is RESULT
3169 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3170 of the expression but are now not needed.
3171
3172 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3173 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3174 evaluated before OMITTED2. Otherwise, if neither has side effects,
3175 just do the conversion of RESULT to TYPE. */
3176
3177tree
3178omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3179{
3180 tree t = fold_convert (type, result);
3181
3182 if (TREE_SIDE_EFFECTS (omitted2))
3183 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3184 if (TREE_SIDE_EFFECTS (omitted1))
3185 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3186
3187 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3188}
3189
3190
3191/* Return a simplified tree node for the truth-negation of ARG. This
3192 never alters ARG itself. We assume that ARG is an operation that
3193 returns a truth value (0 or 1).
3194
3195 FIXME: one would think we would fold the result, but it causes
3196 problems with the dominator optimizer. */
3197
3198tree
3199fold_truth_not_expr (tree arg)
3200{
3201 tree type = TREE_TYPE (arg);
3202 enum tree_code code = TREE_CODE (arg);
3203
3204 /* If this is a comparison, we can simply invert it, except for
3205 floating-point non-equality comparisons, in which case we just
3206 enclose a TRUTH_NOT_EXPR around what we have. */
3207
3208 if (TREE_CODE_CLASS (code) == tcc_comparison)
3209 {
3210 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3211 if (FLOAT_TYPE_P (op_type)
3212 && flag_trapping_math
3213 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3214 && code != NE_EXPR && code != EQ_EXPR)
3215 return NULL_TREE;
3216 else
3217 {
3218 code = invert_tree_comparison (code,
3219 HONOR_NANS (TYPE_MODE (op_type)));
3220 if (code == ERROR_MARK)
3221 return NULL_TREE;
3222 else
3223 return build2 (code, type,
3224 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3225 }
3226 }
3227
3228 switch (code)
3229 {
3230 case INTEGER_CST:
3231 return constant_boolean_node (integer_zerop (arg), type);
3232
3233 case TRUTH_AND_EXPR:
3234 return build2 (TRUTH_OR_EXPR, type,
3235 invert_truthvalue (TREE_OPERAND (arg, 0)),
3236 invert_truthvalue (TREE_OPERAND (arg, 1)));
3237
3238 case TRUTH_OR_EXPR:
3239 return build2 (TRUTH_AND_EXPR, type,
3240 invert_truthvalue (TREE_OPERAND (arg, 0)),
3241 invert_truthvalue (TREE_OPERAND (arg, 1)));
3242
3243 case TRUTH_XOR_EXPR:
3244 /* Here we can invert either operand. We invert the first operand
3245 unless the second operand is a TRUTH_NOT_EXPR in which case our
3246 result is the XOR of the first operand with the inside of the
3247 negation of the second operand. */
3248
3249 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3250 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3251 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3252 else
3253 return build2 (TRUTH_XOR_EXPR, type,
3254 invert_truthvalue (TREE_OPERAND (arg, 0)),
3255 TREE_OPERAND (arg, 1));
3256
3257 case TRUTH_ANDIF_EXPR:
3258 return build2 (TRUTH_ORIF_EXPR, type,
3259 invert_truthvalue (TREE_OPERAND (arg, 0)),
3260 invert_truthvalue (TREE_OPERAND (arg, 1)));
3261
3262 case TRUTH_ORIF_EXPR:
3263 return build2 (TRUTH_ANDIF_EXPR, type,
3264 invert_truthvalue (TREE_OPERAND (arg, 0)),
3265 invert_truthvalue (TREE_OPERAND (arg, 1)));
3266
3267 case TRUTH_NOT_EXPR:
3268 return TREE_OPERAND (arg, 0);
3269
3270 case COND_EXPR:
3271 {
3272 tree arg1 = TREE_OPERAND (arg, 1);
3273 tree arg2 = TREE_OPERAND (arg, 2);
3274 /* A COND_EXPR may have a throw as one operand, which
3275 then has void type. Just leave void operands
3276 as they are. */
3277 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3278 VOID_TYPE_P (TREE_TYPE (arg1))
3279 ? arg1 : invert_truthvalue (arg1),
3280 VOID_TYPE_P (TREE_TYPE (arg2))
3281 ? arg2 : invert_truthvalue (arg2));
3282 }
3283
3284 case COMPOUND_EXPR:
3285 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3286 invert_truthvalue (TREE_OPERAND (arg, 1)));
3287
3288 case NON_LVALUE_EXPR:
3289 return invert_truthvalue (TREE_OPERAND (arg, 0));
3290
3291 case NOP_EXPR:
3292 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3293 return build1 (TRUTH_NOT_EXPR, type, arg);
3294
3295 case CONVERT_EXPR:
3296 case FLOAT_EXPR:
3297 return build1 (TREE_CODE (arg), type,
3298 invert_truthvalue (TREE_OPERAND (arg, 0)));
3299
3300 case BIT_AND_EXPR:
3301 if (!integer_onep (TREE_OPERAND (arg, 1)))
3302 break;
3303 return build2 (EQ_EXPR, type, arg,
3304 build_int_cst (type, 0));
3305
3306 case SAVE_EXPR:
3307 return build1 (TRUTH_NOT_EXPR, type, arg);
3308
3309 case CLEANUP_POINT_EXPR:
3310 return build1 (CLEANUP_POINT_EXPR, type,
3311 invert_truthvalue (TREE_OPERAND (arg, 0)));
3312
3313 default:
3314 break;
3315 }
3316
3317 return NULL_TREE;
3318}
3319
3320/* Return a simplified tree node for the truth-negation of ARG. This
3321 never alters ARG itself. We assume that ARG is an operation that
3322 returns a truth value (0 or 1).
3323
3324 FIXME: one would think we would fold the result, but it causes
3325 problems with the dominator optimizer. */
3326
3327tree
3328invert_truthvalue (tree arg)
3329{
3330 tree tem;
3331
3332 if (TREE_CODE (arg) == ERROR_MARK)
3333 return arg;
3334
3335 tem = fold_truth_not_expr (arg);
3336 if (!tem)
3337 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3338
3339 return tem;
3340}
3341
3342/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3343 operands are another bit-wise operation with a common input. If so,
3344 distribute the bit operations to save an operation and possibly two if
3345 constants are involved. For example, convert
3346 (A | B) & (A | C) into A | (B & C)
3347 Further simplification will occur if B and C are constants.
3348
3349 If this optimization cannot be done, 0 will be returned. */
3350
3351static tree
3352distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3353{
3354 tree common;
3355 tree left, right;
3356
3357 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3358 || TREE_CODE (arg0) == code
3359 || (TREE_CODE (arg0) != BIT_AND_EXPR
3360 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3361 return 0;
3362
3363 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3364 {
3365 common = TREE_OPERAND (arg0, 0);
3366 left = TREE_OPERAND (arg0, 1);
3367 right = TREE_OPERAND (arg1, 1);
3368 }
3369 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3370 {
3371 common = TREE_OPERAND (arg0, 0);
3372 left = TREE_OPERAND (arg0, 1);
3373 right = TREE_OPERAND (arg1, 0);
3374 }
3375 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3376 {
3377 common = TREE_OPERAND (arg0, 1);
3378 left = TREE_OPERAND (arg0, 0);
3379 right = TREE_OPERAND (arg1, 1);
3380 }
3381 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3382 {
3383 common = TREE_OPERAND (arg0, 1);
3384 left = TREE_OPERAND (arg0, 0);
3385 right = TREE_OPERAND (arg1, 0);
3386 }
3387 else
3388 return 0;
3389
3390 return fold_build2 (TREE_CODE (arg0), type, common,
3391 fold_build2 (code, type, left, right));
3392}
3393
3394/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3395 with code CODE. This optimization is unsafe. */
3396static tree
3397distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3398{
3399 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3400 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3401
3402 /* (A / C) +- (B / C) -> (A +- B) / C. */
3403 if (mul0 == mul1
3404 && operand_equal_p (TREE_OPERAND (arg0, 1),
3405 TREE_OPERAND (arg1, 1), 0))
3406 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3407 fold_build2 (code, type,
3408 TREE_OPERAND (arg0, 0),
3409 TREE_OPERAND (arg1, 0)),
3410 TREE_OPERAND (arg0, 1));
3411
3412 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3413 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3414 TREE_OPERAND (arg1, 0), 0)
3415 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3416 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3417 {
3418 REAL_VALUE_TYPE r0, r1;
3419 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3420 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3421 if (!mul0)
3422 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3423 if (!mul1)
3424 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3425 real_arithmetic (&r0, code, &r0, &r1);
3426 return fold_build2 (MULT_EXPR, type,
3427 TREE_OPERAND (arg0, 0),
3428 build_real (type, r0));
3429 }
3430
3431 return NULL_TREE;
3432}
3433
3434/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3435 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3436
3437static tree
3438make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3439 int unsignedp)
3440{
3441 tree result;
3442
3443 if (bitpos == 0)
3444 {
3445 tree size = TYPE_SIZE (TREE_TYPE (inner));
3446 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3447 || POINTER_TYPE_P (TREE_TYPE (inner)))
3448 && host_integerp (size, 0)
3449 && tree_low_cst (size, 0) == bitsize)
3450 return fold_convert (type, inner);
3451 }
3452
3453 result = build3 (BIT_FIELD_REF, type, inner,
3454 size_int (bitsize), bitsize_int (bitpos));
3455
3456 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3457
3458 return result;
3459}
3460
3461/* Optimize a bit-field compare.
3462
3463 There are two cases: First is a compare against a constant and the
3464 second is a comparison of two items where the fields are at the same
3465 bit position relative to the start of a chunk (byte, halfword, word)
3466 large enough to contain it. In these cases we can avoid the shift
3467 implicit in bitfield extractions.
3468
3469 For constants, we emit a compare of the shifted constant with the
3470 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3471 compared. For two fields at the same position, we do the ANDs with the
3472 similar mask and compare the result of the ANDs.
3473
3474 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3475 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3476 are the left and right operands of the comparison, respectively.
3477
3478 If the optimization described above can be done, we return the resulting
3479 tree. Otherwise we return zero. */
3480
3481static tree
3482optimize_bit_field_compare (enum tree_code code, tree compare_type,
3483 tree lhs, tree rhs)
3484{
3485 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3486 tree type = TREE_TYPE (lhs);
3487 tree signed_type, unsigned_type;
3488 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3489 enum machine_mode lmode, rmode, nmode;
3490 int lunsignedp, runsignedp;
3491 int lvolatilep = 0, rvolatilep = 0;
3492 tree linner, rinner = NULL_TREE;
3493 tree mask;
3494 tree offset;
3495
3496 /* Get all the information about the extractions being done. If the bit size
3497 if the same as the size of the underlying object, we aren't doing an
3498 extraction at all and so can do nothing. We also don't want to
3499 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3500 then will no longer be able to replace it. */
3501 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3502 &lunsignedp, &lvolatilep, false);
3503 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3504 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3505 return 0;
3506
3507 if (!const_p)
3508 {
3509 /* If this is not a constant, we can only do something if bit positions,
3510 sizes, and signedness are the same. */
3511 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3512 &runsignedp, &rvolatilep, false);
3513
3514 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3515 || lunsignedp != runsignedp || offset != 0
3516 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3517 return 0;
3518 }
3519
3520 /* See if we can find a mode to refer to this field. We should be able to,
3521 but fail if we can't. */
3522 nmode = get_best_mode (lbitsize, lbitpos,
3523 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3524 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3525 TYPE_ALIGN (TREE_TYPE (rinner))),
3526 word_mode, lvolatilep || rvolatilep);
3527 if (nmode == VOIDmode)
3528 return 0;
3529
3530 /* Set signed and unsigned types of the precision of this mode for the
3531 shifts below. */
3532 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3533 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3534
3535 /* Compute the bit position and size for the new reference and our offset
3536 within it. If the new reference is the same size as the original, we
3537 won't optimize anything, so return zero. */
3538 nbitsize = GET_MODE_BITSIZE (nmode);
3539 nbitpos = lbitpos & ~ (nbitsize - 1);
3540 lbitpos -= nbitpos;
3541 if (nbitsize == lbitsize)
3542 return 0;
3543
3544 if (BYTES_BIG_ENDIAN)
3545 lbitpos = nbitsize - lbitsize - lbitpos;
3546
3547 /* Make the mask to be used against the extracted field. */
3548 mask = build_int_cst (unsigned_type, -1);
3549 mask = force_fit_type (mask, 0, false, false);
3550 mask = fold_convert (unsigned_type, mask);
3551 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3552 mask = const_binop (RSHIFT_EXPR, mask,
3553 size_int (nbitsize - lbitsize - lbitpos), 0);
3554
3555 if (! const_p)
3556 /* If not comparing with constant, just rework the comparison
3557 and return. */
3558 return build2 (code, compare_type,
3559 build2 (BIT_AND_EXPR, unsigned_type,
3560 make_bit_field_ref (linner, unsigned_type,
3561 nbitsize, nbitpos, 1),
3562 mask),
3563 build2 (BIT_AND_EXPR, unsigned_type,
3564 make_bit_field_ref (rinner, unsigned_type,
3565 nbitsize, nbitpos, 1),
3566 mask));
3567
3568 /* Otherwise, we are handling the constant case. See if the constant is too
3569 big for the field. Warn and return a tree of for 0 (false) if so. We do
3570 this not only for its own sake, but to avoid having to test for this
3571 error case below. If we didn't, we might generate wrong code.
3572
3573 For unsigned fields, the constant shifted right by the field length should
3574 be all zero. For signed fields, the high-order bits should agree with
3575 the sign bit. */
3576
3577 if (lunsignedp)
3578 {
3579 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3580 fold_convert (unsigned_type, rhs),
3581 size_int (lbitsize), 0)))
3582 {
3583 warning (0, "comparison is always %d due to width of bit-field",
3584 code == NE_EXPR);
3585 return constant_boolean_node (code == NE_EXPR, compare_type);
3586 }
3587 }
3588 else
3589 {
3590 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3591 size_int (lbitsize - 1), 0);
3592 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3593 {
3594 warning (0, "comparison is always %d due to width of bit-field",
3595 code == NE_EXPR);
3596 return constant_boolean_node (code == NE_EXPR, compare_type);
3597 }
3598 }
3599
3600 /* Single-bit compares should always be against zero. */
3601 if (lbitsize == 1 && ! integer_zerop (rhs))
3602 {
3603 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3604 rhs = build_int_cst (type, 0);
3605 }
3606
3607 /* Make a new bitfield reference, shift the constant over the
3608 appropriate number of bits and mask it with the computed mask
3609 (in case this was a signed field). If we changed it, make a new one. */
3610 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3611 if (lvolatilep)
3612 {
3613 TREE_SIDE_EFFECTS (lhs) = 1;
3614 TREE_THIS_VOLATILE (lhs) = 1;
3615 }
3616
3617 rhs = const_binop (BIT_AND_EXPR,
3618 const_binop (LSHIFT_EXPR,
3619 fold_convert (unsigned_type, rhs),
3620 size_int (lbitpos), 0),
3621 mask, 0);
3622
3623 return build2 (code, compare_type,
3624 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3625 rhs);
3626}
3627
3628/* Subroutine for fold_truthop: decode a field reference.
3629
3630 If EXP is a comparison reference, we return the innermost reference.
3631
3632 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3633 set to the starting bit number.
3634
3635 If the innermost field can be completely contained in a mode-sized
3636 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3637
3638 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3639 otherwise it is not changed.
3640
3641 *PUNSIGNEDP is set to the signedness of the field.
3642
3643 *PMASK is set to the mask used. This is either contained in a
3644 BIT_AND_EXPR or derived from the width of the field.
3645
3646 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3647
3648 Return 0 if this is not a component reference or is one that we can't
3649 do anything with. */
3650
3651static tree
3652decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3653 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3654 int *punsignedp, int *pvolatilep,
3655 tree *pmask, tree *pand_mask)
3656{
3657 tree outer_type = 0;
3658 tree and_mask = 0;
3659 tree mask, inner, offset;
3660 tree unsigned_type;
3661 unsigned int precision;
3662
3663 /* All the optimizations using this function assume integer fields.
3664 There are problems with FP fields since the type_for_size call
3665 below can fail for, e.g., XFmode. */
3666 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3667 return 0;
3668
3669 /* We are interested in the bare arrangement of bits, so strip everything
3670 that doesn't affect the machine mode. However, record the type of the
3671 outermost expression if it may matter below. */
3672 if (TREE_CODE (exp) == NOP_EXPR
3673 || TREE_CODE (exp) == CONVERT_EXPR
3674 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3675 outer_type = TREE_TYPE (exp);
3676 STRIP_NOPS (exp);
3677
3678 if (TREE_CODE (exp) == BIT_AND_EXPR)
3679 {
3680 and_mask = TREE_OPERAND (exp, 1);
3681 exp = TREE_OPERAND (exp, 0);
3682 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3683 if (TREE_CODE (and_mask) != INTEGER_CST)
3684 return 0;
3685 }
3686
3687 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3688 punsignedp, pvolatilep, false);
3689 if ((inner == exp && and_mask == 0)
3690 || *pbitsize < 0 || offset != 0
3691 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3692 return 0;
3693
3694 /* If the number of bits in the reference is the same as the bitsize of
3695 the outer type, then the outer type gives the signedness. Otherwise
3696 (in case of a small bitfield) the signedness is unchanged. */
3697 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3698 *punsignedp = TYPE_UNSIGNED (outer_type);
3699
3700 /* Compute the mask to access the bitfield. */
3701 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3702 precision = TYPE_PRECISION (unsigned_type);
3703
3704 mask = build_int_cst (unsigned_type, -1);
3705 mask = force_fit_type (mask, 0, false, false);
3706
3707 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3708 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3709
3710 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3711 if (and_mask != 0)
3712 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3713 fold_convert (unsigned_type, and_mask), mask);
3714
3715 *pmask = mask;
3716 *pand_mask = and_mask;
3717 return inner;
3718}
3719
3720/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3721 bit positions. */
3722
3723static int
3724all_ones_mask_p (tree mask, int size)
3725{
3726 tree type = TREE_TYPE (mask);
3727 unsigned int precision = TYPE_PRECISION (type);
3728 tree tmask;
3729
3730 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3731 tmask = force_fit_type (tmask, 0, false, false);
3732
3733 return
3734 tree_int_cst_equal (mask,
3735 const_binop (RSHIFT_EXPR,
3736 const_binop (LSHIFT_EXPR, tmask,
3737 size_int (precision - size),
3738 0),
3739 size_int (precision - size), 0));
3740}
3741
3742/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3743 represents the sign bit of EXP's type. If EXP represents a sign
3744 or zero extension, also test VAL against the unextended type.
3745 The return value is the (sub)expression whose sign bit is VAL,
3746 or NULL_TREE otherwise. */
3747
3748static tree
3749sign_bit_p (tree exp, tree val)
3750{
3751 unsigned HOST_WIDE_INT mask_lo, lo;
3752 HOST_WIDE_INT mask_hi, hi;
3753 int width;
3754 tree t;
3755
3756 /* Tree EXP must have an integral type. */
3757 t = TREE_TYPE (exp);
3758 if (! INTEGRAL_TYPE_P (t))
3759 return NULL_TREE;
3760
3761 /* Tree VAL must be an integer constant. */
3762 if (TREE_CODE (val) != INTEGER_CST
3763 || TREE_CONSTANT_OVERFLOW (val))
3764 return NULL_TREE;
3765
3766 width = TYPE_PRECISION (t);
3767 if (width > HOST_BITS_PER_WIDE_INT)
3768 {
3769 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3770 lo = 0;
3771
3772 mask_hi = ((unsigned HOST_WIDE_INT) -1
3773 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3774 mask_lo = -1;
3775 }
3776 else
3777 {
3778 hi = 0;
3779 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3780
3781 mask_hi = 0;
3782 mask_lo = ((unsigned HOST_WIDE_INT) -1
3783 >> (HOST_BITS_PER_WIDE_INT - width));
3784 }
3785
3786 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3787 treat VAL as if it were unsigned. */
3788 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3789 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3790 return exp;
3791
3792 /* Handle extension from a narrower type. */
3793 if (TREE_CODE (exp) == NOP_EXPR
3794 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3795 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3796
3797 return NULL_TREE;
3798}
3799
3800/* Subroutine for fold_truthop: determine if an operand is simple enough
3801 to be evaluated unconditionally. */
3802
3803static int
3804simple_operand_p (tree exp)
3805{
3806 /* Strip any conversions that don't change the machine mode. */
3807 STRIP_NOPS (exp);
3808
3809 return (CONSTANT_CLASS_P (exp)
3810 || TREE_CODE (exp) == SSA_NAME
3811 || (DECL_P (exp)
3812 && ! TREE_ADDRESSABLE (exp)
3813 && ! TREE_THIS_VOLATILE (exp)
3814 && ! DECL_NONLOCAL (exp)
3815 /* Don't regard global variables as simple. They may be
3816 allocated in ways unknown to the compiler (shared memory,
3817 #pragma weak, etc). */
3818 && ! TREE_PUBLIC (exp)
3819 && ! DECL_EXTERNAL (exp)
3820 /* Loading a static variable is unduly expensive, but global
3821 registers aren't expensive. */
3822 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3823}
3824
3825/* The following functions are subroutines to fold_range_test and allow it to
3826 try to change a logical combination of comparisons into a range test.
3827
3828 For example, both
3829 X == 2 || X == 3 || X == 4 || X == 5
3830 and
3831 X >= 2 && X <= 5
3832 are converted to
3833 (unsigned) (X - 2) <= 3
3834
3835 We describe each set of comparisons as being either inside or outside
3836 a range, using a variable named like IN_P, and then describe the
3837 range with a lower and upper bound. If one of the bounds is omitted,
3838 it represents either the highest or lowest value of the type.
3839
3840 In the comments below, we represent a range by two numbers in brackets
3841 preceded by a "+" to designate being inside that range, or a "-" to
3842 designate being outside that range, so the condition can be inverted by
3843 flipping the prefix. An omitted bound is represented by a "-". For
3844 example, "- [-, 10]" means being outside the range starting at the lowest
3845 possible value and ending at 10, in other words, being greater than 10.
3846 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3847 always false.
3848
3849 We set up things so that the missing bounds are handled in a consistent
3850 manner so neither a missing bound nor "true" and "false" need to be
3851 handled using a special case. */
3852
3853/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3854 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3855 and UPPER1_P are nonzero if the respective argument is an upper bound
3856 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3857 must be specified for a comparison. ARG1 will be converted to ARG0's
3858 type if both are specified. */
3859
3860static tree
3861range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3862 tree arg1, int upper1_p)
3863{
3864 tree tem;
3865 int result;
3866 int sgn0, sgn1;
3867
3868 /* If neither arg represents infinity, do the normal operation.
3869 Else, if not a comparison, return infinity. Else handle the special
3870 comparison rules. Note that most of the cases below won't occur, but
3871 are handled for consistency. */
3872
3873 if (arg0 != 0 && arg1 != 0)
3874 {
3875 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3876 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3877 STRIP_NOPS (tem);
3878 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3879 }
3880
3881 if (TREE_CODE_CLASS (code) != tcc_comparison)
3882 return 0;
3883
3884 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3885 for neither. In real maths, we cannot assume open ended ranges are
3886 the same. But, this is computer arithmetic, where numbers are finite.
3887 We can therefore make the transformation of any unbounded range with
3888 the value Z, Z being greater than any representable number. This permits
3889 us to treat unbounded ranges as equal. */
3890 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3891 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3892 switch (code)
3893 {
3894 case EQ_EXPR:
3895 result = sgn0 == sgn1;
3896 break;
3897 case NE_EXPR:
3898 result = sgn0 != sgn1;
3899 break;
3900 case LT_EXPR:
3901 result = sgn0 < sgn1;
3902 break;
3903 case LE_EXPR:
3904 result = sgn0 <= sgn1;
3905 break;
3906 case GT_EXPR:
3907 result = sgn0 > sgn1;
3908 break;
3909 case GE_EXPR:
3910 result = sgn0 >= sgn1;
3911 break;
3912 default:
3913 gcc_unreachable ();
3914 }
3915
3916 return constant_boolean_node (result, type);
3917}
3918
3919/* Given EXP, a logical expression, set the range it is testing into
3920 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3921 actually being tested. *PLOW and *PHIGH will be made of the same
3922 type as the returned expression. If EXP is not a comparison, we
3923 will most likely not be returning a useful value and range. Set
3924 *STRICT_OVERFLOW_P to true if the return value is only valid
3925 because signed overflow is undefined; otherwise, do not change
3926 *STRICT_OVERFLOW_P. */
3927
3928static tree
3929make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3930 bool *strict_overflow_p)
3931{
3932 enum tree_code code;
3933 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3934 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3935 int in_p, n_in_p;
3936 tree low, high, n_low, n_high;
3937
3938 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3939 and see if we can refine the range. Some of the cases below may not
3940 happen, but it doesn't seem worth worrying about this. We "continue"
3941 the outer loop when we've changed something; otherwise we "break"
3942 the switch, which will "break" the while. */
3943
3944 in_p = 0;
3945 low = high = build_int_cst (TREE_TYPE (exp), 0);
3946
3947 while (1)
3948 {
3949 code = TREE_CODE (exp);
3950 exp_type = TREE_TYPE (exp);
3951
3952 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3953 {
3954 if (TREE_CODE_LENGTH (code) > 0)
3955 arg0 = TREE_OPERAND (exp, 0);
3956 if (TREE_CODE_CLASS (code) == tcc_comparison
3957 || TREE_CODE_CLASS (code) == tcc_unary
3958 || TREE_CODE_CLASS (code) == tcc_binary)
3959 arg0_type = TREE_TYPE (arg0);
3960 if (TREE_CODE_CLASS (code) == tcc_binary
3961 || TREE_CODE_CLASS (code) == tcc_comparison
3962 || (TREE_CODE_CLASS (code) == tcc_expression
3963 && TREE_CODE_LENGTH (code) > 1))
3964 arg1 = TREE_OPERAND (exp, 1);
3965 }
3966
3967 switch (code)
3968 {
3969 case TRUTH_NOT_EXPR:
3970 in_p = ! in_p, exp = arg0;
3971 continue;
3972
3973 case EQ_EXPR: case NE_EXPR:
3974 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3975 /* We can only do something if the range is testing for zero
3976 and if the second operand is an integer constant. Note that
3977 saying something is "in" the range we make is done by
3978 complementing IN_P since it will set in the initial case of
3979 being not equal to zero; "out" is leaving it alone. */
3980 if (low == 0 || high == 0
3981 || ! integer_zerop (low) || ! integer_zerop (high)
3982 || TREE_CODE (arg1) != INTEGER_CST)
3983 break;
3984
3985 switch (code)
3986 {
3987 case NE_EXPR: /* - [c, c] */
3988 low = high = arg1;
3989 break;
3990 case EQ_EXPR: /* + [c, c] */
3991 in_p = ! in_p, low = high = arg1;
3992 break;
3993 case GT_EXPR: /* - [-, c] */
3994 low = 0, high = arg1;
3995 break;
3996 case GE_EXPR: /* + [c, -] */
3997 in_p = ! in_p, low = arg1, high = 0;
3998 break;
3999 case LT_EXPR: /* - [c, -] */
4000 low = arg1, high = 0;
4001 break;
4002 case LE_EXPR: /* + [-, c] */
4003 in_p = ! in_p, low = 0, high = arg1;
4004 break;
4005 default:
4006 gcc_unreachable ();
4007 }
4008
4009 /* If this is an unsigned comparison, we also know that EXP is
4010 greater than or equal to zero. We base the range tests we make
4011 on that fact, so we record it here so we can parse existing
4012 range tests. We test arg0_type since often the return type
4013 of, e.g. EQ_EXPR, is boolean. */
4014 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4015 {
4016 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4017 in_p, low, high, 1,
4018 build_int_cst (arg0_type, 0),
4019 NULL_TREE))
4020 break;
4021
4022 in_p = n_in_p, low = n_low, high = n_high;
4023
4024 /* If the high bound is missing, but we have a nonzero low
4025 bound, reverse the range so it goes from zero to the low bound
4026 minus 1. */
4027 if (high == 0 && low && ! integer_zerop (low))
4028 {
4029 in_p = ! in_p;
4030 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4031 integer_one_node, 0);
4032 low = build_int_cst (arg0_type, 0);
4033 }
4034 }
4035
4036 exp = arg0;
4037 continue;
4038
4039 case NEGATE_EXPR:
4040 /* (-x) IN [a,b] -> x in [-b, -a] */
4041 n_low = range_binop (MINUS_EXPR, exp_type,
4042 build_int_cst (exp_type, 0),
4043 0, high, 1);
4044 n_high = range_binop (MINUS_EXPR, exp_type,
4045 build_int_cst (exp_type, 0),
4046 0, low, 0);
4047 low = n_low, high = n_high;
4048 exp = arg0;
4049 continue;
4050
4051 case BIT_NOT_EXPR:
4052 /* ~ X -> -X - 1 */
4053 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4054 build_int_cst (exp_type, 1));
4055 continue;
4056
4057 case PLUS_EXPR: case MINUS_EXPR:
4058 if (TREE_CODE (arg1) != INTEGER_CST)
4059 break;
4060
4061 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4062 move a constant to the other side. */
4063 if (!TYPE_UNSIGNED (arg0_type)
4064 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4065 break;
4066
4067 /* If EXP is signed, any overflow in the computation is undefined,
4068 so we don't worry about it so long as our computations on
4069 the bounds don't overflow. For unsigned, overflow is defined
4070 and this is exactly the right thing. */
4071 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4072 arg0_type, low, 0, arg1, 0);
4073 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4074 arg0_type, high, 1, arg1, 0);
4075 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4076 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4077 break;
4078
4079 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4080 *strict_overflow_p = true;
4081
4082 /* Check for an unsigned range which has wrapped around the maximum
4083 value thus making n_high < n_low, and normalize it. */
4084 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4085 {
4086 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4087 integer_one_node, 0);
4088 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4089 integer_one_node, 0);
4090
4091 /* If the range is of the form +/- [ x+1, x ], we won't
4092 be able to normalize it. But then, it represents the
4093 whole range or the empty set, so make it
4094 +/- [ -, - ]. */
4095 if (tree_int_cst_equal (n_low, low)
4096 && tree_int_cst_equal (n_high, high))
4097 low = high = 0;
4098 else
4099 in_p = ! in_p;
4100 }
4101 else
4102 low = n_low, high = n_high;
4103
4104 exp = arg0;
4105 continue;
4106
4107 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4108 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4109 break;
4110
4111 if (! INTEGRAL_TYPE_P (arg0_type)
4112 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4113 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4114 break;
4115
4116 n_low = low, n_high = high;
4117
4118 if (n_low != 0)
4119 n_low = fold_convert (arg0_type, n_low);
4120
4121 if (n_high != 0)
4122 n_high = fold_convert (arg0_type, n_high);
4123
4124
4125 /* If we're converting arg0 from an unsigned type, to exp,
4126 a signed type, we will be doing the comparison as unsigned.
4127 The tests above have already verified that LOW and HIGH
4128 are both positive.
4129
4130 So we have to ensure that we will handle large unsigned
4131 values the same way that the current signed bounds treat
4132 negative values. */
4133
4134 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4135 {
4136 tree high_positive;
4137 tree equiv_type = lang_hooks.types.type_for_mode
4138 (TYPE_MODE (arg0_type), 1);
4139
4140 /* A range without an upper bound is, naturally, unbounded.
4141 Since convert would have cropped a very large value, use
4142 the max value for the destination type. */
4143 high_positive
4144 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4145 : TYPE_MAX_VALUE (arg0_type);
4146
4147 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4148 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4149 fold_convert (arg0_type,
4150 high_positive),
4151 fold_convert (arg0_type,
4152 integer_one_node));
4153
4154 /* If the low bound is specified, "and" the range with the
4155 range for which the original unsigned value will be
4156 positive. */
4157 if (low != 0)
4158 {
4159 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4160 1, n_low, n_high, 1,
4161 fold_convert (arg0_type,
4162 integer_zero_node),
4163 high_positive))
4164 break;
4165
4166 in_p = (n_in_p == in_p);
4167 }
4168 else
4169 {
4170 /* Otherwise, "or" the range with the range of the input
4171 that will be interpreted as negative. */
4172 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4173 0, n_low, n_high, 1,
4174 fold_convert (arg0_type,
4175 integer_zero_node),
4176 high_positive))
4177 break;
4178
4179 in_p = (in_p != n_in_p);
4180 }
4181 }
4182
4183 exp = arg0;
4184 low = n_low, high = n_high;
4185 continue;
4186
4187 default:
4188 break;
4189 }
4190
4191 break;
4192 }
4193
4194 /* If EXP is a constant, we can evaluate whether this is true or false. */
4195 if (TREE_CODE (exp) == INTEGER_CST)
4196 {
4197 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4198 exp, 0, low, 0))
4199 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4200 exp, 1, high, 1)));
4201 low = high = 0;
4202 exp = 0;
4203 }
4204
4205 *pin_p = in_p, *plow = low, *phigh = high;
4206 return exp;
4207}
4208
4209/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4210 type, TYPE, return an expression to test if EXP is in (or out of, depending
4211 on IN_P) the range. Return 0 if the test couldn't be created. */
4212
4213static tree
4214build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4215{
4216 tree etype = TREE_TYPE (exp);
4217 tree value;
4218
4219#ifdef HAVE_canonicalize_funcptr_for_compare
4220 /* Disable this optimization for function pointer expressions
4221 on targets that require function pointer canonicalization. */
4222 if (HAVE_canonicalize_funcptr_for_compare
4223 && TREE_CODE (etype) == POINTER_TYPE
4224 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4225 return NULL_TREE;
4226#endif
4227
4228 if (! in_p)
4229 {
4230 value = build_range_check (type, exp, 1, low, high);
4231 if (value != 0)
4232 return invert_truthvalue (value);
4233
4234 return 0;
4235 }
4236
4237 if (low == 0 && high == 0)
4238 return build_int_cst (type, 1);
4239
4240 if (low == 0)
4241 return fold_build2 (LE_EXPR, type, exp,
4242 fold_convert (etype, high));
4243
4244 if (high == 0)
4245 return fold_build2 (GE_EXPR, type, exp,
4246 fold_convert (etype, low));
4247
4248 if (operand_equal_p (low, high, 0))
4249 return fold_build2 (EQ_EXPR, type, exp,
4250 fold_convert (etype, low));
4251
4252 if (integer_zerop (low))
4253 {
4254 if (! TYPE_UNSIGNED (etype))
4255 {
4256 etype = lang_hooks.types.unsigned_type (etype);
4257 high = fold_convert (etype, high);
4258 exp = fold_convert (etype, exp);
4259 }
4260 return build_range_check (type, exp, 1, 0, high);
4261 }
4262
4263 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4264 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4265 {
4266 unsigned HOST_WIDE_INT lo;
4267 HOST_WIDE_INT hi;
4268 int prec;
4269
4270 prec = TYPE_PRECISION (etype);
4271 if (prec <= HOST_BITS_PER_WIDE_INT)
4272 {
4273 hi = 0;
4274 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4275 }
4276 else
4277 {
4278 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4279 lo = (unsigned HOST_WIDE_INT) -1;
4280 }
4281
4282 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4283 {
4284 if (TYPE_UNSIGNED (etype))
4285 {
4286 etype = lang_hooks.types.signed_type (etype);
4287 exp = fold_convert (etype, exp);
4288 }
4289 return fold_build2 (GT_EXPR, type, exp,
4290 build_int_cst (etype, 0));
4291 }
4292 }
4293
4294 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4295 This requires wrap-around arithmetics for the type of the expression. */
4296 switch (TREE_CODE (etype))
4297 {
4298 case INTEGER_TYPE:
4299 /* There is no requirement that LOW be within the range of ETYPE
4300 if the latter is a subtype. It must, however, be within the base
4301 type of ETYPE. So be sure we do the subtraction in that type. */
4302 if (TREE_TYPE (etype))
4303 etype = TREE_TYPE (etype);
4304 break;
4305
4306 case ENUMERAL_TYPE:
4307 case BOOLEAN_TYPE:
4308 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4309 TYPE_UNSIGNED (etype));
4310 break;
4311
4312 default:
4313 break;
4314 }
4315
4316 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4317 if (TREE_CODE (etype) == INTEGER_TYPE
4318 && !TYPE_OVERFLOW_WRAPS (etype))
4319 {
4320 tree utype, minv, maxv;
4321
4322 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4323 for the type in question, as we rely on this here. */
4324 utype = lang_hooks.types.unsigned_type (etype);
4325 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4326 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4327 integer_one_node, 1);
4328 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4329
4330 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4331 minv, 1, maxv, 1)))
4332 etype = utype;
4333 else
4334 return 0;
4335 }
4336
4337 high = fold_convert (etype, high);
4338 low = fold_convert (etype, low);
4339 exp = fold_convert (etype, exp);
4340
4341 value = const_binop (MINUS_EXPR, high, low, 0);
4342
4343 if (value != 0 && !TREE_OVERFLOW (value))
4344 return build_range_check (type,
4345 fold_build2 (MINUS_EXPR, etype, exp, low),
4346 1, build_int_cst (etype, 0), value);
4347
4348 return 0;
4349}
4350
4351/* Return the predecessor of VAL in its type, handling the infinite case. */
4352
4353static tree
4354range_predecessor (tree val)
4355{
4356 tree type = TREE_TYPE (val);
4357
4358 if (INTEGRAL_TYPE_P (type)
4359 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4360 return 0;
4361 else
4362 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4363}
4364
4365/* Return the successor of VAL in its type, handling the infinite case. */
4366
4367static tree
4368range_successor (tree val)
4369{
4370 tree type = TREE_TYPE (val);
4371
4372 if (INTEGRAL_TYPE_P (type)
4373 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4374 return 0;
4375 else
4376 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4377}
4378
4379/* Given two ranges, see if we can merge them into one. Return 1 if we
4380 can, 0 if we can't. Set the output range into the specified parameters. */
4381
4382static int
4383merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4384 tree high0, int in1_p, tree low1, tree high1)
4385{
4386 int no_overlap;
4387 int subset;
4388 int temp;
4389 tree tem;
4390 int in_p;
4391 tree low, high;
4392 int lowequal = ((low0 == 0 && low1 == 0)
4393 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4394 low0, 0, low1, 0)));
4395 int highequal = ((high0 == 0 && high1 == 0)
4396 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4397 high0, 1, high1, 1)));
4398
4399 /* Make range 0 be the range that starts first, or ends last if they
4400 start at the same value. Swap them if it isn't. */
4401 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4402 low0, 0, low1, 0))
4403 || (lowequal
4404 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4405 high1, 1, high0, 1))))
4406 {
4407 temp = in0_p, in0_p = in1_p, in1_p = temp;
4408 tem = low0, low0 = low1, low1 = tem;
4409 tem = high0, high0 = high1, high1 = tem;
4410 }
4411
4412 /* Now flag two cases, whether the ranges are disjoint or whether the
4413 second range is totally subsumed in the first. Note that the tests
4414 below are simplified by the ones above. */
4415 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4416 high0, 1, low1, 0));
4417 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4418 high1, 1, high0, 1));
4419
4420 /* We now have four cases, depending on whether we are including or
4421 excluding the two ranges. */
4422 if (in0_p && in1_p)
4423 {
4424 /* If they don't overlap, the result is false. If the second range
4425 is a subset it is the result. Otherwise, the range is from the start
4426 of the second to the end of the first. */
4427 if (no_overlap)
4428 in_p = 0, low = high = 0;
4429 else if (subset)
4430 in_p = 1, low = low1, high = high1;
4431 else
4432 in_p = 1, low = low1, high = high0;
4433 }
4434
4435 else if (in0_p && ! in1_p)
4436 {
4437 /* If they don't overlap, the result is the first range. If they are
4438 equal, the result is false. If the second range is a subset of the
4439 first, and the ranges begin at the same place, we go from just after
4440 the end of the second range to the end of the first. If the second
4441 range is not a subset of the first, or if it is a subset and both
4442 ranges end at the same place, the range starts at the start of the
4443 first range and ends just before the second range.
4444 Otherwise, we can't describe this as a single range. */
4445 if (no_overlap)
4446 in_p = 1, low = low0, high = high0;
4447 else if (lowequal && highequal)
4448 in_p = 0, low = high = 0;
4449 else if (subset && lowequal)
4450 {
4451 low = range_successor (high1);
4452 high = high0;
4453 in_p = 1;
4454 if (low == 0)
4455 {
4456 /* We are in the weird situation where high0 > high1 but
4457 high1 has no successor. Punt. */
4458 return 0;
4459 }
4460 }
4461 else if (! subset || highequal)
4462 {
4463 low = low0;
4464 high = range_predecessor (low1);
4465 in_p = 1;
4466 if (high == 0)
4467 {
4468 /* low0 < low1 but low1 has no predecessor. Punt. */
4469 return 0;
4470 }
4471 }
4472 else
4473 return 0;
4474 }
4475
4476 else if (! in0_p && in1_p)
4477 {
4478 /* If they don't overlap, the result is the second range. If the second
4479 is a subset of the first, the result is false. Otherwise,
4480 the range starts just after the first range and ends at the
4481 end of the second. */
4482 if (no_overlap)
4483 in_p = 1, low = low1, high = high1;
4484 else if (subset || highequal)
4485 in_p = 0, low = high = 0;
4486 else
4487 {
4488 low = range_successor (high0);
4489 high = high1;
4490 in_p = 1;
4491 if (low == 0)
4492 {
4493 /* high1 > high0 but high0 has no successor. Punt. */
4494 return 0;
4495 }
4496 }
4497 }
4498
4499 else
4500 {
4501 /* The case where we are excluding both ranges. Here the complex case
4502 is if they don't overlap. In that case, the only time we have a
4503 range is if they are adjacent. If the second is a subset of the
4504 first, the result is the first. Otherwise, the range to exclude
4505 starts at the beginning of the first range and ends at the end of the
4506 second. */
4507 if (no_overlap)
4508 {
4509 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4510 range_successor (high0),
4511 1, low1, 0)))
4512 in_p = 0, low = low0, high = high1;
4513 else
4514 {
4515 /* Canonicalize - [min, x] into - [-, x]. */
4516 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4517 switch (TREE_CODE (TREE_TYPE (low0)))
4518 {
4519 case ENUMERAL_TYPE:
4520 if (TYPE_PRECISION (TREE_TYPE (low0))
4521 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4522 break;
4523 /* FALLTHROUGH */
4524 case INTEGER_TYPE:
4525 if (tree_int_cst_equal (low0,
4526 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4527 low0 = 0;
4528 break;
4529 case POINTER_TYPE:
4530 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4531 && integer_zerop (low0))
4532 low0 = 0;
4533 break;
4534 default:
4535 break;
4536 }
4537
4538 /* Canonicalize - [x, max] into - [x, -]. */
4539 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4540 switch (TREE_CODE (TREE_TYPE (high1)))
4541 {
4542 case ENUMERAL_TYPE:
4543 if (TYPE_PRECISION (TREE_TYPE (high1))
4544 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4545 break;
4546 /* FALLTHROUGH */
4547 case INTEGER_TYPE:
4548 if (tree_int_cst_equal (high1,
4549 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4550 high1 = 0;
4551 break;
4552 case POINTER_TYPE:
4553 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4554 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4555 high1, 1,
4556 integer_one_node, 1)))
4557 high1 = 0;
4558 break;
4559 default:
4560 break;
4561 }
4562
4563 /* The ranges might be also adjacent between the maximum and
4564 minimum values of the given type. For
4565 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4566 return + [x + 1, y - 1]. */
4567 if (low0 == 0 && high1 == 0)
4568 {
4569 low = range_successor (high0);
4570 high = range_predecessor (low1);
4571 if (low == 0 || high == 0)
4572 return 0;
4573
4574 in_p = 1;
4575 }
4576 else
4577 return 0;
4578 }
4579 }
4580 else if (subset)
4581 in_p = 0, low = low0, high = high0;
4582 else
4583 in_p = 0, low = low0, high = high1;
4584 }
4585
4586 *pin_p = in_p, *plow = low, *phigh = high;
4587 return 1;
4588}
4589
4590
4591/* Subroutine of fold, looking inside expressions of the form
4592 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4593 of the COND_EXPR. This function is being used also to optimize
4594 A op B ? C : A, by reversing the comparison first.
4595
4596 Return a folded expression whose code is not a COND_EXPR
4597 anymore, or NULL_TREE if no folding opportunity is found. */
4598
4599static tree
4600fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4601{
4602 enum tree_code comp_code = TREE_CODE (arg0);
4603 tree arg00 = TREE_OPERAND (arg0, 0);
4604 tree arg01 = TREE_OPERAND (arg0, 1);
4605 tree arg1_type = TREE_TYPE (arg1);
4606 tree tem;
4607
4608 STRIP_NOPS (arg1);
4609 STRIP_NOPS (arg2);
4610
4611 /* If we have A op 0 ? A : -A, consider applying the following
4612 transformations:
4613
4614 A == 0? A : -A same as -A
4615 A != 0? A : -A same as A
4616 A >= 0? A : -A same as abs (A)
4617 A > 0? A : -A same as abs (A)
4618 A <= 0? A : -A same as -abs (A)
4619 A < 0? A : -A same as -abs (A)
4620
4621 None of these transformations work for modes with signed
4622 zeros. If A is +/-0, the first two transformations will
4623 change the sign of the result (from +0 to -0, or vice
4624 versa). The last four will fix the sign of the result,
4625 even though the original expressions could be positive or
4626 negative, depending on the sign of A.
4627
4628 Note that all these transformations are correct if A is
4629 NaN, since the two alternatives (A and -A) are also NaNs. */
4630 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4631 ? real_zerop (arg01)
4632 : integer_zerop (arg01))
4633 && ((TREE_CODE (arg2) == NEGATE_EXPR
4634 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4635 /* In the case that A is of the form X-Y, '-A' (arg2) may
4636 have already been folded to Y-X, check for that. */
4637 || (TREE_CODE (arg1) == MINUS_EXPR
4638 && TREE_CODE (arg2) == MINUS_EXPR
4639 && operand_equal_p (TREE_OPERAND (arg1, 0),
4640 TREE_OPERAND (arg2, 1), 0)
4641 && operand_equal_p (TREE_OPERAND (arg1, 1),
4642 TREE_OPERAND (arg2, 0), 0))))
4643 switch (comp_code)
4644 {
4645 case EQ_EXPR:
4646 case UNEQ_EXPR:
4647 tem = fold_convert (arg1_type, arg1);
4648 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4649 case NE_EXPR:
4650 case LTGT_EXPR:
4651 return pedantic_non_lvalue (fold_convert (type, arg1));
4652 case UNGE_EXPR:
4653 case UNGT_EXPR:
4654 if (flag_trapping_math)
4655 break;
4656 /* Fall through. */
4657 case GE_EXPR:
4658 case GT_EXPR:
4659 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4660 arg1 = fold_convert (lang_hooks.types.signed_type
4661 (TREE_TYPE (arg1)), arg1);
4662 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4663 return pedantic_non_lvalue (fold_convert (type, tem));
4664 case UNLE_EXPR:
4665 case UNLT_EXPR:
4666 if (flag_trapping_math)
4667 break;
4668 case LE_EXPR:
4669 case LT_EXPR:
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671 arg1 = fold_convert (lang_hooks.types.signed_type
4672 (TREE_TYPE (arg1)), arg1);
4673 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4674 return negate_expr (fold_convert (type, tem));
4675 default:
4676 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4677 break;
4678 }
4679
4680 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4681 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4682 both transformations are correct when A is NaN: A != 0
4683 is then true, and A == 0 is false. */
4684
4685 if (integer_zerop (arg01) && integer_zerop (arg2))
4686 {
4687 if (comp_code == NE_EXPR)
4688 return pedantic_non_lvalue (fold_convert (type, arg1));
4689 else if (comp_code == EQ_EXPR)
4690 return build_int_cst (type, 0);
4691 }
4692
4693 /* Try some transformations of A op B ? A : B.
4694
4695 A == B? A : B same as B
4696 A != B? A : B same as A
4697 A >= B? A : B same as max (A, B)
4698 A > B? A : B same as max (B, A)
4699 A <= B? A : B same as min (A, B)
4700 A < B? A : B same as min (B, A)
4701
4702 As above, these transformations don't work in the presence
4703 of signed zeros. For example, if A and B are zeros of
4704 opposite sign, the first two transformations will change
4705 the sign of the result. In the last four, the original
4706 expressions give different results for (A=+0, B=-0) and
4707 (A=-0, B=+0), but the transformed expressions do not.
4708
4709 The first two transformations are correct if either A or B
4710 is a NaN. In the first transformation, the condition will
4711 be false, and B will indeed be chosen. In the case of the
4712 second transformation, the condition A != B will be true,
4713 and A will be chosen.
4714
4715 The conversions to max() and min() are not correct if B is
4716 a number and A is not. The conditions in the original
4717 expressions will be false, so all four give B. The min()
4718 and max() versions would give a NaN instead. */
4719 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4720 /* Avoid these transformations if the COND_EXPR may be used
4721 as an lvalue in the C++ front-end. PR c++/19199. */
4722 && (in_gimple_form
4723 || (strcmp (lang_hooks.name, "GNU C++") != 0
4724 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4725 || ! maybe_lvalue_p (arg1)
4726 || ! maybe_lvalue_p (arg2)))
4727 {
4728 tree comp_op0 = arg00;
4729 tree comp_op1 = arg01;
4730 tree comp_type = TREE_TYPE (comp_op0);
4731
4732 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4733 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4734 {
4735 comp_type = type;
4736 comp_op0 = arg1;
4737 comp_op1 = arg2;
4738 }
4739
4740 switch (comp_code)
4741 {
4742 case EQ_EXPR:
4743 return pedantic_non_lvalue (fold_convert (type, arg2));
4744 case NE_EXPR:
4745 return pedantic_non_lvalue (fold_convert (type, arg1));
4746 case LE_EXPR:
4747 case LT_EXPR:
4748 case UNLE_EXPR:
4749 case UNLT_EXPR:
4750 /* In C++ a ?: expression can be an lvalue, so put the
4751 operand which will be used if they are equal first
4752 so that we can convert this back to the
4753 corresponding COND_EXPR. */
4754 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4755 {
4756 comp_op0 = fold_convert (comp_type, comp_op0);
4757 comp_op1 = fold_convert (comp_type, comp_op1);
4758 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4759 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4760 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4761 return pedantic_non_lvalue (fold_convert (type, tem));
4762 }
4763 break;
4764 case GE_EXPR:
4765 case GT_EXPR:
4766 case UNGE_EXPR:
4767 case UNGT_EXPR:
4768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4769 {
4770 comp_op0 = fold_convert (comp_type, comp_op0);
4771 comp_op1 = fold_convert (comp_type, comp_op1);
4772 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4773 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4774 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4775 return pedantic_non_lvalue (fold_convert (type, tem));
4776 }
4777 break;
4778 case UNEQ_EXPR:
4779 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4780 return pedantic_non_lvalue (fold_convert (type, arg2));
4781 break;
4782 case LTGT_EXPR:
4783 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4784 return pedantic_non_lvalue (fold_convert (type, arg1));
4785 break;
4786 default:
4787 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4788 break;
4789 }
4790 }
4791
4792 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4793 we might still be able to simplify this. For example,
4794 if C1 is one less or one more than C2, this might have started
4795 out as a MIN or MAX and been transformed by this function.
4796 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4797
4798 if (INTEGRAL_TYPE_P (type)
4799 && TREE_CODE (arg01) == INTEGER_CST
4800 && TREE_CODE (arg2) == INTEGER_CST)
4801 switch (comp_code)
4802 {
4803 case EQ_EXPR:
4804 /* We can replace A with C1 in this case. */
4805 arg1 = fold_convert (type, arg01);
4806 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4807
4808 case LT_EXPR:
4809 /* If C1 is C2 + 1, this is min(A, C2). */
4810 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4811 OEP_ONLY_CONST)
4812 && operand_equal_p (arg01,
4813 const_binop (PLUS_EXPR, arg2,
4814 integer_one_node, 0),
4815 OEP_ONLY_CONST))
4816 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4817 type, arg1, arg2));
4818 break;
4819
4820 case LE_EXPR:
4821 /* If C1 is C2 - 1, this is min(A, C2). */
4822 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4823 OEP_ONLY_CONST)
4824 && operand_equal_p (arg01,
4825 const_binop (MINUS_EXPR, arg2,
4826 integer_one_node, 0),
4827 OEP_ONLY_CONST))
4828 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4829 type, arg1, arg2));
4830 break;
4831
4832 case GT_EXPR:
4833 /* If C1 is C2 - 1, this is max(A, C2). */
4834 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4835 OEP_ONLY_CONST)
4836 && operand_equal_p (arg01,
4837 const_binop (MINUS_EXPR, arg2,
4838 integer_one_node, 0),
4839 OEP_ONLY_CONST))
4840 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4841 type, arg1, arg2));
4842 break;
4843
4844 case GE_EXPR:
4845 /* If C1 is C2 + 1, this is max(A, C2). */
4846 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4847 OEP_ONLY_CONST)
4848 && operand_equal_p (arg01,
4849 const_binop (PLUS_EXPR, arg2,
4850 integer_one_node, 0),
4851 OEP_ONLY_CONST))
4852 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4853 type, arg1, arg2));
4854 break;
4855 case NE_EXPR:
4856 break;
4857 default:
4858 gcc_unreachable ();
4859 }
4860
4861 return NULL_TREE;
4862}
4863
4864
4865
4866#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4867#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4868#endif
4869
4870/* EXP is some logical combination of boolean tests. See if we can
4871 merge it into some range test. Return the new tree if so. */
4872
4873static tree
4874fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4875{
4876 int or_op = (code == TRUTH_ORIF_EXPR
4877 || code == TRUTH_OR_EXPR);
4878 int in0_p, in1_p, in_p;
4879 tree low0, low1, low, high0, high1, high;
4880 bool strict_overflow_p = false;
4881 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4882 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4883 tree tem;
4884 const char * const warnmsg = G_("assuming signed overflow does not occur "
4885 "when simplifying range test");
4886
4887 /* If this is an OR operation, invert both sides; we will invert
4888 again at the end. */
4889 if (or_op)
4890 in0_p = ! in0_p, in1_p = ! in1_p;
4891
4892 /* If both expressions are the same, if we can merge the ranges, and we
4893 can build the range test, return it or it inverted. If one of the
4894 ranges is always true or always false, consider it to be the same
4895 expression as the other. */
4896 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4897 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4898 in1_p, low1, high1)
4899 && 0 != (tem = (build_range_check (type,
4900 lhs != 0 ? lhs
4901 : rhs != 0 ? rhs : integer_zero_node,
4902 in_p, low, high))))
4903 {
4904 if (strict_overflow_p)
4905 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4906 return or_op ? invert_truthvalue (tem) : tem;
4907 }
4908
4909 /* On machines where the branch cost is expensive, if this is a
4910 short-circuited branch and the underlying object on both sides
4911 is the same, make a non-short-circuit operation. */
4912 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4913 && lhs != 0 && rhs != 0
4914 && (code == TRUTH_ANDIF_EXPR
4915 || code == TRUTH_ORIF_EXPR)
4916 && operand_equal_p (lhs, rhs, 0))
4917 {
4918 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4919 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4920 which cases we can't do this. */
4921 if (simple_operand_p (lhs))
4922 return build2 (code == TRUTH_ANDIF_EXPR
4923 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4924 type, op0, op1);
4925
4926 else if (lang_hooks.decls.global_bindings_p () == 0
4927 && ! CONTAINS_PLACEHOLDER_P (lhs))
4928 {
4929 tree common = save_expr (lhs);
4930
4931 if (0 != (lhs = build_range_check (type, common,
4932 or_op ? ! in0_p : in0_p,
4933 low0, high0))
4934 && (0 != (rhs = build_range_check (type, common,
4935 or_op ? ! in1_p : in1_p,
4936 low1, high1))))
4937 {
4938 if (strict_overflow_p)
4939 fold_overflow_warning (warnmsg,
4940 WARN_STRICT_OVERFLOW_COMPARISON);
4941 return build2 (code == TRUTH_ANDIF_EXPR
4942 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4943 type, lhs, rhs);
4944 }
4945 }
4946 }
4947
4948 return 0;
4949}
4950
4951/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4952 bit value. Arrange things so the extra bits will be set to zero if and
4953 only if C is signed-extended to its full width. If MASK is nonzero,
4954 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4955
4956static tree
4957unextend (tree c, int p, int unsignedp, tree mask)
4958{
4959 tree type = TREE_TYPE (c);
4960 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4961 tree temp;
4962
4963 if (p == modesize || unsignedp)
4964 return c;
4965
4966 /* We work by getting just the sign bit into the low-order bit, then
4967 into the high-order bit, then sign-extend. We then XOR that value
4968 with C. */
4969 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4970 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4971
4972 /* We must use a signed type in order to get an arithmetic right shift.
4973 However, we must also avoid introducing accidental overflows, so that
4974 a subsequent call to integer_zerop will work. Hence we must
4975 do the type conversion here. At this point, the constant is either
4976 zero or one, and the conversion to a signed type can never overflow.
4977 We could get an overflow if this conversion is done anywhere else. */
4978 if (TYPE_UNSIGNED (type))
4979 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4980
4981 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4982 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4983 if (mask != 0)
4984 temp = const_binop (BIT_AND_EXPR, temp,
4985 fold_convert (TREE_TYPE (c), mask), 0);
4986 /* If necessary, convert the type back to match the type of C. */
4987 if (TYPE_UNSIGNED (type))
4988 temp = fold_convert (type, temp);
4989
4990 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4991}
4992
4993/* Find ways of folding logical expressions of LHS and RHS:
4994 Try to merge two comparisons to the same innermost item.
4995 Look for range tests like "ch >= '0' && ch <= '9'".
4996 Look for combinations of simple terms on machines with expensive branches
4997 and evaluate the RHS unconditionally.
4998
4999 For example, if we have p->a == 2 && p->b == 4 and we can make an
5000 object large enough to span both A and B, we can do this with a comparison
5001 against the object ANDed with the a mask.
5002
5003 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5004 operations to do this with one comparison.
5005
5006 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5007 function and the one above.
5008
5009 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5010 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5011
5012 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5013 two operands.
5014
5015 We return the simplified tree or 0 if no optimization is possible. */
5016
5017static tree
5018fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5019{
5020 /* If this is the "or" of two comparisons, we can do something if
5021 the comparisons are NE_EXPR. If this is the "and", we can do something
5022 if the comparisons are EQ_EXPR. I.e.,
5023 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5024
5025 WANTED_CODE is this operation code. For single bit fields, we can
5026 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5027 comparison for one-bit fields. */
5028
5029 enum tree_code wanted_code;
5030 enum tree_code lcode, rcode;
5031 tree ll_arg, lr_arg, rl_arg, rr_arg;
5032 tree ll_inner, lr_inner, rl_inner, rr_inner;
5033 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5034 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5035 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5036 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5037 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5038 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5039 enum machine_mode lnmode, rnmode;
5040 tree ll_mask, lr_mask, rl_mask, rr_mask;
5041 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5042 tree l_const, r_const;
5043 tree lntype, rntype, result;
5044 int first_bit, end_bit;
5045 int volatilep;
5046 tree orig_lhs = lhs, orig_rhs = rhs;
5047 enum tree_code orig_code = code;
5048
5049 /* Start by getting the comparison codes. Fail if anything is volatile.
5050 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5051 it were surrounded with a NE_EXPR. */
5052
5053 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5054 return 0;
5055
5056 lcode = TREE_CODE (lhs);
5057 rcode = TREE_CODE (rhs);
5058
5059 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5060 {
5061 lhs = build2 (NE_EXPR, truth_type, lhs,
5062 build_int_cst (TREE_TYPE (lhs), 0));
5063 lcode = NE_EXPR;
5064 }
5065
5066 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5067 {
5068 rhs = build2 (NE_EXPR, truth_type, rhs,
5069 build_int_cst (TREE_TYPE (rhs), 0));
5070 rcode = NE_EXPR;
5071 }
5072
5073 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5074 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5075 return 0;
5076
5077 ll_arg = TREE_OPERAND (lhs, 0);
5078 lr_arg = TREE_OPERAND (lhs, 1);
5079 rl_arg = TREE_OPERAND (rhs, 0);
5080 rr_arg = TREE_OPERAND (rhs, 1);
5081
5082 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5083 if (simple_operand_p (ll_arg)
5084 && simple_operand_p (lr_arg))
5085 {
5086 tree result;
5087 if (operand_equal_p (ll_arg, rl_arg, 0)
5088 && operand_equal_p (lr_arg, rr_arg, 0))
5089 {
5090 result = combine_comparisons (code, lcode, rcode,
5091 truth_type, ll_arg, lr_arg);
5092 if (result)
5093 return result;
5094 }
5095 else if (operand_equal_p (ll_arg, rr_arg, 0)
5096 && operand_equal_p (lr_arg, rl_arg, 0))
5097 {
5098 result = combine_comparisons (code, lcode,
5099 swap_tree_comparison (rcode),
5100 truth_type, ll_arg, lr_arg);
5101 if (result)
5102 return result;
5103 }
5104 }
5105
5106 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5107 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5108
5109 /* If the RHS can be evaluated unconditionally and its operands are
5110 simple, it wins to evaluate the RHS unconditionally on machines
5111 with expensive branches. In this case, this isn't a comparison
5112 that can be merged. Avoid doing this if the RHS is a floating-point
5113 comparison since those can trap. */
5114
5115 if (BRANCH_COST >= 2
5116 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5117 && simple_operand_p (rl_arg)
5118 && simple_operand_p (rr_arg))
5119 {
5120 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5121 if (code == TRUTH_OR_EXPR
5122 && lcode == NE_EXPR && integer_zerop (lr_arg)
5123 && rcode == NE_EXPR && integer_zerop (rr_arg)
5124 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5125 return build2 (NE_EXPR, truth_type,
5126 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5127 ll_arg, rl_arg),
5128 build_int_cst (TREE_TYPE (ll_arg), 0));
5129
5130 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5131 if (code == TRUTH_AND_EXPR
5132 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5133 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5134 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5135 return build2 (EQ_EXPR, truth_type,
5136 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5137 ll_arg, rl_arg),
5138 build_int_cst (TREE_TYPE (ll_arg), 0));
5139
5140 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5141 {
5142 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5143 return build2 (code, truth_type, lhs, rhs);
5144 return NULL_TREE;
5145 }
5146 }
5147
5148 /* See if the comparisons can be merged. Then get all the parameters for
5149 each side. */
5150
5151 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5152 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5153 return 0;
5154
5155 volatilep = 0;
5156 ll_inner = decode_field_reference (ll_arg,
5157 &ll_bitsize, &ll_bitpos, &ll_mode,
5158 &ll_unsignedp, &volatilep, &ll_mask,
5159 &ll_and_mask);
5160 lr_inner = decode_field_reference (lr_arg,
5161 &lr_bitsize, &lr_bitpos, &lr_mode,
5162 &lr_unsignedp, &volatilep, &lr_mask,
5163 &lr_and_mask);
5164 rl_inner = decode_field_reference (rl_arg,
5165 &rl_bitsize, &rl_bitpos, &rl_mode,
5166 &rl_unsignedp, &volatilep, &rl_mask,
5167 &rl_and_mask);
5168 rr_inner = decode_field_reference (rr_arg,
5169 &rr_bitsize, &rr_bitpos, &rr_mode,
5170 &rr_unsignedp, &volatilep, &rr_mask,
5171 &rr_and_mask);
5172
5173 /* It must be true that the inner operation on the lhs of each
5174 comparison must be the same if we are to be able to do anything.
5175 Then see if we have constants. If not, the same must be true for
5176 the rhs's. */
5177 if (volatilep || ll_inner == 0 || rl_inner == 0
5178 || ! operand_equal_p (ll_inner, rl_inner, 0))
5179 return 0;
5180
5181 if (TREE_CODE (lr_arg) == INTEGER_CST
5182 && TREE_CODE (rr_arg) == INTEGER_CST)
5183 l_const = lr_arg, r_const = rr_arg;
5184 else if (lr_inner == 0 || rr_inner == 0
5185 || ! operand_equal_p (lr_inner, rr_inner, 0))
5186 return 0;
5187 else
5188 l_const = r_const = 0;
5189
5190 /* If either comparison code is not correct for our logical operation,
5191 fail. However, we can convert a one-bit comparison against zero into
5192 the opposite comparison against that bit being set in the field. */
5193
5194 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5195 if (lcode != wanted_code)
5196 {
5197 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5198 {
5199 /* Make the left operand unsigned, since we are only interested
5200 in the value of one bit. Otherwise we are doing the wrong
5201 thing below. */
5202 ll_unsignedp = 1;
5203 l_const = ll_mask;
5204 }
5205 else
5206 return 0;
5207 }
5208
5209 /* This is analogous to the code for l_const above. */
5210 if (rcode != wanted_code)
5211 {
5212 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5213 {
5214 rl_unsignedp = 1;
5215 r_const = rl_mask;
5216 }
5217 else
5218 return 0;
5219 }
5220
5221 /* After this point all optimizations will generate bit-field
5222 references, which we might not want. */
5223 if (! lang_hooks.can_use_bit_fields_p ())
5224 return 0;
5225
5226 /* See if we can find a mode that contains both fields being compared on
5227 the left. If we can't, fail. Otherwise, update all constants and masks
5228 to be relative to a field of that size. */
5229 first_bit = MIN (ll_bitpos, rl_bitpos);
5230 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5231 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5232 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5233 volatilep);
5234 if (lnmode == VOIDmode)
5235 return 0;
5236
5237 lnbitsize = GET_MODE_BITSIZE (lnmode);
5238 lnbitpos = first_bit & ~ (lnbitsize - 1);
5239 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5240 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5241
5242 if (BYTES_BIG_ENDIAN)
5243 {
5244 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5245 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5246 }
5247
5248 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5249 size_int (xll_bitpos), 0);
5250 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5251 size_int (xrl_bitpos), 0);
5252
5253 if (l_const)
5254 {
5255 l_const = fold_convert (lntype, l_const);
5256 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5257 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5258 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5259 fold_build1 (BIT_NOT_EXPR,
5260 lntype, ll_mask),
5261 0)))
5262 {
5263 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5264
5265 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5266 }
5267 }
5268 if (r_const)
5269 {
5270 r_const = fold_convert (lntype, r_const);
5271 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5272 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5273 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5274 fold_build1 (BIT_NOT_EXPR,
5275 lntype, rl_mask),
5276 0)))
5277 {
5278 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5279
5280 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5281 }
5282 }
5283
5284 /* If the right sides are not constant, do the same for it. Also,
5285 disallow this optimization if a size or signedness mismatch occurs
5286 between the left and right sides. */
5287 if (l_const == 0)
5288 {
5289 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5290 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5291 /* Make sure the two fields on the right
5292 correspond to the left without being swapped. */
5293 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5294 return 0;
5295
5296 first_bit = MIN (lr_bitpos, rr_bitpos);
5297 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5298 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5299 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5300 volatilep);
5301 if (rnmode == VOIDmode)
5302 return 0;
5303
5304 rnbitsize = GET_MODE_BITSIZE (rnmode);
5305 rnbitpos = first_bit & ~ (rnbitsize - 1);
5306 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5307 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5308
5309 if (BYTES_BIG_ENDIAN)
5310 {
5311 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5312 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5313 }
5314
5315 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5316 size_int (xlr_bitpos), 0);
5317 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5318 size_int (xrr_bitpos), 0);
5319
5320 /* Make a mask that corresponds to both fields being compared.
5321 Do this for both items being compared. If the operands are the
5322 same size and the bits being compared are in the same position
5323 then we can do this by masking both and comparing the masked
5324 results. */
5325 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5326 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5327 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5328 {
5329 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5330 ll_unsignedp || rl_unsignedp);
5331 if (! all_ones_mask_p (ll_mask, lnbitsize))
5332 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5333
5334 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5335 lr_unsignedp || rr_unsignedp);
5336 if (! all_ones_mask_p (lr_mask, rnbitsize))
5337 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5338
5339 return build2 (wanted_code, truth_type, lhs, rhs);
5340 }
5341
5342 /* There is still another way we can do something: If both pairs of
5343 fields being compared are adjacent, we may be able to make a wider
5344 field containing them both.
5345
5346 Note that we still must mask the lhs/rhs expressions. Furthermore,
5347 the mask must be shifted to account for the shift done by
5348 make_bit_field_ref. */
5349 if ((ll_bitsize + ll_bitpos == rl_bitpos
5350 && lr_bitsize + lr_bitpos == rr_bitpos)
5351 || (ll_bitpos == rl_bitpos + rl_bitsize
5352 && lr_bitpos == rr_bitpos + rr_bitsize))
5353 {
5354 tree type;
5355
5356 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5357 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5358 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5359 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5360
5361 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5362 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5363 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5364 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5365
5366 /* Convert to the smaller type before masking out unwanted bits. */
5367 type = lntype;
5368 if (lntype != rntype)
5369 {
5370 if (lnbitsize > rnbitsize)
5371 {
5372 lhs = fold_convert (rntype, lhs);
5373 ll_mask = fold_convert (rntype, ll_mask);
5374 type = rntype;
5375 }
5376 else if (lnbitsize < rnbitsize)
5377 {
5378 rhs = fold_convert (lntype, rhs);
5379 lr_mask = fold_convert (lntype, lr_mask);
5380 type = lntype;
5381 }
5382 }
5383
5384 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5385 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5386
5387 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5388 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5389
5390 return build2 (wanted_code, truth_type, lhs, rhs);
5391 }
5392
5393 return 0;
5394 }
5395
5396 /* Handle the case of comparisons with constants. If there is something in
5397 common between the masks, those bits of the constants must be the same.
5398 If not, the condition is always false. Test for this to avoid generating
5399 incorrect code below. */
5400 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5401 if (! integer_zerop (result)
5402 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5403 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5404 {
5405 if (wanted_code == NE_EXPR)
5406 {
5407 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5408 return constant_boolean_node (true, truth_type);
5409 }
5410 else
5411 {
5412 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5413 return constant_boolean_node (false, truth_type);
5414 }
5415 }
5416
5417 /* Construct the expression we will return. First get the component
5418 reference we will make. Unless the mask is all ones the width of
5419 that field, perform the mask operation. Then compare with the
5420 merged constant. */
5421 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5422 ll_unsignedp || rl_unsignedp);
5423
5424 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5425 if (! all_ones_mask_p (ll_mask, lnbitsize))
5426 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5427
5428 return build2 (wanted_code, truth_type, result,
5429 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5430}
5431
5432/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5433 constant. */
5434
5435static tree
5436optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5437{
5438 tree arg0 = op0;
5439 enum tree_code op_code;
5440 tree comp_const = op1;
5441 tree minmax_const;
5442 int consts_equal, consts_lt;
5443 tree inner;
5444
5445 STRIP_SIGN_NOPS (arg0);
5446
5447 op_code = TREE_CODE (arg0);
5448 minmax_const = TREE_OPERAND (arg0, 1);
5449 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5450 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5451 inner = TREE_OPERAND (arg0, 0);
5452
5453 /* If something does not permit us to optimize, return the original tree. */
5454 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5455 || TREE_CODE (comp_const) != INTEGER_CST
5456 || TREE_CONSTANT_OVERFLOW (comp_const)
5457 || TREE_CODE (minmax_const) != INTEGER_CST
5458 || TREE_CONSTANT_OVERFLOW (minmax_const))
5459 return NULL_TREE;
5460
5461 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5462 and GT_EXPR, doing the rest with recursive calls using logical
5463 simplifications. */
5464 switch (code)
5465 {
5466 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5467 {
5468 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5469 type, op0, op1);
5470 if (tem)
5471 return invert_truthvalue (tem);
5472 return NULL_TREE;
5473 }
5474
5475 case GE_EXPR:
5476 return
5477 fold_build2 (TRUTH_ORIF_EXPR, type,
5478 optimize_minmax_comparison
5479 (EQ_EXPR, type, arg0, comp_const),
5480 optimize_minmax_comparison
5481 (GT_EXPR, type, arg0, comp_const));
5482
5483 case EQ_EXPR:
5484 if (op_code == MAX_EXPR && consts_equal)
5485 /* MAX (X, 0) == 0 -> X <= 0 */
5486 return fold_build2 (LE_EXPR, type, inner, comp_const);
5487
5488 else if (op_code == MAX_EXPR && consts_lt)
5489 /* MAX (X, 0) == 5 -> X == 5 */
5490 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5491
5492 else if (op_code == MAX_EXPR)
5493 /* MAX (X, 0) == -1 -> false */
5494 return omit_one_operand (type, integer_zero_node, inner);
5495
5496 else if (consts_equal)
5497 /* MIN (X, 0) == 0 -> X >= 0 */
5498 return fold_build2 (GE_EXPR, type, inner, comp_const);
5499
5500 else if (consts_lt)
5501 /* MIN (X, 0) == 5 -> false */
5502 return omit_one_operand (type, integer_zero_node, inner);
5503
5504 else
5505 /* MIN (X, 0) == -1 -> X == -1 */
5506 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5507
5508 case GT_EXPR:
5509 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5510 /* MAX (X, 0) > 0 -> X > 0
5511 MAX (X, 0) > 5 -> X > 5 */
5512 return fold_build2 (GT_EXPR, type, inner, comp_const);
5513
5514 else if (op_code == MAX_EXPR)
5515 /* MAX (X, 0) > -1 -> true */
5516 return omit_one_operand (type, integer_one_node, inner);
5517
5518 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5519 /* MIN (X, 0) > 0 -> false
5520 MIN (X, 0) > 5 -> false */
5521 return omit_one_operand (type, integer_zero_node, inner);
5522
5523 else
5524 /* MIN (X, 0) > -1 -> X > -1 */
5525 return fold_build2 (GT_EXPR, type, inner, comp_const);
5526
5527 default:
5528 return NULL_TREE;
5529 }
5530}
5531
5532/* T is an integer expression that is being multiplied, divided, or taken a
5533 modulus (CODE says which and what kind of divide or modulus) by a
5534 constant C. See if we can eliminate that operation by folding it with
5535 other operations already in T. WIDE_TYPE, if non-null, is a type that
5536 should be used for the computation if wider than our type.
5537
5538 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5539 (X * 2) + (Y * 4). We must, however, be assured that either the original
5540 expression would not overflow or that overflow is undefined for the type
5541 in the language in question.
5542
5543 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5544 the machine has a multiply-accumulate insn or that this is part of an
5545 addressing calculation.
5546
5547 If we return a non-null expression, it is an equivalent form of the
5548 original computation, but need not be in the original type.
5549
5550 We set *STRICT_OVERFLOW_P to true if the return values depends on
5551 signed overflow being undefined. Otherwise we do not change
5552 *STRICT_OVERFLOW_P. */
5553
5554static tree
5555extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5556 bool *strict_overflow_p)
5557{
5558 /* To avoid exponential search depth, refuse to allow recursion past
5559 three levels. Beyond that (1) it's highly unlikely that we'll find
5560 something interesting and (2) we've probably processed it before
5561 when we built the inner expression. */
5562
5563 static int depth;
5564 tree ret;
5565
5566 if (depth > 3)
5567 return NULL;
5568
5569 depth++;
5570 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5571 depth--;
5572
5573 return ret;
5574}
5575
5576static tree
5577extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5578 bool *strict_overflow_p)
5579{
5580 tree type = TREE_TYPE (t);
5581 enum tree_code tcode = TREE_CODE (t);
5582 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5583 > GET_MODE_SIZE (TYPE_MODE (type)))
5584 ? wide_type : type);
5585 tree t1, t2;
5586 int same_p = tcode == code;
5587 tree op0 = NULL_TREE, op1 = NULL_TREE;
5588 bool sub_strict_overflow_p;
5589
5590 /* Don't deal with constants of zero here; they confuse the code below. */
5591 if (integer_zerop (c))
5592 return NULL_TREE;
5593
5594 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5595 op0 = TREE_OPERAND (t, 0);
5596
5597 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5598 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5599
5600 /* Note that we need not handle conditional operations here since fold
5601 already handles those cases. So just do arithmetic here. */
5602 switch (tcode)
5603 {
5604 case INTEGER_CST:
5605 /* For a constant, we can always simplify if we are a multiply
5606 or (for divide and modulus) if it is a multiple of our constant. */
5607 if (code == MULT_EXPR
5608 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5609 return const_binop (code, fold_convert (ctype, t),
5610 fold_convert (ctype, c), 0);
5611 break;
5612
5613 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5614 /* If op0 is an expression ... */
5615 if ((COMPARISON_CLASS_P (op0)
5616 || UNARY_CLASS_P (op0)
5617 || BINARY_CLASS_P (op0)
5618 || EXPRESSION_CLASS_P (op0))
5619 /* ... and is unsigned, and its type is smaller than ctype,
5620 then we cannot pass through as widening. */
5621 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5622 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5623 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5624 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5625 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5626 /* ... or this is a truncation (t is narrower than op0),
5627 then we cannot pass through this narrowing. */
5628 || (GET_MODE_SIZE (TYPE_MODE (type))
5629 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5630 /* ... or signedness changes for division or modulus,
5631 then we cannot pass through this conversion. */
5632 || (code != MULT_EXPR
5633 && (TYPE_UNSIGNED (ctype)
5634 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5635 break;
5636
5637 /* Pass the constant down and see if we can make a simplification. If
5638 we can, replace this expression with the inner simplification for
5639 possible later conversion to our or some other type. */
5640 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5641 && TREE_CODE (t2) == INTEGER_CST
5642 && ! TREE_CONSTANT_OVERFLOW (t2)
5643 && (0 != (t1 = extract_muldiv (op0, t2, code,
5644 code == MULT_EXPR
5645 ? ctype : NULL_TREE,
5646 strict_overflow_p))))
5647 return t1;
5648 break;
5649
5650 case ABS_EXPR:
5651 /* If widening the type changes it from signed to unsigned, then we
5652 must avoid building ABS_EXPR itself as unsigned. */
5653 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5654 {
5655 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5656 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5657 != 0)
5658 {
5659 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5660 return fold_convert (ctype, t1);
5661 }
5662 break;
5663 }
5664 /* If the constant is negative, we cannot simplify this. */
5665 if (tree_int_cst_sgn (c) == -1)
5666 break;
5667 /* FALLTHROUGH */
5668 case NEGATE_EXPR:
5669 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5670 != 0)
5671 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5672 break;
5673
5674 case MIN_EXPR: case MAX_EXPR:
5675 /* If widening the type changes the signedness, then we can't perform
5676 this optimization as that changes the result. */
5677 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5678 break;
5679
5680 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5681 sub_strict_overflow_p = false;
5682 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5683 &sub_strict_overflow_p)) != 0
5684 && (t2 = extract_muldiv (op1, c, code, wide_type,
5685 &sub_strict_overflow_p)) != 0)
5686 {
5687 if (tree_int_cst_sgn (c) < 0)
5688 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5689 if (sub_strict_overflow_p)
5690 *strict_overflow_p = true;
5691 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5692 fold_convert (ctype, t2));
5693 }
5694 break;
5695
5696 case LSHIFT_EXPR: case RSHIFT_EXPR:
5697 /* If the second operand is constant, this is a multiplication
5698 or floor division, by a power of two, so we can treat it that
5699 way unless the multiplier or divisor overflows. Signed
5700 left-shift overflow is implementation-defined rather than
5701 undefined in C90, so do not convert signed left shift into
5702 multiplication. */
5703 if (TREE_CODE (op1) == INTEGER_CST
5704 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5705 /* const_binop may not detect overflow correctly,
5706 so check for it explicitly here. */
5707 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5708 && TREE_INT_CST_HIGH (op1) == 0
5709 && 0 != (t1 = fold_convert (ctype,
5710 const_binop (LSHIFT_EXPR,
5711 size_one_node,
5712 op1, 0)))
5713 && ! TREE_OVERFLOW (t1))
5714 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5715 ? MULT_EXPR : FLOOR_DIV_EXPR,
5716 ctype, fold_convert (ctype, op0), t1),
5717 c, code, wide_type, strict_overflow_p);
5718 break;
5719
5720 case PLUS_EXPR: case MINUS_EXPR:
5721 /* See if we can eliminate the operation on both sides. If we can, we
5722 can return a new PLUS or MINUS. If we can't, the only remaining
5723 cases where we can do anything are if the second operand is a
5724 constant. */
5725 sub_strict_overflow_p = false;
5726 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5727 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5728 if (t1 != 0 && t2 != 0
5729 && (code == MULT_EXPR
5730 /* If not multiplication, we can only do this if both operands
5731 are divisible by c. */
5732 || (multiple_of_p (ctype, op0, c)
5733 && multiple_of_p (ctype, op1, c))))
5734 {
5735 if (sub_strict_overflow_p)
5736 *strict_overflow_p = true;
5737 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5738 fold_convert (ctype, t2));
5739 }
5740
5741 /* If this was a subtraction, negate OP1 and set it to be an addition.
5742 This simplifies the logic below. */
5743 if (tcode == MINUS_EXPR)
5744 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5745
5746 if (TREE_CODE (op1) != INTEGER_CST)
5747 break;
5748
5749 /* If either OP1 or C are negative, this optimization is not safe for
5750 some of the division and remainder types while for others we need
5751 to change the code. */
5752 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5753 {
5754 if (code == CEIL_DIV_EXPR)
5755 code = FLOOR_DIV_EXPR;
5756 else if (code == FLOOR_DIV_EXPR)
5757 code = CEIL_DIV_EXPR;
5758 else if (code != MULT_EXPR
5759 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5760 break;
5761 }
5762
5763 /* If it's a multiply or a division/modulus operation of a multiple
5764 of our constant, do the operation and verify it doesn't overflow. */
5765 if (code == MULT_EXPR
5766 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5767 {
5768 op1 = const_binop (code, fold_convert (ctype, op1),
5769 fold_convert (ctype, c), 0);
5770 /* We allow the constant to overflow with wrapping semantics. */
5771 if (op1 == 0
5772 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5773 break;
5774 }
5775 else
5776 break;
5777
5778 /* If we have an unsigned type is not a sizetype, we cannot widen
5779 the operation since it will change the result if the original
5780 computation overflowed. */
5781 if (TYPE_UNSIGNED (ctype)
5782 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5783 && ctype != type)
5784 break;
5785
5786 /* If we were able to eliminate our operation from the first side,
5787 apply our operation to the second side and reform the PLUS. */
5788 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5789 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5790
5791 /* The last case is if we are a multiply. In that case, we can
5792 apply the distributive law to commute the multiply and addition
5793 if the multiplication of the constants doesn't overflow. */
5794 if (code == MULT_EXPR)
5795 return fold_build2 (tcode, ctype,
5796 fold_build2 (code, ctype,
5797 fold_convert (ctype, op0),
5798 fold_convert (ctype, c)),
5799 op1);
5800
5801 break;
5802
5803 case MULT_EXPR:
5804 /* We have a special case here if we are doing something like
5805 (C * 8) % 4 since we know that's zero. */
5806 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5807 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5808 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5809 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5810 return omit_one_operand (type, integer_zero_node, op0);
5811
5812 /* ... fall through ... */
5813
5814 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5815 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5816 /* If we can extract our operation from the LHS, do so and return a
5817 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5818 do something only if the second operand is a constant. */
5819 if (same_p
5820 && (t1 = extract_muldiv (op0, c, code, wide_type,
5821 strict_overflow_p)) != 0)
5822 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5823 fold_convert (ctype, op1));
5824 else if (tcode == MULT_EXPR && code == MULT_EXPR
5825 && (t1 = extract_muldiv (op1, c, code, wide_type,
5826 strict_overflow_p)) != 0)
5827 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5828 fold_convert (ctype, t1));
5829 else if (TREE_CODE (op1) != INTEGER_CST)
5830 return 0;
5831
5832 /* If these are the same operation types, we can associate them
5833 assuming no overflow. */
5834 if (tcode == code
5835 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5836 fold_convert (ctype, c), 0))
5837 && ! TREE_OVERFLOW (t1))
5838 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5839
5840 /* If these operations "cancel" each other, we have the main
5841 optimizations of this pass, which occur when either constant is a
5842 multiple of the other, in which case we replace this with either an
5843 operation or CODE or TCODE.
5844
5845 If we have an unsigned type that is not a sizetype, we cannot do
5846 this since it will change the result if the original computation
5847 overflowed. */
5848 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5849 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5850 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5851 || (tcode == MULT_EXPR
5852 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5853 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5854 {
5855 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5856 {
5857 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5858 *strict_overflow_p = true;
5859 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5860 fold_convert (ctype,
5861 const_binop (TRUNC_DIV_EXPR,
5862 op1, c, 0)));
5863 }
5864 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5865 {
5866 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5867 *strict_overflow_p = true;
5868 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5869 fold_convert (ctype,
5870 const_binop (TRUNC_DIV_EXPR,
5871 c, op1, 0)));
5872 }
5873 }
5874 break;
5875
5876 default:
5877 break;
5878 }
5879
5880 return 0;
5881}
5882
5883/* Return a node which has the indicated constant VALUE (either 0 or
5884 1), and is of the indicated TYPE. */
5885
5886tree
5887constant_boolean_node (int value, tree type)
5888{
5889 if (type == integer_type_node)
5890 return value ? integer_one_node : integer_zero_node;
5891 else if (type == boolean_type_node)
5892 return value ? boolean_true_node : boolean_false_node;
5893 else
5894 return build_int_cst (type, value);
5895}
5896
5897
5898/* Return true if expr looks like an ARRAY_REF and set base and
5899 offset to the appropriate trees. If there is no offset,
5900 offset is set to NULL_TREE. Base will be canonicalized to
5901 something you can get the element type from using
5902 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5903 in bytes to the base. */
5904
5905static bool
5906extract_array_ref (tree expr, tree *base, tree *offset)
5907{
5908 /* One canonical form is a PLUS_EXPR with the first
5909 argument being an ADDR_EXPR with a possible NOP_EXPR
5910 attached. */
5911 if (TREE_CODE (expr) == PLUS_EXPR)
5912 {
5913 tree op0 = TREE_OPERAND (expr, 0);
5914 tree inner_base, dummy1;
5915 /* Strip NOP_EXPRs here because the C frontends and/or
5916 folders present us (int *)&x.a + 4B possibly. */
5917 STRIP_NOPS (op0);
5918 if (extract_array_ref (op0, &inner_base, &dummy1))
5919 {
5920 *base = inner_base;
5921 if (dummy1 == NULL_TREE)
5922 *offset = TREE_OPERAND (expr, 1);
5923 else
5924 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5925 dummy1, TREE_OPERAND (expr, 1));
5926 return true;
5927 }
5928 }
5929 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5930 which we transform into an ADDR_EXPR with appropriate
5931 offset. For other arguments to the ADDR_EXPR we assume
5932 zero offset and as such do not care about the ADDR_EXPR
5933 type and strip possible nops from it. */
5934 else if (TREE_CODE (expr) == ADDR_EXPR)
5935 {
5936 tree op0 = TREE_OPERAND (expr, 0);
5937 if (TREE_CODE (op0) == ARRAY_REF)
5938 {
5939 tree idx = TREE_OPERAND (op0, 1);
5940 *base = TREE_OPERAND (op0, 0);
5941 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5942 array_ref_element_size (op0));
5943 }
5944 else
5945 {
5946 /* Handle array-to-pointer decay as &a. */
5947 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5948 *base = TREE_OPERAND (expr, 0);
5949 else
5950 *base = expr;
5951 *offset = NULL_TREE;
5952 }
5953 return true;
5954 }
5955 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5956 else if (SSA_VAR_P (expr)
5957 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5958 {
5959 *base = expr;
5960 *offset = NULL_TREE;
5961 return true;
5962 }
5963
5964 return false;
5965}
5966
5967
5968/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5969 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5970 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5971 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5972 COND is the first argument to CODE; otherwise (as in the example
5973 given here), it is the second argument. TYPE is the type of the
5974 original expression. Return NULL_TREE if no simplification is
5975 possible. */
5976
5977static tree
5978fold_binary_op_with_conditional_arg (enum tree_code code,
5979 tree type, tree op0, tree op1,
5980 tree cond, tree arg, int cond_first_p)
5981{
5982 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5983 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5984 tree test, true_value, false_value;
5985 tree lhs = NULL_TREE;
5986 tree rhs = NULL_TREE;
5987
5988 /* This transformation is only worthwhile if we don't have to wrap
5989 arg in a SAVE_EXPR, and the operation can be simplified on at least
5990 one of the branches once its pushed inside the COND_EXPR. */
5991 if (!TREE_CONSTANT (arg))
5992 return NULL_TREE;
5993
5994 if (TREE_CODE (cond) == COND_EXPR)
5995 {
5996 test = TREE_OPERAND (cond, 0);
5997 true_value = TREE_OPERAND (cond, 1);
5998 false_value = TREE_OPERAND (cond, 2);
5999 /* If this operand throws an expression, then it does not make
6000 sense to try to perform a logical or arithmetic operation
6001 involving it. */
6002 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6003 lhs = true_value;
6004 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6005 rhs = false_value;
6006 }
6007 else
6008 {
6009 tree testtype = TREE_TYPE (cond);
6010 test = cond;
6011 true_value = constant_boolean_node (true, testtype);
6012 false_value = constant_boolean_node (false, testtype);
6013 }
6014
6015 arg = fold_convert (arg_type, arg);
6016 if (lhs == 0)
6017 {
6018 true_value = fold_convert (cond_type, true_value);
6019 if (cond_first_p)
6020 lhs = fold_build2 (code, type, true_value, arg);
6021 else
6022 lhs = fold_build2 (code, type, arg, true_value);
6023 }
6024 if (rhs == 0)
6025 {
6026 false_value = fold_convert (cond_type, false_value);
6027 if (cond_first_p)
6028 rhs = fold_build2 (code, type, false_value, arg);
6029 else
6030 rhs = fold_build2 (code, type, arg, false_value);
6031 }
6032
6033 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6034 return fold_convert (type, test);
6035}
6036
6037
6038/* Subroutine of fold() that checks for the addition of +/- 0.0.
6039
6040 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6041 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6042 ADDEND is the same as X.
6043
6044 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6045 and finite. The problematic cases are when X is zero, and its mode
6046 has signed zeros. In the case of rounding towards -infinity,
6047 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6048 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6049
6050static bool
6051fold_real_zero_addition_p (tree type, tree addend, int negate)
6052{
6053 if (!real_zerop (addend))
6054 return false;
6055
6056 /* Don't allow the fold with -fsignaling-nans. */
6057 if (HONOR_SNANS (TYPE_MODE (type)))
6058 return false;
6059
6060 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6061 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6062 return true;
6063
6064 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6065 if (TREE_CODE (addend) == REAL_CST
6066 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6067 negate = !negate;
6068
6069 /* The mode has signed zeros, and we have to honor their sign.
6070 In this situation, there is only one case we can return true for.
6071 X - 0 is the same as X unless rounding towards -infinity is
6072 supported. */
6073 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6074}
6075
6076/* Subroutine of fold() that checks comparisons of built-in math
6077 functions against real constants.
6078
6079 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6080 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6081 is the type of the result and ARG0 and ARG1 are the operands of the
6082 comparison. ARG1 must be a TREE_REAL_CST.
6083
6084 The function returns the constant folded tree if a simplification
6085 can be made, and NULL_TREE otherwise. */
6086
6087static tree
6088fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6089 tree type, tree arg0, tree arg1)
6090{
6091 REAL_VALUE_TYPE c;
6092
6093 if (BUILTIN_SQRT_P (fcode))
6094 {
6095 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6096 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6097
6098 c = TREE_REAL_CST (arg1);
6099 if (REAL_VALUE_NEGATIVE (c))
6100 {
6101 /* sqrt(x) < y is always false, if y is negative. */
6102 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6103 return omit_one_operand (type, integer_zero_node, arg);
6104
6105 /* sqrt(x) > y is always true, if y is negative and we
6106 don't care about NaNs, i.e. negative values of x. */
6107 if (code == NE_EXPR || !HONOR_NANS (mode))
6108 return omit_one_operand (type, integer_one_node, arg);
6109
6110 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6111 return fold_build2 (GE_EXPR, type, arg,
6112 build_real (TREE_TYPE (arg), dconst0));
6113 }
6114 else if (code == GT_EXPR || code == GE_EXPR)
6115 {
6116 REAL_VALUE_TYPE c2;
6117
6118 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6119 real_convert (&c2, mode, &c2);
6120
6121 if (REAL_VALUE_ISINF (c2))
6122 {
6123 /* sqrt(x) > y is x == +Inf, when y is very large. */
6124 if (HONOR_INFINITIES (mode))
6125 return fold_build2 (EQ_EXPR, type, arg,
6126 build_real (TREE_TYPE (arg), c2));
6127
6128 /* sqrt(x) > y is always false, when y is very large
6129 and we don't care about infinities. */
6130 return omit_one_operand (type, integer_zero_node, arg);
6131 }
6132
6133 /* sqrt(x) > c is the same as x > c*c. */
6134 return fold_build2 (code, type, arg,
6135 build_real (TREE_TYPE (arg), c2));
6136 }
6137 else if (code == LT_EXPR || code == LE_EXPR)
6138 {
6139 REAL_VALUE_TYPE c2;
6140
6141 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6142 real_convert (&c2, mode, &c2);
6143
6144 if (REAL_VALUE_ISINF (c2))
6145 {
6146 /* sqrt(x) < y is always true, when y is a very large
6147 value and we don't care about NaNs or Infinities. */
6148 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6149 return omit_one_operand (type, integer_one_node, arg);
6150
6151 /* sqrt(x) < y is x != +Inf when y is very large and we
6152 don't care about NaNs. */
6153 if (! HONOR_NANS (mode))
6154 return fold_build2 (NE_EXPR, type, arg,
6155 build_real (TREE_TYPE (arg), c2));
6156
6157 /* sqrt(x) < y is x >= 0 when y is very large and we
6158 don't care about Infinities. */
6159 if (! HONOR_INFINITIES (mode))
6160 return fold_build2 (GE_EXPR, type, arg,
6161 build_real (TREE_TYPE (arg), dconst0));
6162
6163 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6164 if (lang_hooks.decls.global_bindings_p () != 0
6165 || CONTAINS_PLACEHOLDER_P (arg))
6166 return NULL_TREE;
6167
6168 arg = save_expr (arg);
6169 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6170 fold_build2 (GE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg),
6172 dconst0)),
6173 fold_build2 (NE_EXPR, type, arg,
6174 build_real (TREE_TYPE (arg),
6175 c2)));
6176 }
6177
6178 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6179 if (! HONOR_NANS (mode))
6180 return fold_build2 (code, type, arg,
6181 build_real (TREE_TYPE (arg), c2));
6182
6183 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6184 if (lang_hooks.decls.global_bindings_p () == 0
6185 && ! CONTAINS_PLACEHOLDER_P (arg))
6186 {
6187 arg = save_expr (arg);
6188 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6189 fold_build2 (GE_EXPR, type, arg,
6190 build_real (TREE_TYPE (arg),
6191 dconst0)),
6192 fold_build2 (code, type, arg,
6193 build_real (TREE_TYPE (arg),
6194 c2)));
6195 }
6196 }
6197 }
6198
6199 return NULL_TREE;
6200}
6201
6202/* Subroutine of fold() that optimizes comparisons against Infinities,
6203 either +Inf or -Inf.
6204
6205 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6206 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6207 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6208
6209 The function returns the constant folded tree if a simplification
6210 can be made, and NULL_TREE otherwise. */
6211
6212static tree
6213fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6214{
6215 enum machine_mode mode;
6216 REAL_VALUE_TYPE max;
6217 tree temp;
6218 bool neg;
6219
6220 mode = TYPE_MODE (TREE_TYPE (arg0));
6221
6222 /* For negative infinity swap the sense of the comparison. */
6223 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6224 if (neg)
6225 code = swap_tree_comparison (code);
6226
6227 switch (code)
6228 {
6229 case GT_EXPR:
6230 /* x > +Inf is always false, if with ignore sNANs. */
6231 if (HONOR_SNANS (mode))
6232 return NULL_TREE;
6233 return omit_one_operand (type, integer_zero_node, arg0);
6234
6235 case LE_EXPR:
6236 /* x <= +Inf is always true, if we don't case about NaNs. */
6237 if (! HONOR_NANS (mode))
6238 return omit_one_operand (type, integer_one_node, arg0);
6239
6240 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6241 if (lang_hooks.decls.global_bindings_p () == 0
6242 && ! CONTAINS_PLACEHOLDER_P (arg0))
6243 {
6244 arg0 = save_expr (arg0);
6245 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6246 }
6247 break;
6248
6249 case EQ_EXPR:
6250 case GE_EXPR:
6251 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6252 real_maxval (&max, neg, mode);
6253 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6254 arg0, build_real (TREE_TYPE (arg0), max));
6255
6256 case LT_EXPR:
6257 /* x < +Inf is always equal to x <= DBL_MAX. */
6258 real_maxval (&max, neg, mode);
6259 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6260 arg0, build_real (TREE_TYPE (arg0), max));
6261
6262 case NE_EXPR:
6263 /* x != +Inf is always equal to !(x > DBL_MAX). */
6264 real_maxval (&max, neg, mode);
6265 if (! HONOR_NANS (mode))
6266 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6267 arg0, build_real (TREE_TYPE (arg0), max));
6268
6269 /* The transformation below creates non-gimple code and thus is
6270 not appropriate if we are in gimple form. */
6271 if (in_gimple_form)
6272 return NULL_TREE;
6273
6274 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6275 arg0, build_real (TREE_TYPE (arg0), max));
6276 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6277
6278 default:
6279 break;
6280 }
6281
6282 return NULL_TREE;
6283}
6284
6285/* Subroutine of fold() that optimizes comparisons of a division by
6286 a nonzero integer constant against an integer constant, i.e.
6287 X/C1 op C2.
6288
6289 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6290 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6291 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6292
6293 The function returns the constant folded tree if a simplification
6294 can be made, and NULL_TREE otherwise. */
6295
6296static tree
6297fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6298{
6299 tree prod, tmp, hi, lo;
6300 tree arg00 = TREE_OPERAND (arg0, 0);
6301 tree arg01 = TREE_OPERAND (arg0, 1);
6302 unsigned HOST_WIDE_INT lpart;
6303 HOST_WIDE_INT hpart;
6304 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6305 bool neg_overflow;
6306 int overflow;
6307
6308 /* We have to do this the hard way to detect unsigned overflow.
6309 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6310 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6311 TREE_INT_CST_HIGH (arg01),
6312 TREE_INT_CST_LOW (arg1),
6313 TREE_INT_CST_HIGH (arg1),
6314 &lpart, &hpart, unsigned_p);
6315 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6316 prod = force_fit_type (prod, -1, overflow, false);
6317 neg_overflow = false;
6318
6319 if (unsigned_p)
6320 {
6321 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6322 lo = prod;
6323
6324 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6325 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6326 TREE_INT_CST_HIGH (prod),
6327 TREE_INT_CST_LOW (tmp),
6328 TREE_INT_CST_HIGH (tmp),
6329 &lpart, &hpart, unsigned_p);
6330 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6331 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6332 TREE_CONSTANT_OVERFLOW (prod));
6333 }
6334 else if (tree_int_cst_sgn (arg01) >= 0)
6335 {
6336 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6337 switch (tree_int_cst_sgn (arg1))
6338 {
6339 case -1:
6340 neg_overflow = true;
6341 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6342 hi = prod;
6343 break;
6344
6345 case 0:
6346 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6347 hi = tmp;
6348 break;
6349
6350 case 1:
6351 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6352 lo = prod;
6353 break;
6354
6355 default:
6356 gcc_unreachable ();
6357 }
6358 }
6359 else
6360 {
6361 /* A negative divisor reverses the relational operators. */
6362 code = swap_tree_comparison (code);
6363
6364 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6365 switch (tree_int_cst_sgn (arg1))
6366 {
6367 case -1:
6368 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6369 lo = prod;
6370 break;
6371
6372 case 0:
6373 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6374 lo = tmp;
6375 break;
6376
6377 case 1:
6378 neg_overflow = true;
6379 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6380 hi = prod;
6381 break;
6382
6383 default:
6384 gcc_unreachable ();
6385 }
6386 }
6387
6388 switch (code)
6389 {
6390 case EQ_EXPR:
6391 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6392 return omit_one_operand (type, integer_zero_node, arg00);
6393 if (TREE_OVERFLOW (hi))
6394 return fold_build2 (GE_EXPR, type, arg00, lo);
6395 if (TREE_OVERFLOW (lo))
6396 return fold_build2 (LE_EXPR, type, arg00, hi);
6397 return build_range_check (type, arg00, 1, lo, hi);
6398
6399 case NE_EXPR:
6400 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6401 return omit_one_operand (type, integer_one_node, arg00);
6402 if (TREE_OVERFLOW (hi))
6403 return fold_build2 (LT_EXPR, type, arg00, lo);
6404 if (TREE_OVERFLOW (lo))
6405 return fold_build2 (GT_EXPR, type, arg00, hi);
6406 return build_range_check (type, arg00, 0, lo, hi);
6407
6408 case LT_EXPR:
6409 if (TREE_OVERFLOW (lo))
6410 {
6411 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6412 return omit_one_operand (type, tmp, arg00);
6413 }
6414 return fold_build2 (LT_EXPR, type, arg00, lo);
6415
6416 case LE_EXPR:
6417 if (TREE_OVERFLOW (hi))
6418 {
6419 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6420 return omit_one_operand (type, tmp, arg00);
6421 }
6422 return fold_build2 (LE_EXPR, type, arg00, hi);
6423
6424 case GT_EXPR:
6425 if (TREE_OVERFLOW (hi))
6426 {
6427 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6428 return omit_one_operand (type, tmp, arg00);
6429 }
6430 return fold_build2 (GT_EXPR, type, arg00, hi);
6431
6432 case GE_EXPR:
6433 if (TREE_OVERFLOW (lo))
6434 {
6435 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6436 return omit_one_operand (type, tmp, arg00);
6437 }
6438 return fold_build2 (GE_EXPR, type, arg00, lo);
6439
6440 default:
6441 break;
6442 }
6443
6444 return NULL_TREE;
6445}
6446
6447
6448/* If CODE with arguments ARG0 and ARG1 represents a single bit
6449 equality/inequality test, then return a simplified form of the test
6450 using a sign testing. Otherwise return NULL. TYPE is the desired
6451 result type. */
6452
6453static tree
6454fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6455 tree result_type)
6456{
6457 /* If this is testing a single bit, we can optimize the test. */
6458 if ((code == NE_EXPR || code == EQ_EXPR)
6459 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6460 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6461 {
6462 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6463 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6464 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6465
6466 if (arg00 != NULL_TREE
6467 /* This is only a win if casting to a signed type is cheap,
6468 i.e. when arg00's type is not a partial mode. */
6469 && TYPE_PRECISION (TREE_TYPE (arg00))
6470 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6471 {
6472 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6473 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6474 result_type, fold_convert (stype, arg00),
6475 build_int_cst (stype, 0));
6476 }
6477 }
6478
6479 return NULL_TREE;
6480}
6481
6482/* If CODE with arguments ARG0 and ARG1 represents a single bit
6483 equality/inequality test, then return a simplified form of
6484 the test using shifts and logical operations. Otherwise return
6485 NULL. TYPE is the desired result type. */
6486
6487tree
6488fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6489 tree result_type)
6490{
6491 /* If this is testing a single bit, we can optimize the test. */
6492 if ((code == NE_EXPR || code == EQ_EXPR)
6493 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6494 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6495 {
6496 tree inner = TREE_OPERAND (arg0, 0);
6497 tree type = TREE_TYPE (arg0);
6498 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6499 enum machine_mode operand_mode = TYPE_MODE (type);
6500 int ops_unsigned;
6501 tree signed_type, unsigned_type, intermediate_type;
6502 tree tem;
6503
6504 /* First, see if we can fold the single bit test into a sign-bit
6505 test. */
6506 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6507 result_type);
6508 if (tem)
6509 return tem;
6510
6511 /* Otherwise we have (A & C) != 0 where C is a single bit,
6512 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6513 Similarly for (A & C) == 0. */
6514
6515 /* If INNER is a right shift of a constant and it plus BITNUM does
6516 not overflow, adjust BITNUM and INNER. */
6517 if (TREE_CODE (inner) == RSHIFT_EXPR
6518 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6519 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6520 && bitnum < TYPE_PRECISION (type)
6521 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6522 bitnum - TYPE_PRECISION (type)))
6523 {
6524 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6525 inner = TREE_OPERAND (inner, 0);
6526 }
6527
6528 /* If we are going to be able to omit the AND below, we must do our
6529 operations as unsigned. If we must use the AND, we have a choice.
6530 Normally unsigned is faster, but for some machines signed is. */
6531#ifdef LOAD_EXTEND_OP
6532 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6533 && !flag_syntax_only) ? 0 : 1;
6534#else
6535 ops_unsigned = 1;
6536#endif
6537
6538 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6539 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6540 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6541 inner = fold_convert (intermediate_type, inner);
6542
6543 if (bitnum != 0)
6544 inner = build2 (RSHIFT_EXPR, intermediate_type,
6545 inner, size_int (bitnum));
6546
6547 if (code == EQ_EXPR)
6548 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6549 inner, integer_one_node);
6550
6551 /* Put the AND last so it can combine with more things. */
6552 inner = build2 (BIT_AND_EXPR, intermediate_type,
6553 inner, integer_one_node);
6554
6555 /* Make sure to return the proper type. */
6556 inner = fold_convert (result_type, inner);
6557
6558 return inner;
6559 }
6560 return NULL_TREE;
6561}
6562
6563/* Check whether we are allowed to reorder operands arg0 and arg1,
6564 such that the evaluation of arg1 occurs before arg0. */
6565
6566static bool
6567reorder_operands_p (tree arg0, tree arg1)
6568{
6569 if (! flag_evaluation_order)
6570 return true;
6571 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6572 return true;
6573 return ! TREE_SIDE_EFFECTS (arg0)
6574 && ! TREE_SIDE_EFFECTS (arg1);
6575}
6576
6577/* Test whether it is preferable two swap two operands, ARG0 and
6578 ARG1, for example because ARG0 is an integer constant and ARG1
6579 isn't. If REORDER is true, only recommend swapping if we can
6580 evaluate the operands in reverse order. */
6581
6582bool
6583tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6584{
6585 STRIP_SIGN_NOPS (arg0);
6586 STRIP_SIGN_NOPS (arg1);
6587
6588 if (TREE_CODE (arg1) == INTEGER_CST)
6589 return 0;
6590 if (TREE_CODE (arg0) == INTEGER_CST)
6591 return 1;
6592
6593 if (TREE_CODE (arg1) == REAL_CST)
6594 return 0;
6595 if (TREE_CODE (arg0) == REAL_CST)
6596 return 1;
6597
6598 if (TREE_CODE (arg1) == COMPLEX_CST)
6599 return 0;
6600 if (TREE_CODE (arg0) == COMPLEX_CST)
6601 return 1;
6602
6603 if (TREE_CONSTANT (arg1))
6604 return 0;
6605 if (TREE_CONSTANT (arg0))
6606 return 1;
6607
6608 if (optimize_size)
6609 return 0;
6610
6611 if (reorder && flag_evaluation_order
6612 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6613 return 0;
6614
6615 if (DECL_P (arg1))
6616 return 0;
6617 if (DECL_P (arg0))
6618 return 1;
6619
6620 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6621 for commutative and comparison operators. Ensuring a canonical
6622 form allows the optimizers to find additional redundancies without
6623 having to explicitly check for both orderings. */
6624 if (TREE_CODE (arg0) == SSA_NAME
6625 && TREE_CODE (arg1) == SSA_NAME
6626 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6627 return 1;
6628
6629 return 0;
6630}
6631
6632/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6633 ARG0 is extended to a wider type. */
6634
6635static tree
6636fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6637{
6638 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6639 tree arg1_unw;
6640 tree shorter_type, outer_type;
6641 tree min, max;
6642 bool above, below;
6643
6644 if (arg0_unw == arg0)
6645 return NULL_TREE;
6646 shorter_type = TREE_TYPE (arg0_unw);
6647
6648#ifdef HAVE_canonicalize_funcptr_for_compare
6649 /* Disable this optimization if we're casting a function pointer
6650 type on targets that require function pointer canonicalization. */
6651 if (HAVE_canonicalize_funcptr_for_compare
6652 && TREE_CODE (shorter_type) == POINTER_TYPE
6653 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6654 return NULL_TREE;
6655#endif
6656
6657 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6658 return NULL_TREE;
6659
6660 arg1_unw = get_unwidened (arg1, shorter_type);
6661
6662 /* If possible, express the comparison in the shorter mode. */
6663 if ((code == EQ_EXPR || code == NE_EXPR
6664 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6665 && (TREE_TYPE (arg1_unw) == shorter_type
6666 || (TREE_CODE (arg1_unw) == INTEGER_CST
6667 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6668 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6669 && int_fits_type_p (arg1_unw, shorter_type))))
6670 return fold_build2 (code, type, arg0_unw,
6671 fold_convert (shorter_type, arg1_unw));
6672
6673 if (TREE_CODE (arg1_unw) != INTEGER_CST
6674 || TREE_CODE (shorter_type) != INTEGER_TYPE
6675 || !int_fits_type_p (arg1_unw, shorter_type))
6676 return NULL_TREE;
6677
6678 /* If we are comparing with the integer that does not fit into the range
6679 of the shorter type, the result is known. */
6680 outer_type = TREE_TYPE (arg1_unw);
6681 min = lower_bound_in_type (outer_type, shorter_type);
6682 max = upper_bound_in_type (outer_type, shorter_type);
6683
6684 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6685 max, arg1_unw));
6686 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6687 arg1_unw, min));
6688
6689 switch (code)
6690 {
6691 case EQ_EXPR:
6692 if (above || below)
6693 return omit_one_operand (type, integer_zero_node, arg0);
6694 break;
6695
6696 case NE_EXPR:
6697 if (above || below)
6698 return omit_one_operand (type, integer_one_node, arg0);
6699 break;
6700
6701 case LT_EXPR:
6702 case LE_EXPR:
6703 if (above)
6704 return omit_one_operand (type, integer_one_node, arg0);
6705 else if (below)
6706 return omit_one_operand (type, integer_zero_node, arg0);
6707
6708 case GT_EXPR:
6709 case GE_EXPR:
6710 if (above)
6711 return omit_one_operand (type, integer_zero_node, arg0);
6712 else if (below)
6713 return omit_one_operand (type, integer_one_node, arg0);
6714
6715 default:
6716 break;
6717 }
6718
6719 return NULL_TREE;
6720}
6721
6722/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6723 ARG0 just the signedness is changed. */
6724
6725static tree
6726fold_sign_changed_comparison (enum tree_code code, tree type,
6727 tree arg0, tree arg1)
6728{
6729 tree arg0_inner, tmp;
6730 tree inner_type, outer_type;
6731
6732 if (TREE_CODE (arg0) != NOP_EXPR
6733 && TREE_CODE (arg0) != CONVERT_EXPR)
6734 return NULL_TREE;
6735
6736 outer_type = TREE_TYPE (arg0);
6737 arg0_inner = TREE_OPERAND (arg0, 0);
6738 inner_type = TREE_TYPE (arg0_inner);
6739
6740#ifdef HAVE_canonicalize_funcptr_for_compare
6741 /* Disable this optimization if we're casting a function pointer
6742 type on targets that require function pointer canonicalization. */
6743 if (HAVE_canonicalize_funcptr_for_compare
6744 && TREE_CODE (inner_type) == POINTER_TYPE
6745 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6746 return NULL_TREE;
6747#endif
6748
6749 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6750 return NULL_TREE;
6751
6752 if (TREE_CODE (arg1) != INTEGER_CST
6753 && !((TREE_CODE (arg1) == NOP_EXPR
6754 || TREE_CODE (arg1) == CONVERT_EXPR)
6755 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6756 return NULL_TREE;
6757
6758 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6759 && code != NE_EXPR
6760 && code != EQ_EXPR)
6761 return NULL_TREE;
6762
6763 if (TREE_CODE (arg1) == INTEGER_CST)
6764 {
6765 tmp = build_int_cst_wide (inner_type,
6766 TREE_INT_CST_LOW (arg1),
6767 TREE_INT_CST_HIGH (arg1));
6768 arg1 = force_fit_type (tmp, 0,
6769 TREE_OVERFLOW (arg1),
6770 TREE_CONSTANT_OVERFLOW (arg1));
6771 }
6772 else
6773 arg1 = fold_convert (inner_type, arg1);
6774
6775 return fold_build2 (code, type, arg0_inner, arg1);
6776}
6777
6778/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6779 step of the array. Reconstructs s and delta in the case of s * delta
6780 being an integer constant (and thus already folded).
6781 ADDR is the address. MULT is the multiplicative expression.
6782 If the function succeeds, the new address expression is returned. Otherwise
6783 NULL_TREE is returned. */
6784
6785static tree
6786try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6787{
6788 tree s, delta, step;
6789 tree ref = TREE_OPERAND (addr, 0), pref;
6790 tree ret, pos;
6791 tree itype;
6792
6793 /* Canonicalize op1 into a possibly non-constant delta
6794 and an INTEGER_CST s. */
6795 if (TREE_CODE (op1) == MULT_EXPR)
6796 {
6797 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6798
6799 STRIP_NOPS (arg0);
6800 STRIP_NOPS (arg1);
6801
6802 if (TREE_CODE (arg0) == INTEGER_CST)
6803 {
6804 s = arg0;
6805 delta = arg1;
6806 }
6807 else if (TREE_CODE (arg1) == INTEGER_CST)
6808 {
6809 s = arg1;
6810 delta = arg0;
6811 }
6812 else
6813 return NULL_TREE;
6814 }
6815 else if (TREE_CODE (op1) == INTEGER_CST)
6816 {
6817 delta = op1;
6818 s = NULL_TREE;
6819 }
6820 else
6821 {
6822 /* Simulate we are delta * 1. */
6823 delta = op1;
6824 s = integer_one_node;
6825 }
6826
6827 for (;; ref = TREE_OPERAND (ref, 0))
6828 {
6829 if (TREE_CODE (ref) == ARRAY_REF)
6830 {
6831 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6832 if (! itype)
6833 continue;
6834
6835 step = array_ref_element_size (ref);
6836 if (TREE_CODE (step) != INTEGER_CST)
6837 continue;
6838
6839 if (s)
6840 {
6841 if (! tree_int_cst_equal (step, s))
6842 continue;
6843 }
6844 else
6845 {
6846 /* Try if delta is a multiple of step. */
6847 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6848 if (! tmp)
6849 continue;
6850 delta = tmp;
6851 }
6852
6853 break;
6854 }
6855
6856 if (!handled_component_p (ref))
6857 return NULL_TREE;
6858 }
6859
6860 /* We found the suitable array reference. So copy everything up to it,
6861 and replace the index. */
6862
6863 pref = TREE_OPERAND (addr, 0);
6864 ret = copy_node (pref);
6865 pos = ret;
6866
6867 while (pref != ref)
6868 {
6869 pref = TREE_OPERAND (pref, 0);
6870 TREE_OPERAND (pos, 0) = copy_node (pref);
6871 pos = TREE_OPERAND (pos, 0);
6872 }
6873
6874 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6875 fold_convert (itype,
6876 TREE_OPERAND (pos, 1)),
6877 fold_convert (itype, delta));
6878
6879 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6880}
6881
6882
6883/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6884 means A >= Y && A != MAX, but in this case we know that
6885 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6886
6887static tree
6888fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6889{
6890 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6891
6892 if (TREE_CODE (bound) == LT_EXPR)
6893 a = TREE_OPERAND (bound, 0);
6894 else if (TREE_CODE (bound) == GT_EXPR)
6895 a = TREE_OPERAND (bound, 1);
6896 else
6897 return NULL_TREE;
6898
6899 typea = TREE_TYPE (a);
6900 if (!INTEGRAL_TYPE_P (typea)
6901 && !POINTER_TYPE_P (typea))
6902 return NULL_TREE;
6903
6904 if (TREE_CODE (ineq) == LT_EXPR)
6905 {
6906 a1 = TREE_OPERAND (ineq, 1);
6907 y = TREE_OPERAND (ineq, 0);
6908 }
6909 else if (TREE_CODE (ineq) == GT_EXPR)
6910 {
6911 a1 = TREE_OPERAND (ineq, 0);
6912 y = TREE_OPERAND (ineq, 1);
6913 }
6914 else
6915 return NULL_TREE;
6916
6917 if (TREE_TYPE (a1) != typea)
6918 return NULL_TREE;
6919
6920 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6921 if (!integer_onep (diff))
6922 return NULL_TREE;
6923
6924 return fold_build2 (GE_EXPR, type, a, y);
6925}
6926
6927/* Fold a sum or difference of at least one multiplication.
6928 Returns the folded tree or NULL if no simplification could be made. */
6929
6930static tree
6931fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6932{
6933 tree arg00, arg01, arg10, arg11;
6934 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6935
6936 /* (A * C) +- (B * C) -> (A+-B) * C.
6937 (A * C) +- A -> A * (C+-1).
6938 We are most concerned about the case where C is a constant,
6939 but other combinations show up during loop reduction. Since
6940 it is not difficult, try all four possibilities. */
6941
6942 if (TREE_CODE (arg0) == MULT_EXPR)
6943 {
6944 arg00 = TREE_OPERAND (arg0, 0);
6945 arg01 = TREE_OPERAND (arg0, 1);
6946 }
6947 else
6948 {
6949 arg00 = arg0;
6950 arg01 = build_one_cst (type);
6951 }
6952 if (TREE_CODE (arg1) == MULT_EXPR)
6953 {
6954 arg10 = TREE_OPERAND (arg1, 0);
6955 arg11 = TREE_OPERAND (arg1, 1);
6956 }
6957 else
6958 {
6959 arg10 = arg1;
6960 arg11 = build_one_cst (type);
6961 }
6962 same = NULL_TREE;
6963
6964 if (operand_equal_p (arg01, arg11, 0))
6965 same = arg01, alt0 = arg00, alt1 = arg10;
6966 else if (operand_equal_p (arg00, arg10, 0))
6967 same = arg00, alt0 = arg01, alt1 = arg11;
6968 else if (operand_equal_p (arg00, arg11, 0))
6969 same = arg00, alt0 = arg01, alt1 = arg10;
6970 else if (operand_equal_p (arg01, arg10, 0))
6971 same = arg01, alt0 = arg00, alt1 = arg11;
6972
6973 /* No identical multiplicands; see if we can find a common
6974 power-of-two factor in non-power-of-two multiplies. This
6975 can help in multi-dimensional array access. */
6976 else if (host_integerp (arg01, 0)
6977 && host_integerp (arg11, 0))
6978 {
6979 HOST_WIDE_INT int01, int11, tmp;
6980 bool swap = false;
6981 tree maybe_same;
6982 int01 = TREE_INT_CST_LOW (arg01);
6983 int11 = TREE_INT_CST_LOW (arg11);
6984
6985 /* Move min of absolute values to int11. */
6986 if ((int01 >= 0 ? int01 : -int01)
6987 < (int11 >= 0 ? int11 : -int11))
6988 {
6989 tmp = int01, int01 = int11, int11 = tmp;
6990 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6991 maybe_same = arg01;
6992 swap = true;
6993 }
6994 else
6995 maybe_same = arg11;
6996
6997 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6998 {
6999 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7000 build_int_cst (TREE_TYPE (arg00),
7001 int01 / int11));
7002 alt1 = arg10;
7003 same = maybe_same;
7004 if (swap)
7005 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7006 }
7007 }
7008
7009 if (same)
7010 return fold_build2 (MULT_EXPR, type,
7011 fold_build2 (code, type,
7012 fold_convert (type, alt0),
7013 fold_convert (type, alt1)),
7014 fold_convert (type, same));
7015
7016 return NULL_TREE;
7017}
7018
7019/* Subroutine of native_encode_expr. Encode the INTEGER_CST
7020 specified by EXPR into the buffer PTR of length LEN bytes.
7021 Return the number of bytes placed in the buffer, or zero
7022 upon failure. */
7023
7024static int
7025native_encode_int (tree expr, unsigned char *ptr, int len)
7026{
7027 tree type = TREE_TYPE (expr);
7028 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7029 int byte, offset, word, words;
7030 unsigned char value;
7031
7032 if (total_bytes > len)
7033 return 0;
7034 words = total_bytes / UNITS_PER_WORD;
7035
7036 for (byte = 0; byte < total_bytes; byte++)
7037 {
7038 int bitpos = byte * BITS_PER_UNIT;
7039 if (bitpos < HOST_BITS_PER_WIDE_INT)
7040 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7041 else
7042 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7043 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7044
7045 if (total_bytes > UNITS_PER_WORD)
7046 {
7047 word = byte / UNITS_PER_WORD;
7048 if (WORDS_BIG_ENDIAN)
7049 word = (words - 1) - word;
7050 offset = word * UNITS_PER_WORD;
7051 if (BYTES_BIG_ENDIAN)
7052 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7053 else
7054 offset += byte % UNITS_PER_WORD;
7055 }
7056 else
7057 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7058 ptr[offset] = value;
7059 }
7060 return total_bytes;
7061}
7062
7063
7064/* Subroutine of native_encode_expr. Encode the REAL_CST
7065 specified by EXPR into the buffer PTR of length LEN bytes.
7066 Return the number of bytes placed in the buffer, or zero
7067 upon failure. */
7068
7069static int
7070native_encode_real (tree expr, unsigned char *ptr, int len)
7071{
7072 tree type = TREE_TYPE (expr);
7073 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7074 int byte, offset, word, words, bitpos;
7075 unsigned char value;
7076
7077 /* There are always 32 bits in each long, no matter the size of
7078 the hosts long. We handle floating point representations with
7079 up to 192 bits. */
7080 long tmp[6];
7081
7082 if (total_bytes > len)
7083 return 0;
7084 words = 32 / UNITS_PER_WORD;
7085
7086 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7087
7088 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7089 bitpos += BITS_PER_UNIT)
7090 {
7091 byte = (bitpos / BITS_PER_UNIT) & 3;
7092 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7093
7094 if (UNITS_PER_WORD < 4)
7095 {
7096 word = byte / UNITS_PER_WORD;
7097 if (WORDS_BIG_ENDIAN)
7098 word = (words - 1) - word;
7099 offset = word * UNITS_PER_WORD;
7100 if (BYTES_BIG_ENDIAN)
7101 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7102 else
7103 offset += byte % UNITS_PER_WORD;
7104 }
7105 else
7106 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7107 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7108 }
7109 return total_bytes;
7110}
7111
7112/* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7113 specified by EXPR into the buffer PTR of length LEN bytes.
7114 Return the number of bytes placed in the buffer, or zero
7115 upon failure. */
7116
7117static int
7118native_encode_complex (tree expr, unsigned char *ptr, int len)
7119{
7120 int rsize, isize;
7121 tree part;
7122
7123 part = TREE_REALPART (expr);
7124 rsize = native_encode_expr (part, ptr, len);
7125 if (rsize == 0)
7126 return 0;
7127 part = TREE_IMAGPART (expr);
7128 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7129 if (isize != rsize)
7130 return 0;
7131 return rsize + isize;
7132}
7133
7134
7135/* Subroutine of native_encode_expr. Encode the VECTOR_CST
7136 specified by EXPR into the buffer PTR of length LEN bytes.
7137 Return the number of bytes placed in the buffer, or zero
7138 upon failure. */
7139
7140static int
7141native_encode_vector (tree expr, unsigned char *ptr, int len)
7142{
7143 int i, size, offset, count;
7144 tree itype, elem, elements;
7145
7146 offset = 0;
7147 elements = TREE_VECTOR_CST_ELTS (expr);
7148 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7149 itype = TREE_TYPE (TREE_TYPE (expr));
7150 size = GET_MODE_SIZE (TYPE_MODE (itype));
7151 for (i = 0; i < count; i++)
7152 {
7153 if (elements)
7154 {
7155 elem = TREE_VALUE (elements);
7156 elements = TREE_CHAIN (elements);
7157 }
7158 else
7159 elem = NULL_TREE;
7160
7161 if (elem)
7162 {
7163 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7164 return 0;
7165 }
7166 else
7167 {
7168 if (offset + size > len)
7169 return 0;
7170 memset (ptr+offset, 0, size);
7171 }
7172 offset += size;
7173 }
7174 return offset;
7175}
7176
7177
7178/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7179 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7180 buffer PTR of length LEN bytes. Return the number of bytes
7181 placed in the buffer, or zero upon failure. */
7182
7183static int
7184native_encode_expr (tree expr, unsigned char *ptr, int len)
7185{
7186 switch (TREE_CODE (expr))
7187 {
7188 case INTEGER_CST:
7189 return native_encode_int (expr, ptr, len);
7190
7191 case REAL_CST:
7192 return native_encode_real (expr, ptr, len);
7193
7194 case COMPLEX_CST:
7195 return native_encode_complex (expr, ptr, len);
7196
7197 case VECTOR_CST:
7198 return native_encode_vector (expr, ptr, len);
7199
7200 default:
7201 return 0;
7202 }
7203}
7204
7205
7206/* Subroutine of native_interpret_expr. Interpret the contents of
7207 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7208 If the buffer cannot be interpreted, return NULL_TREE. */
7209
7210static tree
7211native_interpret_int (tree type, unsigned char *ptr, int len)
7212{
7213 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7214 int byte, offset, word, words;
7215 unsigned char value;
7216 unsigned int HOST_WIDE_INT lo = 0;
7217 HOST_WIDE_INT hi = 0;
7218
7219 if (total_bytes > len)
7220 return NULL_TREE;
7221 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7222 return NULL_TREE;
7223 words = total_bytes / UNITS_PER_WORD;
7224
7225 for (byte = 0; byte < total_bytes; byte++)
7226 {
7227 int bitpos = byte * BITS_PER_UNIT;
7228 if (total_bytes > UNITS_PER_WORD)
7229 {
7230 word = byte / UNITS_PER_WORD;
7231 if (WORDS_BIG_ENDIAN)
7232 word = (words - 1) - word;
7233 offset = word * UNITS_PER_WORD;
7234 if (BYTES_BIG_ENDIAN)
7235 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7236 else
7237 offset += byte % UNITS_PER_WORD;
7238 }
7239 else
7240 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7241 value = ptr[offset];
7242
7243 if (bitpos < HOST_BITS_PER_WIDE_INT)
7244 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7245 else
7246 hi |= (unsigned HOST_WIDE_INT) value
7247 << (bitpos - HOST_BITS_PER_WIDE_INT);
7248 }
7249
7250 return force_fit_type (build_int_cst_wide (type, lo, hi),
7251 0, false, false);
7252}
7253
7254
7255/* Subroutine of native_interpret_expr. Interpret the contents of
7256 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7257 If the buffer cannot be interpreted, return NULL_TREE. */
7258
7259static tree
7260native_interpret_real (tree type, unsigned char *ptr, int len)
7261{
7262 enum machine_mode mode = TYPE_MODE (type);
7263 int total_bytes = GET_MODE_SIZE (mode);
7264 int byte, offset, word, words, bitpos;
7265 unsigned char value;
7266 /* There are always 32 bits in each long, no matter the size of
7267 the hosts long. We handle floating point representations with
7268 up to 192 bits. */
7269 REAL_VALUE_TYPE r;
7270 long tmp[6];
7271
7272 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7273 if (total_bytes > len || total_bytes > 24)
7274 return NULL_TREE;
7275 words = 32 / UNITS_PER_WORD;
7276
7277 memset (tmp, 0, sizeof (tmp));
7278 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7279 bitpos += BITS_PER_UNIT)
7280 {
7281 byte = (bitpos / BITS_PER_UNIT) & 3;
7282 if (UNITS_PER_WORD < 4)
7283 {
7284 word = byte / UNITS_PER_WORD;
7285 if (WORDS_BIG_ENDIAN)
7286 word = (words - 1) - word;
7287 offset = word * UNITS_PER_WORD;
7288 if (BYTES_BIG_ENDIAN)
7289 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7290 else
7291 offset += byte % UNITS_PER_WORD;
7292 }
7293 else
7294 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7295 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7296
7297 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7298 }
7299
7300 real_from_target (&r, tmp, mode);
7301 return build_real (type, r);
7302}
7303
7304
7305/* Subroutine of native_interpret_expr. Interpret the contents of
7306 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7307 If the buffer cannot be interpreted, return NULL_TREE. */
7308
7309static tree
7310native_interpret_complex (tree type, unsigned char *ptr, int len)
7311{
7312 tree etype, rpart, ipart;
7313 int size;
7314
7315 etype = TREE_TYPE (type);
7316 size = GET_MODE_SIZE (TYPE_MODE (etype));
7317 if (size * 2 > len)
7318 return NULL_TREE;
7319 rpart = native_interpret_expr (etype, ptr, size);
7320 if (!rpart)
7321 return NULL_TREE;
7322 ipart = native_interpret_expr (etype, ptr+size, size);
7323 if (!ipart)
7324 return NULL_TREE;
7325 return build_complex (type, rpart, ipart);
7326}
7327
7328
7329/* Subroutine of native_interpret_expr. Interpret the contents of
7330 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7331 If the buffer cannot be interpreted, return NULL_TREE. */
7332
7333static tree
7334native_interpret_vector (tree type, unsigned char *ptr, int len)
7335{
7336 tree etype, elem, elements;
7337 int i, size, count;
7338
7339 etype = TREE_TYPE (type);
7340 size = GET_MODE_SIZE (TYPE_MODE (etype));
7341 count = TYPE_VECTOR_SUBPARTS (type);
7342 if (size * count > len)
7343 return NULL_TREE;
7344
7345 elements = NULL_TREE;
7346 for (i = count - 1; i >= 0; i--)
7347 {
7348 elem = native_interpret_expr (etype, ptr+(i*size), size);
7349 if (!elem)
7350 return NULL_TREE;
7351 elements = tree_cons (NULL_TREE, elem, elements);
7352 }
7353 return build_vector (type, elements);
7354}
7355
7356
7357/* Subroutine of fold_view_convert_expr. Interpret the contents of
7358 the buffer PTR of length LEN as a constant of type TYPE. For
7359 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7360 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7361 return NULL_TREE. */
7362
7363static tree
7364native_interpret_expr (tree type, unsigned char *ptr, int len)
7365{
7366 switch (TREE_CODE (type))
7367 {
7368 case INTEGER_TYPE:
7369 case ENUMERAL_TYPE:
7370 case BOOLEAN_TYPE:
7371 return native_interpret_int (type, ptr, len);
7372
7373 case REAL_TYPE:
7374 return native_interpret_real (type, ptr, len);
7375
7376 case COMPLEX_TYPE:
7377 return native_interpret_complex (type, ptr, len);
7378
7379 case VECTOR_TYPE:
7380 return native_interpret_vector (type, ptr, len);
7381
7382 default:
7383 return NULL_TREE;
7384 }
7385}
7386
7387
7388/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7389 TYPE at compile-time. If we're unable to perform the conversion
7390 return NULL_TREE. */
7391
7392static tree
7393fold_view_convert_expr (tree type, tree expr)
7394{
7395 /* We support up to 512-bit values (for V8DFmode). */
7396 unsigned char buffer[64];
7397 int len;
7398
7399 /* Check that the host and target are sane. */
7400 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7401 return NULL_TREE;
7402
7403 len = native_encode_expr (expr, buffer, sizeof (buffer));
7404 if (len == 0)
7405 return NULL_TREE;
7406
7407 return native_interpret_expr (type, buffer, len);
7408}
7409
7410
7411/* Fold a unary expression of code CODE and type TYPE with operand
7412 OP0. Return the folded expression if folding is successful.
7413 Otherwise, return NULL_TREE. */
7414
7415tree
7416fold_unary (enum tree_code code, tree type, tree op0)
7417{
7418 tree tem;
7419 tree arg0;
7420 enum tree_code_class kind = TREE_CODE_CLASS (code);
7421
7422 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7423 && TREE_CODE_LENGTH (code) == 1);
7424
7425 arg0 = op0;
7426 if (arg0)
7427 {
7428 if (code == NOP_EXPR || code == CONVERT_EXPR
7429 || code == FLOAT_EXPR || code == ABS_EXPR)
7430 {
7431 /* Don't use STRIP_NOPS, because signedness of argument type
7432 matters. */
7433 STRIP_SIGN_NOPS (arg0);
7434 }
7435 else
7436 {
7437 /* Strip any conversions that don't change the mode. This
7438 is safe for every expression, except for a comparison
7439 expression because its signedness is derived from its
7440 operands.
7441
7442 Note that this is done as an internal manipulation within
7443 the constant folder, in order to find the simplest
7444 representation of the arguments so that their form can be
7445 studied. In any cases, the appropriate type conversions
7446 should be put back in the tree that will get out of the
7447 constant folder. */
7448 STRIP_NOPS (arg0);
7449 }
7450 }
7451
7452 if (TREE_CODE_CLASS (code) == tcc_unary)
7453 {
7454 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7455 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7456 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7457 else if (TREE_CODE (arg0) == COND_EXPR)
7458 {
7459 tree arg01 = TREE_OPERAND (arg0, 1);
7460 tree arg02 = TREE_OPERAND (arg0, 2);
7461 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7462 arg01 = fold_build1 (code, type, arg01);
7463 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7464 arg02 = fold_build1 (code, type, arg02);
7465 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7466 arg01, arg02);
7467
7468 /* If this was a conversion, and all we did was to move into
7469 inside the COND_EXPR, bring it back out. But leave it if
7470 it is a conversion from integer to integer and the
7471 result precision is no wider than a word since such a
7472 conversion is cheap and may be optimized away by combine,
7473 while it couldn't if it were outside the COND_EXPR. Then return
7474 so we don't get into an infinite recursion loop taking the
7475 conversion out and then back in. */
7476
7477 if ((code == NOP_EXPR || code == CONVERT_EXPR
7478 || code == NON_LVALUE_EXPR)
7479 && TREE_CODE (tem) == COND_EXPR
7480 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7481 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7482 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7483 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7484 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7485 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7486 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7487 && (INTEGRAL_TYPE_P
7488 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7489 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7490 || flag_syntax_only))
7491 tem = build1 (code, type,
7492 build3 (COND_EXPR,
7493 TREE_TYPE (TREE_OPERAND
7494 (TREE_OPERAND (tem, 1), 0)),
7495 TREE_OPERAND (tem, 0),
7496 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7497 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7498 return tem;
7499 }
7500 else if (COMPARISON_CLASS_P (arg0))
7501 {
7502 if (TREE_CODE (type) == BOOLEAN_TYPE)
7503 {
7504 arg0 = copy_node (arg0);
7505 TREE_TYPE (arg0) = type;
7506 return arg0;
7507 }
7508 else if (TREE_CODE (type) != INTEGER_TYPE)
7509 return fold_build3 (COND_EXPR, type, arg0,
7510 fold_build1 (code, type,
7511 integer_one_node),
7512 fold_build1 (code, type,
7513 integer_zero_node));
7514 }
7515 }
7516
7517 switch (code)
7518 {
7519 case NOP_EXPR:
7520 case FLOAT_EXPR:
7521 case CONVERT_EXPR:
7522 case FIX_TRUNC_EXPR:
7523 case FIX_CEIL_EXPR:
7524 case FIX_FLOOR_EXPR:
7525 case FIX_ROUND_EXPR:
7526 if (TREE_TYPE (op0) == type)
7527 return op0;
7528
7529 /* If we have (type) (a CMP b) and type is an integral type, return
7530 new expression involving the new type. */
7531 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7532 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7533 TREE_OPERAND (op0, 1));
7534
7535 /* Handle cases of two conversions in a row. */
7536 if (TREE_CODE (op0) == NOP_EXPR
7537 || TREE_CODE (op0) == CONVERT_EXPR)
7538 {
7539 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7540 tree inter_type = TREE_TYPE (op0);
7541 int inside_int = INTEGRAL_TYPE_P (inside_type);
7542 int inside_ptr = POINTER_TYPE_P (inside_type);
7543 int inside_float = FLOAT_TYPE_P (inside_type);
7544 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7545 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7546 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7547 int inter_int = INTEGRAL_TYPE_P (inter_type);
7548 int inter_ptr = POINTER_TYPE_P (inter_type);
7549 int inter_float = FLOAT_TYPE_P (inter_type);
7550 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7551 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7552 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7553 int final_int = INTEGRAL_TYPE_P (type);
7554 int final_ptr = POINTER_TYPE_P (type);
7555 int final_float = FLOAT_TYPE_P (type);
7556 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7557 unsigned int final_prec = TYPE_PRECISION (type);
7558 int final_unsignedp = TYPE_UNSIGNED (type);
7559
7560 /* In addition to the cases of two conversions in a row
7561 handled below, if we are converting something to its own
7562 type via an object of identical or wider precision, neither
7563 conversion is needed. */
7564 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7565 && (((inter_int || inter_ptr) && final_int)
7566 || (inter_float && final_float))
7567 && inter_prec >= final_prec)
7568 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7569
7570 /* Likewise, if the intermediate and final types are either both
7571 float or both integer, we don't need the middle conversion if
7572 it is wider than the final type and doesn't change the signedness
7573 (for integers). Avoid this if the final type is a pointer
7574 since then we sometimes need the inner conversion. Likewise if
7575 the outer has a precision not equal to the size of its mode. */
7576 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7577 || (inter_float && inside_float)
7578 || (inter_vec && inside_vec))
7579 && inter_prec >= inside_prec
7580 && (inter_float || inter_vec
7581 || inter_unsignedp == inside_unsignedp)
7582 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7583 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7584 && ! final_ptr
7585 && (! final_vec || inter_prec == inside_prec))
7586 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7587
7588 /* If we have a sign-extension of a zero-extended value, we can
7589 replace that by a single zero-extension. */
7590 if (inside_int && inter_int && final_int
7591 && inside_prec < inter_prec && inter_prec < final_prec
7592 && inside_unsignedp && !inter_unsignedp)
7593 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7594
7595 /* Two conversions in a row are not needed unless:
7596 - some conversion is floating-point (overstrict for now), or
7597 - some conversion is a vector (overstrict for now), or
7598 - the intermediate type is narrower than both initial and
7599 final, or
7600 - the intermediate type and innermost type differ in signedness,
7601 and the outermost type is wider than the intermediate, or
7602 - the initial type is a pointer type and the precisions of the
7603 intermediate and final types differ, or
7604 - the final type is a pointer type and the precisions of the
7605 initial and intermediate types differ.
7606 - the final type is a pointer type and the initial type not
7607 - the initial type is a pointer to an array and the final type
7608 not. */
7609 /* Java pointer type conversions generate checks in some
7610 cases, so we explicitly disallow this optimization. */
7611 if (! inside_float && ! inter_float && ! final_float
7612 && ! inside_vec && ! inter_vec && ! final_vec
7613 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7614 && ! (inside_int && inter_int
7615 && inter_unsignedp != inside_unsignedp
7616 && inter_prec < final_prec)
7617 && ((inter_unsignedp && inter_prec > inside_prec)
7618 == (final_unsignedp && final_prec > inter_prec))
7619 && ! (inside_ptr && inter_prec != final_prec)
7620 && ! (final_ptr && inside_prec != inter_prec)
7621 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7622 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7623 && final_ptr == inside_ptr
7624 && ! (inside_ptr
7625 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7626 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7627 && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7628 && final_ptr))
7629 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7630 }
7631
7632 /* Handle (T *)&A.B.C for A being of type T and B and C
7633 living at offset zero. This occurs frequently in
7634 C++ upcasting and then accessing the base. */
7635 if (TREE_CODE (op0) == ADDR_EXPR
7636 && POINTER_TYPE_P (type)
7637 && handled_component_p (TREE_OPERAND (op0, 0)))
7638 {
7639 HOST_WIDE_INT bitsize, bitpos;
7640 tree offset;
7641 enum machine_mode mode;
7642 int unsignedp, volatilep;
7643 tree base = TREE_OPERAND (op0, 0);
7644 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7645 &mode, &unsignedp, &volatilep, false);
7646 /* If the reference was to a (constant) zero offset, we can use
7647 the address of the base if it has the same base type
7648 as the result type. */
7649 if (! offset && bitpos == 0
7650 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7651 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7652 return fold_convert (type, build_fold_addr_expr (base));
7653 }
7654
7655 if (TREE_CODE (op0) == MODIFY_EXPR
7656 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7657 /* Detect assigning a bitfield. */
7658 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7659 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7660 {
7661 /* Don't leave an assignment inside a conversion
7662 unless assigning a bitfield. */
7663 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7664 /* First do the assignment, then return converted constant. */
7665 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7666 TREE_NO_WARNING (tem) = 1;
7667 TREE_USED (tem) = 1;
7668 return tem;
7669 }
7670
7671 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7672 constants (if x has signed type, the sign bit cannot be set
7673 in c). This folds extension into the BIT_AND_EXPR. */
7674 if (INTEGRAL_TYPE_P (type)
7675 && TREE_CODE (type) != BOOLEAN_TYPE
7676 && TREE_CODE (op0) == BIT_AND_EXPR
7677 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7678 {
7679 tree and = op0;
7680 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7681 int change = 0;
7682
7683 if (TYPE_UNSIGNED (TREE_TYPE (and))
7684 || (TYPE_PRECISION (type)
7685 <= TYPE_PRECISION (TREE_TYPE (and))))
7686 change = 1;
7687 else if (TYPE_PRECISION (TREE_TYPE (and1))
7688 <= HOST_BITS_PER_WIDE_INT
7689 && host_integerp (and1, 1))
7690 {
7691 unsigned HOST_WIDE_INT cst;
7692
7693 cst = tree_low_cst (and1, 1);
7694 cst &= (HOST_WIDE_INT) -1
7695 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7696 change = (cst == 0);
7697#ifdef LOAD_EXTEND_OP
7698 if (change
7699 && !flag_syntax_only
7700 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7701 == ZERO_EXTEND))
7702 {
7703 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7704 and0 = fold_convert (uns, and0);
7705 and1 = fold_convert (uns, and1);
7706 }
7707#endif
7708 }
7709 if (change)
7710 {
7711 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7712 TREE_INT_CST_HIGH (and1));
7713 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7714 TREE_CONSTANT_OVERFLOW (and1));
7715 return fold_build2 (BIT_AND_EXPR, type,
7716 fold_convert (type, and0), tem);
7717 }
7718 }
7719
7720 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7721 T2 being pointers to types of the same size. */
7722 if (POINTER_TYPE_P (type)
7723 && BINARY_CLASS_P (arg0)
7724 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7725 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7726 {
7727 tree arg00 = TREE_OPERAND (arg0, 0);
7728 tree t0 = type;
7729 tree t1 = TREE_TYPE (arg00);
7730 tree tt0 = TREE_TYPE (t0);
7731 tree tt1 = TREE_TYPE (t1);
7732 tree s0 = TYPE_SIZE (tt0);
7733 tree s1 = TYPE_SIZE (tt1);
7734
7735 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7736 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7737 TREE_OPERAND (arg0, 1));
7738 }
7739
7740 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7741 of the same precision, and X is a integer type not narrower than
7742 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7743 if (INTEGRAL_TYPE_P (type)
7744 && TREE_CODE (op0) == BIT_NOT_EXPR
7745 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7746 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7747 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7748 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7749 {
7750 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7751 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7752 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7753 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7754 }
7755
7756 tem = fold_convert_const (code, type, op0);
7757 return tem ? tem : NULL_TREE;
7758
7759 case VIEW_CONVERT_EXPR:
7760 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7761 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7762 return fold_view_convert_expr (type, op0);
7763
7764 case NEGATE_EXPR:
7765 tem = fold_negate_expr (arg0);
7766 if (tem)
7767 return fold_convert (type, tem);
7768 return NULL_TREE;
7769
7770 case ABS_EXPR:
7771 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7772 return fold_abs_const (arg0, type);
7773 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7774 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7775 /* Convert fabs((double)float) into (double)fabsf(float). */
7776 else if (TREE_CODE (arg0) == NOP_EXPR
7777 && TREE_CODE (type) == REAL_TYPE)
7778 {
7779 tree targ0 = strip_float_extensions (arg0);
7780 if (targ0 != arg0)
7781 return fold_convert (type, fold_build1 (ABS_EXPR,
7782 TREE_TYPE (targ0),
7783 targ0));
7784 }
7785 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7786 else if (TREE_CODE (arg0) == ABS_EXPR)
7787 return arg0;
7788 else if (tree_expr_nonnegative_p (arg0))
7789 return arg0;
7790
7791 /* Strip sign ops from argument. */
7792 if (TREE_CODE (type) == REAL_TYPE)
7793 {
7794 tem = fold_strip_sign_ops (arg0);
7795 if (tem)
7796 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7797 }
7798 return NULL_TREE;
7799
7800 case CONJ_EXPR:
7801 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7802 return fold_convert (type, arg0);
7803 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7804 {
7805 tree itype = TREE_TYPE (type);
7806 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7807 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7808 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7809 }
7810 if (TREE_CODE (arg0) == COMPLEX_CST)
7811 {
7812 tree itype = TREE_TYPE (type);
7813 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7814 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7815 return build_complex (type, rpart, negate_expr (ipart));
7816 }
7817 if (TREE_CODE (arg0) == CONJ_EXPR)
7818 return fold_convert (type, TREE_OPERAND (arg0, 0));
7819 return NULL_TREE;
7820
7821 case BIT_NOT_EXPR:
7822 if (TREE_CODE (arg0) == INTEGER_CST)
7823 return fold_not_const (arg0, type);
7824 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7825 return TREE_OPERAND (arg0, 0);
7826 /* Convert ~ (-A) to A - 1. */
7827 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7828 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7829 build_int_cst (type, 1));
7830 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7831 else if (INTEGRAL_TYPE_P (type)
7832 && ((TREE_CODE (arg0) == MINUS_EXPR
7833 && integer_onep (TREE_OPERAND (arg0, 1)))
7834 || (TREE_CODE (arg0) == PLUS_EXPR
7835 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7836 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7837 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7838 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7839 && (tem = fold_unary (BIT_NOT_EXPR, type,
7840 fold_convert (type,
7841 TREE_OPERAND (arg0, 0)))))
7842 return fold_build2 (BIT_XOR_EXPR, type, tem,
7843 fold_convert (type, TREE_OPERAND (arg0, 1)));
7844 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7845 && (tem = fold_unary (BIT_NOT_EXPR, type,
7846 fold_convert (type,
7847 TREE_OPERAND (arg0, 1)))))
7848 return fold_build2 (BIT_XOR_EXPR, type,
7849 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7850
7851 return NULL_TREE;
7852
7853 case TRUTH_NOT_EXPR:
7854 /* The argument to invert_truthvalue must have Boolean type. */
7855 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7856 arg0 = fold_convert (boolean_type_node, arg0);
7857
7858 /* Note that the operand of this must be an int
7859 and its values must be 0 or 1.
7860 ("true" is a fixed value perhaps depending on the language,
7861 but we don't handle values other than 1 correctly yet.) */
7862 tem = fold_truth_not_expr (arg0);
7863 if (!tem)
7864 return NULL_TREE;
7865 return fold_convert (type, tem);
7866
7867 case REALPART_EXPR:
7868 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7869 return fold_convert (type, arg0);
7870 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7871 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7872 TREE_OPERAND (arg0, 1));
7873 if (TREE_CODE (arg0) == COMPLEX_CST)
7874 return fold_convert (type, TREE_REALPART (arg0));
7875 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7876 {
7877 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7878 tem = fold_build2 (TREE_CODE (arg0), itype,
7879 fold_build1 (REALPART_EXPR, itype,
7880 TREE_OPERAND (arg0, 0)),
7881 fold_build1 (REALPART_EXPR, itype,
7882 TREE_OPERAND (arg0, 1)));
7883 return fold_convert (type, tem);
7884 }
7885 if (TREE_CODE (arg0) == CONJ_EXPR)
7886 {
7887 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7888 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7889 return fold_convert (type, tem);
7890 }
7891 return NULL_TREE;
7892
7893 case IMAGPART_EXPR:
7894 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7895 return fold_convert (type, integer_zero_node);
7896 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7897 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7898 TREE_OPERAND (arg0, 0));
7899 if (TREE_CODE (arg0) == COMPLEX_CST)
7900 return fold_convert (type, TREE_IMAGPART (arg0));
7901 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7902 {
7903 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7904 tem = fold_build2 (TREE_CODE (arg0), itype,
7905 fold_build1 (IMAGPART_EXPR, itype,
7906 TREE_OPERAND (arg0, 0)),
7907 fold_build1 (IMAGPART_EXPR, itype,
7908 TREE_OPERAND (arg0, 1)));
7909 return fold_convert (type, tem);
7910 }
7911 if (TREE_CODE (arg0) == CONJ_EXPR)
7912 {
7913 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7914 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7915 return fold_convert (type, negate_expr (tem));
7916 }
7917 return NULL_TREE;
7918
7919 default:
7920 return NULL_TREE;
7921 } /* switch (code) */
7922}
7923
7924/* Fold a binary expression of code CODE and type TYPE with operands
7925 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7926 Return the folded expression if folding is successful. Otherwise,
7927 return NULL_TREE. */
7928
7929static tree
7930fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7931{
7932 enum tree_code compl_code;
7933
7934 if (code == MIN_EXPR)
7935 compl_code = MAX_EXPR;
7936 else if (code == MAX_EXPR)
7937 compl_code = MIN_EXPR;
7938 else
7939 gcc_unreachable ();
7940
7941 /* MIN (MAX (a, b), b) == b. */
7942 if (TREE_CODE (op0) == compl_code
7943 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7944 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7945
7946 /* MIN (MAX (b, a), b) == b. */
7947 if (TREE_CODE (op0) == compl_code
7948 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7949 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7950 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7951
7952 /* MIN (a, MAX (a, b)) == a. */
7953 if (TREE_CODE (op1) == compl_code
7954 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7955 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7956 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7957
7958 /* MIN (a, MAX (b, a)) == a. */
7959 if (TREE_CODE (op1) == compl_code
7960 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7961 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7962 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7963
7964 return NULL_TREE;
7965}
7966
7967/* Subroutine of fold_binary. This routine performs all of the
7968 transformations that are common to the equality/inequality
7969 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7970 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7971 fold_binary should call fold_binary. Fold a comparison with
7972 tree code CODE and type TYPE with operands OP0 and OP1. Return
7973 the folded comparison or NULL_TREE. */
7974
7975static tree
7976fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7977{
7978 tree arg0, arg1, tem;
7979
7980 arg0 = op0;
7981 arg1 = op1;
7982
7983 STRIP_SIGN_NOPS (arg0);
7984 STRIP_SIGN_NOPS (arg1);
7985
7986 tem = fold_relational_const (code, type, arg0, arg1);
7987 if (tem != NULL_TREE)
7988 return tem;
7989
7990 /* If one arg is a real or integer constant, put it last. */
7991 if (tree_swap_operands_p (arg0, arg1, true))
7992 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7993
7994 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7995 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7996 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7997 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7998 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
7999 && (TREE_CODE (arg1) == INTEGER_CST
8000 && !TREE_OVERFLOW (arg1)))
8001 {
8002 tree const1 = TREE_OPERAND (arg0, 1);
8003 tree const2 = arg1;
8004 tree variable = TREE_OPERAND (arg0, 0);
8005 tree lhs;
8006 int lhs_add;
8007 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8008
8009 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8010 TREE_TYPE (arg1), const2, const1);
8011 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8012 && (TREE_CODE (lhs) != INTEGER_CST
8013 || !TREE_OVERFLOW (lhs)))
8014 {
8015 fold_overflow_warning (("assuming signed overflow does not occur "
8016 "when changing X +- C1 cmp C2 to "
8017 "X cmp C1 +- C2"),
8018 WARN_STRICT_OVERFLOW_COMPARISON);
8019 return fold_build2 (code, type, variable, lhs);
8020 }
8021 }
8022
8023 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8024 same object, then we can fold this to a comparison of the two offsets in
8025 signed size type. This is possible because pointer arithmetic is
8026 restricted to retain within an object and overflow on pointer differences
8027 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8028
8029 We check flag_wrapv directly because pointers types are unsigned,
8030 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8031 normally what we want to avoid certain odd overflow cases, but
8032 not here. */
8033 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8034 && !flag_wrapv
8035 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8036 {
8037 tree base0, offset0, base1, offset1;
8038
8039 if (extract_array_ref (arg0, &base0, &offset0)
8040 && extract_array_ref (arg1, &base1, &offset1)
8041 && operand_equal_p (base0, base1, 0))
8042 {
8043 tree signed_size_type_node;
8044 signed_size_type_node = signed_type_for (size_type_node);
8045
8046 /* By converting to signed size type we cover middle-end pointer
8047 arithmetic which operates on unsigned pointer types of size
8048 type size and ARRAY_REF offsets which are properly sign or
8049 zero extended from their type in case it is narrower than
8050 size type. */
8051 if (offset0 == NULL_TREE)
8052 offset0 = build_int_cst (signed_size_type_node, 0);
8053 else
8054 offset0 = fold_convert (signed_size_type_node, offset0);
8055 if (offset1 == NULL_TREE)
8056 offset1 = build_int_cst (signed_size_type_node, 0);
8057 else
8058 offset1 = fold_convert (signed_size_type_node, offset1);
8059
8060 return fold_build2 (code, type, offset0, offset1);
8061 }
8062 }
8063
8064 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8065 {
8066 tree targ0 = strip_float_extensions (arg0);
8067 tree targ1 = strip_float_extensions (arg1);
8068 tree newtype = TREE_TYPE (targ0);
8069
8070 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8071 newtype = TREE_TYPE (targ1);
8072
8073 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8074 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8075 return fold_build2 (code, type, fold_convert (newtype, targ0),
8076 fold_convert (newtype, targ1));
8077
8078 /* (-a) CMP (-b) -> b CMP a */
8079 if (TREE_CODE (arg0) == NEGATE_EXPR
8080 && TREE_CODE (arg1) == NEGATE_EXPR)
8081 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8082 TREE_OPERAND (arg0, 0));
8083
8084 if (TREE_CODE (arg1) == REAL_CST)
8085 {
8086 REAL_VALUE_TYPE cst;
8087 cst = TREE_REAL_CST (arg1);
8088
8089 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8090 if (TREE_CODE (arg0) == NEGATE_EXPR)
8091 return fold_build2 (swap_tree_comparison (code), type,
8092 TREE_OPERAND (arg0, 0),
8093 build_real (TREE_TYPE (arg1),
8094 REAL_VALUE_NEGATE (cst)));
8095
8096 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8097 /* a CMP (-0) -> a CMP 0 */
8098 if (REAL_VALUE_MINUS_ZERO (cst))
8099 return fold_build2 (code, type, arg0,
8100 build_real (TREE_TYPE (arg1), dconst0));
8101
8102 /* x != NaN is always true, other ops are always false. */
8103 if (REAL_VALUE_ISNAN (cst)
8104 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8105 {
8106 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8107 return omit_one_operand (type, tem, arg0);
8108 }
8109
8110 /* Fold comparisons against infinity. */
8111 if (REAL_VALUE_ISINF (cst))
8112 {
8113 tem = fold_inf_compare (code, type, arg0, arg1);
8114 if (tem != NULL_TREE)
8115 return tem;
8116 }
8117 }
8118
8119 /* If this is a comparison of a real constant with a PLUS_EXPR
8120 or a MINUS_EXPR of a real constant, we can convert it into a
8121 comparison with a revised real constant as long as no overflow
8122 occurs when unsafe_math_optimizations are enabled. */
8123 if (flag_unsafe_math_optimizations
8124 && TREE_CODE (arg1) == REAL_CST
8125 && (TREE_CODE (arg0) == PLUS_EXPR
8126 || TREE_CODE (arg0) == MINUS_EXPR)
8127 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8128 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8129 ? MINUS_EXPR : PLUS_EXPR,
8130 arg1, TREE_OPERAND (arg0, 1), 0))
8131 && ! TREE_CONSTANT_OVERFLOW (tem))
8132 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8133
8134 /* Likewise, we can simplify a comparison of a real constant with
8135 a MINUS_EXPR whose first operand is also a real constant, i.e.
8136 (c1 - x) < c2 becomes x > c1-c2. */
8137 if (flag_unsafe_math_optimizations
8138 && TREE_CODE (arg1) == REAL_CST
8139 && TREE_CODE (arg0) == MINUS_EXPR
8140 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8141 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8142 arg1, 0))
8143 && ! TREE_CONSTANT_OVERFLOW (tem))
8144 return fold_build2 (swap_tree_comparison (code), type,
8145 TREE_OPERAND (arg0, 1), tem);
8146
8147 /* Fold comparisons against built-in math functions. */
8148 if (TREE_CODE (arg1) == REAL_CST
8149 && flag_unsafe_math_optimizations
8150 && ! flag_errno_math)
8151 {
8152 enum built_in_function fcode = builtin_mathfn_code (arg0);
8153
8154 if (fcode != END_BUILTINS)
8155 {
8156 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8157 if (tem != NULL_TREE)
8158 return tem;
8159 }
8160 }
8161 }
8162
8163 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8164 if (TREE_CONSTANT (arg1)
8165 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8166 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8167 /* This optimization is invalid for ordered comparisons
8168 if CONST+INCR overflows or if foo+incr might overflow.
8169 This optimization is invalid for floating point due to rounding.
8170 For pointer types we assume overflow doesn't happen. */
8171 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8172 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8173 && (code == EQ_EXPR || code == NE_EXPR))))
8174 {
8175 tree varop, newconst;
8176
8177 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8178 {
8179 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8180 arg1, TREE_OPERAND (arg0, 1));
8181 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8182 TREE_OPERAND (arg0, 0),
8183 TREE_OPERAND (arg0, 1));
8184 }
8185 else
8186 {
8187 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8188 arg1, TREE_OPERAND (arg0, 1));
8189 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8190 TREE_OPERAND (arg0, 0),
8191 TREE_OPERAND (arg0, 1));
8192 }
8193
8194
8195 /* If VAROP is a reference to a bitfield, we must mask
8196 the constant by the width of the field. */
8197 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8198 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8199 && host_integerp (DECL_SIZE (TREE_OPERAND
8200 (TREE_OPERAND (varop, 0), 1)), 1))
8201 {
8202 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8203 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8204 tree folded_compare, shift;
8205
8206 /* First check whether the comparison would come out
8207 always the same. If we don't do that we would
8208 change the meaning with the masking. */
8209 folded_compare = fold_build2 (code, type,
8210 TREE_OPERAND (varop, 0), arg1);
8211 if (TREE_CODE (folded_compare) == INTEGER_CST)
8212 return omit_one_operand (type, folded_compare, varop);
8213
8214 shift = build_int_cst (NULL_TREE,
8215 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8216 shift = fold_convert (TREE_TYPE (varop), shift);
8217 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8218 newconst, shift);
8219 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8220 newconst, shift);
8221 }
8222
8223 return fold_build2 (code, type, varop, newconst);
8224 }
8225
8226 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8227 && (TREE_CODE (arg0) == NOP_EXPR
8228 || TREE_CODE (arg0) == CONVERT_EXPR))
8229 {
8230 /* If we are widening one operand of an integer comparison,
8231 see if the other operand is similarly being widened. Perhaps we
8232 can do the comparison in the narrower type. */
8233 tem = fold_widened_comparison (code, type, arg0, arg1);
8234 if (tem)
8235 return tem;
8236
8237 /* Or if we are changing signedness. */
8238 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8239 if (tem)
8240 return tem;
8241 }
8242
8243 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8244 constant, we can simplify it. */
8245 if (TREE_CODE (arg1) == INTEGER_CST
8246 && (TREE_CODE (arg0) == MIN_EXPR
8247 || TREE_CODE (arg0) == MAX_EXPR)
8248 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8249 {
8250 tem = optimize_minmax_comparison (code, type, op0, op1);
8251 if (tem)
8252 return tem;
8253 }
8254
8255 /* Simplify comparison of something with itself. (For IEEE
8256 floating-point, we can only do some of these simplifications.) */
8257 if (operand_equal_p (arg0, arg1, 0))
8258 {
8259 switch (code)
8260 {
8261 case EQ_EXPR:
8262 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8263 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8264 return constant_boolean_node (1, type);
8265 break;
8266
8267 case GE_EXPR:
8268 case LE_EXPR:
8269 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8270 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8271 return constant_boolean_node (1, type);
8272 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8273
8274 case NE_EXPR:
8275 /* For NE, we can only do this simplification if integer
8276 or we don't honor IEEE floating point NaNs. */
8277 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8278 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8279 break;
8280 /* ... fall through ... */
8281 case GT_EXPR:
8282 case LT_EXPR:
8283 return constant_boolean_node (0, type);
8284 default:
8285 gcc_unreachable ();
8286 }
8287 }
8288
8289 /* If we are comparing an expression that just has comparisons
8290 of two integer values, arithmetic expressions of those comparisons,
8291 and constants, we can simplify it. There are only three cases
8292 to check: the two values can either be equal, the first can be
8293 greater, or the second can be greater. Fold the expression for
8294 those three values. Since each value must be 0 or 1, we have
8295 eight possibilities, each of which corresponds to the constant 0
8296 or 1 or one of the six possible comparisons.
8297
8298 This handles common cases like (a > b) == 0 but also handles
8299 expressions like ((x > y) - (y > x)) > 0, which supposedly
8300 occur in macroized code. */
8301
8302 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8303 {
8304 tree cval1 = 0, cval2 = 0;
8305 int save_p = 0;
8306
8307 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8308 /* Don't handle degenerate cases here; they should already
8309 have been handled anyway. */
8310 && cval1 != 0 && cval2 != 0
8311 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8312 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8313 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8314 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8315 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8316 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8317 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8318 {
8319 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8320 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8321
8322 /* We can't just pass T to eval_subst in case cval1 or cval2
8323 was the same as ARG1. */
8324
8325 tree high_result
8326 = fold_build2 (code, type,
8327 eval_subst (arg0, cval1, maxval,
8328 cval2, minval),
8329 arg1);
8330 tree equal_result
8331 = fold_build2 (code, type,
8332 eval_subst (arg0, cval1, maxval,
8333 cval2, maxval),
8334 arg1);
8335 tree low_result
8336 = fold_build2 (code, type,
8337 eval_subst (arg0, cval1, minval,
8338 cval2, maxval),
8339 arg1);
8340
8341 /* All three of these results should be 0 or 1. Confirm they are.
8342 Then use those values to select the proper code to use. */
8343
8344 if (TREE_CODE (high_result) == INTEGER_CST
8345 && TREE_CODE (equal_result) == INTEGER_CST
8346 && TREE_CODE (low_result) == INTEGER_CST)
8347 {
8348 /* Make a 3-bit mask with the high-order bit being the
8349 value for `>', the next for '=', and the low for '<'. */
8350 switch ((integer_onep (high_result) * 4)
8351 + (integer_onep (equal_result) * 2)
8352 + integer_onep (low_result))
8353 {
8354 case 0:
8355 /* Always false. */
8356 return omit_one_operand (type, integer_zero_node, arg0);
8357 case 1:
8358 code = LT_EXPR;
8359 break;
8360 case 2:
8361 code = EQ_EXPR;
8362 break;
8363 case 3:
8364 code = LE_EXPR;
8365 break;
8366 case 4:
8367 code = GT_EXPR;
8368 break;
8369 case 5:
8370 code = NE_EXPR;
8371 break;
8372 case 6:
8373 code = GE_EXPR;
8374 break;
8375 case 7:
8376 /* Always true. */
8377 return omit_one_operand (type, integer_one_node, arg0);
8378 }
8379
8380 if (save_p)
8381 return save_expr (build2 (code, type, cval1, cval2));
8382 return fold_build2 (code, type, cval1, cval2);
8383 }
8384 }
8385 }
8386
8387 /* Fold a comparison of the address of COMPONENT_REFs with the same
8388 type and component to a comparison of the address of the base
8389 object. In short, &x->a OP &y->a to x OP y and
8390 &x->a OP &y.a to x OP &y */
8391 if (TREE_CODE (arg0) == ADDR_EXPR
8392 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8393 && TREE_CODE (arg1) == ADDR_EXPR
8394 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8395 {
8396 tree cref0 = TREE_OPERAND (arg0, 0);
8397 tree cref1 = TREE_OPERAND (arg1, 0);
8398 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8399 {
8400 tree op0 = TREE_OPERAND (cref0, 0);
8401 tree op1 = TREE_OPERAND (cref1, 0);
8402 return fold_build2 (code, type,
8403 build_fold_addr_expr (op0),
8404 build_fold_addr_expr (op1));
8405 }
8406 }
8407
8408 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8409 into a single range test. */
8410 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8411 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8412 && TREE_CODE (arg1) == INTEGER_CST
8413 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8414 && !integer_zerop (TREE_OPERAND (arg0, 1))
8415 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8416 && !TREE_OVERFLOW (arg1))
8417 {
8418 tem = fold_div_compare (code, type, arg0, arg1);
8419 if (tem != NULL_TREE)
8420 return tem;
8421 }
8422
8423 return NULL_TREE;
8424}
8425
8426
8427/* Subroutine of fold_binary. Optimize complex multiplications of the
8428 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8429 argument EXPR represents the expression "z" of type TYPE. */
8430
8431static tree
8432fold_mult_zconjz (tree type, tree expr)
8433{
8434 tree itype = TREE_TYPE (type);
8435 tree rpart, ipart, tem;
8436
8437 if (TREE_CODE (expr) == COMPLEX_EXPR)
8438 {
8439 rpart = TREE_OPERAND (expr, 0);
8440 ipart = TREE_OPERAND (expr, 1);
8441 }
8442 else if (TREE_CODE (expr) == COMPLEX_CST)
8443 {
8444 rpart = TREE_REALPART (expr);
8445 ipart = TREE_IMAGPART (expr);
8446 }
8447 else
8448 {
8449 expr = save_expr (expr);
8450 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8451 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8452 }
8453
8454 rpart = save_expr (rpart);
8455 ipart = save_expr (ipart);
8456 tem = fold_build2 (PLUS_EXPR, itype,
8457 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8458 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8459 return fold_build2 (COMPLEX_EXPR, type, tem,
8460 fold_convert (itype, integer_zero_node));
8461}
8462
8463
8464/* Fold a binary expression of code CODE and type TYPE with operands
8465 OP0 and OP1. Return the folded expression if folding is
8466 successful. Otherwise, return NULL_TREE. */
8467
8468tree
8469fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8470{
8471 enum tree_code_class kind = TREE_CODE_CLASS (code);
8472 tree arg0, arg1, tem;
8473 tree t1 = NULL_TREE;
8474 bool strict_overflow_p;
8475
8476 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8477 && TREE_CODE_LENGTH (code) == 2
8478 && op0 != NULL_TREE
8479 && op1 != NULL_TREE);
8480
8481 arg0 = op0;
8482 arg1 = op1;
8483
8484 /* Strip any conversions that don't change the mode. This is
8485 safe for every expression, except for a comparison expression
8486 because its signedness is derived from its operands. So, in
8487 the latter case, only strip conversions that don't change the
8488 signedness.
8489
8490 Note that this is done as an internal manipulation within the
8491 constant folder, in order to find the simplest representation
8492 of the arguments so that their form can be studied. In any
8493 cases, the appropriate type conversions should be put back in
8494 the tree that will get out of the constant folder. */
8495
8496 if (kind == tcc_comparison)
8497 {
8498 STRIP_SIGN_NOPS (arg0);
8499 STRIP_SIGN_NOPS (arg1);
8500 }
8501 else
8502 {
8503 STRIP_NOPS (arg0);
8504 STRIP_NOPS (arg1);
8505 }
8506
8507 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8508 constant but we can't do arithmetic on them. */
8509 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8510 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8511 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8512 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8513 {
8514 if (kind == tcc_binary)
8515 tem = const_binop (code, arg0, arg1, 0);
8516 else if (kind == tcc_comparison)
8517 tem = fold_relational_const (code, type, arg0, arg1);
8518 else
8519 tem = NULL_TREE;
8520
8521 if (tem != NULL_TREE)
8522 {
8523 if (TREE_TYPE (tem) != type)
8524 tem = fold_convert (type, tem);
8525 return tem;
8526 }
8527 }
8528
8529 /* If this is a commutative operation, and ARG0 is a constant, move it
8530 to ARG1 to reduce the number of tests below. */
8531 if (commutative_tree_code (code)
8532 && tree_swap_operands_p (arg0, arg1, true))
8533 return fold_build2 (code, type, op1, op0);
8534
8535 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8536
8537 First check for cases where an arithmetic operation is applied to a
8538 compound, conditional, or comparison operation. Push the arithmetic
8539 operation inside the compound or conditional to see if any folding
8540 can then be done. Convert comparison to conditional for this purpose.
8541 The also optimizes non-constant cases that used to be done in
8542 expand_expr.
8543
8544 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8545 one of the operands is a comparison and the other is a comparison, a
8546 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8547 code below would make the expression more complex. Change it to a
8548 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8549 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8550
8551 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8552 || code == EQ_EXPR || code == NE_EXPR)
8553 && ((truth_value_p (TREE_CODE (arg0))
8554 && (truth_value_p (TREE_CODE (arg1))
8555 || (TREE_CODE (arg1) == BIT_AND_EXPR
8556 && integer_onep (TREE_OPERAND (arg1, 1)))))
8557 || (truth_value_p (TREE_CODE (arg1))
8558 && (truth_value_p (TREE_CODE (arg0))
8559 || (TREE_CODE (arg0) == BIT_AND_EXPR
8560 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8561 {
8562 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8563 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8564 : TRUTH_XOR_EXPR,
8565 boolean_type_node,
8566 fold_convert (boolean_type_node, arg0),
8567 fold_convert (boolean_type_node, arg1));
8568
8569 if (code == EQ_EXPR)
8570 tem = invert_truthvalue (tem);
8571
8572 return fold_convert (type, tem);
8573 }
8574
8575 if (TREE_CODE_CLASS (code) == tcc_binary
8576 || TREE_CODE_CLASS (code) == tcc_comparison)
8577 {
8578 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8579 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8580 fold_build2 (code, type,
8581 TREE_OPERAND (arg0, 1), op1));
8582 if (TREE_CODE (arg1) == COMPOUND_EXPR
8583 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8584 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8585 fold_build2 (code, type,
8586 op0, TREE_OPERAND (arg1, 1)));
8587
8588 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8589 {
8590 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8591 arg0, arg1,
8592 /*cond_first_p=*/1);
8593 if (tem != NULL_TREE)
8594 return tem;
8595 }
8596
8597 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8598 {
8599 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8600 arg1, arg0,
8601 /*cond_first_p=*/0);
8602 if (tem != NULL_TREE)
8603 return tem;
8604 }
8605 }
8606
8607 switch (code)
8608 {
8609 case PLUS_EXPR:
8610 /* A + (-B) -> A - B */
8611 if (TREE_CODE (arg1) == NEGATE_EXPR)
8612 return fold_build2 (MINUS_EXPR, type,
8613 fold_convert (type, arg0),
8614 fold_convert (type, TREE_OPERAND (arg1, 0)));
8615 /* (-A) + B -> B - A */
8616 if (TREE_CODE (arg0) == NEGATE_EXPR
8617 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8618 return fold_build2 (MINUS_EXPR, type,
8619 fold_convert (type, arg1),
8620 fold_convert (type, TREE_OPERAND (arg0, 0)));
8621 /* Convert ~A + 1 to -A. */
8622 if (INTEGRAL_TYPE_P (type)
8623 && TREE_CODE (arg0) == BIT_NOT_EXPR
8624 && integer_onep (arg1))
8625 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8626
8627 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8628 same or one. */
8629 if ((TREE_CODE (arg0) == MULT_EXPR
8630 || TREE_CODE (arg1) == MULT_EXPR)
8631 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8632 {
8633 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8634 if (tem)
8635 return tem;
8636 }
8637
8638 if (! FLOAT_TYPE_P (type))
8639 {
8640 if (integer_zerop (arg1))
8641 return non_lvalue (fold_convert (type, arg0));
8642
8643 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8644 with a constant, and the two constants have no bits in common,
8645 we should treat this as a BIT_IOR_EXPR since this may produce more
8646 simplifications. */
8647 if (TREE_CODE (arg0) == BIT_AND_EXPR
8648 && TREE_CODE (arg1) == BIT_AND_EXPR
8649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8650 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8651 && integer_zerop (const_binop (BIT_AND_EXPR,
8652 TREE_OPERAND (arg0, 1),
8653 TREE_OPERAND (arg1, 1), 0)))
8654 {
8655 code = BIT_IOR_EXPR;
8656 goto bit_ior;
8657 }
8658
8659 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8660 (plus (plus (mult) (mult)) (foo)) so that we can
8661 take advantage of the factoring cases below. */
8662 if (((TREE_CODE (arg0) == PLUS_EXPR
8663 || TREE_CODE (arg0) == MINUS_EXPR)
8664 && TREE_CODE (arg1) == MULT_EXPR)
8665 || ((TREE_CODE (arg1) == PLUS_EXPR
8666 || TREE_CODE (arg1) == MINUS_EXPR)
8667 && TREE_CODE (arg0) == MULT_EXPR))
8668 {
8669 tree parg0, parg1, parg, marg;
8670 enum tree_code pcode;
8671
8672 if (TREE_CODE (arg1) == MULT_EXPR)
8673 parg = arg0, marg = arg1;
8674 else
8675 parg = arg1, marg = arg0;
8676 pcode = TREE_CODE (parg);
8677 parg0 = TREE_OPERAND (parg, 0);
8678 parg1 = TREE_OPERAND (parg, 1);
8679 STRIP_NOPS (parg0);
8680 STRIP_NOPS (parg1);
8681
8682 if (TREE_CODE (parg0) == MULT_EXPR
8683 && TREE_CODE (parg1) != MULT_EXPR)
8684 return fold_build2 (pcode, type,
8685 fold_build2 (PLUS_EXPR, type,
8686 fold_convert (type, parg0),
8687 fold_convert (type, marg)),
8688 fold_convert (type, parg1));
8689 if (TREE_CODE (parg0) != MULT_EXPR
8690 && TREE_CODE (parg1) == MULT_EXPR)
8691 return fold_build2 (PLUS_EXPR, type,
8692 fold_convert (type, parg0),
8693 fold_build2 (pcode, type,
8694 fold_convert (type, marg),
8695 fold_convert (type,
8696 parg1)));
8697 }
8698
8699 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8700 of the array. Loop optimizer sometimes produce this type of
8701 expressions. */
8702 if (TREE_CODE (arg0) == ADDR_EXPR)
8703 {
8704 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8705 if (tem)
8706 return fold_convert (type, tem);
8707 }
8708 else if (TREE_CODE (arg1) == ADDR_EXPR)
8709 {
8710 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8711 if (tem)
8712 return fold_convert (type, tem);
8713 }
8714 }
8715 else
8716 {
8717 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8718 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8719 return non_lvalue (fold_convert (type, arg0));
8720
8721 /* Likewise if the operands are reversed. */
8722 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8723 return non_lvalue (fold_convert (type, arg1));
8724
8725 /* Convert X + -C into X - C. */
8726 if (TREE_CODE (arg1) == REAL_CST
8727 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8728 {
8729 tem = fold_negate_const (arg1, type);
8730 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8731 return fold_build2 (MINUS_EXPR, type,
8732 fold_convert (type, arg0),
8733 fold_convert (type, tem));
8734 }
8735
8736 if (flag_unsafe_math_optimizations
8737 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8738 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8739 && (tem = distribute_real_division (code, type, arg0, arg1)))
8740 return tem;
8741
8742 /* Convert x+x into x*2.0. */
8743 if (operand_equal_p (arg0, arg1, 0)
8744 && SCALAR_FLOAT_TYPE_P (type))
8745 return fold_build2 (MULT_EXPR, type, arg0,
8746 build_real (type, dconst2));
8747
8748 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8749 if (flag_unsafe_math_optimizations
8750 && TREE_CODE (arg1) == PLUS_EXPR
8751 && TREE_CODE (arg0) != MULT_EXPR)
8752 {
8753 tree tree10 = TREE_OPERAND (arg1, 0);
8754 tree tree11 = TREE_OPERAND (arg1, 1);
8755 if (TREE_CODE (tree11) == MULT_EXPR
8756 && TREE_CODE (tree10) == MULT_EXPR)
8757 {
8758 tree tree0;
8759 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8760 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8761 }
8762 }
8763 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8764 if (flag_unsafe_math_optimizations
8765 && TREE_CODE (arg0) == PLUS_EXPR
8766 && TREE_CODE (arg1) != MULT_EXPR)
8767 {
8768 tree tree00 = TREE_OPERAND (arg0, 0);
8769 tree tree01 = TREE_OPERAND (arg0, 1);
8770 if (TREE_CODE (tree01) == MULT_EXPR
8771 && TREE_CODE (tree00) == MULT_EXPR)
8772 {
8773 tree tree0;
8774 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8775 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8776 }
8777 }
8778 }
8779
8780 bit_rotate:
8781 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8782 is a rotate of A by C1 bits. */
8783 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8784 is a rotate of A by B bits. */
8785 {
8786 enum tree_code code0, code1;
8787 code0 = TREE_CODE (arg0);
8788 code1 = TREE_CODE (arg1);
8789 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8790 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8791 && operand_equal_p (TREE_OPERAND (arg0, 0),
8792 TREE_OPERAND (arg1, 0), 0)
8793 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8794 {
8795 tree tree01, tree11;
8796 enum tree_code code01, code11;
8797
8798 tree01 = TREE_OPERAND (arg0, 1);
8799 tree11 = TREE_OPERAND (arg1, 1);
8800 STRIP_NOPS (tree01);
8801 STRIP_NOPS (tree11);
8802 code01 = TREE_CODE (tree01);
8803 code11 = TREE_CODE (tree11);
8804 if (code01 == INTEGER_CST
8805 && code11 == INTEGER_CST
8806 && TREE_INT_CST_HIGH (tree01) == 0
8807 && TREE_INT_CST_HIGH (tree11) == 0
8808 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8809 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8810 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8811 code0 == LSHIFT_EXPR ? tree01 : tree11);
8812 else if (code11 == MINUS_EXPR)
8813 {
8814 tree tree110, tree111;
8815 tree110 = TREE_OPERAND (tree11, 0);
8816 tree111 = TREE_OPERAND (tree11, 1);
8817 STRIP_NOPS (tree110);
8818 STRIP_NOPS (tree111);
8819 if (TREE_CODE (tree110) == INTEGER_CST
8820 && 0 == compare_tree_int (tree110,
8821 TYPE_PRECISION
8822 (TREE_TYPE (TREE_OPERAND
8823 (arg0, 0))))
8824 && operand_equal_p (tree01, tree111, 0))
8825 return build2 ((code0 == LSHIFT_EXPR
8826 ? LROTATE_EXPR
8827 : RROTATE_EXPR),
8828 type, TREE_OPERAND (arg0, 0), tree01);
8829 }
8830 else if (code01 == MINUS_EXPR)
8831 {
8832 tree tree010, tree011;
8833 tree010 = TREE_OPERAND (tree01, 0);
8834 tree011 = TREE_OPERAND (tree01, 1);
8835 STRIP_NOPS (tree010);
8836 STRIP_NOPS (tree011);
8837 if (TREE_CODE (tree010) == INTEGER_CST
8838 && 0 == compare_tree_int (tree010,
8839 TYPE_PRECISION
8840 (TREE_TYPE (TREE_OPERAND
8841 (arg0, 0))))
8842 && operand_equal_p (tree11, tree011, 0))
8843 return build2 ((code0 != LSHIFT_EXPR
8844 ? LROTATE_EXPR
8845 : RROTATE_EXPR),
8846 type, TREE_OPERAND (arg0, 0), tree11);
8847 }
8848 }
8849 }
8850
8851 associate:
8852 /* In most languages, can't associate operations on floats through
8853 parentheses. Rather than remember where the parentheses were, we
8854 don't associate floats at all, unless the user has specified
8855 -funsafe-math-optimizations. */
8856
8857 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8858 {
8859 tree var0, con0, lit0, minus_lit0;
8860 tree var1, con1, lit1, minus_lit1;
8861 bool ok = true;
8862
8863 /* Split both trees into variables, constants, and literals. Then
8864 associate each group together, the constants with literals,
8865 then the result with variables. This increases the chances of
8866 literals being recombined later and of generating relocatable
8867 expressions for the sum of a constant and literal. */
8868 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8869 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8870 code == MINUS_EXPR);
8871
8872 /* With undefined overflow we can only associate constants
8873 with one variable. */
8874 if ((POINTER_TYPE_P (type)
8875 || (INTEGRAL_TYPE_P (type)
8876 && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8877 && var0 && var1)
8878 {
8879 tree tmp0 = var0;
8880 tree tmp1 = var1;
8881
8882 if (TREE_CODE (tmp0) == NEGATE_EXPR)
8883 tmp0 = TREE_OPERAND (tmp0, 0);
8884 if (TREE_CODE (tmp1) == NEGATE_EXPR)
8885 tmp1 = TREE_OPERAND (tmp1, 0);
8886 /* The only case we can still associate with two variables
8887 is if they are the same, modulo negation. */
8888 if (!operand_equal_p (tmp0, tmp1, 0))
8889 ok = false;
8890 }
8891
8892 /* Only do something if we found more than two objects. Otherwise,
8893 nothing has changed and we risk infinite recursion. */
8894 if (ok
8895 && (2 < ((var0 != 0) + (var1 != 0)
8896 + (con0 != 0) + (con1 != 0)
8897 + (lit0 != 0) + (lit1 != 0)
8898 + (minus_lit0 != 0) + (minus_lit1 != 0))))
8899 {
8900 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8901 if (code == MINUS_EXPR)
8902 code = PLUS_EXPR;
8903
8904 var0 = associate_trees (var0, var1, code, type);
8905 con0 = associate_trees (con0, con1, code, type);
8906 lit0 = associate_trees (lit0, lit1, code, type);
8907 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8908
8909 /* Preserve the MINUS_EXPR if the negative part of the literal is
8910 greater than the positive part. Otherwise, the multiplicative
8911 folding code (i.e extract_muldiv) may be fooled in case
8912 unsigned constants are subtracted, like in the following
8913 example: ((X*2 + 4) - 8U)/2. */
8914 if (minus_lit0 && lit0)
8915 {
8916 if (TREE_CODE (lit0) == INTEGER_CST
8917 && TREE_CODE (minus_lit0) == INTEGER_CST
8918 && tree_int_cst_lt (lit0, minus_lit0))
8919 {
8920 minus_lit0 = associate_trees (minus_lit0, lit0,
8921 MINUS_EXPR, type);
8922 lit0 = 0;
8923 }
8924 else
8925 {
8926 lit0 = associate_trees (lit0, minus_lit0,
8927 MINUS_EXPR, type);
8928 minus_lit0 = 0;
8929 }
8930 }
8931 if (minus_lit0)
8932 {
8933 if (con0 == 0)
8934 return fold_convert (type,
8935 associate_trees (var0, minus_lit0,
8936 MINUS_EXPR, type));
8937 else
8938 {
8939 con0 = associate_trees (con0, minus_lit0,
8940 MINUS_EXPR, type);
8941 return fold_convert (type,
8942 associate_trees (var0, con0,
8943 PLUS_EXPR, type));
8944 }
8945 }
8946
8947 con0 = associate_trees (con0, lit0, code, type);
8948 return fold_convert (type, associate_trees (var0, con0,
8949 code, type));
8950 }
8951 }
8952
8953 return NULL_TREE;
8954
8955 case MINUS_EXPR:
8956 /* A - (-B) -> A + B */
8957 if (TREE_CODE (arg1) == NEGATE_EXPR)
8958 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8959 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8960 if (TREE_CODE (arg0) == NEGATE_EXPR
8961 && (FLOAT_TYPE_P (type)
8962 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8963 && negate_expr_p (arg1)
8964 && reorder_operands_p (arg0, arg1))
8965 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8966 TREE_OPERAND (arg0, 0));
8967 /* Convert -A - 1 to ~A. */
8968 if (INTEGRAL_TYPE_P (type)
8969 && TREE_CODE (arg0) == NEGATE_EXPR
8970 && integer_onep (arg1))
8971 return fold_build1 (BIT_NOT_EXPR, type,
8972 fold_convert (type, TREE_OPERAND (arg0, 0)));
8973
8974 /* Convert -1 - A to ~A. */
8975 if (INTEGRAL_TYPE_P (type)
8976 && integer_all_onesp (arg0))
8977 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8978
8979 if (! FLOAT_TYPE_P (type))
8980 {
8981 if (integer_zerop (arg0))
8982 return negate_expr (fold_convert (type, arg1));
8983 if (integer_zerop (arg1))
8984 return non_lvalue (fold_convert (type, arg0));
8985
8986 /* Fold A - (A & B) into ~B & A. */
8987 if (!TREE_SIDE_EFFECTS (arg0)
8988 && TREE_CODE (arg1) == BIT_AND_EXPR)
8989 {
8990 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8991 return fold_build2 (BIT_AND_EXPR, type,
8992 fold_build1 (BIT_NOT_EXPR, type,
8993 TREE_OPERAND (arg1, 0)),
8994 arg0);
8995 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8996 return fold_build2 (BIT_AND_EXPR, type,
8997 fold_build1 (BIT_NOT_EXPR, type,
8998 TREE_OPERAND (arg1, 1)),
8999 arg0);
9000 }
9001
9002 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9003 any power of 2 minus 1. */
9004 if (TREE_CODE (arg0) == BIT_AND_EXPR
9005 && TREE_CODE (arg1) == BIT_AND_EXPR
9006 && operand_equal_p (TREE_OPERAND (arg0, 0),
9007 TREE_OPERAND (arg1, 0), 0))
9008 {
9009 tree mask0 = TREE_OPERAND (arg0, 1);
9010 tree mask1 = TREE_OPERAND (arg1, 1);
9011 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9012
9013 if (operand_equal_p (tem, mask1, 0))
9014 {
9015 tem = fold_build2 (BIT_XOR_EXPR, type,
9016 TREE_OPERAND (arg0, 0), mask1);
9017 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9018 }
9019 }
9020 }
9021
9022 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9023 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9024 return non_lvalue (fold_convert (type, arg0));
9025
9026 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9027 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9028 (-ARG1 + ARG0) reduces to -ARG1. */
9029 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9030 return negate_expr (fold_convert (type, arg1));
9031
9032 /* Fold &x - &x. This can happen from &x.foo - &x.
9033 This is unsafe for certain floats even in non-IEEE formats.
9034 In IEEE, it is unsafe because it does wrong for NaNs.
9035 Also note that operand_equal_p is always false if an operand
9036 is volatile. */
9037
9038 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9039 && operand_equal_p (arg0, arg1, 0))
9040 return fold_convert (type, integer_zero_node);
9041
9042 /* A - B -> A + (-B) if B is easily negatable. */
9043 if (negate_expr_p (arg1)
9044 && ((FLOAT_TYPE_P (type)
9045 /* Avoid this transformation if B is a positive REAL_CST. */
9046 && (TREE_CODE (arg1) != REAL_CST
9047 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9048 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9049 return fold_build2 (PLUS_EXPR, type,
9050 fold_convert (type, arg0),
9051 fold_convert (type, negate_expr (arg1)));
9052
9053 /* Try folding difference of addresses. */
9054 {
9055 HOST_WIDE_INT diff;
9056
9057 if ((TREE_CODE (arg0) == ADDR_EXPR
9058 || TREE_CODE (arg1) == ADDR_EXPR)
9059 && ptr_difference_const (arg0, arg1, &diff))
9060 return build_int_cst_type (type, diff);
9061 }
9062
9063 /* Fold &a[i] - &a[j] to i-j. */
9064 if (TREE_CODE (arg0) == ADDR_EXPR
9065 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9066 && TREE_CODE (arg1) == ADDR_EXPR
9067 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9068 {
9069 tree aref0 = TREE_OPERAND (arg0, 0);
9070 tree aref1 = TREE_OPERAND (arg1, 0);
9071 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9072 TREE_OPERAND (aref1, 0), 0))
9073 {
9074 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9075 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9076 tree esz = array_ref_element_size (aref0);
9077 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9078 return fold_build2 (MULT_EXPR, type, diff,
9079 fold_convert (type, esz));
9080
9081 }
9082 }
9083
9084 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9085 of the array. Loop optimizer sometimes produce this type of
9086 expressions. */
9087 if (TREE_CODE (arg0) == ADDR_EXPR)
9088 {
9089 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9090 if (tem)
9091 return fold_convert (type, tem);
9092 }
9093
9094 if (flag_unsafe_math_optimizations
9095 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9096 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9097 && (tem = distribute_real_division (code, type, arg0, arg1)))
9098 return tem;
9099
9100 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9101 same or one. */
9102 if ((TREE_CODE (arg0) == MULT_EXPR
9103 || TREE_CODE (arg1) == MULT_EXPR)
9104 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9105 {
9106 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9107 if (tem)
9108 return tem;
9109 }
9110
9111 goto associate;
9112
9113 case MULT_EXPR:
9114 /* (-A) * (-B) -> A * B */
9115 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9116 return fold_build2 (MULT_EXPR, type,
9117 fold_convert (type, TREE_OPERAND (arg0, 0)),
9118 fold_convert (type, negate_expr (arg1)));
9119 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9120 return fold_build2 (MULT_EXPR, type,
9121 fold_convert (type, negate_expr (arg0)),
9122 fold_convert (type, TREE_OPERAND (arg1, 0)));
9123
9124 if (! FLOAT_TYPE_P (type))
9125 {
9126 if (integer_zerop (arg1))
9127 return omit_one_operand (type, arg1, arg0);
9128 if (integer_onep (arg1))
9129 return non_lvalue (fold_convert (type, arg0));
9130 /* Transform x * -1 into -x. */
9131 if (integer_all_onesp (arg1))
9132 return fold_convert (type, negate_expr (arg0));
9133
9134 /* (a * (1 << b)) is (a << b) */
9135 if (TREE_CODE (arg1) == LSHIFT_EXPR
9136 && integer_onep (TREE_OPERAND (arg1, 0)))
9137 return fold_build2 (LSHIFT_EXPR, type, arg0,
9138 TREE_OPERAND (arg1, 1));
9139 if (TREE_CODE (arg0) == LSHIFT_EXPR
9140 && integer_onep (TREE_OPERAND (arg0, 0)))
9141 return fold_build2 (LSHIFT_EXPR, type, arg1,
9142 TREE_OPERAND (arg0, 1));
9143
9144 strict_overflow_p = false;
9145 if (TREE_CODE (arg1) == INTEGER_CST
9146 && 0 != (tem = extract_muldiv (op0,
9147 fold_convert (type, arg1),
9148 code, NULL_TREE,
9149 &strict_overflow_p)))
9150 {
9151 if (strict_overflow_p)
9152 fold_overflow_warning (("assuming signed overflow does not "
9153 "occur when simplifying "
9154 "multiplication"),
9155 WARN_STRICT_OVERFLOW_MISC);
9156 return fold_convert (type, tem);
9157 }
9158
9159 /* Optimize z * conj(z) for integer complex numbers. */
9160 if (TREE_CODE (arg0) == CONJ_EXPR
9161 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9162 return fold_mult_zconjz (type, arg1);
9163 if (TREE_CODE (arg1) == CONJ_EXPR
9164 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9165 return fold_mult_zconjz (type, arg0);
9166 }
9167 else
9168 {
9169 /* Maybe fold x * 0 to 0. The expressions aren't the same
9170 when x is NaN, since x * 0 is also NaN. Nor are they the
9171 same in modes with signed zeros, since multiplying a
9172 negative value by 0 gives -0, not +0. */
9173 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9174 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9175 && real_zerop (arg1))
9176 return omit_one_operand (type, arg1, arg0);
9177 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9178 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9179 && real_onep (arg1))
9180 return non_lvalue (fold_convert (type, arg0));
9181
9182 /* Transform x * -1.0 into -x. */
9183 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9184 && real_minus_onep (arg1))
9185 return fold_convert (type, negate_expr (arg0));
9186
9187 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9188 if (flag_unsafe_math_optimizations
9189 && TREE_CODE (arg0) == RDIV_EXPR
9190 && TREE_CODE (arg1) == REAL_CST
9191 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9192 {
9193 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9194 arg1, 0);
9195 if (tem)
9196 return fold_build2 (RDIV_EXPR, type, tem,
9197 TREE_OPERAND (arg0, 1));
9198 }
9199
9200 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9201 if (operand_equal_p (arg0, arg1, 0))
9202 {
9203 tree tem = fold_strip_sign_ops (arg0);
9204 if (tem != NULL_TREE)
9205 {
9206 tem = fold_convert (type, tem);
9207 return fold_build2 (MULT_EXPR, type, tem, tem);
9208 }
9209 }
9210
9211 /* Optimize z * conj(z) for floating point complex numbers.
9212 Guarded by flag_unsafe_math_optimizations as non-finite
9213 imaginary components don't produce scalar results. */
9214 if (flag_unsafe_math_optimizations
9215 && TREE_CODE (arg0) == CONJ_EXPR
9216 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9217 return fold_mult_zconjz (type, arg1);
9218 if (flag_unsafe_math_optimizations
9219 && TREE_CODE (arg1) == CONJ_EXPR
9220 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9221 return fold_mult_zconjz (type, arg0);
9222
9223 if (flag_unsafe_math_optimizations)
9224 {
9225 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9226 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9227
9228 /* Optimizations of root(...)*root(...). */
9229 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9230 {
9231 tree rootfn, arg, arglist;
9232 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9233 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9234
9235 /* Optimize sqrt(x)*sqrt(x) as x. */
9236 if (BUILTIN_SQRT_P (fcode0)
9237 && operand_equal_p (arg00, arg10, 0)
9238 && ! HONOR_SNANS (TYPE_MODE (type)))
9239 return arg00;
9240
9241 /* Optimize root(x)*root(y) as root(x*y). */
9242 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9243 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9244 arglist = build_tree_list (NULL_TREE, arg);
9245 return build_function_call_expr (rootfn, arglist);
9246 }
9247
9248 /* Optimize expN(x)*expN(y) as expN(x+y). */
9249 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9250 {
9251 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9252 tree arg = fold_build2 (PLUS_EXPR, type,
9253 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9254 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9255 tree arglist = build_tree_list (NULL_TREE, arg);
9256 return build_function_call_expr (expfn, arglist);
9257 }
9258
9259 /* Optimizations of pow(...)*pow(...). */
9260 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9261 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9262 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9263 {
9264 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9265 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9266 1)));
9267 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9268 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9269 1)));
9270
9271 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9272 if (operand_equal_p (arg01, arg11, 0))
9273 {
9274 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9275 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9276 tree arglist = tree_cons (NULL_TREE, arg,
9277 build_tree_list (NULL_TREE,
9278 arg01));
9279 return build_function_call_expr (powfn, arglist);
9280 }
9281
9282 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9283 if (operand_equal_p (arg00, arg10, 0))
9284 {
9285 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9286 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9287 tree arglist = tree_cons (NULL_TREE, arg00,
9288 build_tree_list (NULL_TREE,
9289 arg));
9290 return build_function_call_expr (powfn, arglist);
9291 }
9292 }
9293
9294 /* Optimize tan(x)*cos(x) as sin(x). */
9295 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9296 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9297 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9298 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9299 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9300 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9301 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9302 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9303 {
9304 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9305
9306 if (sinfn != NULL_TREE)
9307 return build_function_call_expr (sinfn,
9308 TREE_OPERAND (arg0, 1));
9309 }
9310
9311 /* Optimize x*pow(x,c) as pow(x,c+1). */
9312 if (fcode1 == BUILT_IN_POW
9313 || fcode1 == BUILT_IN_POWF
9314 || fcode1 == BUILT_IN_POWL)
9315 {
9316 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9317 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9318 1)));
9319 if (TREE_CODE (arg11) == REAL_CST
9320 && ! TREE_CONSTANT_OVERFLOW (arg11)
9321 && operand_equal_p (arg0, arg10, 0))
9322 {
9323 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9324 REAL_VALUE_TYPE c;
9325 tree arg, arglist;
9326
9327 c = TREE_REAL_CST (arg11);
9328 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9329 arg = build_real (type, c);
9330 arglist = build_tree_list (NULL_TREE, arg);
9331 arglist = tree_cons (NULL_TREE, arg0, arglist);
9332 return build_function_call_expr (powfn, arglist);
9333 }
9334 }
9335
9336 /* Optimize pow(x,c)*x as pow(x,c+1). */
9337 if (fcode0 == BUILT_IN_POW
9338 || fcode0 == BUILT_IN_POWF
9339 || fcode0 == BUILT_IN_POWL)
9340 {
9341 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9342 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9343 1)));
9344 if (TREE_CODE (arg01) == REAL_CST
9345 && ! TREE_CONSTANT_OVERFLOW (arg01)
9346 && operand_equal_p (arg1, arg00, 0))
9347 {
9348 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9349 REAL_VALUE_TYPE c;
9350 tree arg, arglist;
9351
9352 c = TREE_REAL_CST (arg01);
9353 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9354 arg = build_real (type, c);
9355 arglist = build_tree_list (NULL_TREE, arg);
9356 arglist = tree_cons (NULL_TREE, arg1, arglist);
9357 return build_function_call_expr (powfn, arglist);
9358 }
9359 }
9360
9361 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9362 if (! optimize_size
9363 && operand_equal_p (arg0, arg1, 0))
9364 {
9365 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9366
9367 if (powfn)
9368 {
9369 tree arg = build_real (type, dconst2);
9370 tree arglist = build_tree_list (NULL_TREE, arg);
9371 arglist = tree_cons (NULL_TREE, arg0, arglist);
9372 return build_function_call_expr (powfn, arglist);
9373 }
9374 }
9375 }
9376 }
9377 goto associate;
9378
9379 case BIT_IOR_EXPR:
9380 bit_ior:
9381 if (integer_all_onesp (arg1))
9382 return omit_one_operand (type, arg1, arg0);
9383 if (integer_zerop (arg1))
9384 return non_lvalue (fold_convert (type, arg0));
9385 if (operand_equal_p (arg0, arg1, 0))
9386 return non_lvalue (fold_convert (type, arg0));
9387
9388 /* ~X | X is -1. */
9389 if (TREE_CODE (arg0) == BIT_NOT_EXPR
1/* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
22
23/*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
30
31/* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type.
33
34 fold takes a tree as argument and returns a simplified tree.
35
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
39
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
42
43 force_fit_type takes a constant, an overflowable flag and prior
44 overflow indicators. It forces the value to fit the type and sets
45 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46
47#include "config.h"
48#include "system.h"
49#include "coretypes.h"
50#include "tm.h"
51#include "flags.h"
52#include "tree.h"
53#include "real.h"
54#include "rtl.h"
55#include "expr.h"
56#include "tm_p.h"
57#include "toplev.h"
58#include "intl.h"
59#include "ggc.h"
60#include "hashtab.h"
61#include "langhooks.h"
62#include "md5.h"
63
64/* Non-zero if we are folding constants inside an initializer; zero
65 otherwise. */
66int folding_initializer = 0;
67
68/* The following constants represent a bit based encoding of GCC's
69 comparison operators. This encoding simplifies transformations
70 on relational comparison operators, such as AND and OR. */
71enum comparison_code {
72 COMPCODE_FALSE = 0,
73 COMPCODE_LT = 1,
74 COMPCODE_EQ = 2,
75 COMPCODE_LE = 3,
76 COMPCODE_GT = 4,
77 COMPCODE_LTGT = 5,
78 COMPCODE_GE = 6,
79 COMPCODE_ORD = 7,
80 COMPCODE_UNORD = 8,
81 COMPCODE_UNLT = 9,
82 COMPCODE_UNEQ = 10,
83 COMPCODE_UNLE = 11,
84 COMPCODE_UNGT = 12,
85 COMPCODE_NE = 13,
86 COMPCODE_UNGE = 14,
87 COMPCODE_TRUE = 15
88};
89
90static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
91static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
92static bool negate_mathfn_p (enum built_in_function);
93static bool negate_expr_p (tree);
94static tree negate_expr (tree);
95static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96static tree associate_trees (tree, tree, enum tree_code, tree);
97static tree const_binop (enum tree_code, tree, tree, int);
98static enum comparison_code comparison_to_compcode (enum tree_code);
99static enum tree_code compcode_to_comparison (enum comparison_code);
100static tree combine_comparisons (enum tree_code, enum tree_code,
101 enum tree_code, tree, tree, tree);
102static int truth_value_p (enum tree_code);
103static int operand_equal_for_comparison_p (tree, tree, tree);
104static int twoval_comparison_p (tree, tree *, tree *, int *);
105static tree eval_subst (tree, tree, tree, tree, tree);
106static tree pedantic_omit_one_operand (tree, tree, tree);
107static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
108static tree make_bit_field_ref (tree, tree, int, int, int);
109static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
110static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
111 enum machine_mode *, int *, int *,
112 tree *, tree *);
113static int all_ones_mask_p (tree, int);
114static tree sign_bit_p (tree, tree);
115static int simple_operand_p (tree);
116static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117static tree range_predecessor (tree);
118static tree range_successor (tree);
119static tree make_range (tree, int *, tree *, tree *, bool *);
120static tree build_range_check (tree, tree, int, tree, tree);
121static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
122 tree);
123static tree fold_range_test (enum tree_code, tree, tree, tree);
124static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
125static tree unextend (tree, int, int, tree);
126static tree fold_truthop (enum tree_code, tree, tree, tree);
127static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
128static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130static int multiple_of_p (tree, tree, tree);
131static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134static bool fold_real_zero_addition_p (tree, tree, int);
135static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136 tree, tree, tree);
137static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138static tree fold_div_compare (enum tree_code, tree, tree, tree);
139static bool reorder_operands_p (tree, tree);
140static tree fold_negate_const (tree, tree);
141static tree fold_not_const (tree, tree);
142static tree fold_relational_const (enum tree_code, tree, tree, tree);
143static int native_encode_expr (tree, unsigned char *, int);
144static tree native_interpret_expr (tree, unsigned char *, int);
145
146
147/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
150 addition.
151
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 sign. */
155#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156
157/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
161
162#define LOWPART(x) \
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164#define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167
168/* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
171
172static void
173encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174{
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
179}
180
181/* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184
185static void
186decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 HOST_WIDE_INT *hi)
188{
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
191}
192
193/* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
194 in overflow of the value, when >0 we are only interested in signed
195 overflow, for <0 we are interested in any overflow. OVERFLOWED
196 indicates whether overflow has already occurred. CONST_OVERFLOWED
197 indicates whether constant overflow has already occurred. We force
198 T's value to be within range of T's type (by setting to 0 or 1 all
199 the bits outside the type's range). We set TREE_OVERFLOWED if,
200 OVERFLOWED is nonzero,
201 or OVERFLOWABLE is >0 and signed overflow occurs
202 or OVERFLOWABLE is <0 and any overflow occurs
203 We set TREE_CONSTANT_OVERFLOWED if,
204 CONST_OVERFLOWED is nonzero
205 or we set TREE_OVERFLOWED.
206 We return either the original T, or a copy. */
207
208tree
209force_fit_type (tree t, int overflowable,
210 bool overflowed, bool overflowed_const)
211{
212 unsigned HOST_WIDE_INT low;
213 HOST_WIDE_INT high;
214 unsigned int prec;
215 int sign_extended_type;
216
217 gcc_assert (TREE_CODE (t) == INTEGER_CST);
218
219 low = TREE_INT_CST_LOW (t);
220 high = TREE_INT_CST_HIGH (t);
221
222 if (POINTER_TYPE_P (TREE_TYPE (t))
223 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = POINTER_SIZE;
225 else
226 prec = TYPE_PRECISION (TREE_TYPE (t));
227 /* Size types *are* sign extended. */
228 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
229 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
231
232 /* First clear all bits that are beyond the type's precision. */
233
234 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 ;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
237 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238 else
239 {
240 high = 0;
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 }
244
245 if (!sign_extended_type)
246 /* No sign extension */;
247 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
248 /* Correct width already. */;
249 else if (prec > HOST_BITS_PER_WIDE_INT)
250 {
251 /* Sign extend top half? */
252 if (high & ((unsigned HOST_WIDE_INT)1
253 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
254 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
255 }
256 else if (prec == HOST_BITS_PER_WIDE_INT)
257 {
258 if ((HOST_WIDE_INT)low < 0)
259 high = -1;
260 }
261 else
262 {
263 /* Sign extend bottom half? */
264 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 {
266 high = -1;
267 low |= (HOST_WIDE_INT)(-1) << prec;
268 }
269 }
270
271 /* If the value changed, return a new node. */
272 if (overflowed || overflowed_const
273 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
274 {
275 t = build_int_cst_wide (TREE_TYPE (t), low, high);
276
277 if (overflowed
278 || overflowable < 0
279 || (overflowable > 0 && sign_extended_type))
280 {
281 t = copy_node (t);
282 TREE_OVERFLOW (t) = 1;
283 TREE_CONSTANT_OVERFLOW (t) = 1;
284 }
285 else if (overflowed_const)
286 {
287 t = copy_node (t);
288 TREE_CONSTANT_OVERFLOW (t) = 1;
289 }
290 }
291
292 return t;
293}
294
295/* Add two doubleword integers with doubleword result.
296 Return nonzero if the operation overflows according to UNSIGNED_P.
297 Each argument is given as two `HOST_WIDE_INT' pieces.
298 One argument is L1 and H1; the other, L2 and H2.
299 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300
301int
302add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
303 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
304 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305 bool unsigned_p)
306{
307 unsigned HOST_WIDE_INT l;
308 HOST_WIDE_INT h;
309
310 l = l1 + l2;
311 h = h1 + h2 + (l < l1);
312
313 *lv = l;
314 *hv = h;
315
316 if (unsigned_p)
317 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
318 else
319 return OVERFLOW_SUM_SIGN (h1, h2, h);
320}
321
322/* Negate a doubleword integer with doubleword result.
323 Return nonzero if the operation overflows, assuming it's signed.
324 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
325 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326
327int
328neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
329 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
330{
331 if (l1 == 0)
332 {
333 *lv = 0;
334 *hv = - h1;
335 return (*hv & h1) < 0;
336 }
337 else
338 {
339 *lv = -l1;
340 *hv = ~h1;
341 return 0;
342 }
343}
344
345/* Multiply two doubleword integers with doubleword result.
346 Return nonzero if the operation overflows according to UNSIGNED_P.
347 Each argument is given as two `HOST_WIDE_INT' pieces.
348 One argument is L1 and H1; the other, L2 and H2.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350
351int
352mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
354 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355 bool unsigned_p)
356{
357 HOST_WIDE_INT arg1[4];
358 HOST_WIDE_INT arg2[4];
359 HOST_WIDE_INT prod[4 * 2];
360 unsigned HOST_WIDE_INT carry;
361 int i, j, k;
362 unsigned HOST_WIDE_INT toplow, neglow;
363 HOST_WIDE_INT tophigh, neghigh;
364
365 encode (arg1, l1, h1);
366 encode (arg2, l2, h2);
367
368 memset (prod, 0, sizeof prod);
369
370 for (i = 0; i < 4; i++)
371 {
372 carry = 0;
373 for (j = 0; j < 4; j++)
374 {
375 k = i + j;
376 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
377 carry += arg1[i] * arg2[j];
378 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
379 carry += prod[k];
380 prod[k] = LOWPART (carry);
381 carry = HIGHPART (carry);
382 }
383 prod[i + 4] = carry;
384 }
385
386 decode (prod, lv, hv);
387 decode (prod + 4, &toplow, &tophigh);
388
389 /* Unsigned overflow is immediate. */
390 if (unsigned_p)
391 return (toplow | tophigh) != 0;
392
393 /* Check for signed overflow by calculating the signed representation of the
394 top half of the result; it should agree with the low half's sign bit. */
395 if (h1 < 0)
396 {
397 neg_double (l2, h2, &neglow, &neghigh);
398 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
399 }
400 if (h2 < 0)
401 {
402 neg_double (l1, h1, &neglow, &neghigh);
403 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
404 }
405 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406}
407
408/* Shift the doubleword integer in L1, H1 left by COUNT places
409 keeping only PREC bits of result.
410 Shift right if COUNT is negative.
411 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
412 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413
414void
415lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
416 HOST_WIDE_INT count, unsigned int prec,
417 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
418{
419 unsigned HOST_WIDE_INT signmask;
420
421 if (count < 0)
422 {
423 rshift_double (l1, h1, -count, prec, lv, hv, arith);
424 return;
425 }
426
427 if (SHIFT_COUNT_TRUNCATED)
428 count %= prec;
429
430 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
431 {
432 /* Shifting by the host word size is undefined according to the
433 ANSI standard, so we must handle this as a special case. */
434 *hv = 0;
435 *lv = 0;
436 }
437 else if (count >= HOST_BITS_PER_WIDE_INT)
438 {
439 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
440 *lv = 0;
441 }
442 else
443 {
444 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
445 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
446 *lv = l1 << count;
447 }
448
449 /* Sign extend all bits that are beyond the precision. */
450
451 signmask = -((prec > HOST_BITS_PER_WIDE_INT
452 ? ((unsigned HOST_WIDE_INT) *hv
453 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
454 : (*lv >> (prec - 1))) & 1);
455
456 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
457 ;
458 else if (prec >= HOST_BITS_PER_WIDE_INT)
459 {
460 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
461 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
462 }
463 else
464 {
465 *hv = signmask;
466 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
467 *lv |= signmask << prec;
468 }
469}
470
471/* Shift the doubleword integer in L1, H1 right by COUNT places
472 keeping only PREC bits of result. COUNT must be positive.
473 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
474 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
475
476void
477rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
478 HOST_WIDE_INT count, unsigned int prec,
479 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480 int arith)
481{
482 unsigned HOST_WIDE_INT signmask;
483
484 signmask = (arith
485 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486 : 0);
487
488 if (SHIFT_COUNT_TRUNCATED)
489 count %= prec;
490
491 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
492 {
493 /* Shifting by the host word size is undefined according to the
494 ANSI standard, so we must handle this as a special case. */
495 *hv = 0;
496 *lv = 0;
497 }
498 else if (count >= HOST_BITS_PER_WIDE_INT)
499 {
500 *hv = 0;
501 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
502 }
503 else
504 {
505 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
506 *lv = ((l1 >> count)
507 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508 }
509
510 /* Zero / sign extend all bits that are beyond the precision. */
511
512 if (count >= (HOST_WIDE_INT)prec)
513 {
514 *hv = signmask;
515 *lv = signmask;
516 }
517 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
518 ;
519 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
520 {
521 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
522 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
523 }
524 else
525 {
526 *hv = signmask;
527 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
528 *lv |= signmask << (prec - count);
529 }
530}
531
532/* Rotate the doubleword integer in L1, H1 left by COUNT places
533 keeping only PREC bits of result.
534 Rotate right if COUNT is negative.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536
537void
538lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541{
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
544
545 count %= prec;
546 if (count < 0)
547 count += prec;
548
549 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
553}
554
555/* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result. COUNT must be positive.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558
559void
560rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563{
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
566
567 count %= prec;
568 if (count < 0)
569 count += prec;
570
571 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
575}
576
577/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
578 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
579 CODE is a tree code for a kind of division, one of
580 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
581 or EXACT_DIV_EXPR
582 It controls how the quotient is rounded to an integer.
583 Return nonzero if the operation overflows.
584 UNS nonzero says do unsigned division. */
585
586int
587div_and_round_double (enum tree_code code, int uns,
588 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
589 HOST_WIDE_INT hnum_orig,
590 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
591 HOST_WIDE_INT hden_orig,
592 unsigned HOST_WIDE_INT *lquo,
593 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
594 HOST_WIDE_INT *hrem)
595{
596 int quo_neg = 0;
597 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
598 HOST_WIDE_INT den[4], quo[4];
599 int i, j;
600 unsigned HOST_WIDE_INT work;
601 unsigned HOST_WIDE_INT carry = 0;
602 unsigned HOST_WIDE_INT lnum = lnum_orig;
603 HOST_WIDE_INT hnum = hnum_orig;
604 unsigned HOST_WIDE_INT lden = lden_orig;
605 HOST_WIDE_INT hden = hden_orig;
606 int overflow = 0;
607
608 if (hden == 0 && lden == 0)
609 overflow = 1, lden = 1;
610
611 /* Calculate quotient sign and convert operands to unsigned. */
612 if (!uns)
613 {
614 if (hnum < 0)
615 {
616 quo_neg = ~ quo_neg;
617 /* (minimum integer) / (-1) is the only overflow case. */
618 if (neg_double (lnum, hnum, &lnum, &hnum)
619 && ((HOST_WIDE_INT) lden & hden) == -1)
620 overflow = 1;
621 }
622 if (hden < 0)
623 {
624 quo_neg = ~ quo_neg;
625 neg_double (lden, hden, &lden, &hden);
626 }
627 }
628
629 if (hnum == 0 && hden == 0)
630 { /* single precision */
631 *hquo = *hrem = 0;
632 /* This unsigned division rounds toward zero. */
633 *lquo = lnum / lden;
634 goto finish_up;
635 }
636
637 if (hnum == 0)
638 { /* trivial case: dividend < divisor */
639 /* hden != 0 already checked. */
640 *hquo = *lquo = 0;
641 *hrem = hnum;
642 *lrem = lnum;
643 goto finish_up;
644 }
645
646 memset (quo, 0, sizeof quo);
647
648 memset (num, 0, sizeof num); /* to zero 9th element */
649 memset (den, 0, sizeof den);
650
651 encode (num, lnum, hnum);
652 encode (den, lden, hden);
653
654 /* Special code for when the divisor < BASE. */
655 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
656 {
657 /* hnum != 0 already checked. */
658 for (i = 4 - 1; i >= 0; i--)
659 {
660 work = num[i] + carry * BASE;
661 quo[i] = work / lden;
662 carry = work % lden;
663 }
664 }
665 else
666 {
667 /* Full double precision division,
668 with thanks to Don Knuth's "Seminumerical Algorithms". */
669 int num_hi_sig, den_hi_sig;
670 unsigned HOST_WIDE_INT quo_est, scale;
671
672 /* Find the highest nonzero divisor digit. */
673 for (i = 4 - 1;; i--)
674 if (den[i] != 0)
675 {
676 den_hi_sig = i;
677 break;
678 }
679
680 /* Insure that the first digit of the divisor is at least BASE/2.
681 This is required by the quotient digit estimation algorithm. */
682
683 scale = BASE / (den[den_hi_sig] + 1);
684 if (scale > 1)
685 { /* scale divisor and dividend */
686 carry = 0;
687 for (i = 0; i <= 4 - 1; i++)
688 {
689 work = (num[i] * scale) + carry;
690 num[i] = LOWPART (work);
691 carry = HIGHPART (work);
692 }
693
694 num[4] = carry;
695 carry = 0;
696 for (i = 0; i <= 4 - 1; i++)
697 {
698 work = (den[i] * scale) + carry;
699 den[i] = LOWPART (work);
700 carry = HIGHPART (work);
701 if (den[i] != 0) den_hi_sig = i;
702 }
703 }
704
705 num_hi_sig = 4;
706
707 /* Main loop */
708 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
709 {
710 /* Guess the next quotient digit, quo_est, by dividing the first
711 two remaining dividend digits by the high order quotient digit.
712 quo_est is never low and is at most 2 high. */
713 unsigned HOST_WIDE_INT tmp;
714
715 num_hi_sig = i + den_hi_sig + 1;
716 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
717 if (num[num_hi_sig] != den[den_hi_sig])
718 quo_est = work / den[den_hi_sig];
719 else
720 quo_est = BASE - 1;
721
722 /* Refine quo_est so it's usually correct, and at most one high. */
723 tmp = work - quo_est * den[den_hi_sig];
724 if (tmp < BASE
725 && (den[den_hi_sig - 1] * quo_est
726 > (tmp * BASE + num[num_hi_sig - 2])))
727 quo_est--;
728
729 /* Try QUO_EST as the quotient digit, by multiplying the
730 divisor by QUO_EST and subtracting from the remaining dividend.
731 Keep in mind that QUO_EST is the I - 1st digit. */
732
733 carry = 0;
734 for (j = 0; j <= den_hi_sig; j++)
735 {
736 work = quo_est * den[j] + carry;
737 carry = HIGHPART (work);
738 work = num[i + j] - LOWPART (work);
739 num[i + j] = LOWPART (work);
740 carry += HIGHPART (work) != 0;
741 }
742
743 /* If quo_est was high by one, then num[i] went negative and
744 we need to correct things. */
745 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746 {
747 quo_est--;
748 carry = 0; /* add divisor back in */
749 for (j = 0; j <= den_hi_sig; j++)
750 {
751 work = num[i + j] + den[j] + carry;
752 carry = HIGHPART (work);
753 num[i + j] = LOWPART (work);
754 }
755
756 num [num_hi_sig] += carry;
757 }
758
759 /* Store the quotient digit. */
760 quo[i] = quo_est;
761 }
762 }
763
764 decode (quo, lquo, hquo);
765
766 finish_up:
767 /* If result is negative, make it so. */
768 if (quo_neg)
769 neg_double (*lquo, *hquo, lquo, hquo);
770
771 /* Compute trial remainder: rem = num - (quo * den) */
772 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773 neg_double (*lrem, *hrem, lrem, hrem);
774 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
775
776 switch (code)
777 {
778 case TRUNC_DIV_EXPR:
779 case TRUNC_MOD_EXPR: /* round toward zero */
780 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
781 return overflow;
782
783 case FLOOR_DIV_EXPR:
784 case FLOOR_MOD_EXPR: /* round toward negative infinity */
785 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
786 {
787 /* quo = quo - 1; */
788 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
789 lquo, hquo);
790 }
791 else
792 return overflow;
793 break;
794
795 case CEIL_DIV_EXPR:
796 case CEIL_MOD_EXPR: /* round toward positive infinity */
797 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
798 {
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
800 lquo, hquo);
801 }
802 else
803 return overflow;
804 break;
805
806 case ROUND_DIV_EXPR:
807 case ROUND_MOD_EXPR: /* round to closest integer */
808 {
809 unsigned HOST_WIDE_INT labs_rem = *lrem;
810 HOST_WIDE_INT habs_rem = *hrem;
811 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
812 HOST_WIDE_INT habs_den = hden, htwice;
813
814 /* Get absolute values. */
815 if (*hrem < 0)
816 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
817 if (hden < 0)
818 neg_double (lden, hden, &labs_den, &habs_den);
819
820 /* If (2 * abs (lrem) >= abs (lden)) */
821 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
822 labs_rem, habs_rem, &ltwice, &htwice);
823
824 if (((unsigned HOST_WIDE_INT) habs_den
825 < (unsigned HOST_WIDE_INT) htwice)
826 || (((unsigned HOST_WIDE_INT) habs_den
827 == (unsigned HOST_WIDE_INT) htwice)
828 && (labs_den < ltwice)))
829 {
830 if (*hquo < 0)
831 /* quo = quo - 1; */
832 add_double (*lquo, *hquo,
833 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834 else
835 /* quo = quo + 1; */
836 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
837 lquo, hquo);
838 }
839 else
840 return overflow;
841 }
842 break;
843
844 default:
845 gcc_unreachable ();
846 }
847
848 /* Compute true remainder: rem = num - (quo * den) */
849 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
850 neg_double (*lrem, *hrem, lrem, hrem);
851 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
852 return overflow;
853}
854
855/* If ARG2 divides ARG1 with zero remainder, carries out the division
856 of type CODE and returns the quotient.
857 Otherwise returns NULL_TREE. */
858
859static tree
860div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
861{
862 unsigned HOST_WIDE_INT int1l, int2l;
863 HOST_WIDE_INT int1h, int2h;
864 unsigned HOST_WIDE_INT quol, reml;
865 HOST_WIDE_INT quoh, remh;
866 tree type = TREE_TYPE (arg1);
867 int uns = TYPE_UNSIGNED (type);
868
869 int1l = TREE_INT_CST_LOW (arg1);
870 int1h = TREE_INT_CST_HIGH (arg1);
871 int2l = TREE_INT_CST_LOW (arg2);
872 int2h = TREE_INT_CST_HIGH (arg2);
873
874 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
875 &quol, &quoh, &reml, &remh);
876 if (remh != 0 || reml != 0)
877 return NULL_TREE;
878
879 return build_int_cst_wide (type, quol, quoh);
880}
881
882/* This is non-zero if we should defer warnings about undefined
883 overflow. This facility exists because these warnings are a
884 special case. The code to estimate loop iterations does not want
885 to issue any warnings, since it works with expressions which do not
886 occur in user code. Various bits of cleanup code call fold(), but
887 only use the result if it has certain characteristics (e.g., is a
888 constant); that code only wants to issue a warning if the result is
889 used. */
890
891static int fold_deferring_overflow_warnings;
892
893/* If a warning about undefined overflow is deferred, this is the
894 warning. Note that this may cause us to turn two warnings into
895 one, but that is fine since it is sufficient to only give one
896 warning per expression. */
897
898static const char* fold_deferred_overflow_warning;
899
900/* If a warning about undefined overflow is deferred, this is the
901 level at which the warning should be emitted. */
902
903static enum warn_strict_overflow_code fold_deferred_overflow_code;
904
905/* Start deferring overflow warnings. We could use a stack here to
906 permit nested calls, but at present it is not necessary. */
907
908void
909fold_defer_overflow_warnings (void)
910{
911 ++fold_deferring_overflow_warnings;
912}
913
914/* Stop deferring overflow warnings. If there is a pending warning,
915 and ISSUE is true, then issue the warning if appropriate. STMT is
916 the statement with which the warning should be associated (used for
917 location information); STMT may be NULL. CODE is the level of the
918 warning--a warn_strict_overflow_code value. This function will use
919 the smaller of CODE and the deferred code when deciding whether to
920 issue the warning. CODE may be zero to mean to always use the
921 deferred code. */
922
923void
924fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
925{
926 const char *warnmsg;
927 location_t locus;
928
929 gcc_assert (fold_deferring_overflow_warnings > 0);
930 --fold_deferring_overflow_warnings;
931 if (fold_deferring_overflow_warnings > 0)
932 {
933 if (fold_deferred_overflow_warning != NULL
934 && code != 0
935 && code < (int) fold_deferred_overflow_code)
936 fold_deferred_overflow_code = code;
937 return;
938 }
939
940 warnmsg = fold_deferred_overflow_warning;
941 fold_deferred_overflow_warning = NULL;
942
943 if (!issue || warnmsg == NULL)
944 return;
945
946 /* Use the smallest code level when deciding to issue the
947 warning. */
948 if (code == 0 || code > (int) fold_deferred_overflow_code)
949 code = fold_deferred_overflow_code;
950
951 if (!issue_strict_overflow_warning (code))
952 return;
953
954 if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
955 locus = input_location;
956 else
957 locus = EXPR_LOCATION (stmt);
958 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
959}
960
961/* Stop deferring overflow warnings, ignoring any deferred
962 warnings. */
963
964void
965fold_undefer_and_ignore_overflow_warnings (void)
966{
967 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
968}
969
970/* Whether we are deferring overflow warnings. */
971
972bool
973fold_deferring_overflow_warnings_p (void)
974{
975 return fold_deferring_overflow_warnings > 0;
976}
977
978/* This is called when we fold something based on the fact that signed
979 overflow is undefined. */
980
981static void
982fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
983{
984 gcc_assert (!flag_wrapv && !flag_trapv);
985 if (fold_deferring_overflow_warnings > 0)
986 {
987 if (fold_deferred_overflow_warning == NULL
988 || wc < fold_deferred_overflow_code)
989 {
990 fold_deferred_overflow_warning = gmsgid;
991 fold_deferred_overflow_code = wc;
992 }
993 }
994 else if (issue_strict_overflow_warning (wc))
995 warning (OPT_Wstrict_overflow, gmsgid);
996}
997
998/* Return true if the built-in mathematical function specified by CODE
999 is odd, i.e. -f(x) == f(-x). */
1000
1001static bool
1002negate_mathfn_p (enum built_in_function code)
1003{
1004 switch (code)
1005 {
1006 CASE_FLT_FN (BUILT_IN_ASIN):
1007 CASE_FLT_FN (BUILT_IN_ASINH):
1008 CASE_FLT_FN (BUILT_IN_ATAN):
1009 CASE_FLT_FN (BUILT_IN_ATANH):
1010 CASE_FLT_FN (BUILT_IN_CBRT):
1011 CASE_FLT_FN (BUILT_IN_SIN):
1012 CASE_FLT_FN (BUILT_IN_SINH):
1013 CASE_FLT_FN (BUILT_IN_TAN):
1014 CASE_FLT_FN (BUILT_IN_TANH):
1015 return true;
1016
1017 default:
1018 break;
1019 }
1020 return false;
1021}
1022
1023/* Check whether we may negate an integer constant T without causing
1024 overflow. */
1025
1026bool
1027may_negate_without_overflow_p (tree t)
1028{
1029 unsigned HOST_WIDE_INT val;
1030 unsigned int prec;
1031 tree type;
1032
1033 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1034
1035 type = TREE_TYPE (t);
1036 if (TYPE_UNSIGNED (type))
1037 return false;
1038
1039 prec = TYPE_PRECISION (type);
1040 if (prec > HOST_BITS_PER_WIDE_INT)
1041 {
1042 if (TREE_INT_CST_LOW (t) != 0)
1043 return true;
1044 prec -= HOST_BITS_PER_WIDE_INT;
1045 val = TREE_INT_CST_HIGH (t);
1046 }
1047 else
1048 val = TREE_INT_CST_LOW (t);
1049 if (prec < HOST_BITS_PER_WIDE_INT)
1050 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1051 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1052}
1053
1054/* Determine whether an expression T can be cheaply negated using
1055 the function negate_expr without introducing undefined overflow. */
1056
1057static bool
1058negate_expr_p (tree t)
1059{
1060 tree type;
1061
1062 if (t == 0)
1063 return false;
1064
1065 type = TREE_TYPE (t);
1066
1067 STRIP_SIGN_NOPS (t);
1068 switch (TREE_CODE (t))
1069 {
1070 case INTEGER_CST:
1071 if (TYPE_OVERFLOW_WRAPS (type))
1072 return true;
1073
1074 /* Check that -CST will not overflow type. */
1075 return may_negate_without_overflow_p (t);
1076 case BIT_NOT_EXPR:
1077 return (INTEGRAL_TYPE_P (type)
1078 && TYPE_OVERFLOW_WRAPS (type));
1079
1080 case REAL_CST:
1081 case NEGATE_EXPR:
1082 return true;
1083
1084 case COMPLEX_CST:
1085 return negate_expr_p (TREE_REALPART (t))
1086 && negate_expr_p (TREE_IMAGPART (t));
1087
1088 case PLUS_EXPR:
1089 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1090 return false;
1091 /* -(A + B) -> (-B) - A. */
1092 if (negate_expr_p (TREE_OPERAND (t, 1))
1093 && reorder_operands_p (TREE_OPERAND (t, 0),
1094 TREE_OPERAND (t, 1)))
1095 return true;
1096 /* -(A + B) -> (-A) - B. */
1097 return negate_expr_p (TREE_OPERAND (t, 0));
1098
1099 case MINUS_EXPR:
1100 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1101 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1102 && reorder_operands_p (TREE_OPERAND (t, 0),
1103 TREE_OPERAND (t, 1));
1104
1105 case MULT_EXPR:
1106 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1107 break;
1108
1109 /* Fall through. */
1110
1111 case RDIV_EXPR:
1112 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1113 return negate_expr_p (TREE_OPERAND (t, 1))
1114 || negate_expr_p (TREE_OPERAND (t, 0));
1115 break;
1116
1117 case TRUNC_DIV_EXPR:
1118 case ROUND_DIV_EXPR:
1119 case FLOOR_DIV_EXPR:
1120 case CEIL_DIV_EXPR:
1121 case EXACT_DIV_EXPR:
1122 /* In general we can't negate A / B, because if A is INT_MIN and
1123 B is 1, we may turn this into INT_MIN / -1 which is undefined
1124 and actually traps on some architectures. But if overflow is
1125 undefined, we can negate, because - (INT_MIN / 1) is an
1126 overflow. */
1127 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1128 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1129 break;
1130 return negate_expr_p (TREE_OPERAND (t, 1))
1131 || negate_expr_p (TREE_OPERAND (t, 0));
1132
1133 case NOP_EXPR:
1134 /* Negate -((double)float) as (double)(-float). */
1135 if (TREE_CODE (type) == REAL_TYPE)
1136 {
1137 tree tem = strip_float_extensions (t);
1138 if (tem != t)
1139 return negate_expr_p (tem);
1140 }
1141 break;
1142
1143 case CALL_EXPR:
1144 /* Negate -f(x) as f(-x). */
1145 if (negate_mathfn_p (builtin_mathfn_code (t)))
1146 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1147 break;
1148
1149 case RSHIFT_EXPR:
1150 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1151 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1152 {
1153 tree op1 = TREE_OPERAND (t, 1);
1154 if (TREE_INT_CST_HIGH (op1) == 0
1155 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1156 == TREE_INT_CST_LOW (op1))
1157 return true;
1158 }
1159 break;
1160
1161 default:
1162 break;
1163 }
1164 return false;
1165}
1166
1167/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1168 simplification is possible.
1169 If negate_expr_p would return true for T, NULL_TREE will never be
1170 returned. */
1171
1172static tree
1173fold_negate_expr (tree t)
1174{
1175 tree type = TREE_TYPE (t);
1176 tree tem;
1177
1178 switch (TREE_CODE (t))
1179 {
1180 /* Convert - (~A) to A + 1. */
1181 case BIT_NOT_EXPR:
1182 if (INTEGRAL_TYPE_P (type))
1183 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1184 build_int_cst (type, 1));
1185 break;
1186
1187 case INTEGER_CST:
1188 tem = fold_negate_const (t, type);
1189 if (!TREE_OVERFLOW (tem)
1190 || !TYPE_OVERFLOW_TRAPS (type))
1191 return tem;
1192 break;
1193
1194 case REAL_CST:
1195 tem = fold_negate_const (t, type);
1196 /* Two's complement FP formats, such as c4x, may overflow. */
1197 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1198 return tem;
1199 break;
1200
1201 case COMPLEX_CST:
1202 {
1203 tree rpart = negate_expr (TREE_REALPART (t));
1204 tree ipart = negate_expr (TREE_IMAGPART (t));
1205
1206 if ((TREE_CODE (rpart) == REAL_CST
1207 && TREE_CODE (ipart) == REAL_CST)
1208 || (TREE_CODE (rpart) == INTEGER_CST
1209 && TREE_CODE (ipart) == INTEGER_CST))
1210 return build_complex (type, rpart, ipart);
1211 }
1212 break;
1213
1214 case NEGATE_EXPR:
1215 return TREE_OPERAND (t, 0);
1216
1217 case PLUS_EXPR:
1218 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1219 {
1220 /* -(A + B) -> (-B) - A. */
1221 if (negate_expr_p (TREE_OPERAND (t, 1))
1222 && reorder_operands_p (TREE_OPERAND (t, 0),
1223 TREE_OPERAND (t, 1)))
1224 {
1225 tem = negate_expr (TREE_OPERAND (t, 1));
1226 return fold_build2 (MINUS_EXPR, type,
1227 tem, TREE_OPERAND (t, 0));
1228 }
1229
1230 /* -(A + B) -> (-A) - B. */
1231 if (negate_expr_p (TREE_OPERAND (t, 0)))
1232 {
1233 tem = negate_expr (TREE_OPERAND (t, 0));
1234 return fold_build2 (MINUS_EXPR, type,
1235 tem, TREE_OPERAND (t, 1));
1236 }
1237 }
1238 break;
1239
1240 case MINUS_EXPR:
1241 /* - (A - B) -> B - A */
1242 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1243 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1244 return fold_build2 (MINUS_EXPR, type,
1245 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1246 break;
1247
1248 case MULT_EXPR:
1249 if (TYPE_UNSIGNED (type))
1250 break;
1251
1252 /* Fall through. */
1253
1254 case RDIV_EXPR:
1255 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1256 {
1257 tem = TREE_OPERAND (t, 1);
1258 if (negate_expr_p (tem))
1259 return fold_build2 (TREE_CODE (t), type,
1260 TREE_OPERAND (t, 0), negate_expr (tem));
1261 tem = TREE_OPERAND (t, 0);
1262 if (negate_expr_p (tem))
1263 return fold_build2 (TREE_CODE (t), type,
1264 negate_expr (tem), TREE_OPERAND (t, 1));
1265 }
1266 break;
1267
1268 case TRUNC_DIV_EXPR:
1269 case ROUND_DIV_EXPR:
1270 case FLOOR_DIV_EXPR:
1271 case CEIL_DIV_EXPR:
1272 case EXACT_DIV_EXPR:
1273 /* In general we can't negate A / B, because if A is INT_MIN and
1274 B is 1, we may turn this into INT_MIN / -1 which is undefined
1275 and actually traps on some architectures. But if overflow is
1276 undefined, we can negate, because - (INT_MIN / 1) is an
1277 overflow. */
1278 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1279 {
1280 const char * const warnmsg = G_("assuming signed overflow does not "
1281 "occur when negating a division");
1282 tem = TREE_OPERAND (t, 1);
1283 if (negate_expr_p (tem))
1284 {
1285 if (INTEGRAL_TYPE_P (type)
1286 && (TREE_CODE (tem) != INTEGER_CST
1287 || integer_onep (tem)))
1288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1289 return fold_build2 (TREE_CODE (t), type,
1290 TREE_OPERAND (t, 0), negate_expr (tem));
1291 }
1292 tem = TREE_OPERAND (t, 0);
1293 if (negate_expr_p (tem))
1294 {
1295 if (INTEGRAL_TYPE_P (type)
1296 && (TREE_CODE (tem) != INTEGER_CST
1297 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1298 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1299 return fold_build2 (TREE_CODE (t), type,
1300 negate_expr (tem), TREE_OPERAND (t, 1));
1301 }
1302 }
1303 break;
1304
1305 case NOP_EXPR:
1306 /* Convert -((double)float) into (double)(-float). */
1307 if (TREE_CODE (type) == REAL_TYPE)
1308 {
1309 tem = strip_float_extensions (t);
1310 if (tem != t && negate_expr_p (tem))
1311 return negate_expr (tem);
1312 }
1313 break;
1314
1315 case CALL_EXPR:
1316 /* Negate -f(x) as f(-x). */
1317 if (negate_mathfn_p (builtin_mathfn_code (t))
1318 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1319 {
1320 tree fndecl, arg, arglist;
1321
1322 fndecl = get_callee_fndecl (t);
1323 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1324 arglist = build_tree_list (NULL_TREE, arg);
1325 return build_function_call_expr (fndecl, arglist);
1326 }
1327 break;
1328
1329 case RSHIFT_EXPR:
1330 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1331 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1332 {
1333 tree op1 = TREE_OPERAND (t, 1);
1334 if (TREE_INT_CST_HIGH (op1) == 0
1335 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1336 == TREE_INT_CST_LOW (op1))
1337 {
1338 tree ntype = TYPE_UNSIGNED (type)
1339 ? lang_hooks.types.signed_type (type)
1340 : lang_hooks.types.unsigned_type (type);
1341 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1342 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1343 return fold_convert (type, temp);
1344 }
1345 }
1346 break;
1347
1348 default:
1349 break;
1350 }
1351
1352 return NULL_TREE;
1353}
1354
1355/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1356 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1357 return NULL_TREE. */
1358
1359static tree
1360negate_expr (tree t)
1361{
1362 tree type, tem;
1363
1364 if (t == NULL_TREE)
1365 return NULL_TREE;
1366
1367 type = TREE_TYPE (t);
1368 STRIP_SIGN_NOPS (t);
1369
1370 tem = fold_negate_expr (t);
1371 if (!tem)
1372 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1373 return fold_convert (type, tem);
1374}
1375
1376/* Split a tree IN into a constant, literal and variable parts that could be
1377 combined with CODE to make IN. "constant" means an expression with
1378 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1379 commutative arithmetic operation. Store the constant part into *CONP,
1380 the literal in *LITP and return the variable part. If a part isn't
1381 present, set it to null. If the tree does not decompose in this way,
1382 return the entire tree as the variable part and the other parts as null.
1383
1384 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1385 case, we negate an operand that was subtracted. Except if it is a
1386 literal for which we use *MINUS_LITP instead.
1387
1388 If NEGATE_P is true, we are negating all of IN, again except a literal
1389 for which we use *MINUS_LITP instead.
1390
1391 If IN is itself a literal or constant, return it as appropriate.
1392
1393 Note that we do not guarantee that any of the three values will be the
1394 same type as IN, but they will have the same signedness and mode. */
1395
1396static tree
1397split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1398 tree *minus_litp, int negate_p)
1399{
1400 tree var = 0;
1401
1402 *conp = 0;
1403 *litp = 0;
1404 *minus_litp = 0;
1405
1406 /* Strip any conversions that don't change the machine mode or signedness. */
1407 STRIP_SIGN_NOPS (in);
1408
1409 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1410 *litp = in;
1411 else if (TREE_CODE (in) == code
1412 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1413 /* We can associate addition and subtraction together (even
1414 though the C standard doesn't say so) for integers because
1415 the value is not affected. For reals, the value might be
1416 affected, so we can't. */
1417 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1418 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1419 {
1420 tree op0 = TREE_OPERAND (in, 0);
1421 tree op1 = TREE_OPERAND (in, 1);
1422 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1423 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1424
1425 /* First see if either of the operands is a literal, then a constant. */
1426 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1427 *litp = op0, op0 = 0;
1428 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1429 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1430
1431 if (op0 != 0 && TREE_CONSTANT (op0))
1432 *conp = op0, op0 = 0;
1433 else if (op1 != 0 && TREE_CONSTANT (op1))
1434 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1435
1436 /* If we haven't dealt with either operand, this is not a case we can
1437 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1438 if (op0 != 0 && op1 != 0)
1439 var = in;
1440 else if (op0 != 0)
1441 var = op0;
1442 else
1443 var = op1, neg_var_p = neg1_p;
1444
1445 /* Now do any needed negations. */
1446 if (neg_litp_p)
1447 *minus_litp = *litp, *litp = 0;
1448 if (neg_conp_p)
1449 *conp = negate_expr (*conp);
1450 if (neg_var_p)
1451 var = negate_expr (var);
1452 }
1453 else if (TREE_CONSTANT (in))
1454 *conp = in;
1455 else
1456 var = in;
1457
1458 if (negate_p)
1459 {
1460 if (*litp)
1461 *minus_litp = *litp, *litp = 0;
1462 else if (*minus_litp)
1463 *litp = *minus_litp, *minus_litp = 0;
1464 *conp = negate_expr (*conp);
1465 var = negate_expr (var);
1466 }
1467
1468 return var;
1469}
1470
1471/* Re-associate trees split by the above function. T1 and T2 are either
1472 expressions to associate or null. Return the new expression, if any. If
1473 we build an operation, do it in TYPE and with CODE. */
1474
1475static tree
1476associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1477{
1478 if (t1 == 0)
1479 return t2;
1480 else if (t2 == 0)
1481 return t1;
1482
1483 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1484 try to fold this since we will have infinite recursion. But do
1485 deal with any NEGATE_EXPRs. */
1486 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1487 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1488 {
1489 if (code == PLUS_EXPR)
1490 {
1491 if (TREE_CODE (t1) == NEGATE_EXPR)
1492 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1493 fold_convert (type, TREE_OPERAND (t1, 0)));
1494 else if (TREE_CODE (t2) == NEGATE_EXPR)
1495 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1496 fold_convert (type, TREE_OPERAND (t2, 0)));
1497 else if (integer_zerop (t2))
1498 return fold_convert (type, t1);
1499 }
1500 else if (code == MINUS_EXPR)
1501 {
1502 if (integer_zerop (t2))
1503 return fold_convert (type, t1);
1504 }
1505
1506 return build2 (code, type, fold_convert (type, t1),
1507 fold_convert (type, t2));
1508 }
1509
1510 return fold_build2 (code, type, fold_convert (type, t1),
1511 fold_convert (type, t2));
1512}
1513
1514/* Combine two integer constants ARG1 and ARG2 under operation CODE
1515 to produce a new constant. Return NULL_TREE if we don't know how
1516 to evaluate CODE at compile-time.
1517
1518 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1519
1520tree
1521int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1522{
1523 unsigned HOST_WIDE_INT int1l, int2l;
1524 HOST_WIDE_INT int1h, int2h;
1525 unsigned HOST_WIDE_INT low;
1526 HOST_WIDE_INT hi;
1527 unsigned HOST_WIDE_INT garbagel;
1528 HOST_WIDE_INT garbageh;
1529 tree t;
1530 tree type = TREE_TYPE (arg1);
1531 int uns = TYPE_UNSIGNED (type);
1532 int is_sizetype
1533 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1534 int overflow = 0;
1535
1536 int1l = TREE_INT_CST_LOW (arg1);
1537 int1h = TREE_INT_CST_HIGH (arg1);
1538 int2l = TREE_INT_CST_LOW (arg2);
1539 int2h = TREE_INT_CST_HIGH (arg2);
1540
1541 switch (code)
1542 {
1543 case BIT_IOR_EXPR:
1544 low = int1l | int2l, hi = int1h | int2h;
1545 break;
1546
1547 case BIT_XOR_EXPR:
1548 low = int1l ^ int2l, hi = int1h ^ int2h;
1549 break;
1550
1551 case BIT_AND_EXPR:
1552 low = int1l & int2l, hi = int1h & int2h;
1553 break;
1554
1555 case RSHIFT_EXPR:
1556 int2l = -int2l;
1557 case LSHIFT_EXPR:
1558 /* It's unclear from the C standard whether shifts can overflow.
1559 The following code ignores overflow; perhaps a C standard
1560 interpretation ruling is needed. */
1561 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1562 &low, &hi, !uns);
1563 break;
1564
1565 case RROTATE_EXPR:
1566 int2l = - int2l;
1567 case LROTATE_EXPR:
1568 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1569 &low, &hi);
1570 break;
1571
1572 case PLUS_EXPR:
1573 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1574 break;
1575
1576 case MINUS_EXPR:
1577 neg_double (int2l, int2h, &low, &hi);
1578 add_double (int1l, int1h, low, hi, &low, &hi);
1579 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1580 break;
1581
1582 case MULT_EXPR:
1583 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1584 break;
1585
1586 case TRUNC_DIV_EXPR:
1587 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1588 case EXACT_DIV_EXPR:
1589 /* This is a shortcut for a common special case. */
1590 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1591 && ! TREE_CONSTANT_OVERFLOW (arg1)
1592 && ! TREE_CONSTANT_OVERFLOW (arg2)
1593 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1594 {
1595 if (code == CEIL_DIV_EXPR)
1596 int1l += int2l - 1;
1597
1598 low = int1l / int2l, hi = 0;
1599 break;
1600 }
1601
1602 /* ... fall through ... */
1603
1604 case ROUND_DIV_EXPR:
1605 if (int2h == 0 && int2l == 0)
1606 return NULL_TREE;
1607 if (int2h == 0 && int2l == 1)
1608 {
1609 low = int1l, hi = int1h;
1610 break;
1611 }
1612 if (int1l == int2l && int1h == int2h
1613 && ! (int1l == 0 && int1h == 0))
1614 {
1615 low = 1, hi = 0;
1616 break;
1617 }
1618 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1619 &low, &hi, &garbagel, &garbageh);
1620 break;
1621
1622 case TRUNC_MOD_EXPR:
1623 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1624 /* This is a shortcut for a common special case. */
1625 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1626 && ! TREE_CONSTANT_OVERFLOW (arg1)
1627 && ! TREE_CONSTANT_OVERFLOW (arg2)
1628 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1629 {
1630 if (code == CEIL_MOD_EXPR)
1631 int1l += int2l - 1;
1632 low = int1l % int2l, hi = 0;
1633 break;
1634 }
1635
1636 /* ... fall through ... */
1637
1638 case ROUND_MOD_EXPR:
1639 if (int2h == 0 && int2l == 0)
1640 return NULL_TREE;
1641 overflow = div_and_round_double (code, uns,
1642 int1l, int1h, int2l, int2h,
1643 &garbagel, &garbageh, &low, &hi);
1644 break;
1645
1646 case MIN_EXPR:
1647 case MAX_EXPR:
1648 if (uns)
1649 low = (((unsigned HOST_WIDE_INT) int1h
1650 < (unsigned HOST_WIDE_INT) int2h)
1651 || (((unsigned HOST_WIDE_INT) int1h
1652 == (unsigned HOST_WIDE_INT) int2h)
1653 && int1l < int2l));
1654 else
1655 low = (int1h < int2h
1656 || (int1h == int2h && int1l < int2l));
1657
1658 if (low == (code == MIN_EXPR))
1659 low = int1l, hi = int1h;
1660 else
1661 low = int2l, hi = int2h;
1662 break;
1663
1664 default:
1665 return NULL_TREE;
1666 }
1667
1668 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1669
1670 if (notrunc)
1671 {
1672 /* Propagate overflow flags ourselves. */
1673 if (((!uns || is_sizetype) && overflow)
1674 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1675 {
1676 t = copy_node (t);
1677 TREE_OVERFLOW (t) = 1;
1678 TREE_CONSTANT_OVERFLOW (t) = 1;
1679 }
1680 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1681 {
1682 t = copy_node (t);
1683 TREE_CONSTANT_OVERFLOW (t) = 1;
1684 }
1685 }
1686 else
1687 t = force_fit_type (t, 1,
1688 ((!uns || is_sizetype) && overflow)
1689 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1690 TREE_CONSTANT_OVERFLOW (arg1)
1691 | TREE_CONSTANT_OVERFLOW (arg2));
1692
1693 return t;
1694}
1695
1696/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1697 constant. We assume ARG1 and ARG2 have the same data type, or at least
1698 are the same kind of constant and the same machine mode. Return zero if
1699 combining the constants is not allowed in the current operating mode.
1700
1701 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1702
1703static tree
1704const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1705{
1706 /* Sanity check for the recursive cases. */
1707 if (!arg1 || !arg2)
1708 return NULL_TREE;
1709
1710 STRIP_NOPS (arg1);
1711 STRIP_NOPS (arg2);
1712
1713 if (TREE_CODE (arg1) == INTEGER_CST)
1714 return int_const_binop (code, arg1, arg2, notrunc);
1715
1716 if (TREE_CODE (arg1) == REAL_CST)
1717 {
1718 enum machine_mode mode;
1719 REAL_VALUE_TYPE d1;
1720 REAL_VALUE_TYPE d2;
1721 REAL_VALUE_TYPE value;
1722 REAL_VALUE_TYPE result;
1723 bool inexact;
1724 tree t, type;
1725
1726 /* The following codes are handled by real_arithmetic. */
1727 switch (code)
1728 {
1729 case PLUS_EXPR:
1730 case MINUS_EXPR:
1731 case MULT_EXPR:
1732 case RDIV_EXPR:
1733 case MIN_EXPR:
1734 case MAX_EXPR:
1735 break;
1736
1737 default:
1738 return NULL_TREE;
1739 }
1740
1741 d1 = TREE_REAL_CST (arg1);
1742 d2 = TREE_REAL_CST (arg2);
1743
1744 type = TREE_TYPE (arg1);
1745 mode = TYPE_MODE (type);
1746
1747 /* Don't perform operation if we honor signaling NaNs and
1748 either operand is a NaN. */
1749 if (HONOR_SNANS (mode)
1750 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1751 return NULL_TREE;
1752
1753 /* Don't perform operation if it would raise a division
1754 by zero exception. */
1755 if (code == RDIV_EXPR
1756 && REAL_VALUES_EQUAL (d2, dconst0)
1757 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1758 return NULL_TREE;
1759
1760 /* If either operand is a NaN, just return it. Otherwise, set up
1761 for floating-point trap; we return an overflow. */
1762 if (REAL_VALUE_ISNAN (d1))
1763 return arg1;
1764 else if (REAL_VALUE_ISNAN (d2))
1765 return arg2;
1766
1767 inexact = real_arithmetic (&value, code, &d1, &d2);
1768 real_convert (&result, mode, &value);
1769
1770 /* Don't constant fold this floating point operation if
1771 the result has overflowed and flag_trapping_math. */
1772 if (flag_trapping_math
1773 && MODE_HAS_INFINITIES (mode)
1774 && REAL_VALUE_ISINF (result)
1775 && !REAL_VALUE_ISINF (d1)
1776 && !REAL_VALUE_ISINF (d2))
1777 return NULL_TREE;
1778
1779 /* Don't constant fold this floating point operation if the
1780 result may dependent upon the run-time rounding mode and
1781 flag_rounding_math is set, or if GCC's software emulation
1782 is unable to accurately represent the result. */
1783 if ((flag_rounding_math
1784 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1785 && !flag_unsafe_math_optimizations))
1786 && (inexact || !real_identical (&result, &value)))
1787 return NULL_TREE;
1788
1789 t = build_real (type, result);
1790
1791 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1792 TREE_CONSTANT_OVERFLOW (t)
1793 = TREE_OVERFLOW (t)
1794 | TREE_CONSTANT_OVERFLOW (arg1)
1795 | TREE_CONSTANT_OVERFLOW (arg2);
1796 return t;
1797 }
1798
1799 if (TREE_CODE (arg1) == COMPLEX_CST)
1800 {
1801 tree type = TREE_TYPE (arg1);
1802 tree r1 = TREE_REALPART (arg1);
1803 tree i1 = TREE_IMAGPART (arg1);
1804 tree r2 = TREE_REALPART (arg2);
1805 tree i2 = TREE_IMAGPART (arg2);
1806 tree real, imag;
1807
1808 switch (code)
1809 {
1810 case PLUS_EXPR:
1811 case MINUS_EXPR:
1812 real = const_binop (code, r1, r2, notrunc);
1813 imag = const_binop (code, i1, i2, notrunc);
1814 break;
1815
1816 case MULT_EXPR:
1817 real = const_binop (MINUS_EXPR,
1818 const_binop (MULT_EXPR, r1, r2, notrunc),
1819 const_binop (MULT_EXPR, i1, i2, notrunc),
1820 notrunc);
1821 imag = const_binop (PLUS_EXPR,
1822 const_binop (MULT_EXPR, r1, i2, notrunc),
1823 const_binop (MULT_EXPR, i1, r2, notrunc),
1824 notrunc);
1825 break;
1826
1827 case RDIV_EXPR:
1828 {
1829 tree magsquared
1830 = const_binop (PLUS_EXPR,
1831 const_binop (MULT_EXPR, r2, r2, notrunc),
1832 const_binop (MULT_EXPR, i2, i2, notrunc),
1833 notrunc);
1834 tree t1
1835 = const_binop (PLUS_EXPR,
1836 const_binop (MULT_EXPR, r1, r2, notrunc),
1837 const_binop (MULT_EXPR, i1, i2, notrunc),
1838 notrunc);
1839 tree t2
1840 = const_binop (MINUS_EXPR,
1841 const_binop (MULT_EXPR, i1, r2, notrunc),
1842 const_binop (MULT_EXPR, r1, i2, notrunc),
1843 notrunc);
1844
1845 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1846 code = TRUNC_DIV_EXPR;
1847
1848 real = const_binop (code, t1, magsquared, notrunc);
1849 imag = const_binop (code, t2, magsquared, notrunc);
1850 }
1851 break;
1852
1853 default:
1854 return NULL_TREE;
1855 }
1856
1857 if (real && imag)
1858 return build_complex (type, real, imag);
1859 }
1860
1861 return NULL_TREE;
1862}
1863
1864/* Create a size type INT_CST node with NUMBER sign extended. KIND
1865 indicates which particular sizetype to create. */
1866
1867tree
1868size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1869{
1870 return build_int_cst (sizetype_tab[(int) kind], number);
1871}
1872
1873/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1874 is a tree code. The type of the result is taken from the operands.
1875 Both must be the same type integer type and it must be a size type.
1876 If the operands are constant, so is the result. */
1877
1878tree
1879size_binop (enum tree_code code, tree arg0, tree arg1)
1880{
1881 tree type = TREE_TYPE (arg0);
1882
1883 if (arg0 == error_mark_node || arg1 == error_mark_node)
1884 return error_mark_node;
1885
1886 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1887 && type == TREE_TYPE (arg1));
1888
1889 /* Handle the special case of two integer constants faster. */
1890 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1891 {
1892 /* And some specific cases even faster than that. */
1893 if (code == PLUS_EXPR && integer_zerop (arg0))
1894 return arg1;
1895 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1896 && integer_zerop (arg1))
1897 return arg0;
1898 else if (code == MULT_EXPR && integer_onep (arg0))
1899 return arg1;
1900
1901 /* Handle general case of two integer constants. */
1902 return int_const_binop (code, arg0, arg1, 0);
1903 }
1904
1905 return fold_build2 (code, type, arg0, arg1);
1906}
1907
1908/* Given two values, either both of sizetype or both of bitsizetype,
1909 compute the difference between the two values. Return the value
1910 in signed type corresponding to the type of the operands. */
1911
1912tree
1913size_diffop (tree arg0, tree arg1)
1914{
1915 tree type = TREE_TYPE (arg0);
1916 tree ctype;
1917
1918 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1919 && type == TREE_TYPE (arg1));
1920
1921 /* If the type is already signed, just do the simple thing. */
1922 if (!TYPE_UNSIGNED (type))
1923 return size_binop (MINUS_EXPR, arg0, arg1);
1924
1925 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1926
1927 /* If either operand is not a constant, do the conversions to the signed
1928 type and subtract. The hardware will do the right thing with any
1929 overflow in the subtraction. */
1930 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1931 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1932 fold_convert (ctype, arg1));
1933
1934 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1935 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1936 overflow) and negate (which can't either). Special-case a result
1937 of zero while we're here. */
1938 if (tree_int_cst_equal (arg0, arg1))
1939 return build_int_cst (ctype, 0);
1940 else if (tree_int_cst_lt (arg1, arg0))
1941 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1942 else
1943 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1944 fold_convert (ctype, size_binop (MINUS_EXPR,
1945 arg1, arg0)));
1946}
1947
1948/* A subroutine of fold_convert_const handling conversions of an
1949 INTEGER_CST to another integer type. */
1950
1951static tree
1952fold_convert_const_int_from_int (tree type, tree arg1)
1953{
1954 tree t;
1955
1956 /* Given an integer constant, make new constant with new type,
1957 appropriately sign-extended or truncated. */
1958 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1959 TREE_INT_CST_HIGH (arg1));
1960
1961 t = force_fit_type (t,
1962 /* Don't set the overflow when
1963 converting a pointer */
1964 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1965 (TREE_INT_CST_HIGH (arg1) < 0
1966 && (TYPE_UNSIGNED (type)
1967 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1968 | TREE_OVERFLOW (arg1),
1969 TREE_CONSTANT_OVERFLOW (arg1));
1970
1971 return t;
1972}
1973
1974/* A subroutine of fold_convert_const handling conversions a REAL_CST
1975 to an integer type. */
1976
1977static tree
1978fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1979{
1980 int overflow = 0;
1981 tree t;
1982
1983 /* The following code implements the floating point to integer
1984 conversion rules required by the Java Language Specification,
1985 that IEEE NaNs are mapped to zero and values that overflow
1986 the target precision saturate, i.e. values greater than
1987 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1988 are mapped to INT_MIN. These semantics are allowed by the
1989 C and C++ standards that simply state that the behavior of
1990 FP-to-integer conversion is unspecified upon overflow. */
1991
1992 HOST_WIDE_INT high, low;
1993 REAL_VALUE_TYPE r;
1994 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1995
1996 switch (code)
1997 {
1998 case FIX_TRUNC_EXPR:
1999 real_trunc (&r, VOIDmode, &x);
2000 break;
2001
2002 case FIX_CEIL_EXPR:
2003 real_ceil (&r, VOIDmode, &x);
2004 break;
2005
2006 case FIX_FLOOR_EXPR:
2007 real_floor (&r, VOIDmode, &x);
2008 break;
2009
2010 case FIX_ROUND_EXPR:
2011 real_round (&r, VOIDmode, &x);
2012 break;
2013
2014 default:
2015 gcc_unreachable ();
2016 }
2017
2018 /* If R is NaN, return zero and show we have an overflow. */
2019 if (REAL_VALUE_ISNAN (r))
2020 {
2021 overflow = 1;
2022 high = 0;
2023 low = 0;
2024 }
2025
2026 /* See if R is less than the lower bound or greater than the
2027 upper bound. */
2028
2029 if (! overflow)
2030 {
2031 tree lt = TYPE_MIN_VALUE (type);
2032 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2033 if (REAL_VALUES_LESS (r, l))
2034 {
2035 overflow = 1;
2036 high = TREE_INT_CST_HIGH (lt);
2037 low = TREE_INT_CST_LOW (lt);
2038 }
2039 }
2040
2041 if (! overflow)
2042 {
2043 tree ut = TYPE_MAX_VALUE (type);
2044 if (ut)
2045 {
2046 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047 if (REAL_VALUES_LESS (u, r))
2048 {
2049 overflow = 1;
2050 high = TREE_INT_CST_HIGH (ut);
2051 low = TREE_INT_CST_LOW (ut);
2052 }
2053 }
2054 }
2055
2056 if (! overflow)
2057 REAL_VALUE_TO_INT (&low, &high, r);
2058
2059 t = build_int_cst_wide (type, low, high);
2060
2061 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2062 TREE_CONSTANT_OVERFLOW (arg1));
2063 return t;
2064}
2065
2066/* A subroutine of fold_convert_const handling conversions a REAL_CST
2067 to another floating point type. */
2068
2069static tree
2070fold_convert_const_real_from_real (tree type, tree arg1)
2071{
2072 REAL_VALUE_TYPE value;
2073 tree t;
2074
2075 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2076 t = build_real (type, value);
2077
2078 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2079 TREE_CONSTANT_OVERFLOW (t)
2080 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2081 return t;
2082}
2083
2084/* Attempt to fold type conversion operation CODE of expression ARG1 to
2085 type TYPE. If no simplification can be done return NULL_TREE. */
2086
2087static tree
2088fold_convert_const (enum tree_code code, tree type, tree arg1)
2089{
2090 if (TREE_TYPE (arg1) == type)
2091 return arg1;
2092
2093 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2094 {
2095 if (TREE_CODE (arg1) == INTEGER_CST)
2096 return fold_convert_const_int_from_int (type, arg1);
2097 else if (TREE_CODE (arg1) == REAL_CST)
2098 return fold_convert_const_int_from_real (code, type, arg1);
2099 }
2100 else if (TREE_CODE (type) == REAL_TYPE)
2101 {
2102 if (TREE_CODE (arg1) == INTEGER_CST)
2103 return build_real_from_int_cst (type, arg1);
2104 if (TREE_CODE (arg1) == REAL_CST)
2105 return fold_convert_const_real_from_real (type, arg1);
2106 }
2107 return NULL_TREE;
2108}
2109
2110/* Construct a vector of zero elements of vector type TYPE. */
2111
2112static tree
2113build_zero_vector (tree type)
2114{
2115 tree elem, list;
2116 int i, units;
2117
2118 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2119 units = TYPE_VECTOR_SUBPARTS (type);
2120
2121 list = NULL_TREE;
2122 for (i = 0; i < units; i++)
2123 list = tree_cons (NULL_TREE, elem, list);
2124 return build_vector (type, list);
2125}
2126
2127/* Convert expression ARG to type TYPE. Used by the middle-end for
2128 simple conversions in preference to calling the front-end's convert. */
2129
2130tree
2131fold_convert (tree type, tree arg)
2132{
2133 tree orig = TREE_TYPE (arg);
2134 tree tem;
2135
2136 if (type == orig)
2137 return arg;
2138
2139 if (TREE_CODE (arg) == ERROR_MARK
2140 || TREE_CODE (type) == ERROR_MARK
2141 || TREE_CODE (orig) == ERROR_MARK)
2142 return error_mark_node;
2143
2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2145 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2146 TYPE_MAIN_VARIANT (orig)))
2147 return fold_build1 (NOP_EXPR, type, arg);
2148
2149 switch (TREE_CODE (type))
2150 {
2151 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2152 case POINTER_TYPE: case REFERENCE_TYPE:
2153 case OFFSET_TYPE:
2154 if (TREE_CODE (arg) == INTEGER_CST)
2155 {
2156 tem = fold_convert_const (NOP_EXPR, type, arg);
2157 if (tem != NULL_TREE)
2158 return tem;
2159 }
2160 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2161 || TREE_CODE (orig) == OFFSET_TYPE)
2162 return fold_build1 (NOP_EXPR, type, arg);
2163 if (TREE_CODE (orig) == COMPLEX_TYPE)
2164 {
2165 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2166 return fold_convert (type, tem);
2167 }
2168 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2169 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2170 return fold_build1 (NOP_EXPR, type, arg);
2171
2172 case REAL_TYPE:
2173 if (TREE_CODE (arg) == INTEGER_CST)
2174 {
2175 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2176 if (tem != NULL_TREE)
2177 return tem;
2178 }
2179 else if (TREE_CODE (arg) == REAL_CST)
2180 {
2181 tem = fold_convert_const (NOP_EXPR, type, arg);
2182 if (tem != NULL_TREE)
2183 return tem;
2184 }
2185
2186 switch (TREE_CODE (orig))
2187 {
2188 case INTEGER_TYPE:
2189 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2190 case POINTER_TYPE: case REFERENCE_TYPE:
2191 return fold_build1 (FLOAT_EXPR, type, arg);
2192
2193 case REAL_TYPE:
2194 return fold_build1 (NOP_EXPR, type, arg);
2195
2196 case COMPLEX_TYPE:
2197 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2198 return fold_convert (type, tem);
2199
2200 default:
2201 gcc_unreachable ();
2202 }
2203
2204 case COMPLEX_TYPE:
2205 switch (TREE_CODE (orig))
2206 {
2207 case INTEGER_TYPE:
2208 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2209 case POINTER_TYPE: case REFERENCE_TYPE:
2210 case REAL_TYPE:
2211 return build2 (COMPLEX_EXPR, type,
2212 fold_convert (TREE_TYPE (type), arg),
2213 fold_convert (TREE_TYPE (type), integer_zero_node));
2214 case COMPLEX_TYPE:
2215 {
2216 tree rpart, ipart;
2217
2218 if (TREE_CODE (arg) == COMPLEX_EXPR)
2219 {
2220 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2221 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2222 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2223 }
2224
2225 arg = save_expr (arg);
2226 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2227 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2228 rpart = fold_convert (TREE_TYPE (type), rpart);
2229 ipart = fold_convert (TREE_TYPE (type), ipart);
2230 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2231 }
2232
2233 default:
2234 gcc_unreachable ();
2235 }
2236
2237 case VECTOR_TYPE:
2238 if (integer_zerop (arg))
2239 return build_zero_vector (type);
2240 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2241 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2242 || TREE_CODE (orig) == VECTOR_TYPE);
2243 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2244
2245 case VOID_TYPE:
2246 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2247
2248 default:
2249 gcc_unreachable ();
2250 }
2251}
2252
2253/* Return false if expr can be assumed not to be an lvalue, true
2254 otherwise. */
2255
2256static bool
2257maybe_lvalue_p (tree x)
2258{
2259 /* We only need to wrap lvalue tree codes. */
2260 switch (TREE_CODE (x))
2261 {
2262 case VAR_DECL:
2263 case PARM_DECL:
2264 case RESULT_DECL:
2265 case LABEL_DECL:
2266 case FUNCTION_DECL:
2267 case SSA_NAME:
2268
2269 case COMPONENT_REF:
2270 case INDIRECT_REF:
2271 case ALIGN_INDIRECT_REF:
2272 case MISALIGNED_INDIRECT_REF:
2273 case ARRAY_REF:
2274 case ARRAY_RANGE_REF:
2275 case BIT_FIELD_REF:
2276 case OBJ_TYPE_REF:
2277
2278 case REALPART_EXPR:
2279 case IMAGPART_EXPR:
2280 case PREINCREMENT_EXPR:
2281 case PREDECREMENT_EXPR:
2282 case SAVE_EXPR:
2283 case TRY_CATCH_EXPR:
2284 case WITH_CLEANUP_EXPR:
2285 case COMPOUND_EXPR:
2286 case MODIFY_EXPR:
2287 case TARGET_EXPR:
2288 case COND_EXPR:
2289 case BIND_EXPR:
2290 case MIN_EXPR:
2291 case MAX_EXPR:
2292 break;
2293
2294 default:
2295 /* Assume the worst for front-end tree codes. */
2296 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2297 break;
2298 return false;
2299 }
2300
2301 return true;
2302}
2303
2304/* Return an expr equal to X but certainly not valid as an lvalue. */
2305
2306tree
2307non_lvalue (tree x)
2308{
2309 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2310 us. */
2311 if (in_gimple_form)
2312 return x;
2313
2314 if (! maybe_lvalue_p (x))
2315 return x;
2316 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2317}
2318
2319/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2320 Zero means allow extended lvalues. */
2321
2322int pedantic_lvalues;
2323
2324/* When pedantic, return an expr equal to X but certainly not valid as a
2325 pedantic lvalue. Otherwise, return X. */
2326
2327static tree
2328pedantic_non_lvalue (tree x)
2329{
2330 if (pedantic_lvalues)
2331 return non_lvalue (x);
2332 else
2333 return x;
2334}
2335
2336/* Given a tree comparison code, return the code that is the logical inverse
2337 of the given code. It is not safe to do this for floating-point
2338 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2339 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2340
2341enum tree_code
2342invert_tree_comparison (enum tree_code code, bool honor_nans)
2343{
2344 if (honor_nans && flag_trapping_math)
2345 return ERROR_MARK;
2346
2347 switch (code)
2348 {
2349 case EQ_EXPR:
2350 return NE_EXPR;
2351 case NE_EXPR:
2352 return EQ_EXPR;
2353 case GT_EXPR:
2354 return honor_nans ? UNLE_EXPR : LE_EXPR;
2355 case GE_EXPR:
2356 return honor_nans ? UNLT_EXPR : LT_EXPR;
2357 case LT_EXPR:
2358 return honor_nans ? UNGE_EXPR : GE_EXPR;
2359 case LE_EXPR:
2360 return honor_nans ? UNGT_EXPR : GT_EXPR;
2361 case LTGT_EXPR:
2362 return UNEQ_EXPR;
2363 case UNEQ_EXPR:
2364 return LTGT_EXPR;
2365 case UNGT_EXPR:
2366 return LE_EXPR;
2367 case UNGE_EXPR:
2368 return LT_EXPR;
2369 case UNLT_EXPR:
2370 return GE_EXPR;
2371 case UNLE_EXPR:
2372 return GT_EXPR;
2373 case ORDERED_EXPR:
2374 return UNORDERED_EXPR;
2375 case UNORDERED_EXPR:
2376 return ORDERED_EXPR;
2377 default:
2378 gcc_unreachable ();
2379 }
2380}
2381
2382/* Similar, but return the comparison that results if the operands are
2383 swapped. This is safe for floating-point. */
2384
2385enum tree_code
2386swap_tree_comparison (enum tree_code code)
2387{
2388 switch (code)
2389 {
2390 case EQ_EXPR:
2391 case NE_EXPR:
2392 case ORDERED_EXPR:
2393 case UNORDERED_EXPR:
2394 case LTGT_EXPR:
2395 case UNEQ_EXPR:
2396 return code;
2397 case GT_EXPR:
2398 return LT_EXPR;
2399 case GE_EXPR:
2400 return LE_EXPR;
2401 case LT_EXPR:
2402 return GT_EXPR;
2403 case LE_EXPR:
2404 return GE_EXPR;
2405 case UNGT_EXPR:
2406 return UNLT_EXPR;
2407 case UNGE_EXPR:
2408 return UNLE_EXPR;
2409 case UNLT_EXPR:
2410 return UNGT_EXPR;
2411 case UNLE_EXPR:
2412 return UNGE_EXPR;
2413 default:
2414 gcc_unreachable ();
2415 }
2416}
2417
2418
2419/* Convert a comparison tree code from an enum tree_code representation
2420 into a compcode bit-based encoding. This function is the inverse of
2421 compcode_to_comparison. */
2422
2423static enum comparison_code
2424comparison_to_compcode (enum tree_code code)
2425{
2426 switch (code)
2427 {
2428 case LT_EXPR:
2429 return COMPCODE_LT;
2430 case EQ_EXPR:
2431 return COMPCODE_EQ;
2432 case LE_EXPR:
2433 return COMPCODE_LE;
2434 case GT_EXPR:
2435 return COMPCODE_GT;
2436 case NE_EXPR:
2437 return COMPCODE_NE;
2438 case GE_EXPR:
2439 return COMPCODE_GE;
2440 case ORDERED_EXPR:
2441 return COMPCODE_ORD;
2442 case UNORDERED_EXPR:
2443 return COMPCODE_UNORD;
2444 case UNLT_EXPR:
2445 return COMPCODE_UNLT;
2446 case UNEQ_EXPR:
2447 return COMPCODE_UNEQ;
2448 case UNLE_EXPR:
2449 return COMPCODE_UNLE;
2450 case UNGT_EXPR:
2451 return COMPCODE_UNGT;
2452 case LTGT_EXPR:
2453 return COMPCODE_LTGT;
2454 case UNGE_EXPR:
2455 return COMPCODE_UNGE;
2456 default:
2457 gcc_unreachable ();
2458 }
2459}
2460
2461/* Convert a compcode bit-based encoding of a comparison operator back
2462 to GCC's enum tree_code representation. This function is the
2463 inverse of comparison_to_compcode. */
2464
2465static enum tree_code
2466compcode_to_comparison (enum comparison_code code)
2467{
2468 switch (code)
2469 {
2470 case COMPCODE_LT:
2471 return LT_EXPR;
2472 case COMPCODE_EQ:
2473 return EQ_EXPR;
2474 case COMPCODE_LE:
2475 return LE_EXPR;
2476 case COMPCODE_GT:
2477 return GT_EXPR;
2478 case COMPCODE_NE:
2479 return NE_EXPR;
2480 case COMPCODE_GE:
2481 return GE_EXPR;
2482 case COMPCODE_ORD:
2483 return ORDERED_EXPR;
2484 case COMPCODE_UNORD:
2485 return UNORDERED_EXPR;
2486 case COMPCODE_UNLT:
2487 return UNLT_EXPR;
2488 case COMPCODE_UNEQ:
2489 return UNEQ_EXPR;
2490 case COMPCODE_UNLE:
2491 return UNLE_EXPR;
2492 case COMPCODE_UNGT:
2493 return UNGT_EXPR;
2494 case COMPCODE_LTGT:
2495 return LTGT_EXPR;
2496 case COMPCODE_UNGE:
2497 return UNGE_EXPR;
2498 default:
2499 gcc_unreachable ();
2500 }
2501}
2502
2503/* Return a tree for the comparison which is the combination of
2504 doing the AND or OR (depending on CODE) of the two operations LCODE
2505 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2506 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2507 if this makes the transformation invalid. */
2508
2509tree
2510combine_comparisons (enum tree_code code, enum tree_code lcode,
2511 enum tree_code rcode, tree truth_type,
2512 tree ll_arg, tree lr_arg)
2513{
2514 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2515 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2516 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2517 enum comparison_code compcode;
2518
2519 switch (code)
2520 {
2521 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2522 compcode = lcompcode & rcompcode;
2523 break;
2524
2525 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2526 compcode = lcompcode | rcompcode;
2527 break;
2528
2529 default:
2530 return NULL_TREE;
2531 }
2532
2533 if (!honor_nans)
2534 {
2535 /* Eliminate unordered comparisons, as well as LTGT and ORD
2536 which are not used unless the mode has NaNs. */
2537 compcode &= ~COMPCODE_UNORD;
2538 if (compcode == COMPCODE_LTGT)
2539 compcode = COMPCODE_NE;
2540 else if (compcode == COMPCODE_ORD)
2541 compcode = COMPCODE_TRUE;
2542 }
2543 else if (flag_trapping_math)
2544 {
2545 /* Check that the original operation and the optimized ones will trap
2546 under the same condition. */
2547 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2548 && (lcompcode != COMPCODE_EQ)
2549 && (lcompcode != COMPCODE_ORD);
2550 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2551 && (rcompcode != COMPCODE_EQ)
2552 && (rcompcode != COMPCODE_ORD);
2553 bool trap = (compcode & COMPCODE_UNORD) == 0
2554 && (compcode != COMPCODE_EQ)
2555 && (compcode != COMPCODE_ORD);
2556
2557 /* In a short-circuited boolean expression the LHS might be
2558 such that the RHS, if evaluated, will never trap. For
2559 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2560 if neither x nor y is NaN. (This is a mixed blessing: for
2561 example, the expression above will never trap, hence
2562 optimizing it to x < y would be invalid). */
2563 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2564 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2565 rtrap = false;
2566
2567 /* If the comparison was short-circuited, and only the RHS
2568 trapped, we may now generate a spurious trap. */
2569 if (rtrap && !ltrap
2570 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2571 return NULL_TREE;
2572
2573 /* If we changed the conditions that cause a trap, we lose. */
2574 if ((ltrap || rtrap) != trap)
2575 return NULL_TREE;
2576 }
2577
2578 if (compcode == COMPCODE_TRUE)
2579 return constant_boolean_node (true, truth_type);
2580 else if (compcode == COMPCODE_FALSE)
2581 return constant_boolean_node (false, truth_type);
2582 else
2583 return fold_build2 (compcode_to_comparison (compcode),
2584 truth_type, ll_arg, lr_arg);
2585}
2586
2587/* Return nonzero if CODE is a tree code that represents a truth value. */
2588
2589static int
2590truth_value_p (enum tree_code code)
2591{
2592 return (TREE_CODE_CLASS (code) == tcc_comparison
2593 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2594 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2595 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2596}
2597
2598/* Return nonzero if two operands (typically of the same tree node)
2599 are necessarily equal. If either argument has side-effects this
2600 function returns zero. FLAGS modifies behavior as follows:
2601
2602 If OEP_ONLY_CONST is set, only return nonzero for constants.
2603 This function tests whether the operands are indistinguishable;
2604 it does not test whether they are equal using C's == operation.
2605 The distinction is important for IEEE floating point, because
2606 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2607 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2608
2609 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2610 even though it may hold multiple values during a function.
2611 This is because a GCC tree node guarantees that nothing else is
2612 executed between the evaluation of its "operands" (which may often
2613 be evaluated in arbitrary order). Hence if the operands themselves
2614 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2615 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2616 unset means assuming isochronic (or instantaneous) tree equivalence.
2617 Unless comparing arbitrary expression trees, such as from different
2618 statements, this flag can usually be left unset.
2619
2620 If OEP_PURE_SAME is set, then pure functions with identical arguments
2621 are considered the same. It is used when the caller has other ways
2622 to ensure that global memory is unchanged in between. */
2623
2624int
2625operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2626{
2627 /* If either is ERROR_MARK, they aren't equal. */
2628 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2629 return 0;
2630
2631 /* If both types don't have the same signedness, then we can't consider
2632 them equal. We must check this before the STRIP_NOPS calls
2633 because they may change the signedness of the arguments. */
2634 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635 return 0;
2636
2637 /* If both types don't have the same precision, then it is not safe
2638 to strip NOPs. */
2639 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2640 return 0;
2641
2642 STRIP_NOPS (arg0);
2643 STRIP_NOPS (arg1);
2644
2645 /* In case both args are comparisons but with different comparison
2646 code, try to swap the comparison operands of one arg to produce
2647 a match and compare that variant. */
2648 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2649 && COMPARISON_CLASS_P (arg0)
2650 && COMPARISON_CLASS_P (arg1))
2651 {
2652 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2653
2654 if (TREE_CODE (arg0) == swap_code)
2655 return operand_equal_p (TREE_OPERAND (arg0, 0),
2656 TREE_OPERAND (arg1, 1), flags)
2657 && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 TREE_OPERAND (arg1, 0), flags);
2659 }
2660
2661 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2662 /* This is needed for conversions and for COMPONENT_REF.
2663 Might as well play it safe and always test this. */
2664 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2665 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2666 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2667 return 0;
2668
2669 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2670 We don't care about side effects in that case because the SAVE_EXPR
2671 takes care of that for us. In all other cases, two expressions are
2672 equal if they have no side effects. If we have two identical
2673 expressions with side effects that should be treated the same due
2674 to the only side effects being identical SAVE_EXPR's, that will
2675 be detected in the recursive calls below. */
2676 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2677 && (TREE_CODE (arg0) == SAVE_EXPR
2678 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2679 return 1;
2680
2681 /* Next handle constant cases, those for which we can return 1 even
2682 if ONLY_CONST is set. */
2683 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2684 switch (TREE_CODE (arg0))
2685 {
2686 case INTEGER_CST:
2687 return (! TREE_CONSTANT_OVERFLOW (arg0)
2688 && ! TREE_CONSTANT_OVERFLOW (arg1)
2689 && tree_int_cst_equal (arg0, arg1));
2690
2691 case REAL_CST:
2692 return (! TREE_CONSTANT_OVERFLOW (arg0)
2693 && ! TREE_CONSTANT_OVERFLOW (arg1)
2694 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2695 TREE_REAL_CST (arg1)));
2696
2697 case VECTOR_CST:
2698 {
2699 tree v1, v2;
2700
2701 if (TREE_CONSTANT_OVERFLOW (arg0)
2702 || TREE_CONSTANT_OVERFLOW (arg1))
2703 return 0;
2704
2705 v1 = TREE_VECTOR_CST_ELTS (arg0);
2706 v2 = TREE_VECTOR_CST_ELTS (arg1);
2707 while (v1 && v2)
2708 {
2709 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2710 flags))
2711 return 0;
2712 v1 = TREE_CHAIN (v1);
2713 v2 = TREE_CHAIN (v2);
2714 }
2715
2716 return v1 == v2;
2717 }
2718
2719 case COMPLEX_CST:
2720 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2721 flags)
2722 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2723 flags));
2724
2725 case STRING_CST:
2726 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2727 && ! memcmp (TREE_STRING_POINTER (arg0),
2728 TREE_STRING_POINTER (arg1),
2729 TREE_STRING_LENGTH (arg0)));
2730
2731 case ADDR_EXPR:
2732 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2733 0);
2734 default:
2735 break;
2736 }
2737
2738 if (flags & OEP_ONLY_CONST)
2739 return 0;
2740
2741/* Define macros to test an operand from arg0 and arg1 for equality and a
2742 variant that allows null and views null as being different from any
2743 non-null value. In the latter case, if either is null, the both
2744 must be; otherwise, do the normal comparison. */
2745#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2746 TREE_OPERAND (arg1, N), flags)
2747
2748#define OP_SAME_WITH_NULL(N) \
2749 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2750 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2751
2752 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2753 {
2754 case tcc_unary:
2755 /* Two conversions are equal only if signedness and modes match. */
2756 switch (TREE_CODE (arg0))
2757 {
2758 case NOP_EXPR:
2759 case CONVERT_EXPR:
2760 case FIX_CEIL_EXPR:
2761 case FIX_TRUNC_EXPR:
2762 case FIX_FLOOR_EXPR:
2763 case FIX_ROUND_EXPR:
2764 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2765 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2766 return 0;
2767 break;
2768 default:
2769 break;
2770 }
2771
2772 return OP_SAME (0);
2773
2774
2775 case tcc_comparison:
2776 case tcc_binary:
2777 if (OP_SAME (0) && OP_SAME (1))
2778 return 1;
2779
2780 /* For commutative ops, allow the other order. */
2781 return (commutative_tree_code (TREE_CODE (arg0))
2782 && operand_equal_p (TREE_OPERAND (arg0, 0),
2783 TREE_OPERAND (arg1, 1), flags)
2784 && operand_equal_p (TREE_OPERAND (arg0, 1),
2785 TREE_OPERAND (arg1, 0), flags));
2786
2787 case tcc_reference:
2788 /* If either of the pointer (or reference) expressions we are
2789 dereferencing contain a side effect, these cannot be equal. */
2790 if (TREE_SIDE_EFFECTS (arg0)
2791 || TREE_SIDE_EFFECTS (arg1))
2792 return 0;
2793
2794 switch (TREE_CODE (arg0))
2795 {
2796 case INDIRECT_REF:
2797 case ALIGN_INDIRECT_REF:
2798 case MISALIGNED_INDIRECT_REF:
2799 case REALPART_EXPR:
2800 case IMAGPART_EXPR:
2801 return OP_SAME (0);
2802
2803 case ARRAY_REF:
2804 case ARRAY_RANGE_REF:
2805 /* Operands 2 and 3 may be null. */
2806 return (OP_SAME (0)
2807 && OP_SAME (1)
2808 && OP_SAME_WITH_NULL (2)
2809 && OP_SAME_WITH_NULL (3));
2810
2811 case COMPONENT_REF:
2812 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2813 may be NULL when we're called to compare MEM_EXPRs. */
2814 return OP_SAME_WITH_NULL (0)
2815 && OP_SAME (1)
2816 && OP_SAME_WITH_NULL (2);
2817
2818 case BIT_FIELD_REF:
2819 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2820
2821 default:
2822 return 0;
2823 }
2824
2825 case tcc_expression:
2826 switch (TREE_CODE (arg0))
2827 {
2828 case ADDR_EXPR:
2829 case TRUTH_NOT_EXPR:
2830 return OP_SAME (0);
2831
2832 case TRUTH_ANDIF_EXPR:
2833 case TRUTH_ORIF_EXPR:
2834 return OP_SAME (0) && OP_SAME (1);
2835
2836 case TRUTH_AND_EXPR:
2837 case TRUTH_OR_EXPR:
2838 case TRUTH_XOR_EXPR:
2839 if (OP_SAME (0) && OP_SAME (1))
2840 return 1;
2841
2842 /* Otherwise take into account this is a commutative operation. */
2843 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2844 TREE_OPERAND (arg1, 1), flags)
2845 && operand_equal_p (TREE_OPERAND (arg0, 1),
2846 TREE_OPERAND (arg1, 0), flags));
2847
2848 case CALL_EXPR:
2849 /* If the CALL_EXPRs call different functions, then they
2850 clearly can not be equal. */
2851 if (!OP_SAME (0))
2852 return 0;
2853
2854 {
2855 unsigned int cef = call_expr_flags (arg0);
2856 if (flags & OEP_PURE_SAME)
2857 cef &= ECF_CONST | ECF_PURE;
2858 else
2859 cef &= ECF_CONST;
2860 if (!cef)
2861 return 0;
2862 }
2863
2864 /* Now see if all the arguments are the same. operand_equal_p
2865 does not handle TREE_LIST, so we walk the operands here
2866 feeding them to operand_equal_p. */
2867 arg0 = TREE_OPERAND (arg0, 1);
2868 arg1 = TREE_OPERAND (arg1, 1);
2869 while (arg0 && arg1)
2870 {
2871 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2872 flags))
2873 return 0;
2874
2875 arg0 = TREE_CHAIN (arg0);
2876 arg1 = TREE_CHAIN (arg1);
2877 }
2878
2879 /* If we get here and both argument lists are exhausted
2880 then the CALL_EXPRs are equal. */
2881 return ! (arg0 || arg1);
2882
2883 default:
2884 return 0;
2885 }
2886
2887 case tcc_declaration:
2888 /* Consider __builtin_sqrt equal to sqrt. */
2889 return (TREE_CODE (arg0) == FUNCTION_DECL
2890 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2891 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2892 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2893
2894 default:
2895 return 0;
2896 }
2897
2898#undef OP_SAME
2899#undef OP_SAME_WITH_NULL
2900}
2901
2902/* Similar to operand_equal_p, but see if ARG0 might have been made by
2903 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2904
2905 When in doubt, return 0. */
2906
2907static int
2908operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2909{
2910 int unsignedp1, unsignedpo;
2911 tree primarg0, primarg1, primother;
2912 unsigned int correct_width;
2913
2914 if (operand_equal_p (arg0, arg1, 0))
2915 return 1;
2916
2917 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2918 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2919 return 0;
2920
2921 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2922 and see if the inner values are the same. This removes any
2923 signedness comparison, which doesn't matter here. */
2924 primarg0 = arg0, primarg1 = arg1;
2925 STRIP_NOPS (primarg0);
2926 STRIP_NOPS (primarg1);
2927 if (operand_equal_p (primarg0, primarg1, 0))
2928 return 1;
2929
2930 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2931 actual comparison operand, ARG0.
2932
2933 First throw away any conversions to wider types
2934 already present in the operands. */
2935
2936 primarg1 = get_narrower (arg1, &unsignedp1);
2937 primother = get_narrower (other, &unsignedpo);
2938
2939 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2940 if (unsignedp1 == unsignedpo
2941 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2942 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2943 {
2944 tree type = TREE_TYPE (arg0);
2945
2946 /* Make sure shorter operand is extended the right way
2947 to match the longer operand. */
2948 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2949 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2950
2951 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2952 return 1;
2953 }
2954
2955 return 0;
2956}
2957
2958/* See if ARG is an expression that is either a comparison or is performing
2959 arithmetic on comparisons. The comparisons must only be comparing
2960 two different values, which will be stored in *CVAL1 and *CVAL2; if
2961 they are nonzero it means that some operands have already been found.
2962 No variables may be used anywhere else in the expression except in the
2963 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2964 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2965
2966 If this is true, return 1. Otherwise, return zero. */
2967
2968static int
2969twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2970{
2971 enum tree_code code = TREE_CODE (arg);
2972 enum tree_code_class class = TREE_CODE_CLASS (code);
2973
2974 /* We can handle some of the tcc_expression cases here. */
2975 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2976 class = tcc_unary;
2977 else if (class == tcc_expression
2978 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2979 || code == COMPOUND_EXPR))
2980 class = tcc_binary;
2981
2982 else if (class == tcc_expression && code == SAVE_EXPR
2983 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2984 {
2985 /* If we've already found a CVAL1 or CVAL2, this expression is
2986 two complex to handle. */
2987 if (*cval1 || *cval2)
2988 return 0;
2989
2990 class = tcc_unary;
2991 *save_p = 1;
2992 }
2993
2994 switch (class)
2995 {
2996 case tcc_unary:
2997 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2998
2999 case tcc_binary:
3000 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3001 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3002 cval1, cval2, save_p));
3003
3004 case tcc_constant:
3005 return 1;
3006
3007 case tcc_expression:
3008 if (code == COND_EXPR)
3009 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3010 cval1, cval2, save_p)
3011 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3012 cval1, cval2, save_p)
3013 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3014 cval1, cval2, save_p));
3015 return 0;
3016
3017 case tcc_comparison:
3018 /* First see if we can handle the first operand, then the second. For
3019 the second operand, we know *CVAL1 can't be zero. It must be that
3020 one side of the comparison is each of the values; test for the
3021 case where this isn't true by failing if the two operands
3022 are the same. */
3023
3024 if (operand_equal_p (TREE_OPERAND (arg, 0),
3025 TREE_OPERAND (arg, 1), 0))
3026 return 0;
3027
3028 if (*cval1 == 0)
3029 *cval1 = TREE_OPERAND (arg, 0);
3030 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3031 ;
3032 else if (*cval2 == 0)
3033 *cval2 = TREE_OPERAND (arg, 0);
3034 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3035 ;
3036 else
3037 return 0;
3038
3039 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3040 ;
3041 else if (*cval2 == 0)
3042 *cval2 = TREE_OPERAND (arg, 1);
3043 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3044 ;
3045 else
3046 return 0;
3047
3048 return 1;
3049
3050 default:
3051 return 0;
3052 }
3053}
3054
3055/* ARG is a tree that is known to contain just arithmetic operations and
3056 comparisons. Evaluate the operations in the tree substituting NEW0 for
3057 any occurrence of OLD0 as an operand of a comparison and likewise for
3058 NEW1 and OLD1. */
3059
3060static tree
3061eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3062{
3063 tree type = TREE_TYPE (arg);
3064 enum tree_code code = TREE_CODE (arg);
3065 enum tree_code_class class = TREE_CODE_CLASS (code);
3066
3067 /* We can handle some of the tcc_expression cases here. */
3068 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3069 class = tcc_unary;
3070 else if (class == tcc_expression
3071 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3072 class = tcc_binary;
3073
3074 switch (class)
3075 {
3076 case tcc_unary:
3077 return fold_build1 (code, type,
3078 eval_subst (TREE_OPERAND (arg, 0),
3079 old0, new0, old1, new1));
3080
3081 case tcc_binary:
3082 return fold_build2 (code, type,
3083 eval_subst (TREE_OPERAND (arg, 0),
3084 old0, new0, old1, new1),
3085 eval_subst (TREE_OPERAND (arg, 1),
3086 old0, new0, old1, new1));
3087
3088 case tcc_expression:
3089 switch (code)
3090 {
3091 case SAVE_EXPR:
3092 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3093
3094 case COMPOUND_EXPR:
3095 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3096
3097 case COND_EXPR:
3098 return fold_build3 (code, type,
3099 eval_subst (TREE_OPERAND (arg, 0),
3100 old0, new0, old1, new1),
3101 eval_subst (TREE_OPERAND (arg, 1),
3102 old0, new0, old1, new1),
3103 eval_subst (TREE_OPERAND (arg, 2),
3104 old0, new0, old1, new1));
3105 default:
3106 break;
3107 }
3108 /* Fall through - ??? */
3109
3110 case tcc_comparison:
3111 {
3112 tree arg0 = TREE_OPERAND (arg, 0);
3113 tree arg1 = TREE_OPERAND (arg, 1);
3114
3115 /* We need to check both for exact equality and tree equality. The
3116 former will be true if the operand has a side-effect. In that
3117 case, we know the operand occurred exactly once. */
3118
3119 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3120 arg0 = new0;
3121 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3122 arg0 = new1;
3123
3124 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3125 arg1 = new0;
3126 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3127 arg1 = new1;
3128
3129 return fold_build2 (code, type, arg0, arg1);
3130 }
3131
3132 default:
3133 return arg;
3134 }
3135}
3136
3137/* Return a tree for the case when the result of an expression is RESULT
3138 converted to TYPE and OMITTED was previously an operand of the expression
3139 but is now not needed (e.g., we folded OMITTED * 0).
3140
3141 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3142 the conversion of RESULT to TYPE. */
3143
3144tree
3145omit_one_operand (tree type, tree result, tree omitted)
3146{
3147 tree t = fold_convert (type, result);
3148
3149 if (TREE_SIDE_EFFECTS (omitted))
3150 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3151
3152 return non_lvalue (t);
3153}
3154
3155/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3156
3157static tree
3158pedantic_omit_one_operand (tree type, tree result, tree omitted)
3159{
3160 tree t = fold_convert (type, result);
3161
3162 if (TREE_SIDE_EFFECTS (omitted))
3163 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3164
3165 return pedantic_non_lvalue (t);
3166}
3167
3168/* Return a tree for the case when the result of an expression is RESULT
3169 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3170 of the expression but are now not needed.
3171
3172 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3173 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3174 evaluated before OMITTED2. Otherwise, if neither has side effects,
3175 just do the conversion of RESULT to TYPE. */
3176
3177tree
3178omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3179{
3180 tree t = fold_convert (type, result);
3181
3182 if (TREE_SIDE_EFFECTS (omitted2))
3183 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3184 if (TREE_SIDE_EFFECTS (omitted1))
3185 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3186
3187 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3188}
3189
3190
3191/* Return a simplified tree node for the truth-negation of ARG. This
3192 never alters ARG itself. We assume that ARG is an operation that
3193 returns a truth value (0 or 1).
3194
3195 FIXME: one would think we would fold the result, but it causes
3196 problems with the dominator optimizer. */
3197
3198tree
3199fold_truth_not_expr (tree arg)
3200{
3201 tree type = TREE_TYPE (arg);
3202 enum tree_code code = TREE_CODE (arg);
3203
3204 /* If this is a comparison, we can simply invert it, except for
3205 floating-point non-equality comparisons, in which case we just
3206 enclose a TRUTH_NOT_EXPR around what we have. */
3207
3208 if (TREE_CODE_CLASS (code) == tcc_comparison)
3209 {
3210 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3211 if (FLOAT_TYPE_P (op_type)
3212 && flag_trapping_math
3213 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3214 && code != NE_EXPR && code != EQ_EXPR)
3215 return NULL_TREE;
3216 else
3217 {
3218 code = invert_tree_comparison (code,
3219 HONOR_NANS (TYPE_MODE (op_type)));
3220 if (code == ERROR_MARK)
3221 return NULL_TREE;
3222 else
3223 return build2 (code, type,
3224 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3225 }
3226 }
3227
3228 switch (code)
3229 {
3230 case INTEGER_CST:
3231 return constant_boolean_node (integer_zerop (arg), type);
3232
3233 case TRUTH_AND_EXPR:
3234 return build2 (TRUTH_OR_EXPR, type,
3235 invert_truthvalue (TREE_OPERAND (arg, 0)),
3236 invert_truthvalue (TREE_OPERAND (arg, 1)));
3237
3238 case TRUTH_OR_EXPR:
3239 return build2 (TRUTH_AND_EXPR, type,
3240 invert_truthvalue (TREE_OPERAND (arg, 0)),
3241 invert_truthvalue (TREE_OPERAND (arg, 1)));
3242
3243 case TRUTH_XOR_EXPR:
3244 /* Here we can invert either operand. We invert the first operand
3245 unless the second operand is a TRUTH_NOT_EXPR in which case our
3246 result is the XOR of the first operand with the inside of the
3247 negation of the second operand. */
3248
3249 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3250 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3251 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3252 else
3253 return build2 (TRUTH_XOR_EXPR, type,
3254 invert_truthvalue (TREE_OPERAND (arg, 0)),
3255 TREE_OPERAND (arg, 1));
3256
3257 case TRUTH_ANDIF_EXPR:
3258 return build2 (TRUTH_ORIF_EXPR, type,
3259 invert_truthvalue (TREE_OPERAND (arg, 0)),
3260 invert_truthvalue (TREE_OPERAND (arg, 1)));
3261
3262 case TRUTH_ORIF_EXPR:
3263 return build2 (TRUTH_ANDIF_EXPR, type,
3264 invert_truthvalue (TREE_OPERAND (arg, 0)),
3265 invert_truthvalue (TREE_OPERAND (arg, 1)));
3266
3267 case TRUTH_NOT_EXPR:
3268 return TREE_OPERAND (arg, 0);
3269
3270 case COND_EXPR:
3271 {
3272 tree arg1 = TREE_OPERAND (arg, 1);
3273 tree arg2 = TREE_OPERAND (arg, 2);
3274 /* A COND_EXPR may have a throw as one operand, which
3275 then has void type. Just leave void operands
3276 as they are. */
3277 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3278 VOID_TYPE_P (TREE_TYPE (arg1))
3279 ? arg1 : invert_truthvalue (arg1),
3280 VOID_TYPE_P (TREE_TYPE (arg2))
3281 ? arg2 : invert_truthvalue (arg2));
3282 }
3283
3284 case COMPOUND_EXPR:
3285 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3286 invert_truthvalue (TREE_OPERAND (arg, 1)));
3287
3288 case NON_LVALUE_EXPR:
3289 return invert_truthvalue (TREE_OPERAND (arg, 0));
3290
3291 case NOP_EXPR:
3292 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3293 return build1 (TRUTH_NOT_EXPR, type, arg);
3294
3295 case CONVERT_EXPR:
3296 case FLOAT_EXPR:
3297 return build1 (TREE_CODE (arg), type,
3298 invert_truthvalue (TREE_OPERAND (arg, 0)));
3299
3300 case BIT_AND_EXPR:
3301 if (!integer_onep (TREE_OPERAND (arg, 1)))
3302 break;
3303 return build2 (EQ_EXPR, type, arg,
3304 build_int_cst (type, 0));
3305
3306 case SAVE_EXPR:
3307 return build1 (TRUTH_NOT_EXPR, type, arg);
3308
3309 case CLEANUP_POINT_EXPR:
3310 return build1 (CLEANUP_POINT_EXPR, type,
3311 invert_truthvalue (TREE_OPERAND (arg, 0)));
3312
3313 default:
3314 break;
3315 }
3316
3317 return NULL_TREE;
3318}
3319
3320/* Return a simplified tree node for the truth-negation of ARG. This
3321 never alters ARG itself. We assume that ARG is an operation that
3322 returns a truth value (0 or 1).
3323
3324 FIXME: one would think we would fold the result, but it causes
3325 problems with the dominator optimizer. */
3326
3327tree
3328invert_truthvalue (tree arg)
3329{
3330 tree tem;
3331
3332 if (TREE_CODE (arg) == ERROR_MARK)
3333 return arg;
3334
3335 tem = fold_truth_not_expr (arg);
3336 if (!tem)
3337 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3338
3339 return tem;
3340}
3341
3342/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3343 operands are another bit-wise operation with a common input. If so,
3344 distribute the bit operations to save an operation and possibly two if
3345 constants are involved. For example, convert
3346 (A | B) & (A | C) into A | (B & C)
3347 Further simplification will occur if B and C are constants.
3348
3349 If this optimization cannot be done, 0 will be returned. */
3350
3351static tree
3352distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3353{
3354 tree common;
3355 tree left, right;
3356
3357 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3358 || TREE_CODE (arg0) == code
3359 || (TREE_CODE (arg0) != BIT_AND_EXPR
3360 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3361 return 0;
3362
3363 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3364 {
3365 common = TREE_OPERAND (arg0, 0);
3366 left = TREE_OPERAND (arg0, 1);
3367 right = TREE_OPERAND (arg1, 1);
3368 }
3369 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3370 {
3371 common = TREE_OPERAND (arg0, 0);
3372 left = TREE_OPERAND (arg0, 1);
3373 right = TREE_OPERAND (arg1, 0);
3374 }
3375 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3376 {
3377 common = TREE_OPERAND (arg0, 1);
3378 left = TREE_OPERAND (arg0, 0);
3379 right = TREE_OPERAND (arg1, 1);
3380 }
3381 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3382 {
3383 common = TREE_OPERAND (arg0, 1);
3384 left = TREE_OPERAND (arg0, 0);
3385 right = TREE_OPERAND (arg1, 0);
3386 }
3387 else
3388 return 0;
3389
3390 return fold_build2 (TREE_CODE (arg0), type, common,
3391 fold_build2 (code, type, left, right));
3392}
3393
3394/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3395 with code CODE. This optimization is unsafe. */
3396static tree
3397distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3398{
3399 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3400 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3401
3402 /* (A / C) +- (B / C) -> (A +- B) / C. */
3403 if (mul0 == mul1
3404 && operand_equal_p (TREE_OPERAND (arg0, 1),
3405 TREE_OPERAND (arg1, 1), 0))
3406 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3407 fold_build2 (code, type,
3408 TREE_OPERAND (arg0, 0),
3409 TREE_OPERAND (arg1, 0)),
3410 TREE_OPERAND (arg0, 1));
3411
3412 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3413 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3414 TREE_OPERAND (arg1, 0), 0)
3415 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3416 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3417 {
3418 REAL_VALUE_TYPE r0, r1;
3419 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3420 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3421 if (!mul0)
3422 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3423 if (!mul1)
3424 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3425 real_arithmetic (&r0, code, &r0, &r1);
3426 return fold_build2 (MULT_EXPR, type,
3427 TREE_OPERAND (arg0, 0),
3428 build_real (type, r0));
3429 }
3430
3431 return NULL_TREE;
3432}
3433
3434/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3435 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3436
3437static tree
3438make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3439 int unsignedp)
3440{
3441 tree result;
3442
3443 if (bitpos == 0)
3444 {
3445 tree size = TYPE_SIZE (TREE_TYPE (inner));
3446 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3447 || POINTER_TYPE_P (TREE_TYPE (inner)))
3448 && host_integerp (size, 0)
3449 && tree_low_cst (size, 0) == bitsize)
3450 return fold_convert (type, inner);
3451 }
3452
3453 result = build3 (BIT_FIELD_REF, type, inner,
3454 size_int (bitsize), bitsize_int (bitpos));
3455
3456 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3457
3458 return result;
3459}
3460
3461/* Optimize a bit-field compare.
3462
3463 There are two cases: First is a compare against a constant and the
3464 second is a comparison of two items where the fields are at the same
3465 bit position relative to the start of a chunk (byte, halfword, word)
3466 large enough to contain it. In these cases we can avoid the shift
3467 implicit in bitfield extractions.
3468
3469 For constants, we emit a compare of the shifted constant with the
3470 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3471 compared. For two fields at the same position, we do the ANDs with the
3472 similar mask and compare the result of the ANDs.
3473
3474 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3475 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3476 are the left and right operands of the comparison, respectively.
3477
3478 If the optimization described above can be done, we return the resulting
3479 tree. Otherwise we return zero. */
3480
3481static tree
3482optimize_bit_field_compare (enum tree_code code, tree compare_type,
3483 tree lhs, tree rhs)
3484{
3485 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3486 tree type = TREE_TYPE (lhs);
3487 tree signed_type, unsigned_type;
3488 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3489 enum machine_mode lmode, rmode, nmode;
3490 int lunsignedp, runsignedp;
3491 int lvolatilep = 0, rvolatilep = 0;
3492 tree linner, rinner = NULL_TREE;
3493 tree mask;
3494 tree offset;
3495
3496 /* Get all the information about the extractions being done. If the bit size
3497 if the same as the size of the underlying object, we aren't doing an
3498 extraction at all and so can do nothing. We also don't want to
3499 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3500 then will no longer be able to replace it. */
3501 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3502 &lunsignedp, &lvolatilep, false);
3503 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3504 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3505 return 0;
3506
3507 if (!const_p)
3508 {
3509 /* If this is not a constant, we can only do something if bit positions,
3510 sizes, and signedness are the same. */
3511 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3512 &runsignedp, &rvolatilep, false);
3513
3514 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3515 || lunsignedp != runsignedp || offset != 0
3516 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3517 return 0;
3518 }
3519
3520 /* See if we can find a mode to refer to this field. We should be able to,
3521 but fail if we can't. */
3522 nmode = get_best_mode (lbitsize, lbitpos,
3523 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3524 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3525 TYPE_ALIGN (TREE_TYPE (rinner))),
3526 word_mode, lvolatilep || rvolatilep);
3527 if (nmode == VOIDmode)
3528 return 0;
3529
3530 /* Set signed and unsigned types of the precision of this mode for the
3531 shifts below. */
3532 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3533 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3534
3535 /* Compute the bit position and size for the new reference and our offset
3536 within it. If the new reference is the same size as the original, we
3537 won't optimize anything, so return zero. */
3538 nbitsize = GET_MODE_BITSIZE (nmode);
3539 nbitpos = lbitpos & ~ (nbitsize - 1);
3540 lbitpos -= nbitpos;
3541 if (nbitsize == lbitsize)
3542 return 0;
3543
3544 if (BYTES_BIG_ENDIAN)
3545 lbitpos = nbitsize - lbitsize - lbitpos;
3546
3547 /* Make the mask to be used against the extracted field. */
3548 mask = build_int_cst (unsigned_type, -1);
3549 mask = force_fit_type (mask, 0, false, false);
3550 mask = fold_convert (unsigned_type, mask);
3551 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3552 mask = const_binop (RSHIFT_EXPR, mask,
3553 size_int (nbitsize - lbitsize - lbitpos), 0);
3554
3555 if (! const_p)
3556 /* If not comparing with constant, just rework the comparison
3557 and return. */
3558 return build2 (code, compare_type,
3559 build2 (BIT_AND_EXPR, unsigned_type,
3560 make_bit_field_ref (linner, unsigned_type,
3561 nbitsize, nbitpos, 1),
3562 mask),
3563 build2 (BIT_AND_EXPR, unsigned_type,
3564 make_bit_field_ref (rinner, unsigned_type,
3565 nbitsize, nbitpos, 1),
3566 mask));
3567
3568 /* Otherwise, we are handling the constant case. See if the constant is too
3569 big for the field. Warn and return a tree of for 0 (false) if so. We do
3570 this not only for its own sake, but to avoid having to test for this
3571 error case below. If we didn't, we might generate wrong code.
3572
3573 For unsigned fields, the constant shifted right by the field length should
3574 be all zero. For signed fields, the high-order bits should agree with
3575 the sign bit. */
3576
3577 if (lunsignedp)
3578 {
3579 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3580 fold_convert (unsigned_type, rhs),
3581 size_int (lbitsize), 0)))
3582 {
3583 warning (0, "comparison is always %d due to width of bit-field",
3584 code == NE_EXPR);
3585 return constant_boolean_node (code == NE_EXPR, compare_type);
3586 }
3587 }
3588 else
3589 {
3590 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3591 size_int (lbitsize - 1), 0);
3592 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3593 {
3594 warning (0, "comparison is always %d due to width of bit-field",
3595 code == NE_EXPR);
3596 return constant_boolean_node (code == NE_EXPR, compare_type);
3597 }
3598 }
3599
3600 /* Single-bit compares should always be against zero. */
3601 if (lbitsize == 1 && ! integer_zerop (rhs))
3602 {
3603 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3604 rhs = build_int_cst (type, 0);
3605 }
3606
3607 /* Make a new bitfield reference, shift the constant over the
3608 appropriate number of bits and mask it with the computed mask
3609 (in case this was a signed field). If we changed it, make a new one. */
3610 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3611 if (lvolatilep)
3612 {
3613 TREE_SIDE_EFFECTS (lhs) = 1;
3614 TREE_THIS_VOLATILE (lhs) = 1;
3615 }
3616
3617 rhs = const_binop (BIT_AND_EXPR,
3618 const_binop (LSHIFT_EXPR,
3619 fold_convert (unsigned_type, rhs),
3620 size_int (lbitpos), 0),
3621 mask, 0);
3622
3623 return build2 (code, compare_type,
3624 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3625 rhs);
3626}
3627
3628/* Subroutine for fold_truthop: decode a field reference.
3629
3630 If EXP is a comparison reference, we return the innermost reference.
3631
3632 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3633 set to the starting bit number.
3634
3635 If the innermost field can be completely contained in a mode-sized
3636 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3637
3638 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3639 otherwise it is not changed.
3640
3641 *PUNSIGNEDP is set to the signedness of the field.
3642
3643 *PMASK is set to the mask used. This is either contained in a
3644 BIT_AND_EXPR or derived from the width of the field.
3645
3646 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3647
3648 Return 0 if this is not a component reference or is one that we can't
3649 do anything with. */
3650
3651static tree
3652decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3653 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3654 int *punsignedp, int *pvolatilep,
3655 tree *pmask, tree *pand_mask)
3656{
3657 tree outer_type = 0;
3658 tree and_mask = 0;
3659 tree mask, inner, offset;
3660 tree unsigned_type;
3661 unsigned int precision;
3662
3663 /* All the optimizations using this function assume integer fields.
3664 There are problems with FP fields since the type_for_size call
3665 below can fail for, e.g., XFmode. */
3666 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3667 return 0;
3668
3669 /* We are interested in the bare arrangement of bits, so strip everything
3670 that doesn't affect the machine mode. However, record the type of the
3671 outermost expression if it may matter below. */
3672 if (TREE_CODE (exp) == NOP_EXPR
3673 || TREE_CODE (exp) == CONVERT_EXPR
3674 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3675 outer_type = TREE_TYPE (exp);
3676 STRIP_NOPS (exp);
3677
3678 if (TREE_CODE (exp) == BIT_AND_EXPR)
3679 {
3680 and_mask = TREE_OPERAND (exp, 1);
3681 exp = TREE_OPERAND (exp, 0);
3682 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3683 if (TREE_CODE (and_mask) != INTEGER_CST)
3684 return 0;
3685 }
3686
3687 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3688 punsignedp, pvolatilep, false);
3689 if ((inner == exp && and_mask == 0)
3690 || *pbitsize < 0 || offset != 0
3691 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3692 return 0;
3693
3694 /* If the number of bits in the reference is the same as the bitsize of
3695 the outer type, then the outer type gives the signedness. Otherwise
3696 (in case of a small bitfield) the signedness is unchanged. */
3697 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3698 *punsignedp = TYPE_UNSIGNED (outer_type);
3699
3700 /* Compute the mask to access the bitfield. */
3701 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3702 precision = TYPE_PRECISION (unsigned_type);
3703
3704 mask = build_int_cst (unsigned_type, -1);
3705 mask = force_fit_type (mask, 0, false, false);
3706
3707 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3708 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3709
3710 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3711 if (and_mask != 0)
3712 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3713 fold_convert (unsigned_type, and_mask), mask);
3714
3715 *pmask = mask;
3716 *pand_mask = and_mask;
3717 return inner;
3718}
3719
3720/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3721 bit positions. */
3722
3723static int
3724all_ones_mask_p (tree mask, int size)
3725{
3726 tree type = TREE_TYPE (mask);
3727 unsigned int precision = TYPE_PRECISION (type);
3728 tree tmask;
3729
3730 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3731 tmask = force_fit_type (tmask, 0, false, false);
3732
3733 return
3734 tree_int_cst_equal (mask,
3735 const_binop (RSHIFT_EXPR,
3736 const_binop (LSHIFT_EXPR, tmask,
3737 size_int (precision - size),
3738 0),
3739 size_int (precision - size), 0));
3740}
3741
3742/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3743 represents the sign bit of EXP's type. If EXP represents a sign
3744 or zero extension, also test VAL against the unextended type.
3745 The return value is the (sub)expression whose sign bit is VAL,
3746 or NULL_TREE otherwise. */
3747
3748static tree
3749sign_bit_p (tree exp, tree val)
3750{
3751 unsigned HOST_WIDE_INT mask_lo, lo;
3752 HOST_WIDE_INT mask_hi, hi;
3753 int width;
3754 tree t;
3755
3756 /* Tree EXP must have an integral type. */
3757 t = TREE_TYPE (exp);
3758 if (! INTEGRAL_TYPE_P (t))
3759 return NULL_TREE;
3760
3761 /* Tree VAL must be an integer constant. */
3762 if (TREE_CODE (val) != INTEGER_CST
3763 || TREE_CONSTANT_OVERFLOW (val))
3764 return NULL_TREE;
3765
3766 width = TYPE_PRECISION (t);
3767 if (width > HOST_BITS_PER_WIDE_INT)
3768 {
3769 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3770 lo = 0;
3771
3772 mask_hi = ((unsigned HOST_WIDE_INT) -1
3773 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3774 mask_lo = -1;
3775 }
3776 else
3777 {
3778 hi = 0;
3779 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3780
3781 mask_hi = 0;
3782 mask_lo = ((unsigned HOST_WIDE_INT) -1
3783 >> (HOST_BITS_PER_WIDE_INT - width));
3784 }
3785
3786 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3787 treat VAL as if it were unsigned. */
3788 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3789 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3790 return exp;
3791
3792 /* Handle extension from a narrower type. */
3793 if (TREE_CODE (exp) == NOP_EXPR
3794 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3795 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3796
3797 return NULL_TREE;
3798}
3799
3800/* Subroutine for fold_truthop: determine if an operand is simple enough
3801 to be evaluated unconditionally. */
3802
3803static int
3804simple_operand_p (tree exp)
3805{
3806 /* Strip any conversions that don't change the machine mode. */
3807 STRIP_NOPS (exp);
3808
3809 return (CONSTANT_CLASS_P (exp)
3810 || TREE_CODE (exp) == SSA_NAME
3811 || (DECL_P (exp)
3812 && ! TREE_ADDRESSABLE (exp)
3813 && ! TREE_THIS_VOLATILE (exp)
3814 && ! DECL_NONLOCAL (exp)
3815 /* Don't regard global variables as simple. They may be
3816 allocated in ways unknown to the compiler (shared memory,
3817 #pragma weak, etc). */
3818 && ! TREE_PUBLIC (exp)
3819 && ! DECL_EXTERNAL (exp)
3820 /* Loading a static variable is unduly expensive, but global
3821 registers aren't expensive. */
3822 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3823}
3824
3825/* The following functions are subroutines to fold_range_test and allow it to
3826 try to change a logical combination of comparisons into a range test.
3827
3828 For example, both
3829 X == 2 || X == 3 || X == 4 || X == 5
3830 and
3831 X >= 2 && X <= 5
3832 are converted to
3833 (unsigned) (X - 2) <= 3
3834
3835 We describe each set of comparisons as being either inside or outside
3836 a range, using a variable named like IN_P, and then describe the
3837 range with a lower and upper bound. If one of the bounds is omitted,
3838 it represents either the highest or lowest value of the type.
3839
3840 In the comments below, we represent a range by two numbers in brackets
3841 preceded by a "+" to designate being inside that range, or a "-" to
3842 designate being outside that range, so the condition can be inverted by
3843 flipping the prefix. An omitted bound is represented by a "-". For
3844 example, "- [-, 10]" means being outside the range starting at the lowest
3845 possible value and ending at 10, in other words, being greater than 10.
3846 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3847 always false.
3848
3849 We set up things so that the missing bounds are handled in a consistent
3850 manner so neither a missing bound nor "true" and "false" need to be
3851 handled using a special case. */
3852
3853/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3854 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3855 and UPPER1_P are nonzero if the respective argument is an upper bound
3856 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3857 must be specified for a comparison. ARG1 will be converted to ARG0's
3858 type if both are specified. */
3859
3860static tree
3861range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3862 tree arg1, int upper1_p)
3863{
3864 tree tem;
3865 int result;
3866 int sgn0, sgn1;
3867
3868 /* If neither arg represents infinity, do the normal operation.
3869 Else, if not a comparison, return infinity. Else handle the special
3870 comparison rules. Note that most of the cases below won't occur, but
3871 are handled for consistency. */
3872
3873 if (arg0 != 0 && arg1 != 0)
3874 {
3875 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3876 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3877 STRIP_NOPS (tem);
3878 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3879 }
3880
3881 if (TREE_CODE_CLASS (code) != tcc_comparison)
3882 return 0;
3883
3884 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3885 for neither. In real maths, we cannot assume open ended ranges are
3886 the same. But, this is computer arithmetic, where numbers are finite.
3887 We can therefore make the transformation of any unbounded range with
3888 the value Z, Z being greater than any representable number. This permits
3889 us to treat unbounded ranges as equal. */
3890 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3891 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3892 switch (code)
3893 {
3894 case EQ_EXPR:
3895 result = sgn0 == sgn1;
3896 break;
3897 case NE_EXPR:
3898 result = sgn0 != sgn1;
3899 break;
3900 case LT_EXPR:
3901 result = sgn0 < sgn1;
3902 break;
3903 case LE_EXPR:
3904 result = sgn0 <= sgn1;
3905 break;
3906 case GT_EXPR:
3907 result = sgn0 > sgn1;
3908 break;
3909 case GE_EXPR:
3910 result = sgn0 >= sgn1;
3911 break;
3912 default:
3913 gcc_unreachable ();
3914 }
3915
3916 return constant_boolean_node (result, type);
3917}
3918
3919/* Given EXP, a logical expression, set the range it is testing into
3920 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3921 actually being tested. *PLOW and *PHIGH will be made of the same
3922 type as the returned expression. If EXP is not a comparison, we
3923 will most likely not be returning a useful value and range. Set
3924 *STRICT_OVERFLOW_P to true if the return value is only valid
3925 because signed overflow is undefined; otherwise, do not change
3926 *STRICT_OVERFLOW_P. */
3927
3928static tree
3929make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3930 bool *strict_overflow_p)
3931{
3932 enum tree_code code;
3933 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3934 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3935 int in_p, n_in_p;
3936 tree low, high, n_low, n_high;
3937
3938 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3939 and see if we can refine the range. Some of the cases below may not
3940 happen, but it doesn't seem worth worrying about this. We "continue"
3941 the outer loop when we've changed something; otherwise we "break"
3942 the switch, which will "break" the while. */
3943
3944 in_p = 0;
3945 low = high = build_int_cst (TREE_TYPE (exp), 0);
3946
3947 while (1)
3948 {
3949 code = TREE_CODE (exp);
3950 exp_type = TREE_TYPE (exp);
3951
3952 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3953 {
3954 if (TREE_CODE_LENGTH (code) > 0)
3955 arg0 = TREE_OPERAND (exp, 0);
3956 if (TREE_CODE_CLASS (code) == tcc_comparison
3957 || TREE_CODE_CLASS (code) == tcc_unary
3958 || TREE_CODE_CLASS (code) == tcc_binary)
3959 arg0_type = TREE_TYPE (arg0);
3960 if (TREE_CODE_CLASS (code) == tcc_binary
3961 || TREE_CODE_CLASS (code) == tcc_comparison
3962 || (TREE_CODE_CLASS (code) == tcc_expression
3963 && TREE_CODE_LENGTH (code) > 1))
3964 arg1 = TREE_OPERAND (exp, 1);
3965 }
3966
3967 switch (code)
3968 {
3969 case TRUTH_NOT_EXPR:
3970 in_p = ! in_p, exp = arg0;
3971 continue;
3972
3973 case EQ_EXPR: case NE_EXPR:
3974 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3975 /* We can only do something if the range is testing for zero
3976 and if the second operand is an integer constant. Note that
3977 saying something is "in" the range we make is done by
3978 complementing IN_P since it will set in the initial case of
3979 being not equal to zero; "out" is leaving it alone. */
3980 if (low == 0 || high == 0
3981 || ! integer_zerop (low) || ! integer_zerop (high)
3982 || TREE_CODE (arg1) != INTEGER_CST)
3983 break;
3984
3985 switch (code)
3986 {
3987 case NE_EXPR: /* - [c, c] */
3988 low = high = arg1;
3989 break;
3990 case EQ_EXPR: /* + [c, c] */
3991 in_p = ! in_p, low = high = arg1;
3992 break;
3993 case GT_EXPR: /* - [-, c] */
3994 low = 0, high = arg1;
3995 break;
3996 case GE_EXPR: /* + [c, -] */
3997 in_p = ! in_p, low = arg1, high = 0;
3998 break;
3999 case LT_EXPR: /* - [c, -] */
4000 low = arg1, high = 0;
4001 break;
4002 case LE_EXPR: /* + [-, c] */
4003 in_p = ! in_p, low = 0, high = arg1;
4004 break;
4005 default:
4006 gcc_unreachable ();
4007 }
4008
4009 /* If this is an unsigned comparison, we also know that EXP is
4010 greater than or equal to zero. We base the range tests we make
4011 on that fact, so we record it here so we can parse existing
4012 range tests. We test arg0_type since often the return type
4013 of, e.g. EQ_EXPR, is boolean. */
4014 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4015 {
4016 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4017 in_p, low, high, 1,
4018 build_int_cst (arg0_type, 0),
4019 NULL_TREE))
4020 break;
4021
4022 in_p = n_in_p, low = n_low, high = n_high;
4023
4024 /* If the high bound is missing, but we have a nonzero low
4025 bound, reverse the range so it goes from zero to the low bound
4026 minus 1. */
4027 if (high == 0 && low && ! integer_zerop (low))
4028 {
4029 in_p = ! in_p;
4030 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4031 integer_one_node, 0);
4032 low = build_int_cst (arg0_type, 0);
4033 }
4034 }
4035
4036 exp = arg0;
4037 continue;
4038
4039 case NEGATE_EXPR:
4040 /* (-x) IN [a,b] -> x in [-b, -a] */
4041 n_low = range_binop (MINUS_EXPR, exp_type,
4042 build_int_cst (exp_type, 0),
4043 0, high, 1);
4044 n_high = range_binop (MINUS_EXPR, exp_type,
4045 build_int_cst (exp_type, 0),
4046 0, low, 0);
4047 low = n_low, high = n_high;
4048 exp = arg0;
4049 continue;
4050
4051 case BIT_NOT_EXPR:
4052 /* ~ X -> -X - 1 */
4053 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4054 build_int_cst (exp_type, 1));
4055 continue;
4056
4057 case PLUS_EXPR: case MINUS_EXPR:
4058 if (TREE_CODE (arg1) != INTEGER_CST)
4059 break;
4060
4061 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4062 move a constant to the other side. */
4063 if (!TYPE_UNSIGNED (arg0_type)
4064 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4065 break;
4066
4067 /* If EXP is signed, any overflow in the computation is undefined,
4068 so we don't worry about it so long as our computations on
4069 the bounds don't overflow. For unsigned, overflow is defined
4070 and this is exactly the right thing. */
4071 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4072 arg0_type, low, 0, arg1, 0);
4073 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4074 arg0_type, high, 1, arg1, 0);
4075 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4076 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4077 break;
4078
4079 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4080 *strict_overflow_p = true;
4081
4082 /* Check for an unsigned range which has wrapped around the maximum
4083 value thus making n_high < n_low, and normalize it. */
4084 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4085 {
4086 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4087 integer_one_node, 0);
4088 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4089 integer_one_node, 0);
4090
4091 /* If the range is of the form +/- [ x+1, x ], we won't
4092 be able to normalize it. But then, it represents the
4093 whole range or the empty set, so make it
4094 +/- [ -, - ]. */
4095 if (tree_int_cst_equal (n_low, low)
4096 && tree_int_cst_equal (n_high, high))
4097 low = high = 0;
4098 else
4099 in_p = ! in_p;
4100 }
4101 else
4102 low = n_low, high = n_high;
4103
4104 exp = arg0;
4105 continue;
4106
4107 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4108 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4109 break;
4110
4111 if (! INTEGRAL_TYPE_P (arg0_type)
4112 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4113 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4114 break;
4115
4116 n_low = low, n_high = high;
4117
4118 if (n_low != 0)
4119 n_low = fold_convert (arg0_type, n_low);
4120
4121 if (n_high != 0)
4122 n_high = fold_convert (arg0_type, n_high);
4123
4124
4125 /* If we're converting arg0 from an unsigned type, to exp,
4126 a signed type, we will be doing the comparison as unsigned.
4127 The tests above have already verified that LOW and HIGH
4128 are both positive.
4129
4130 So we have to ensure that we will handle large unsigned
4131 values the same way that the current signed bounds treat
4132 negative values. */
4133
4134 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4135 {
4136 tree high_positive;
4137 tree equiv_type = lang_hooks.types.type_for_mode
4138 (TYPE_MODE (arg0_type), 1);
4139
4140 /* A range without an upper bound is, naturally, unbounded.
4141 Since convert would have cropped a very large value, use
4142 the max value for the destination type. */
4143 high_positive
4144 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4145 : TYPE_MAX_VALUE (arg0_type);
4146
4147 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4148 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4149 fold_convert (arg0_type,
4150 high_positive),
4151 fold_convert (arg0_type,
4152 integer_one_node));
4153
4154 /* If the low bound is specified, "and" the range with the
4155 range for which the original unsigned value will be
4156 positive. */
4157 if (low != 0)
4158 {
4159 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4160 1, n_low, n_high, 1,
4161 fold_convert (arg0_type,
4162 integer_zero_node),
4163 high_positive))
4164 break;
4165
4166 in_p = (n_in_p == in_p);
4167 }
4168 else
4169 {
4170 /* Otherwise, "or" the range with the range of the input
4171 that will be interpreted as negative. */
4172 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4173 0, n_low, n_high, 1,
4174 fold_convert (arg0_type,
4175 integer_zero_node),
4176 high_positive))
4177 break;
4178
4179 in_p = (in_p != n_in_p);
4180 }
4181 }
4182
4183 exp = arg0;
4184 low = n_low, high = n_high;
4185 continue;
4186
4187 default:
4188 break;
4189 }
4190
4191 break;
4192 }
4193
4194 /* If EXP is a constant, we can evaluate whether this is true or false. */
4195 if (TREE_CODE (exp) == INTEGER_CST)
4196 {
4197 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4198 exp, 0, low, 0))
4199 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4200 exp, 1, high, 1)));
4201 low = high = 0;
4202 exp = 0;
4203 }
4204
4205 *pin_p = in_p, *plow = low, *phigh = high;
4206 return exp;
4207}
4208
4209/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4210 type, TYPE, return an expression to test if EXP is in (or out of, depending
4211 on IN_P) the range. Return 0 if the test couldn't be created. */
4212
4213static tree
4214build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4215{
4216 tree etype = TREE_TYPE (exp);
4217 tree value;
4218
4219#ifdef HAVE_canonicalize_funcptr_for_compare
4220 /* Disable this optimization for function pointer expressions
4221 on targets that require function pointer canonicalization. */
4222 if (HAVE_canonicalize_funcptr_for_compare
4223 && TREE_CODE (etype) == POINTER_TYPE
4224 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4225 return NULL_TREE;
4226#endif
4227
4228 if (! in_p)
4229 {
4230 value = build_range_check (type, exp, 1, low, high);
4231 if (value != 0)
4232 return invert_truthvalue (value);
4233
4234 return 0;
4235 }
4236
4237 if (low == 0 && high == 0)
4238 return build_int_cst (type, 1);
4239
4240 if (low == 0)
4241 return fold_build2 (LE_EXPR, type, exp,
4242 fold_convert (etype, high));
4243
4244 if (high == 0)
4245 return fold_build2 (GE_EXPR, type, exp,
4246 fold_convert (etype, low));
4247
4248 if (operand_equal_p (low, high, 0))
4249 return fold_build2 (EQ_EXPR, type, exp,
4250 fold_convert (etype, low));
4251
4252 if (integer_zerop (low))
4253 {
4254 if (! TYPE_UNSIGNED (etype))
4255 {
4256 etype = lang_hooks.types.unsigned_type (etype);
4257 high = fold_convert (etype, high);
4258 exp = fold_convert (etype, exp);
4259 }
4260 return build_range_check (type, exp, 1, 0, high);
4261 }
4262
4263 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4264 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4265 {
4266 unsigned HOST_WIDE_INT lo;
4267 HOST_WIDE_INT hi;
4268 int prec;
4269
4270 prec = TYPE_PRECISION (etype);
4271 if (prec <= HOST_BITS_PER_WIDE_INT)
4272 {
4273 hi = 0;
4274 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4275 }
4276 else
4277 {
4278 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4279 lo = (unsigned HOST_WIDE_INT) -1;
4280 }
4281
4282 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4283 {
4284 if (TYPE_UNSIGNED (etype))
4285 {
4286 etype = lang_hooks.types.signed_type (etype);
4287 exp = fold_convert (etype, exp);
4288 }
4289 return fold_build2 (GT_EXPR, type, exp,
4290 build_int_cst (etype, 0));
4291 }
4292 }
4293
4294 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4295 This requires wrap-around arithmetics for the type of the expression. */
4296 switch (TREE_CODE (etype))
4297 {
4298 case INTEGER_TYPE:
4299 /* There is no requirement that LOW be within the range of ETYPE
4300 if the latter is a subtype. It must, however, be within the base
4301 type of ETYPE. So be sure we do the subtraction in that type. */
4302 if (TREE_TYPE (etype))
4303 etype = TREE_TYPE (etype);
4304 break;
4305
4306 case ENUMERAL_TYPE:
4307 case BOOLEAN_TYPE:
4308 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4309 TYPE_UNSIGNED (etype));
4310 break;
4311
4312 default:
4313 break;
4314 }
4315
4316 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4317 if (TREE_CODE (etype) == INTEGER_TYPE
4318 && !TYPE_OVERFLOW_WRAPS (etype))
4319 {
4320 tree utype, minv, maxv;
4321
4322 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4323 for the type in question, as we rely on this here. */
4324 utype = lang_hooks.types.unsigned_type (etype);
4325 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4326 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4327 integer_one_node, 1);
4328 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4329
4330 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4331 minv, 1, maxv, 1)))
4332 etype = utype;
4333 else
4334 return 0;
4335 }
4336
4337 high = fold_convert (etype, high);
4338 low = fold_convert (etype, low);
4339 exp = fold_convert (etype, exp);
4340
4341 value = const_binop (MINUS_EXPR, high, low, 0);
4342
4343 if (value != 0 && !TREE_OVERFLOW (value))
4344 return build_range_check (type,
4345 fold_build2 (MINUS_EXPR, etype, exp, low),
4346 1, build_int_cst (etype, 0), value);
4347
4348 return 0;
4349}
4350
4351/* Return the predecessor of VAL in its type, handling the infinite case. */
4352
4353static tree
4354range_predecessor (tree val)
4355{
4356 tree type = TREE_TYPE (val);
4357
4358 if (INTEGRAL_TYPE_P (type)
4359 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4360 return 0;
4361 else
4362 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4363}
4364
4365/* Return the successor of VAL in its type, handling the infinite case. */
4366
4367static tree
4368range_successor (tree val)
4369{
4370 tree type = TREE_TYPE (val);
4371
4372 if (INTEGRAL_TYPE_P (type)
4373 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4374 return 0;
4375 else
4376 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4377}
4378
4379/* Given two ranges, see if we can merge them into one. Return 1 if we
4380 can, 0 if we can't. Set the output range into the specified parameters. */
4381
4382static int
4383merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4384 tree high0, int in1_p, tree low1, tree high1)
4385{
4386 int no_overlap;
4387 int subset;
4388 int temp;
4389 tree tem;
4390 int in_p;
4391 tree low, high;
4392 int lowequal = ((low0 == 0 && low1 == 0)
4393 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4394 low0, 0, low1, 0)));
4395 int highequal = ((high0 == 0 && high1 == 0)
4396 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4397 high0, 1, high1, 1)));
4398
4399 /* Make range 0 be the range that starts first, or ends last if they
4400 start at the same value. Swap them if it isn't. */
4401 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4402 low0, 0, low1, 0))
4403 || (lowequal
4404 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4405 high1, 1, high0, 1))))
4406 {
4407 temp = in0_p, in0_p = in1_p, in1_p = temp;
4408 tem = low0, low0 = low1, low1 = tem;
4409 tem = high0, high0 = high1, high1 = tem;
4410 }
4411
4412 /* Now flag two cases, whether the ranges are disjoint or whether the
4413 second range is totally subsumed in the first. Note that the tests
4414 below are simplified by the ones above. */
4415 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4416 high0, 1, low1, 0));
4417 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4418 high1, 1, high0, 1));
4419
4420 /* We now have four cases, depending on whether we are including or
4421 excluding the two ranges. */
4422 if (in0_p && in1_p)
4423 {
4424 /* If they don't overlap, the result is false. If the second range
4425 is a subset it is the result. Otherwise, the range is from the start
4426 of the second to the end of the first. */
4427 if (no_overlap)
4428 in_p = 0, low = high = 0;
4429 else if (subset)
4430 in_p = 1, low = low1, high = high1;
4431 else
4432 in_p = 1, low = low1, high = high0;
4433 }
4434
4435 else if (in0_p && ! in1_p)
4436 {
4437 /* If they don't overlap, the result is the first range. If they are
4438 equal, the result is false. If the second range is a subset of the
4439 first, and the ranges begin at the same place, we go from just after
4440 the end of the second range to the end of the first. If the second
4441 range is not a subset of the first, or if it is a subset and both
4442 ranges end at the same place, the range starts at the start of the
4443 first range and ends just before the second range.
4444 Otherwise, we can't describe this as a single range. */
4445 if (no_overlap)
4446 in_p = 1, low = low0, high = high0;
4447 else if (lowequal && highequal)
4448 in_p = 0, low = high = 0;
4449 else if (subset && lowequal)
4450 {
4451 low = range_successor (high1);
4452 high = high0;
4453 in_p = 1;
4454 if (low == 0)
4455 {
4456 /* We are in the weird situation where high0 > high1 but
4457 high1 has no successor. Punt. */
4458 return 0;
4459 }
4460 }
4461 else if (! subset || highequal)
4462 {
4463 low = low0;
4464 high = range_predecessor (low1);
4465 in_p = 1;
4466 if (high == 0)
4467 {
4468 /* low0 < low1 but low1 has no predecessor. Punt. */
4469 return 0;
4470 }
4471 }
4472 else
4473 return 0;
4474 }
4475
4476 else if (! in0_p && in1_p)
4477 {
4478 /* If they don't overlap, the result is the second range. If the second
4479 is a subset of the first, the result is false. Otherwise,
4480 the range starts just after the first range and ends at the
4481 end of the second. */
4482 if (no_overlap)
4483 in_p = 1, low = low1, high = high1;
4484 else if (subset || highequal)
4485 in_p = 0, low = high = 0;
4486 else
4487 {
4488 low = range_successor (high0);
4489 high = high1;
4490 in_p = 1;
4491 if (low == 0)
4492 {
4493 /* high1 > high0 but high0 has no successor. Punt. */
4494 return 0;
4495 }
4496 }
4497 }
4498
4499 else
4500 {
4501 /* The case where we are excluding both ranges. Here the complex case
4502 is if they don't overlap. In that case, the only time we have a
4503 range is if they are adjacent. If the second is a subset of the
4504 first, the result is the first. Otherwise, the range to exclude
4505 starts at the beginning of the first range and ends at the end of the
4506 second. */
4507 if (no_overlap)
4508 {
4509 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4510 range_successor (high0),
4511 1, low1, 0)))
4512 in_p = 0, low = low0, high = high1;
4513 else
4514 {
4515 /* Canonicalize - [min, x] into - [-, x]. */
4516 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4517 switch (TREE_CODE (TREE_TYPE (low0)))
4518 {
4519 case ENUMERAL_TYPE:
4520 if (TYPE_PRECISION (TREE_TYPE (low0))
4521 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4522 break;
4523 /* FALLTHROUGH */
4524 case INTEGER_TYPE:
4525 if (tree_int_cst_equal (low0,
4526 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4527 low0 = 0;
4528 break;
4529 case POINTER_TYPE:
4530 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4531 && integer_zerop (low0))
4532 low0 = 0;
4533 break;
4534 default:
4535 break;
4536 }
4537
4538 /* Canonicalize - [x, max] into - [x, -]. */
4539 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4540 switch (TREE_CODE (TREE_TYPE (high1)))
4541 {
4542 case ENUMERAL_TYPE:
4543 if (TYPE_PRECISION (TREE_TYPE (high1))
4544 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4545 break;
4546 /* FALLTHROUGH */
4547 case INTEGER_TYPE:
4548 if (tree_int_cst_equal (high1,
4549 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4550 high1 = 0;
4551 break;
4552 case POINTER_TYPE:
4553 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4554 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4555 high1, 1,
4556 integer_one_node, 1)))
4557 high1 = 0;
4558 break;
4559 default:
4560 break;
4561 }
4562
4563 /* The ranges might be also adjacent between the maximum and
4564 minimum values of the given type. For
4565 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4566 return + [x + 1, y - 1]. */
4567 if (low0 == 0 && high1 == 0)
4568 {
4569 low = range_successor (high0);
4570 high = range_predecessor (low1);
4571 if (low == 0 || high == 0)
4572 return 0;
4573
4574 in_p = 1;
4575 }
4576 else
4577 return 0;
4578 }
4579 }
4580 else if (subset)
4581 in_p = 0, low = low0, high = high0;
4582 else
4583 in_p = 0, low = low0, high = high1;
4584 }
4585
4586 *pin_p = in_p, *plow = low, *phigh = high;
4587 return 1;
4588}
4589
4590
4591/* Subroutine of fold, looking inside expressions of the form
4592 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4593 of the COND_EXPR. This function is being used also to optimize
4594 A op B ? C : A, by reversing the comparison first.
4595
4596 Return a folded expression whose code is not a COND_EXPR
4597 anymore, or NULL_TREE if no folding opportunity is found. */
4598
4599static tree
4600fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4601{
4602 enum tree_code comp_code = TREE_CODE (arg0);
4603 tree arg00 = TREE_OPERAND (arg0, 0);
4604 tree arg01 = TREE_OPERAND (arg0, 1);
4605 tree arg1_type = TREE_TYPE (arg1);
4606 tree tem;
4607
4608 STRIP_NOPS (arg1);
4609 STRIP_NOPS (arg2);
4610
4611 /* If we have A op 0 ? A : -A, consider applying the following
4612 transformations:
4613
4614 A == 0? A : -A same as -A
4615 A != 0? A : -A same as A
4616 A >= 0? A : -A same as abs (A)
4617 A > 0? A : -A same as abs (A)
4618 A <= 0? A : -A same as -abs (A)
4619 A < 0? A : -A same as -abs (A)
4620
4621 None of these transformations work for modes with signed
4622 zeros. If A is +/-0, the first two transformations will
4623 change the sign of the result (from +0 to -0, or vice
4624 versa). The last four will fix the sign of the result,
4625 even though the original expressions could be positive or
4626 negative, depending on the sign of A.
4627
4628 Note that all these transformations are correct if A is
4629 NaN, since the two alternatives (A and -A) are also NaNs. */
4630 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4631 ? real_zerop (arg01)
4632 : integer_zerop (arg01))
4633 && ((TREE_CODE (arg2) == NEGATE_EXPR
4634 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4635 /* In the case that A is of the form X-Y, '-A' (arg2) may
4636 have already been folded to Y-X, check for that. */
4637 || (TREE_CODE (arg1) == MINUS_EXPR
4638 && TREE_CODE (arg2) == MINUS_EXPR
4639 && operand_equal_p (TREE_OPERAND (arg1, 0),
4640 TREE_OPERAND (arg2, 1), 0)
4641 && operand_equal_p (TREE_OPERAND (arg1, 1),
4642 TREE_OPERAND (arg2, 0), 0))))
4643 switch (comp_code)
4644 {
4645 case EQ_EXPR:
4646 case UNEQ_EXPR:
4647 tem = fold_convert (arg1_type, arg1);
4648 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4649 case NE_EXPR:
4650 case LTGT_EXPR:
4651 return pedantic_non_lvalue (fold_convert (type, arg1));
4652 case UNGE_EXPR:
4653 case UNGT_EXPR:
4654 if (flag_trapping_math)
4655 break;
4656 /* Fall through. */
4657 case GE_EXPR:
4658 case GT_EXPR:
4659 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4660 arg1 = fold_convert (lang_hooks.types.signed_type
4661 (TREE_TYPE (arg1)), arg1);
4662 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4663 return pedantic_non_lvalue (fold_convert (type, tem));
4664 case UNLE_EXPR:
4665 case UNLT_EXPR:
4666 if (flag_trapping_math)
4667 break;
4668 case LE_EXPR:
4669 case LT_EXPR:
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671 arg1 = fold_convert (lang_hooks.types.signed_type
4672 (TREE_TYPE (arg1)), arg1);
4673 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4674 return negate_expr (fold_convert (type, tem));
4675 default:
4676 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4677 break;
4678 }
4679
4680 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4681 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4682 both transformations are correct when A is NaN: A != 0
4683 is then true, and A == 0 is false. */
4684
4685 if (integer_zerop (arg01) && integer_zerop (arg2))
4686 {
4687 if (comp_code == NE_EXPR)
4688 return pedantic_non_lvalue (fold_convert (type, arg1));
4689 else if (comp_code == EQ_EXPR)
4690 return build_int_cst (type, 0);
4691 }
4692
4693 /* Try some transformations of A op B ? A : B.
4694
4695 A == B? A : B same as B
4696 A != B? A : B same as A
4697 A >= B? A : B same as max (A, B)
4698 A > B? A : B same as max (B, A)
4699 A <= B? A : B same as min (A, B)
4700 A < B? A : B same as min (B, A)
4701
4702 As above, these transformations don't work in the presence
4703 of signed zeros. For example, if A and B are zeros of
4704 opposite sign, the first two transformations will change
4705 the sign of the result. In the last four, the original
4706 expressions give different results for (A=+0, B=-0) and
4707 (A=-0, B=+0), but the transformed expressions do not.
4708
4709 The first two transformations are correct if either A or B
4710 is a NaN. In the first transformation, the condition will
4711 be false, and B will indeed be chosen. In the case of the
4712 second transformation, the condition A != B will be true,
4713 and A will be chosen.
4714
4715 The conversions to max() and min() are not correct if B is
4716 a number and A is not. The conditions in the original
4717 expressions will be false, so all four give B. The min()
4718 and max() versions would give a NaN instead. */
4719 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4720 /* Avoid these transformations if the COND_EXPR may be used
4721 as an lvalue in the C++ front-end. PR c++/19199. */
4722 && (in_gimple_form
4723 || (strcmp (lang_hooks.name, "GNU C++") != 0
4724 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4725 || ! maybe_lvalue_p (arg1)
4726 || ! maybe_lvalue_p (arg2)))
4727 {
4728 tree comp_op0 = arg00;
4729 tree comp_op1 = arg01;
4730 tree comp_type = TREE_TYPE (comp_op0);
4731
4732 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4733 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4734 {
4735 comp_type = type;
4736 comp_op0 = arg1;
4737 comp_op1 = arg2;
4738 }
4739
4740 switch (comp_code)
4741 {
4742 case EQ_EXPR:
4743 return pedantic_non_lvalue (fold_convert (type, arg2));
4744 case NE_EXPR:
4745 return pedantic_non_lvalue (fold_convert (type, arg1));
4746 case LE_EXPR:
4747 case LT_EXPR:
4748 case UNLE_EXPR:
4749 case UNLT_EXPR:
4750 /* In C++ a ?: expression can be an lvalue, so put the
4751 operand which will be used if they are equal first
4752 so that we can convert this back to the
4753 corresponding COND_EXPR. */
4754 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4755 {
4756 comp_op0 = fold_convert (comp_type, comp_op0);
4757 comp_op1 = fold_convert (comp_type, comp_op1);
4758 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4759 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4760 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4761 return pedantic_non_lvalue (fold_convert (type, tem));
4762 }
4763 break;
4764 case GE_EXPR:
4765 case GT_EXPR:
4766 case UNGE_EXPR:
4767 case UNGT_EXPR:
4768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4769 {
4770 comp_op0 = fold_convert (comp_type, comp_op0);
4771 comp_op1 = fold_convert (comp_type, comp_op1);
4772 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4773 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4774 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4775 return pedantic_non_lvalue (fold_convert (type, tem));
4776 }
4777 break;
4778 case UNEQ_EXPR:
4779 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4780 return pedantic_non_lvalue (fold_convert (type, arg2));
4781 break;
4782 case LTGT_EXPR:
4783 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4784 return pedantic_non_lvalue (fold_convert (type, arg1));
4785 break;
4786 default:
4787 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4788 break;
4789 }
4790 }
4791
4792 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4793 we might still be able to simplify this. For example,
4794 if C1 is one less or one more than C2, this might have started
4795 out as a MIN or MAX and been transformed by this function.
4796 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4797
4798 if (INTEGRAL_TYPE_P (type)
4799 && TREE_CODE (arg01) == INTEGER_CST
4800 && TREE_CODE (arg2) == INTEGER_CST)
4801 switch (comp_code)
4802 {
4803 case EQ_EXPR:
4804 /* We can replace A with C1 in this case. */
4805 arg1 = fold_convert (type, arg01);
4806 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4807
4808 case LT_EXPR:
4809 /* If C1 is C2 + 1, this is min(A, C2). */
4810 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4811 OEP_ONLY_CONST)
4812 && operand_equal_p (arg01,
4813 const_binop (PLUS_EXPR, arg2,
4814 integer_one_node, 0),
4815 OEP_ONLY_CONST))
4816 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4817 type, arg1, arg2));
4818 break;
4819
4820 case LE_EXPR:
4821 /* If C1 is C2 - 1, this is min(A, C2). */
4822 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4823 OEP_ONLY_CONST)
4824 && operand_equal_p (arg01,
4825 const_binop (MINUS_EXPR, arg2,
4826 integer_one_node, 0),
4827 OEP_ONLY_CONST))
4828 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4829 type, arg1, arg2));
4830 break;
4831
4832 case GT_EXPR:
4833 /* If C1 is C2 - 1, this is max(A, C2). */
4834 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4835 OEP_ONLY_CONST)
4836 && operand_equal_p (arg01,
4837 const_binop (MINUS_EXPR, arg2,
4838 integer_one_node, 0),
4839 OEP_ONLY_CONST))
4840 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4841 type, arg1, arg2));
4842 break;
4843
4844 case GE_EXPR:
4845 /* If C1 is C2 + 1, this is max(A, C2). */
4846 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4847 OEP_ONLY_CONST)
4848 && operand_equal_p (arg01,
4849 const_binop (PLUS_EXPR, arg2,
4850 integer_one_node, 0),
4851 OEP_ONLY_CONST))
4852 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4853 type, arg1, arg2));
4854 break;
4855 case NE_EXPR:
4856 break;
4857 default:
4858 gcc_unreachable ();
4859 }
4860
4861 return NULL_TREE;
4862}
4863
4864
4865
4866#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4867#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4868#endif
4869
4870/* EXP is some logical combination of boolean tests. See if we can
4871 merge it into some range test. Return the new tree if so. */
4872
4873static tree
4874fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4875{
4876 int or_op = (code == TRUTH_ORIF_EXPR
4877 || code == TRUTH_OR_EXPR);
4878 int in0_p, in1_p, in_p;
4879 tree low0, low1, low, high0, high1, high;
4880 bool strict_overflow_p = false;
4881 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4882 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4883 tree tem;
4884 const char * const warnmsg = G_("assuming signed overflow does not occur "
4885 "when simplifying range test");
4886
4887 /* If this is an OR operation, invert both sides; we will invert
4888 again at the end. */
4889 if (or_op)
4890 in0_p = ! in0_p, in1_p = ! in1_p;
4891
4892 /* If both expressions are the same, if we can merge the ranges, and we
4893 can build the range test, return it or it inverted. If one of the
4894 ranges is always true or always false, consider it to be the same
4895 expression as the other. */
4896 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4897 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4898 in1_p, low1, high1)
4899 && 0 != (tem = (build_range_check (type,
4900 lhs != 0 ? lhs
4901 : rhs != 0 ? rhs : integer_zero_node,
4902 in_p, low, high))))
4903 {
4904 if (strict_overflow_p)
4905 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4906 return or_op ? invert_truthvalue (tem) : tem;
4907 }
4908
4909 /* On machines where the branch cost is expensive, if this is a
4910 short-circuited branch and the underlying object on both sides
4911 is the same, make a non-short-circuit operation. */
4912 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4913 && lhs != 0 && rhs != 0
4914 && (code == TRUTH_ANDIF_EXPR
4915 || code == TRUTH_ORIF_EXPR)
4916 && operand_equal_p (lhs, rhs, 0))
4917 {
4918 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4919 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4920 which cases we can't do this. */
4921 if (simple_operand_p (lhs))
4922 return build2 (code == TRUTH_ANDIF_EXPR
4923 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4924 type, op0, op1);
4925
4926 else if (lang_hooks.decls.global_bindings_p () == 0
4927 && ! CONTAINS_PLACEHOLDER_P (lhs))
4928 {
4929 tree common = save_expr (lhs);
4930
4931 if (0 != (lhs = build_range_check (type, common,
4932 or_op ? ! in0_p : in0_p,
4933 low0, high0))
4934 && (0 != (rhs = build_range_check (type, common,
4935 or_op ? ! in1_p : in1_p,
4936 low1, high1))))
4937 {
4938 if (strict_overflow_p)
4939 fold_overflow_warning (warnmsg,
4940 WARN_STRICT_OVERFLOW_COMPARISON);
4941 return build2 (code == TRUTH_ANDIF_EXPR
4942 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4943 type, lhs, rhs);
4944 }
4945 }
4946 }
4947
4948 return 0;
4949}
4950
4951/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4952 bit value. Arrange things so the extra bits will be set to zero if and
4953 only if C is signed-extended to its full width. If MASK is nonzero,
4954 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4955
4956static tree
4957unextend (tree c, int p, int unsignedp, tree mask)
4958{
4959 tree type = TREE_TYPE (c);
4960 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4961 tree temp;
4962
4963 if (p == modesize || unsignedp)
4964 return c;
4965
4966 /* We work by getting just the sign bit into the low-order bit, then
4967 into the high-order bit, then sign-extend. We then XOR that value
4968 with C. */
4969 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4970 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4971
4972 /* We must use a signed type in order to get an arithmetic right shift.
4973 However, we must also avoid introducing accidental overflows, so that
4974 a subsequent call to integer_zerop will work. Hence we must
4975 do the type conversion here. At this point, the constant is either
4976 zero or one, and the conversion to a signed type can never overflow.
4977 We could get an overflow if this conversion is done anywhere else. */
4978 if (TYPE_UNSIGNED (type))
4979 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4980
4981 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4982 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4983 if (mask != 0)
4984 temp = const_binop (BIT_AND_EXPR, temp,
4985 fold_convert (TREE_TYPE (c), mask), 0);
4986 /* If necessary, convert the type back to match the type of C. */
4987 if (TYPE_UNSIGNED (type))
4988 temp = fold_convert (type, temp);
4989
4990 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4991}
4992
4993/* Find ways of folding logical expressions of LHS and RHS:
4994 Try to merge two comparisons to the same innermost item.
4995 Look for range tests like "ch >= '0' && ch <= '9'".
4996 Look for combinations of simple terms on machines with expensive branches
4997 and evaluate the RHS unconditionally.
4998
4999 For example, if we have p->a == 2 && p->b == 4 and we can make an
5000 object large enough to span both A and B, we can do this with a comparison
5001 against the object ANDed with the a mask.
5002
5003 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5004 operations to do this with one comparison.
5005
5006 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5007 function and the one above.
5008
5009 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5010 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5011
5012 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5013 two operands.
5014
5015 We return the simplified tree or 0 if no optimization is possible. */
5016
5017static tree
5018fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5019{
5020 /* If this is the "or" of two comparisons, we can do something if
5021 the comparisons are NE_EXPR. If this is the "and", we can do something
5022 if the comparisons are EQ_EXPR. I.e.,
5023 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5024
5025 WANTED_CODE is this operation code. For single bit fields, we can
5026 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5027 comparison for one-bit fields. */
5028
5029 enum tree_code wanted_code;
5030 enum tree_code lcode, rcode;
5031 tree ll_arg, lr_arg, rl_arg, rr_arg;
5032 tree ll_inner, lr_inner, rl_inner, rr_inner;
5033 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5034 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5035 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5036 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5037 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5038 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5039 enum machine_mode lnmode, rnmode;
5040 tree ll_mask, lr_mask, rl_mask, rr_mask;
5041 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5042 tree l_const, r_const;
5043 tree lntype, rntype, result;
5044 int first_bit, end_bit;
5045 int volatilep;
5046 tree orig_lhs = lhs, orig_rhs = rhs;
5047 enum tree_code orig_code = code;
5048
5049 /* Start by getting the comparison codes. Fail if anything is volatile.
5050 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5051 it were surrounded with a NE_EXPR. */
5052
5053 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5054 return 0;
5055
5056 lcode = TREE_CODE (lhs);
5057 rcode = TREE_CODE (rhs);
5058
5059 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5060 {
5061 lhs = build2 (NE_EXPR, truth_type, lhs,
5062 build_int_cst (TREE_TYPE (lhs), 0));
5063 lcode = NE_EXPR;
5064 }
5065
5066 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5067 {
5068 rhs = build2 (NE_EXPR, truth_type, rhs,
5069 build_int_cst (TREE_TYPE (rhs), 0));
5070 rcode = NE_EXPR;
5071 }
5072
5073 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5074 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5075 return 0;
5076
5077 ll_arg = TREE_OPERAND (lhs, 0);
5078 lr_arg = TREE_OPERAND (lhs, 1);
5079 rl_arg = TREE_OPERAND (rhs, 0);
5080 rr_arg = TREE_OPERAND (rhs, 1);
5081
5082 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5083 if (simple_operand_p (ll_arg)
5084 && simple_operand_p (lr_arg))
5085 {
5086 tree result;
5087 if (operand_equal_p (ll_arg, rl_arg, 0)
5088 && operand_equal_p (lr_arg, rr_arg, 0))
5089 {
5090 result = combine_comparisons (code, lcode, rcode,
5091 truth_type, ll_arg, lr_arg);
5092 if (result)
5093 return result;
5094 }
5095 else if (operand_equal_p (ll_arg, rr_arg, 0)
5096 && operand_equal_p (lr_arg, rl_arg, 0))
5097 {
5098 result = combine_comparisons (code, lcode,
5099 swap_tree_comparison (rcode),
5100 truth_type, ll_arg, lr_arg);
5101 if (result)
5102 return result;
5103 }
5104 }
5105
5106 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5107 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5108
5109 /* If the RHS can be evaluated unconditionally and its operands are
5110 simple, it wins to evaluate the RHS unconditionally on machines
5111 with expensive branches. In this case, this isn't a comparison
5112 that can be merged. Avoid doing this if the RHS is a floating-point
5113 comparison since those can trap. */
5114
5115 if (BRANCH_COST >= 2
5116 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5117 && simple_operand_p (rl_arg)
5118 && simple_operand_p (rr_arg))
5119 {
5120 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5121 if (code == TRUTH_OR_EXPR
5122 && lcode == NE_EXPR && integer_zerop (lr_arg)
5123 && rcode == NE_EXPR && integer_zerop (rr_arg)
5124 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5125 return build2 (NE_EXPR, truth_type,
5126 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5127 ll_arg, rl_arg),
5128 build_int_cst (TREE_TYPE (ll_arg), 0));
5129
5130 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5131 if (code == TRUTH_AND_EXPR
5132 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5133 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5134 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5135 return build2 (EQ_EXPR, truth_type,
5136 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5137 ll_arg, rl_arg),
5138 build_int_cst (TREE_TYPE (ll_arg), 0));
5139
5140 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5141 {
5142 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5143 return build2 (code, truth_type, lhs, rhs);
5144 return NULL_TREE;
5145 }
5146 }
5147
5148 /* See if the comparisons can be merged. Then get all the parameters for
5149 each side. */
5150
5151 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5152 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5153 return 0;
5154
5155 volatilep = 0;
5156 ll_inner = decode_field_reference (ll_arg,
5157 &ll_bitsize, &ll_bitpos, &ll_mode,
5158 &ll_unsignedp, &volatilep, &ll_mask,
5159 &ll_and_mask);
5160 lr_inner = decode_field_reference (lr_arg,
5161 &lr_bitsize, &lr_bitpos, &lr_mode,
5162 &lr_unsignedp, &volatilep, &lr_mask,
5163 &lr_and_mask);
5164 rl_inner = decode_field_reference (rl_arg,
5165 &rl_bitsize, &rl_bitpos, &rl_mode,
5166 &rl_unsignedp, &volatilep, &rl_mask,
5167 &rl_and_mask);
5168 rr_inner = decode_field_reference (rr_arg,
5169 &rr_bitsize, &rr_bitpos, &rr_mode,
5170 &rr_unsignedp, &volatilep, &rr_mask,
5171 &rr_and_mask);
5172
5173 /* It must be true that the inner operation on the lhs of each
5174 comparison must be the same if we are to be able to do anything.
5175 Then see if we have constants. If not, the same must be true for
5176 the rhs's. */
5177 if (volatilep || ll_inner == 0 || rl_inner == 0
5178 || ! operand_equal_p (ll_inner, rl_inner, 0))
5179 return 0;
5180
5181 if (TREE_CODE (lr_arg) == INTEGER_CST
5182 && TREE_CODE (rr_arg) == INTEGER_CST)
5183 l_const = lr_arg, r_const = rr_arg;
5184 else if (lr_inner == 0 || rr_inner == 0
5185 || ! operand_equal_p (lr_inner, rr_inner, 0))
5186 return 0;
5187 else
5188 l_const = r_const = 0;
5189
5190 /* If either comparison code is not correct for our logical operation,
5191 fail. However, we can convert a one-bit comparison against zero into
5192 the opposite comparison against that bit being set in the field. */
5193
5194 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5195 if (lcode != wanted_code)
5196 {
5197 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5198 {
5199 /* Make the left operand unsigned, since we are only interested
5200 in the value of one bit. Otherwise we are doing the wrong
5201 thing below. */
5202 ll_unsignedp = 1;
5203 l_const = ll_mask;
5204 }
5205 else
5206 return 0;
5207 }
5208
5209 /* This is analogous to the code for l_const above. */
5210 if (rcode != wanted_code)
5211 {
5212 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5213 {
5214 rl_unsignedp = 1;
5215 r_const = rl_mask;
5216 }
5217 else
5218 return 0;
5219 }
5220
5221 /* After this point all optimizations will generate bit-field
5222 references, which we might not want. */
5223 if (! lang_hooks.can_use_bit_fields_p ())
5224 return 0;
5225
5226 /* See if we can find a mode that contains both fields being compared on
5227 the left. If we can't, fail. Otherwise, update all constants and masks
5228 to be relative to a field of that size. */
5229 first_bit = MIN (ll_bitpos, rl_bitpos);
5230 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5231 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5232 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5233 volatilep);
5234 if (lnmode == VOIDmode)
5235 return 0;
5236
5237 lnbitsize = GET_MODE_BITSIZE (lnmode);
5238 lnbitpos = first_bit & ~ (lnbitsize - 1);
5239 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5240 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5241
5242 if (BYTES_BIG_ENDIAN)
5243 {
5244 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5245 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5246 }
5247
5248 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5249 size_int (xll_bitpos), 0);
5250 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5251 size_int (xrl_bitpos), 0);
5252
5253 if (l_const)
5254 {
5255 l_const = fold_convert (lntype, l_const);
5256 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5257 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5258 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5259 fold_build1 (BIT_NOT_EXPR,
5260 lntype, ll_mask),
5261 0)))
5262 {
5263 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5264
5265 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5266 }
5267 }
5268 if (r_const)
5269 {
5270 r_const = fold_convert (lntype, r_const);
5271 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5272 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5273 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5274 fold_build1 (BIT_NOT_EXPR,
5275 lntype, rl_mask),
5276 0)))
5277 {
5278 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5279
5280 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5281 }
5282 }
5283
5284 /* If the right sides are not constant, do the same for it. Also,
5285 disallow this optimization if a size or signedness mismatch occurs
5286 between the left and right sides. */
5287 if (l_const == 0)
5288 {
5289 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5290 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5291 /* Make sure the two fields on the right
5292 correspond to the left without being swapped. */
5293 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5294 return 0;
5295
5296 first_bit = MIN (lr_bitpos, rr_bitpos);
5297 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5298 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5299 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5300 volatilep);
5301 if (rnmode == VOIDmode)
5302 return 0;
5303
5304 rnbitsize = GET_MODE_BITSIZE (rnmode);
5305 rnbitpos = first_bit & ~ (rnbitsize - 1);
5306 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5307 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5308
5309 if (BYTES_BIG_ENDIAN)
5310 {
5311 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5312 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5313 }
5314
5315 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5316 size_int (xlr_bitpos), 0);
5317 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5318 size_int (xrr_bitpos), 0);
5319
5320 /* Make a mask that corresponds to both fields being compared.
5321 Do this for both items being compared. If the operands are the
5322 same size and the bits being compared are in the same position
5323 then we can do this by masking both and comparing the masked
5324 results. */
5325 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5326 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5327 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5328 {
5329 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5330 ll_unsignedp || rl_unsignedp);
5331 if (! all_ones_mask_p (ll_mask, lnbitsize))
5332 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5333
5334 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5335 lr_unsignedp || rr_unsignedp);
5336 if (! all_ones_mask_p (lr_mask, rnbitsize))
5337 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5338
5339 return build2 (wanted_code, truth_type, lhs, rhs);
5340 }
5341
5342 /* There is still another way we can do something: If both pairs of
5343 fields being compared are adjacent, we may be able to make a wider
5344 field containing them both.
5345
5346 Note that we still must mask the lhs/rhs expressions. Furthermore,
5347 the mask must be shifted to account for the shift done by
5348 make_bit_field_ref. */
5349 if ((ll_bitsize + ll_bitpos == rl_bitpos
5350 && lr_bitsize + lr_bitpos == rr_bitpos)
5351 || (ll_bitpos == rl_bitpos + rl_bitsize
5352 && lr_bitpos == rr_bitpos + rr_bitsize))
5353 {
5354 tree type;
5355
5356 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5357 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5358 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5359 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5360
5361 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5362 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5363 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5364 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5365
5366 /* Convert to the smaller type before masking out unwanted bits. */
5367 type = lntype;
5368 if (lntype != rntype)
5369 {
5370 if (lnbitsize > rnbitsize)
5371 {
5372 lhs = fold_convert (rntype, lhs);
5373 ll_mask = fold_convert (rntype, ll_mask);
5374 type = rntype;
5375 }
5376 else if (lnbitsize < rnbitsize)
5377 {
5378 rhs = fold_convert (lntype, rhs);
5379 lr_mask = fold_convert (lntype, lr_mask);
5380 type = lntype;
5381 }
5382 }
5383
5384 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5385 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5386
5387 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5388 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5389
5390 return build2 (wanted_code, truth_type, lhs, rhs);
5391 }
5392
5393 return 0;
5394 }
5395
5396 /* Handle the case of comparisons with constants. If there is something in
5397 common between the masks, those bits of the constants must be the same.
5398 If not, the condition is always false. Test for this to avoid generating
5399 incorrect code below. */
5400 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5401 if (! integer_zerop (result)
5402 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5403 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5404 {
5405 if (wanted_code == NE_EXPR)
5406 {
5407 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5408 return constant_boolean_node (true, truth_type);
5409 }
5410 else
5411 {
5412 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5413 return constant_boolean_node (false, truth_type);
5414 }
5415 }
5416
5417 /* Construct the expression we will return. First get the component
5418 reference we will make. Unless the mask is all ones the width of
5419 that field, perform the mask operation. Then compare with the
5420 merged constant. */
5421 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5422 ll_unsignedp || rl_unsignedp);
5423
5424 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5425 if (! all_ones_mask_p (ll_mask, lnbitsize))
5426 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5427
5428 return build2 (wanted_code, truth_type, result,
5429 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5430}
5431
5432/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5433 constant. */
5434
5435static tree
5436optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5437{
5438 tree arg0 = op0;
5439 enum tree_code op_code;
5440 tree comp_const = op1;
5441 tree minmax_const;
5442 int consts_equal, consts_lt;
5443 tree inner;
5444
5445 STRIP_SIGN_NOPS (arg0);
5446
5447 op_code = TREE_CODE (arg0);
5448 minmax_const = TREE_OPERAND (arg0, 1);
5449 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5450 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5451 inner = TREE_OPERAND (arg0, 0);
5452
5453 /* If something does not permit us to optimize, return the original tree. */
5454 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5455 || TREE_CODE (comp_const) != INTEGER_CST
5456 || TREE_CONSTANT_OVERFLOW (comp_const)
5457 || TREE_CODE (minmax_const) != INTEGER_CST
5458 || TREE_CONSTANT_OVERFLOW (minmax_const))
5459 return NULL_TREE;
5460
5461 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5462 and GT_EXPR, doing the rest with recursive calls using logical
5463 simplifications. */
5464 switch (code)
5465 {
5466 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5467 {
5468 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5469 type, op0, op1);
5470 if (tem)
5471 return invert_truthvalue (tem);
5472 return NULL_TREE;
5473 }
5474
5475 case GE_EXPR:
5476 return
5477 fold_build2 (TRUTH_ORIF_EXPR, type,
5478 optimize_minmax_comparison
5479 (EQ_EXPR, type, arg0, comp_const),
5480 optimize_minmax_comparison
5481 (GT_EXPR, type, arg0, comp_const));
5482
5483 case EQ_EXPR:
5484 if (op_code == MAX_EXPR && consts_equal)
5485 /* MAX (X, 0) == 0 -> X <= 0 */
5486 return fold_build2 (LE_EXPR, type, inner, comp_const);
5487
5488 else if (op_code == MAX_EXPR && consts_lt)
5489 /* MAX (X, 0) == 5 -> X == 5 */
5490 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5491
5492 else if (op_code == MAX_EXPR)
5493 /* MAX (X, 0) == -1 -> false */
5494 return omit_one_operand (type, integer_zero_node, inner);
5495
5496 else if (consts_equal)
5497 /* MIN (X, 0) == 0 -> X >= 0 */
5498 return fold_build2 (GE_EXPR, type, inner, comp_const);
5499
5500 else if (consts_lt)
5501 /* MIN (X, 0) == 5 -> false */
5502 return omit_one_operand (type, integer_zero_node, inner);
5503
5504 else
5505 /* MIN (X, 0) == -1 -> X == -1 */
5506 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5507
5508 case GT_EXPR:
5509 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5510 /* MAX (X, 0) > 0 -> X > 0
5511 MAX (X, 0) > 5 -> X > 5 */
5512 return fold_build2 (GT_EXPR, type, inner, comp_const);
5513
5514 else if (op_code == MAX_EXPR)
5515 /* MAX (X, 0) > -1 -> true */
5516 return omit_one_operand (type, integer_one_node, inner);
5517
5518 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5519 /* MIN (X, 0) > 0 -> false
5520 MIN (X, 0) > 5 -> false */
5521 return omit_one_operand (type, integer_zero_node, inner);
5522
5523 else
5524 /* MIN (X, 0) > -1 -> X > -1 */
5525 return fold_build2 (GT_EXPR, type, inner, comp_const);
5526
5527 default:
5528 return NULL_TREE;
5529 }
5530}
5531
5532/* T is an integer expression that is being multiplied, divided, or taken a
5533 modulus (CODE says which and what kind of divide or modulus) by a
5534 constant C. See if we can eliminate that operation by folding it with
5535 other operations already in T. WIDE_TYPE, if non-null, is a type that
5536 should be used for the computation if wider than our type.
5537
5538 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5539 (X * 2) + (Y * 4). We must, however, be assured that either the original
5540 expression would not overflow or that overflow is undefined for the type
5541 in the language in question.
5542
5543 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5544 the machine has a multiply-accumulate insn or that this is part of an
5545 addressing calculation.
5546
5547 If we return a non-null expression, it is an equivalent form of the
5548 original computation, but need not be in the original type.
5549
5550 We set *STRICT_OVERFLOW_P to true if the return values depends on
5551 signed overflow being undefined. Otherwise we do not change
5552 *STRICT_OVERFLOW_P. */
5553
5554static tree
5555extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5556 bool *strict_overflow_p)
5557{
5558 /* To avoid exponential search depth, refuse to allow recursion past
5559 three levels. Beyond that (1) it's highly unlikely that we'll find
5560 something interesting and (2) we've probably processed it before
5561 when we built the inner expression. */
5562
5563 static int depth;
5564 tree ret;
5565
5566 if (depth > 3)
5567 return NULL;
5568
5569 depth++;
5570 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5571 depth--;
5572
5573 return ret;
5574}
5575
5576static tree
5577extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5578 bool *strict_overflow_p)
5579{
5580 tree type = TREE_TYPE (t);
5581 enum tree_code tcode = TREE_CODE (t);
5582 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5583 > GET_MODE_SIZE (TYPE_MODE (type)))
5584 ? wide_type : type);
5585 tree t1, t2;
5586 int same_p = tcode == code;
5587 tree op0 = NULL_TREE, op1 = NULL_TREE;
5588 bool sub_strict_overflow_p;
5589
5590 /* Don't deal with constants of zero here; they confuse the code below. */
5591 if (integer_zerop (c))
5592 return NULL_TREE;
5593
5594 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5595 op0 = TREE_OPERAND (t, 0);
5596
5597 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5598 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5599
5600 /* Note that we need not handle conditional operations here since fold
5601 already handles those cases. So just do arithmetic here. */
5602 switch (tcode)
5603 {
5604 case INTEGER_CST:
5605 /* For a constant, we can always simplify if we are a multiply
5606 or (for divide and modulus) if it is a multiple of our constant. */
5607 if (code == MULT_EXPR
5608 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5609 return const_binop (code, fold_convert (ctype, t),
5610 fold_convert (ctype, c), 0);
5611 break;
5612
5613 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5614 /* If op0 is an expression ... */
5615 if ((COMPARISON_CLASS_P (op0)
5616 || UNARY_CLASS_P (op0)
5617 || BINARY_CLASS_P (op0)
5618 || EXPRESSION_CLASS_P (op0))
5619 /* ... and is unsigned, and its type is smaller than ctype,
5620 then we cannot pass through as widening. */
5621 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5622 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5623 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5624 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5625 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5626 /* ... or this is a truncation (t is narrower than op0),
5627 then we cannot pass through this narrowing. */
5628 || (GET_MODE_SIZE (TYPE_MODE (type))
5629 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5630 /* ... or signedness changes for division or modulus,
5631 then we cannot pass through this conversion. */
5632 || (code != MULT_EXPR
5633 && (TYPE_UNSIGNED (ctype)
5634 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5635 break;
5636
5637 /* Pass the constant down and see if we can make a simplification. If
5638 we can, replace this expression with the inner simplification for
5639 possible later conversion to our or some other type. */
5640 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5641 && TREE_CODE (t2) == INTEGER_CST
5642 && ! TREE_CONSTANT_OVERFLOW (t2)
5643 && (0 != (t1 = extract_muldiv (op0, t2, code,
5644 code == MULT_EXPR
5645 ? ctype : NULL_TREE,
5646 strict_overflow_p))))
5647 return t1;
5648 break;
5649
5650 case ABS_EXPR:
5651 /* If widening the type changes it from signed to unsigned, then we
5652 must avoid building ABS_EXPR itself as unsigned. */
5653 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5654 {
5655 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5656 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5657 != 0)
5658 {
5659 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5660 return fold_convert (ctype, t1);
5661 }
5662 break;
5663 }
5664 /* If the constant is negative, we cannot simplify this. */
5665 if (tree_int_cst_sgn (c) == -1)
5666 break;
5667 /* FALLTHROUGH */
5668 case NEGATE_EXPR:
5669 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5670 != 0)
5671 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5672 break;
5673
5674 case MIN_EXPR: case MAX_EXPR:
5675 /* If widening the type changes the signedness, then we can't perform
5676 this optimization as that changes the result. */
5677 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5678 break;
5679
5680 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5681 sub_strict_overflow_p = false;
5682 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5683 &sub_strict_overflow_p)) != 0
5684 && (t2 = extract_muldiv (op1, c, code, wide_type,
5685 &sub_strict_overflow_p)) != 0)
5686 {
5687 if (tree_int_cst_sgn (c) < 0)
5688 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5689 if (sub_strict_overflow_p)
5690 *strict_overflow_p = true;
5691 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5692 fold_convert (ctype, t2));
5693 }
5694 break;
5695
5696 case LSHIFT_EXPR: case RSHIFT_EXPR:
5697 /* If the second operand is constant, this is a multiplication
5698 or floor division, by a power of two, so we can treat it that
5699 way unless the multiplier or divisor overflows. Signed
5700 left-shift overflow is implementation-defined rather than
5701 undefined in C90, so do not convert signed left shift into
5702 multiplication. */
5703 if (TREE_CODE (op1) == INTEGER_CST
5704 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5705 /* const_binop may not detect overflow correctly,
5706 so check for it explicitly here. */
5707 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5708 && TREE_INT_CST_HIGH (op1) == 0
5709 && 0 != (t1 = fold_convert (ctype,
5710 const_binop (LSHIFT_EXPR,
5711 size_one_node,
5712 op1, 0)))
5713 && ! TREE_OVERFLOW (t1))
5714 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5715 ? MULT_EXPR : FLOOR_DIV_EXPR,
5716 ctype, fold_convert (ctype, op0), t1),
5717 c, code, wide_type, strict_overflow_p);
5718 break;
5719
5720 case PLUS_EXPR: case MINUS_EXPR:
5721 /* See if we can eliminate the operation on both sides. If we can, we
5722 can return a new PLUS or MINUS. If we can't, the only remaining
5723 cases where we can do anything are if the second operand is a
5724 constant. */
5725 sub_strict_overflow_p = false;
5726 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5727 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5728 if (t1 != 0 && t2 != 0
5729 && (code == MULT_EXPR
5730 /* If not multiplication, we can only do this if both operands
5731 are divisible by c. */
5732 || (multiple_of_p (ctype, op0, c)
5733 && multiple_of_p (ctype, op1, c))))
5734 {
5735 if (sub_strict_overflow_p)
5736 *strict_overflow_p = true;
5737 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5738 fold_convert (ctype, t2));
5739 }
5740
5741 /* If this was a subtraction, negate OP1 and set it to be an addition.
5742 This simplifies the logic below. */
5743 if (tcode == MINUS_EXPR)
5744 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5745
5746 if (TREE_CODE (op1) != INTEGER_CST)
5747 break;
5748
5749 /* If either OP1 or C are negative, this optimization is not safe for
5750 some of the division and remainder types while for others we need
5751 to change the code. */
5752 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5753 {
5754 if (code == CEIL_DIV_EXPR)
5755 code = FLOOR_DIV_EXPR;
5756 else if (code == FLOOR_DIV_EXPR)
5757 code = CEIL_DIV_EXPR;
5758 else if (code != MULT_EXPR
5759 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5760 break;
5761 }
5762
5763 /* If it's a multiply or a division/modulus operation of a multiple
5764 of our constant, do the operation and verify it doesn't overflow. */
5765 if (code == MULT_EXPR
5766 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5767 {
5768 op1 = const_binop (code, fold_convert (ctype, op1),
5769 fold_convert (ctype, c), 0);
5770 /* We allow the constant to overflow with wrapping semantics. */
5771 if (op1 == 0
5772 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5773 break;
5774 }
5775 else
5776 break;
5777
5778 /* If we have an unsigned type is not a sizetype, we cannot widen
5779 the operation since it will change the result if the original
5780 computation overflowed. */
5781 if (TYPE_UNSIGNED (ctype)
5782 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5783 && ctype != type)
5784 break;
5785
5786 /* If we were able to eliminate our operation from the first side,
5787 apply our operation to the second side and reform the PLUS. */
5788 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5789 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5790
5791 /* The last case is if we are a multiply. In that case, we can
5792 apply the distributive law to commute the multiply and addition
5793 if the multiplication of the constants doesn't overflow. */
5794 if (code == MULT_EXPR)
5795 return fold_build2 (tcode, ctype,
5796 fold_build2 (code, ctype,
5797 fold_convert (ctype, op0),
5798 fold_convert (ctype, c)),
5799 op1);
5800
5801 break;
5802
5803 case MULT_EXPR:
5804 /* We have a special case here if we are doing something like
5805 (C * 8) % 4 since we know that's zero. */
5806 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5807 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5808 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5809 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5810 return omit_one_operand (type, integer_zero_node, op0);
5811
5812 /* ... fall through ... */
5813
5814 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5815 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5816 /* If we can extract our operation from the LHS, do so and return a
5817 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5818 do something only if the second operand is a constant. */
5819 if (same_p
5820 && (t1 = extract_muldiv (op0, c, code, wide_type,
5821 strict_overflow_p)) != 0)
5822 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5823 fold_convert (ctype, op1));
5824 else if (tcode == MULT_EXPR && code == MULT_EXPR
5825 && (t1 = extract_muldiv (op1, c, code, wide_type,
5826 strict_overflow_p)) != 0)
5827 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5828 fold_convert (ctype, t1));
5829 else if (TREE_CODE (op1) != INTEGER_CST)
5830 return 0;
5831
5832 /* If these are the same operation types, we can associate them
5833 assuming no overflow. */
5834 if (tcode == code
5835 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5836 fold_convert (ctype, c), 0))
5837 && ! TREE_OVERFLOW (t1))
5838 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5839
5840 /* If these operations "cancel" each other, we have the main
5841 optimizations of this pass, which occur when either constant is a
5842 multiple of the other, in which case we replace this with either an
5843 operation or CODE or TCODE.
5844
5845 If we have an unsigned type that is not a sizetype, we cannot do
5846 this since it will change the result if the original computation
5847 overflowed. */
5848 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5849 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5850 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5851 || (tcode == MULT_EXPR
5852 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5853 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5854 {
5855 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5856 {
5857 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5858 *strict_overflow_p = true;
5859 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5860 fold_convert (ctype,
5861 const_binop (TRUNC_DIV_EXPR,
5862 op1, c, 0)));
5863 }
5864 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5865 {
5866 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5867 *strict_overflow_p = true;
5868 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5869 fold_convert (ctype,
5870 const_binop (TRUNC_DIV_EXPR,
5871 c, op1, 0)));
5872 }
5873 }
5874 break;
5875
5876 default:
5877 break;
5878 }
5879
5880 return 0;
5881}
5882
5883/* Return a node which has the indicated constant VALUE (either 0 or
5884 1), and is of the indicated TYPE. */
5885
5886tree
5887constant_boolean_node (int value, tree type)
5888{
5889 if (type == integer_type_node)
5890 return value ? integer_one_node : integer_zero_node;
5891 else if (type == boolean_type_node)
5892 return value ? boolean_true_node : boolean_false_node;
5893 else
5894 return build_int_cst (type, value);
5895}
5896
5897
5898/* Return true if expr looks like an ARRAY_REF and set base and
5899 offset to the appropriate trees. If there is no offset,
5900 offset is set to NULL_TREE. Base will be canonicalized to
5901 something you can get the element type from using
5902 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5903 in bytes to the base. */
5904
5905static bool
5906extract_array_ref (tree expr, tree *base, tree *offset)
5907{
5908 /* One canonical form is a PLUS_EXPR with the first
5909 argument being an ADDR_EXPR with a possible NOP_EXPR
5910 attached. */
5911 if (TREE_CODE (expr) == PLUS_EXPR)
5912 {
5913 tree op0 = TREE_OPERAND (expr, 0);
5914 tree inner_base, dummy1;
5915 /* Strip NOP_EXPRs here because the C frontends and/or
5916 folders present us (int *)&x.a + 4B possibly. */
5917 STRIP_NOPS (op0);
5918 if (extract_array_ref (op0, &inner_base, &dummy1))
5919 {
5920 *base = inner_base;
5921 if (dummy1 == NULL_TREE)
5922 *offset = TREE_OPERAND (expr, 1);
5923 else
5924 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5925 dummy1, TREE_OPERAND (expr, 1));
5926 return true;
5927 }
5928 }
5929 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5930 which we transform into an ADDR_EXPR with appropriate
5931 offset. For other arguments to the ADDR_EXPR we assume
5932 zero offset and as such do not care about the ADDR_EXPR
5933 type and strip possible nops from it. */
5934 else if (TREE_CODE (expr) == ADDR_EXPR)
5935 {
5936 tree op0 = TREE_OPERAND (expr, 0);
5937 if (TREE_CODE (op0) == ARRAY_REF)
5938 {
5939 tree idx = TREE_OPERAND (op0, 1);
5940 *base = TREE_OPERAND (op0, 0);
5941 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5942 array_ref_element_size (op0));
5943 }
5944 else
5945 {
5946 /* Handle array-to-pointer decay as &a. */
5947 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5948 *base = TREE_OPERAND (expr, 0);
5949 else
5950 *base = expr;
5951 *offset = NULL_TREE;
5952 }
5953 return true;
5954 }
5955 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5956 else if (SSA_VAR_P (expr)
5957 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5958 {
5959 *base = expr;
5960 *offset = NULL_TREE;
5961 return true;
5962 }
5963
5964 return false;
5965}
5966
5967
5968/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5969 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5970 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5971 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5972 COND is the first argument to CODE; otherwise (as in the example
5973 given here), it is the second argument. TYPE is the type of the
5974 original expression. Return NULL_TREE if no simplification is
5975 possible. */
5976
5977static tree
5978fold_binary_op_with_conditional_arg (enum tree_code code,
5979 tree type, tree op0, tree op1,
5980 tree cond, tree arg, int cond_first_p)
5981{
5982 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5983 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5984 tree test, true_value, false_value;
5985 tree lhs = NULL_TREE;
5986 tree rhs = NULL_TREE;
5987
5988 /* This transformation is only worthwhile if we don't have to wrap
5989 arg in a SAVE_EXPR, and the operation can be simplified on at least
5990 one of the branches once its pushed inside the COND_EXPR. */
5991 if (!TREE_CONSTANT (arg))
5992 return NULL_TREE;
5993
5994 if (TREE_CODE (cond) == COND_EXPR)
5995 {
5996 test = TREE_OPERAND (cond, 0);
5997 true_value = TREE_OPERAND (cond, 1);
5998 false_value = TREE_OPERAND (cond, 2);
5999 /* If this operand throws an expression, then it does not make
6000 sense to try to perform a logical or arithmetic operation
6001 involving it. */
6002 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6003 lhs = true_value;
6004 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6005 rhs = false_value;
6006 }
6007 else
6008 {
6009 tree testtype = TREE_TYPE (cond);
6010 test = cond;
6011 true_value = constant_boolean_node (true, testtype);
6012 false_value = constant_boolean_node (false, testtype);
6013 }
6014
6015 arg = fold_convert (arg_type, arg);
6016 if (lhs == 0)
6017 {
6018 true_value = fold_convert (cond_type, true_value);
6019 if (cond_first_p)
6020 lhs = fold_build2 (code, type, true_value, arg);
6021 else
6022 lhs = fold_build2 (code, type, arg, true_value);
6023 }
6024 if (rhs == 0)
6025 {
6026 false_value = fold_convert (cond_type, false_value);
6027 if (cond_first_p)
6028 rhs = fold_build2 (code, type, false_value, arg);
6029 else
6030 rhs = fold_build2 (code, type, arg, false_value);
6031 }
6032
6033 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6034 return fold_convert (type, test);
6035}
6036
6037
6038/* Subroutine of fold() that checks for the addition of +/- 0.0.
6039
6040 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6041 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6042 ADDEND is the same as X.
6043
6044 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6045 and finite. The problematic cases are when X is zero, and its mode
6046 has signed zeros. In the case of rounding towards -infinity,
6047 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6048 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6049
6050static bool
6051fold_real_zero_addition_p (tree type, tree addend, int negate)
6052{
6053 if (!real_zerop (addend))
6054 return false;
6055
6056 /* Don't allow the fold with -fsignaling-nans. */
6057 if (HONOR_SNANS (TYPE_MODE (type)))
6058 return false;
6059
6060 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6061 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6062 return true;
6063
6064 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6065 if (TREE_CODE (addend) == REAL_CST
6066 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6067 negate = !negate;
6068
6069 /* The mode has signed zeros, and we have to honor their sign.
6070 In this situation, there is only one case we can return true for.
6071 X - 0 is the same as X unless rounding towards -infinity is
6072 supported. */
6073 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6074}
6075
6076/* Subroutine of fold() that checks comparisons of built-in math
6077 functions against real constants.
6078
6079 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6080 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6081 is the type of the result and ARG0 and ARG1 are the operands of the
6082 comparison. ARG1 must be a TREE_REAL_CST.
6083
6084 The function returns the constant folded tree if a simplification
6085 can be made, and NULL_TREE otherwise. */
6086
6087static tree
6088fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6089 tree type, tree arg0, tree arg1)
6090{
6091 REAL_VALUE_TYPE c;
6092
6093 if (BUILTIN_SQRT_P (fcode))
6094 {
6095 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6096 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6097
6098 c = TREE_REAL_CST (arg1);
6099 if (REAL_VALUE_NEGATIVE (c))
6100 {
6101 /* sqrt(x) < y is always false, if y is negative. */
6102 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6103 return omit_one_operand (type, integer_zero_node, arg);
6104
6105 /* sqrt(x) > y is always true, if y is negative and we
6106 don't care about NaNs, i.e. negative values of x. */
6107 if (code == NE_EXPR || !HONOR_NANS (mode))
6108 return omit_one_operand (type, integer_one_node, arg);
6109
6110 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6111 return fold_build2 (GE_EXPR, type, arg,
6112 build_real (TREE_TYPE (arg), dconst0));
6113 }
6114 else if (code == GT_EXPR || code == GE_EXPR)
6115 {
6116 REAL_VALUE_TYPE c2;
6117
6118 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6119 real_convert (&c2, mode, &c2);
6120
6121 if (REAL_VALUE_ISINF (c2))
6122 {
6123 /* sqrt(x) > y is x == +Inf, when y is very large. */
6124 if (HONOR_INFINITIES (mode))
6125 return fold_build2 (EQ_EXPR, type, arg,
6126 build_real (TREE_TYPE (arg), c2));
6127
6128 /* sqrt(x) > y is always false, when y is very large
6129 and we don't care about infinities. */
6130 return omit_one_operand (type, integer_zero_node, arg);
6131 }
6132
6133 /* sqrt(x) > c is the same as x > c*c. */
6134 return fold_build2 (code, type, arg,
6135 build_real (TREE_TYPE (arg), c2));
6136 }
6137 else if (code == LT_EXPR || code == LE_EXPR)
6138 {
6139 REAL_VALUE_TYPE c2;
6140
6141 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6142 real_convert (&c2, mode, &c2);
6143
6144 if (REAL_VALUE_ISINF (c2))
6145 {
6146 /* sqrt(x) < y is always true, when y is a very large
6147 value and we don't care about NaNs or Infinities. */
6148 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6149 return omit_one_operand (type, integer_one_node, arg);
6150
6151 /* sqrt(x) < y is x != +Inf when y is very large and we
6152 don't care about NaNs. */
6153 if (! HONOR_NANS (mode))
6154 return fold_build2 (NE_EXPR, type, arg,
6155 build_real (TREE_TYPE (arg), c2));
6156
6157 /* sqrt(x) < y is x >= 0 when y is very large and we
6158 don't care about Infinities. */
6159 if (! HONOR_INFINITIES (mode))
6160 return fold_build2 (GE_EXPR, type, arg,
6161 build_real (TREE_TYPE (arg), dconst0));
6162
6163 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6164 if (lang_hooks.decls.global_bindings_p () != 0
6165 || CONTAINS_PLACEHOLDER_P (arg))
6166 return NULL_TREE;
6167
6168 arg = save_expr (arg);
6169 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6170 fold_build2 (GE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg),
6172 dconst0)),
6173 fold_build2 (NE_EXPR, type, arg,
6174 build_real (TREE_TYPE (arg),
6175 c2)));
6176 }
6177
6178 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6179 if (! HONOR_NANS (mode))
6180 return fold_build2 (code, type, arg,
6181 build_real (TREE_TYPE (arg), c2));
6182
6183 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6184 if (lang_hooks.decls.global_bindings_p () == 0
6185 && ! CONTAINS_PLACEHOLDER_P (arg))
6186 {
6187 arg = save_expr (arg);
6188 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6189 fold_build2 (GE_EXPR, type, arg,
6190 build_real (TREE_TYPE (arg),
6191 dconst0)),
6192 fold_build2 (code, type, arg,
6193 build_real (TREE_TYPE (arg),
6194 c2)));
6195 }
6196 }
6197 }
6198
6199 return NULL_TREE;
6200}
6201
6202/* Subroutine of fold() that optimizes comparisons against Infinities,
6203 either +Inf or -Inf.
6204
6205 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6206 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6207 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6208
6209 The function returns the constant folded tree if a simplification
6210 can be made, and NULL_TREE otherwise. */
6211
6212static tree
6213fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6214{
6215 enum machine_mode mode;
6216 REAL_VALUE_TYPE max;
6217 tree temp;
6218 bool neg;
6219
6220 mode = TYPE_MODE (TREE_TYPE (arg0));
6221
6222 /* For negative infinity swap the sense of the comparison. */
6223 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6224 if (neg)
6225 code = swap_tree_comparison (code);
6226
6227 switch (code)
6228 {
6229 case GT_EXPR:
6230 /* x > +Inf is always false, if with ignore sNANs. */
6231 if (HONOR_SNANS (mode))
6232 return NULL_TREE;
6233 return omit_one_operand (type, integer_zero_node, arg0);
6234
6235 case LE_EXPR:
6236 /* x <= +Inf is always true, if we don't case about NaNs. */
6237 if (! HONOR_NANS (mode))
6238 return omit_one_operand (type, integer_one_node, arg0);
6239
6240 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6241 if (lang_hooks.decls.global_bindings_p () == 0
6242 && ! CONTAINS_PLACEHOLDER_P (arg0))
6243 {
6244 arg0 = save_expr (arg0);
6245 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6246 }
6247 break;
6248
6249 case EQ_EXPR:
6250 case GE_EXPR:
6251 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6252 real_maxval (&max, neg, mode);
6253 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6254 arg0, build_real (TREE_TYPE (arg0), max));
6255
6256 case LT_EXPR:
6257 /* x < +Inf is always equal to x <= DBL_MAX. */
6258 real_maxval (&max, neg, mode);
6259 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6260 arg0, build_real (TREE_TYPE (arg0), max));
6261
6262 case NE_EXPR:
6263 /* x != +Inf is always equal to !(x > DBL_MAX). */
6264 real_maxval (&max, neg, mode);
6265 if (! HONOR_NANS (mode))
6266 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6267 arg0, build_real (TREE_TYPE (arg0), max));
6268
6269 /* The transformation below creates non-gimple code and thus is
6270 not appropriate if we are in gimple form. */
6271 if (in_gimple_form)
6272 return NULL_TREE;
6273
6274 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6275 arg0, build_real (TREE_TYPE (arg0), max));
6276 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6277
6278 default:
6279 break;
6280 }
6281
6282 return NULL_TREE;
6283}
6284
6285/* Subroutine of fold() that optimizes comparisons of a division by
6286 a nonzero integer constant against an integer constant, i.e.
6287 X/C1 op C2.
6288
6289 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6290 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6291 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6292
6293 The function returns the constant folded tree if a simplification
6294 can be made, and NULL_TREE otherwise. */
6295
6296static tree
6297fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6298{
6299 tree prod, tmp, hi, lo;
6300 tree arg00 = TREE_OPERAND (arg0, 0);
6301 tree arg01 = TREE_OPERAND (arg0, 1);
6302 unsigned HOST_WIDE_INT lpart;
6303 HOST_WIDE_INT hpart;
6304 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6305 bool neg_overflow;
6306 int overflow;
6307
6308 /* We have to do this the hard way to detect unsigned overflow.
6309 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6310 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6311 TREE_INT_CST_HIGH (arg01),
6312 TREE_INT_CST_LOW (arg1),
6313 TREE_INT_CST_HIGH (arg1),
6314 &lpart, &hpart, unsigned_p);
6315 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6316 prod = force_fit_type (prod, -1, overflow, false);
6317 neg_overflow = false;
6318
6319 if (unsigned_p)
6320 {
6321 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6322 lo = prod;
6323
6324 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6325 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6326 TREE_INT_CST_HIGH (prod),
6327 TREE_INT_CST_LOW (tmp),
6328 TREE_INT_CST_HIGH (tmp),
6329 &lpart, &hpart, unsigned_p);
6330 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6331 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6332 TREE_CONSTANT_OVERFLOW (prod));
6333 }
6334 else if (tree_int_cst_sgn (arg01) >= 0)
6335 {
6336 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6337 switch (tree_int_cst_sgn (arg1))
6338 {
6339 case -1:
6340 neg_overflow = true;
6341 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6342 hi = prod;
6343 break;
6344
6345 case 0:
6346 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6347 hi = tmp;
6348 break;
6349
6350 case 1:
6351 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6352 lo = prod;
6353 break;
6354
6355 default:
6356 gcc_unreachable ();
6357 }
6358 }
6359 else
6360 {
6361 /* A negative divisor reverses the relational operators. */
6362 code = swap_tree_comparison (code);
6363
6364 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6365 switch (tree_int_cst_sgn (arg1))
6366 {
6367 case -1:
6368 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6369 lo = prod;
6370 break;
6371
6372 case 0:
6373 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6374 lo = tmp;
6375 break;
6376
6377 case 1:
6378 neg_overflow = true;
6379 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6380 hi = prod;
6381 break;
6382
6383 default:
6384 gcc_unreachable ();
6385 }
6386 }
6387
6388 switch (code)
6389 {
6390 case EQ_EXPR:
6391 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6392 return omit_one_operand (type, integer_zero_node, arg00);
6393 if (TREE_OVERFLOW (hi))
6394 return fold_build2 (GE_EXPR, type, arg00, lo);
6395 if (TREE_OVERFLOW (lo))
6396 return fold_build2 (LE_EXPR, type, arg00, hi);
6397 return build_range_check (type, arg00, 1, lo, hi);
6398
6399 case NE_EXPR:
6400 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6401 return omit_one_operand (type, integer_one_node, arg00);
6402 if (TREE_OVERFLOW (hi))
6403 return fold_build2 (LT_EXPR, type, arg00, lo);
6404 if (TREE_OVERFLOW (lo))
6405 return fold_build2 (GT_EXPR, type, arg00, hi);
6406 return build_range_check (type, arg00, 0, lo, hi);
6407
6408 case LT_EXPR:
6409 if (TREE_OVERFLOW (lo))
6410 {
6411 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6412 return omit_one_operand (type, tmp, arg00);
6413 }
6414 return fold_build2 (LT_EXPR, type, arg00, lo);
6415
6416 case LE_EXPR:
6417 if (TREE_OVERFLOW (hi))
6418 {
6419 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6420 return omit_one_operand (type, tmp, arg00);
6421 }
6422 return fold_build2 (LE_EXPR, type, arg00, hi);
6423
6424 case GT_EXPR:
6425 if (TREE_OVERFLOW (hi))
6426 {
6427 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6428 return omit_one_operand (type, tmp, arg00);
6429 }
6430 return fold_build2 (GT_EXPR, type, arg00, hi);
6431
6432 case GE_EXPR:
6433 if (TREE_OVERFLOW (lo))
6434 {
6435 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6436 return omit_one_operand (type, tmp, arg00);
6437 }
6438 return fold_build2 (GE_EXPR, type, arg00, lo);
6439
6440 default:
6441 break;
6442 }
6443
6444 return NULL_TREE;
6445}
6446
6447
6448/* If CODE with arguments ARG0 and ARG1 represents a single bit
6449 equality/inequality test, then return a simplified form of the test
6450 using a sign testing. Otherwise return NULL. TYPE is the desired
6451 result type. */
6452
6453static tree
6454fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6455 tree result_type)
6456{
6457 /* If this is testing a single bit, we can optimize the test. */
6458 if ((code == NE_EXPR || code == EQ_EXPR)
6459 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6460 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6461 {
6462 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6463 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6464 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6465
6466 if (arg00 != NULL_TREE
6467 /* This is only a win if casting to a signed type is cheap,
6468 i.e. when arg00's type is not a partial mode. */
6469 && TYPE_PRECISION (TREE_TYPE (arg00))
6470 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6471 {
6472 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6473 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6474 result_type, fold_convert (stype, arg00),
6475 build_int_cst (stype, 0));
6476 }
6477 }
6478
6479 return NULL_TREE;
6480}
6481
6482/* If CODE with arguments ARG0 and ARG1 represents a single bit
6483 equality/inequality test, then return a simplified form of
6484 the test using shifts and logical operations. Otherwise return
6485 NULL. TYPE is the desired result type. */
6486
6487tree
6488fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6489 tree result_type)
6490{
6491 /* If this is testing a single bit, we can optimize the test. */
6492 if ((code == NE_EXPR || code == EQ_EXPR)
6493 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6494 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6495 {
6496 tree inner = TREE_OPERAND (arg0, 0);
6497 tree type = TREE_TYPE (arg0);
6498 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6499 enum machine_mode operand_mode = TYPE_MODE (type);
6500 int ops_unsigned;
6501 tree signed_type, unsigned_type, intermediate_type;
6502 tree tem;
6503
6504 /* First, see if we can fold the single bit test into a sign-bit
6505 test. */
6506 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6507 result_type);
6508 if (tem)
6509 return tem;
6510
6511 /* Otherwise we have (A & C) != 0 where C is a single bit,
6512 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6513 Similarly for (A & C) == 0. */
6514
6515 /* If INNER is a right shift of a constant and it plus BITNUM does
6516 not overflow, adjust BITNUM and INNER. */
6517 if (TREE_CODE (inner) == RSHIFT_EXPR
6518 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6519 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6520 && bitnum < TYPE_PRECISION (type)
6521 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6522 bitnum - TYPE_PRECISION (type)))
6523 {
6524 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6525 inner = TREE_OPERAND (inner, 0);
6526 }
6527
6528 /* If we are going to be able to omit the AND below, we must do our
6529 operations as unsigned. If we must use the AND, we have a choice.
6530 Normally unsigned is faster, but for some machines signed is. */
6531#ifdef LOAD_EXTEND_OP
6532 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6533 && !flag_syntax_only) ? 0 : 1;
6534#else
6535 ops_unsigned = 1;
6536#endif
6537
6538 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6539 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6540 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6541 inner = fold_convert (intermediate_type, inner);
6542
6543 if (bitnum != 0)
6544 inner = build2 (RSHIFT_EXPR, intermediate_type,
6545 inner, size_int (bitnum));
6546
6547 if (code == EQ_EXPR)
6548 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6549 inner, integer_one_node);
6550
6551 /* Put the AND last so it can combine with more things. */
6552 inner = build2 (BIT_AND_EXPR, intermediate_type,
6553 inner, integer_one_node);
6554
6555 /* Make sure to return the proper type. */
6556 inner = fold_convert (result_type, inner);
6557
6558 return inner;
6559 }
6560 return NULL_TREE;
6561}
6562
6563/* Check whether we are allowed to reorder operands arg0 and arg1,
6564 such that the evaluation of arg1 occurs before arg0. */
6565
6566static bool
6567reorder_operands_p (tree arg0, tree arg1)
6568{
6569 if (! flag_evaluation_order)
6570 return true;
6571 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6572 return true;
6573 return ! TREE_SIDE_EFFECTS (arg0)
6574 && ! TREE_SIDE_EFFECTS (arg1);
6575}
6576
6577/* Test whether it is preferable two swap two operands, ARG0 and
6578 ARG1, for example because ARG0 is an integer constant and ARG1
6579 isn't. If REORDER is true, only recommend swapping if we can
6580 evaluate the operands in reverse order. */
6581
6582bool
6583tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6584{
6585 STRIP_SIGN_NOPS (arg0);
6586 STRIP_SIGN_NOPS (arg1);
6587
6588 if (TREE_CODE (arg1) == INTEGER_CST)
6589 return 0;
6590 if (TREE_CODE (arg0) == INTEGER_CST)
6591 return 1;
6592
6593 if (TREE_CODE (arg1) == REAL_CST)
6594 return 0;
6595 if (TREE_CODE (arg0) == REAL_CST)
6596 return 1;
6597
6598 if (TREE_CODE (arg1) == COMPLEX_CST)
6599 return 0;
6600 if (TREE_CODE (arg0) == COMPLEX_CST)
6601 return 1;
6602
6603 if (TREE_CONSTANT (arg1))
6604 return 0;
6605 if (TREE_CONSTANT (arg0))
6606 return 1;
6607
6608 if (optimize_size)
6609 return 0;
6610
6611 if (reorder && flag_evaluation_order
6612 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6613 return 0;
6614
6615 if (DECL_P (arg1))
6616 return 0;
6617 if (DECL_P (arg0))
6618 return 1;
6619
6620 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6621 for commutative and comparison operators. Ensuring a canonical
6622 form allows the optimizers to find additional redundancies without
6623 having to explicitly check for both orderings. */
6624 if (TREE_CODE (arg0) == SSA_NAME
6625 && TREE_CODE (arg1) == SSA_NAME
6626 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6627 return 1;
6628
6629 return 0;
6630}
6631
6632/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6633 ARG0 is extended to a wider type. */
6634
6635static tree
6636fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6637{
6638 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6639 tree arg1_unw;
6640 tree shorter_type, outer_type;
6641 tree min, max;
6642 bool above, below;
6643
6644 if (arg0_unw == arg0)
6645 return NULL_TREE;
6646 shorter_type = TREE_TYPE (arg0_unw);
6647
6648#ifdef HAVE_canonicalize_funcptr_for_compare
6649 /* Disable this optimization if we're casting a function pointer
6650 type on targets that require function pointer canonicalization. */
6651 if (HAVE_canonicalize_funcptr_for_compare
6652 && TREE_CODE (shorter_type) == POINTER_TYPE
6653 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6654 return NULL_TREE;
6655#endif
6656
6657 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6658 return NULL_TREE;
6659
6660 arg1_unw = get_unwidened (arg1, shorter_type);
6661
6662 /* If possible, express the comparison in the shorter mode. */
6663 if ((code == EQ_EXPR || code == NE_EXPR
6664 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6665 && (TREE_TYPE (arg1_unw) == shorter_type
6666 || (TREE_CODE (arg1_unw) == INTEGER_CST
6667 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6668 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6669 && int_fits_type_p (arg1_unw, shorter_type))))
6670 return fold_build2 (code, type, arg0_unw,
6671 fold_convert (shorter_type, arg1_unw));
6672
6673 if (TREE_CODE (arg1_unw) != INTEGER_CST
6674 || TREE_CODE (shorter_type) != INTEGER_TYPE
6675 || !int_fits_type_p (arg1_unw, shorter_type))
6676 return NULL_TREE;
6677
6678 /* If we are comparing with the integer that does not fit into the range
6679 of the shorter type, the result is known. */
6680 outer_type = TREE_TYPE (arg1_unw);
6681 min = lower_bound_in_type (outer_type, shorter_type);
6682 max = upper_bound_in_type (outer_type, shorter_type);
6683
6684 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6685 max, arg1_unw));
6686 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6687 arg1_unw, min));
6688
6689 switch (code)
6690 {
6691 case EQ_EXPR:
6692 if (above || below)
6693 return omit_one_operand (type, integer_zero_node, arg0);
6694 break;
6695
6696 case NE_EXPR:
6697 if (above || below)
6698 return omit_one_operand (type, integer_one_node, arg0);
6699 break;
6700
6701 case LT_EXPR:
6702 case LE_EXPR:
6703 if (above)
6704 return omit_one_operand (type, integer_one_node, arg0);
6705 else if (below)
6706 return omit_one_operand (type, integer_zero_node, arg0);
6707
6708 case GT_EXPR:
6709 case GE_EXPR:
6710 if (above)
6711 return omit_one_operand (type, integer_zero_node, arg0);
6712 else if (below)
6713 return omit_one_operand (type, integer_one_node, arg0);
6714
6715 default:
6716 break;
6717 }
6718
6719 return NULL_TREE;
6720}
6721
6722/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6723 ARG0 just the signedness is changed. */
6724
6725static tree
6726fold_sign_changed_comparison (enum tree_code code, tree type,
6727 tree arg0, tree arg1)
6728{
6729 tree arg0_inner, tmp;
6730 tree inner_type, outer_type;
6731
6732 if (TREE_CODE (arg0) != NOP_EXPR
6733 && TREE_CODE (arg0) != CONVERT_EXPR)
6734 return NULL_TREE;
6735
6736 outer_type = TREE_TYPE (arg0);
6737 arg0_inner = TREE_OPERAND (arg0, 0);
6738 inner_type = TREE_TYPE (arg0_inner);
6739
6740#ifdef HAVE_canonicalize_funcptr_for_compare
6741 /* Disable this optimization if we're casting a function pointer
6742 type on targets that require function pointer canonicalization. */
6743 if (HAVE_canonicalize_funcptr_for_compare
6744 && TREE_CODE (inner_type) == POINTER_TYPE
6745 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6746 return NULL_TREE;
6747#endif
6748
6749 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6750 return NULL_TREE;
6751
6752 if (TREE_CODE (arg1) != INTEGER_CST
6753 && !((TREE_CODE (arg1) == NOP_EXPR
6754 || TREE_CODE (arg1) == CONVERT_EXPR)
6755 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6756 return NULL_TREE;
6757
6758 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6759 && code != NE_EXPR
6760 && code != EQ_EXPR)
6761 return NULL_TREE;
6762
6763 if (TREE_CODE (arg1) == INTEGER_CST)
6764 {
6765 tmp = build_int_cst_wide (inner_type,
6766 TREE_INT_CST_LOW (arg1),
6767 TREE_INT_CST_HIGH (arg1));
6768 arg1 = force_fit_type (tmp, 0,
6769 TREE_OVERFLOW (arg1),
6770 TREE_CONSTANT_OVERFLOW (arg1));
6771 }
6772 else
6773 arg1 = fold_convert (inner_type, arg1);
6774
6775 return fold_build2 (code, type, arg0_inner, arg1);
6776}
6777
6778/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6779 step of the array. Reconstructs s and delta in the case of s * delta
6780 being an integer constant (and thus already folded).
6781 ADDR is the address. MULT is the multiplicative expression.
6782 If the function succeeds, the new address expression is returned. Otherwise
6783 NULL_TREE is returned. */
6784
6785static tree
6786try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6787{
6788 tree s, delta, step;
6789 tree ref = TREE_OPERAND (addr, 0), pref;
6790 tree ret, pos;
6791 tree itype;
6792
6793 /* Canonicalize op1 into a possibly non-constant delta
6794 and an INTEGER_CST s. */
6795 if (TREE_CODE (op1) == MULT_EXPR)
6796 {
6797 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6798
6799 STRIP_NOPS (arg0);
6800 STRIP_NOPS (arg1);
6801
6802 if (TREE_CODE (arg0) == INTEGER_CST)
6803 {
6804 s = arg0;
6805 delta = arg1;
6806 }
6807 else if (TREE_CODE (arg1) == INTEGER_CST)
6808 {
6809 s = arg1;
6810 delta = arg0;
6811 }
6812 else
6813 return NULL_TREE;
6814 }
6815 else if (TREE_CODE (op1) == INTEGER_CST)
6816 {
6817 delta = op1;
6818 s = NULL_TREE;
6819 }
6820 else
6821 {
6822 /* Simulate we are delta * 1. */
6823 delta = op1;
6824 s = integer_one_node;
6825 }
6826
6827 for (;; ref = TREE_OPERAND (ref, 0))
6828 {
6829 if (TREE_CODE (ref) == ARRAY_REF)
6830 {
6831 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6832 if (! itype)
6833 continue;
6834
6835 step = array_ref_element_size (ref);
6836 if (TREE_CODE (step) != INTEGER_CST)
6837 continue;
6838
6839 if (s)
6840 {
6841 if (! tree_int_cst_equal (step, s))
6842 continue;
6843 }
6844 else
6845 {
6846 /* Try if delta is a multiple of step. */
6847 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6848 if (! tmp)
6849 continue;
6850 delta = tmp;
6851 }
6852
6853 break;
6854 }
6855
6856 if (!handled_component_p (ref))
6857 return NULL_TREE;
6858 }
6859
6860 /* We found the suitable array reference. So copy everything up to it,
6861 and replace the index. */
6862
6863 pref = TREE_OPERAND (addr, 0);
6864 ret = copy_node (pref);
6865 pos = ret;
6866
6867 while (pref != ref)
6868 {
6869 pref = TREE_OPERAND (pref, 0);
6870 TREE_OPERAND (pos, 0) = copy_node (pref);
6871 pos = TREE_OPERAND (pos, 0);
6872 }
6873
6874 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6875 fold_convert (itype,
6876 TREE_OPERAND (pos, 1)),
6877 fold_convert (itype, delta));
6878
6879 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6880}
6881
6882
6883/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6884 means A >= Y && A != MAX, but in this case we know that
6885 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6886
6887static tree
6888fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6889{
6890 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6891
6892 if (TREE_CODE (bound) == LT_EXPR)
6893 a = TREE_OPERAND (bound, 0);
6894 else if (TREE_CODE (bound) == GT_EXPR)
6895 a = TREE_OPERAND (bound, 1);
6896 else
6897 return NULL_TREE;
6898
6899 typea = TREE_TYPE (a);
6900 if (!INTEGRAL_TYPE_P (typea)
6901 && !POINTER_TYPE_P (typea))
6902 return NULL_TREE;
6903
6904 if (TREE_CODE (ineq) == LT_EXPR)
6905 {
6906 a1 = TREE_OPERAND (ineq, 1);
6907 y = TREE_OPERAND (ineq, 0);
6908 }
6909 else if (TREE_CODE (ineq) == GT_EXPR)
6910 {
6911 a1 = TREE_OPERAND (ineq, 0);
6912 y = TREE_OPERAND (ineq, 1);
6913 }
6914 else
6915 return NULL_TREE;
6916
6917 if (TREE_TYPE (a1) != typea)
6918 return NULL_TREE;
6919
6920 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6921 if (!integer_onep (diff))
6922 return NULL_TREE;
6923
6924 return fold_build2 (GE_EXPR, type, a, y);
6925}
6926
6927/* Fold a sum or difference of at least one multiplication.
6928 Returns the folded tree or NULL if no simplification could be made. */
6929
6930static tree
6931fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6932{
6933 tree arg00, arg01, arg10, arg11;
6934 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6935
6936 /* (A * C) +- (B * C) -> (A+-B) * C.
6937 (A * C) +- A -> A * (C+-1).
6938 We are most concerned about the case where C is a constant,
6939 but other combinations show up during loop reduction. Since
6940 it is not difficult, try all four possibilities. */
6941
6942 if (TREE_CODE (arg0) == MULT_EXPR)
6943 {
6944 arg00 = TREE_OPERAND (arg0, 0);
6945 arg01 = TREE_OPERAND (arg0, 1);
6946 }
6947 else
6948 {
6949 arg00 = arg0;
6950 arg01 = build_one_cst (type);
6951 }
6952 if (TREE_CODE (arg1) == MULT_EXPR)
6953 {
6954 arg10 = TREE_OPERAND (arg1, 0);
6955 arg11 = TREE_OPERAND (arg1, 1);
6956 }
6957 else
6958 {
6959 arg10 = arg1;
6960 arg11 = build_one_cst (type);
6961 }
6962 same = NULL_TREE;
6963
6964 if (operand_equal_p (arg01, arg11, 0))
6965 same = arg01, alt0 = arg00, alt1 = arg10;
6966 else if (operand_equal_p (arg00, arg10, 0))
6967 same = arg00, alt0 = arg01, alt1 = arg11;
6968 else if (operand_equal_p (arg00, arg11, 0))
6969 same = arg00, alt0 = arg01, alt1 = arg10;
6970 else if (operand_equal_p (arg01, arg10, 0))
6971 same = arg01, alt0 = arg00, alt1 = arg11;
6972
6973 /* No identical multiplicands; see if we can find a common
6974 power-of-two factor in non-power-of-two multiplies. This
6975 can help in multi-dimensional array access. */
6976 else if (host_integerp (arg01, 0)
6977 && host_integerp (arg11, 0))
6978 {
6979 HOST_WIDE_INT int01, int11, tmp;
6980 bool swap = false;
6981 tree maybe_same;
6982 int01 = TREE_INT_CST_LOW (arg01);
6983 int11 = TREE_INT_CST_LOW (arg11);
6984
6985 /* Move min of absolute values to int11. */
6986 if ((int01 >= 0 ? int01 : -int01)
6987 < (int11 >= 0 ? int11 : -int11))
6988 {
6989 tmp = int01, int01 = int11, int11 = tmp;
6990 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6991 maybe_same = arg01;
6992 swap = true;
6993 }
6994 else
6995 maybe_same = arg11;
6996
6997 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6998 {
6999 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7000 build_int_cst (TREE_TYPE (arg00),
7001 int01 / int11));
7002 alt1 = arg10;
7003 same = maybe_same;
7004 if (swap)
7005 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7006 }
7007 }
7008
7009 if (same)
7010 return fold_build2 (MULT_EXPR, type,
7011 fold_build2 (code, type,
7012 fold_convert (type, alt0),
7013 fold_convert (type, alt1)),
7014 fold_convert (type, same));
7015
7016 return NULL_TREE;
7017}
7018
7019/* Subroutine of native_encode_expr. Encode the INTEGER_CST
7020 specified by EXPR into the buffer PTR of length LEN bytes.
7021 Return the number of bytes placed in the buffer, or zero
7022 upon failure. */
7023
7024static int
7025native_encode_int (tree expr, unsigned char *ptr, int len)
7026{
7027 tree type = TREE_TYPE (expr);
7028 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7029 int byte, offset, word, words;
7030 unsigned char value;
7031
7032 if (total_bytes > len)
7033 return 0;
7034 words = total_bytes / UNITS_PER_WORD;
7035
7036 for (byte = 0; byte < total_bytes; byte++)
7037 {
7038 int bitpos = byte * BITS_PER_UNIT;
7039 if (bitpos < HOST_BITS_PER_WIDE_INT)
7040 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7041 else
7042 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7043 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7044
7045 if (total_bytes > UNITS_PER_WORD)
7046 {
7047 word = byte / UNITS_PER_WORD;
7048 if (WORDS_BIG_ENDIAN)
7049 word = (words - 1) - word;
7050 offset = word * UNITS_PER_WORD;
7051 if (BYTES_BIG_ENDIAN)
7052 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7053 else
7054 offset += byte % UNITS_PER_WORD;
7055 }
7056 else
7057 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7058 ptr[offset] = value;
7059 }
7060 return total_bytes;
7061}
7062
7063
7064/* Subroutine of native_encode_expr. Encode the REAL_CST
7065 specified by EXPR into the buffer PTR of length LEN bytes.
7066 Return the number of bytes placed in the buffer, or zero
7067 upon failure. */
7068
7069static int
7070native_encode_real (tree expr, unsigned char *ptr, int len)
7071{
7072 tree type = TREE_TYPE (expr);
7073 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7074 int byte, offset, word, words, bitpos;
7075 unsigned char value;
7076
7077 /* There are always 32 bits in each long, no matter the size of
7078 the hosts long. We handle floating point representations with
7079 up to 192 bits. */
7080 long tmp[6];
7081
7082 if (total_bytes > len)
7083 return 0;
7084 words = 32 / UNITS_PER_WORD;
7085
7086 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7087
7088 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7089 bitpos += BITS_PER_UNIT)
7090 {
7091 byte = (bitpos / BITS_PER_UNIT) & 3;
7092 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7093
7094 if (UNITS_PER_WORD < 4)
7095 {
7096 word = byte / UNITS_PER_WORD;
7097 if (WORDS_BIG_ENDIAN)
7098 word = (words - 1) - word;
7099 offset = word * UNITS_PER_WORD;
7100 if (BYTES_BIG_ENDIAN)
7101 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7102 else
7103 offset += byte % UNITS_PER_WORD;
7104 }
7105 else
7106 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7107 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7108 }
7109 return total_bytes;
7110}
7111
7112/* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7113 specified by EXPR into the buffer PTR of length LEN bytes.
7114 Return the number of bytes placed in the buffer, or zero
7115 upon failure. */
7116
7117static int
7118native_encode_complex (tree expr, unsigned char *ptr, int len)
7119{
7120 int rsize, isize;
7121 tree part;
7122
7123 part = TREE_REALPART (expr);
7124 rsize = native_encode_expr (part, ptr, len);
7125 if (rsize == 0)
7126 return 0;
7127 part = TREE_IMAGPART (expr);
7128 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7129 if (isize != rsize)
7130 return 0;
7131 return rsize + isize;
7132}
7133
7134
7135/* Subroutine of native_encode_expr. Encode the VECTOR_CST
7136 specified by EXPR into the buffer PTR of length LEN bytes.
7137 Return the number of bytes placed in the buffer, or zero
7138 upon failure. */
7139
7140static int
7141native_encode_vector (tree expr, unsigned char *ptr, int len)
7142{
7143 int i, size, offset, count;
7144 tree itype, elem, elements;
7145
7146 offset = 0;
7147 elements = TREE_VECTOR_CST_ELTS (expr);
7148 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7149 itype = TREE_TYPE (TREE_TYPE (expr));
7150 size = GET_MODE_SIZE (TYPE_MODE (itype));
7151 for (i = 0; i < count; i++)
7152 {
7153 if (elements)
7154 {
7155 elem = TREE_VALUE (elements);
7156 elements = TREE_CHAIN (elements);
7157 }
7158 else
7159 elem = NULL_TREE;
7160
7161 if (elem)
7162 {
7163 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7164 return 0;
7165 }
7166 else
7167 {
7168 if (offset + size > len)
7169 return 0;
7170 memset (ptr+offset, 0, size);
7171 }
7172 offset += size;
7173 }
7174 return offset;
7175}
7176
7177
7178/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7179 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7180 buffer PTR of length LEN bytes. Return the number of bytes
7181 placed in the buffer, or zero upon failure. */
7182
7183static int
7184native_encode_expr (tree expr, unsigned char *ptr, int len)
7185{
7186 switch (TREE_CODE (expr))
7187 {
7188 case INTEGER_CST:
7189 return native_encode_int (expr, ptr, len);
7190
7191 case REAL_CST:
7192 return native_encode_real (expr, ptr, len);
7193
7194 case COMPLEX_CST:
7195 return native_encode_complex (expr, ptr, len);
7196
7197 case VECTOR_CST:
7198 return native_encode_vector (expr, ptr, len);
7199
7200 default:
7201 return 0;
7202 }
7203}
7204
7205
7206/* Subroutine of native_interpret_expr. Interpret the contents of
7207 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7208 If the buffer cannot be interpreted, return NULL_TREE. */
7209
7210static tree
7211native_interpret_int (tree type, unsigned char *ptr, int len)
7212{
7213 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7214 int byte, offset, word, words;
7215 unsigned char value;
7216 unsigned int HOST_WIDE_INT lo = 0;
7217 HOST_WIDE_INT hi = 0;
7218
7219 if (total_bytes > len)
7220 return NULL_TREE;
7221 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7222 return NULL_TREE;
7223 words = total_bytes / UNITS_PER_WORD;
7224
7225 for (byte = 0; byte < total_bytes; byte++)
7226 {
7227 int bitpos = byte * BITS_PER_UNIT;
7228 if (total_bytes > UNITS_PER_WORD)
7229 {
7230 word = byte / UNITS_PER_WORD;
7231 if (WORDS_BIG_ENDIAN)
7232 word = (words - 1) - word;
7233 offset = word * UNITS_PER_WORD;
7234 if (BYTES_BIG_ENDIAN)
7235 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7236 else
7237 offset += byte % UNITS_PER_WORD;
7238 }
7239 else
7240 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7241 value = ptr[offset];
7242
7243 if (bitpos < HOST_BITS_PER_WIDE_INT)
7244 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7245 else
7246 hi |= (unsigned HOST_WIDE_INT) value
7247 << (bitpos - HOST_BITS_PER_WIDE_INT);
7248 }
7249
7250 return force_fit_type (build_int_cst_wide (type, lo, hi),
7251 0, false, false);
7252}
7253
7254
7255/* Subroutine of native_interpret_expr. Interpret the contents of
7256 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7257 If the buffer cannot be interpreted, return NULL_TREE. */
7258
7259static tree
7260native_interpret_real (tree type, unsigned char *ptr, int len)
7261{
7262 enum machine_mode mode = TYPE_MODE (type);
7263 int total_bytes = GET_MODE_SIZE (mode);
7264 int byte, offset, word, words, bitpos;
7265 unsigned char value;
7266 /* There are always 32 bits in each long, no matter the size of
7267 the hosts long. We handle floating point representations with
7268 up to 192 bits. */
7269 REAL_VALUE_TYPE r;
7270 long tmp[6];
7271
7272 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7273 if (total_bytes > len || total_bytes > 24)
7274 return NULL_TREE;
7275 words = 32 / UNITS_PER_WORD;
7276
7277 memset (tmp, 0, sizeof (tmp));
7278 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7279 bitpos += BITS_PER_UNIT)
7280 {
7281 byte = (bitpos / BITS_PER_UNIT) & 3;
7282 if (UNITS_PER_WORD < 4)
7283 {
7284 word = byte / UNITS_PER_WORD;
7285 if (WORDS_BIG_ENDIAN)
7286 word = (words - 1) - word;
7287 offset = word * UNITS_PER_WORD;
7288 if (BYTES_BIG_ENDIAN)
7289 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7290 else
7291 offset += byte % UNITS_PER_WORD;
7292 }
7293 else
7294 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7295 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7296
7297 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7298 }
7299
7300 real_from_target (&r, tmp, mode);
7301 return build_real (type, r);
7302}
7303
7304
7305/* Subroutine of native_interpret_expr. Interpret the contents of
7306 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7307 If the buffer cannot be interpreted, return NULL_TREE. */
7308
7309static tree
7310native_interpret_complex (tree type, unsigned char *ptr, int len)
7311{
7312 tree etype, rpart, ipart;
7313 int size;
7314
7315 etype = TREE_TYPE (type);
7316 size = GET_MODE_SIZE (TYPE_MODE (etype));
7317 if (size * 2 > len)
7318 return NULL_TREE;
7319 rpart = native_interpret_expr (etype, ptr, size);
7320 if (!rpart)
7321 return NULL_TREE;
7322 ipart = native_interpret_expr (etype, ptr+size, size);
7323 if (!ipart)
7324 return NULL_TREE;
7325 return build_complex (type, rpart, ipart);
7326}
7327
7328
7329/* Subroutine of native_interpret_expr. Interpret the contents of
7330 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7331 If the buffer cannot be interpreted, return NULL_TREE. */
7332
7333static tree
7334native_interpret_vector (tree type, unsigned char *ptr, int len)
7335{
7336 tree etype, elem, elements;
7337 int i, size, count;
7338
7339 etype = TREE_TYPE (type);
7340 size = GET_MODE_SIZE (TYPE_MODE (etype));
7341 count = TYPE_VECTOR_SUBPARTS (type);
7342 if (size * count > len)
7343 return NULL_TREE;
7344
7345 elements = NULL_TREE;
7346 for (i = count - 1; i >= 0; i--)
7347 {
7348 elem = native_interpret_expr (etype, ptr+(i*size), size);
7349 if (!elem)
7350 return NULL_TREE;
7351 elements = tree_cons (NULL_TREE, elem, elements);
7352 }
7353 return build_vector (type, elements);
7354}
7355
7356
7357/* Subroutine of fold_view_convert_expr. Interpret the contents of
7358 the buffer PTR of length LEN as a constant of type TYPE. For
7359 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7360 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7361 return NULL_TREE. */
7362
7363static tree
7364native_interpret_expr (tree type, unsigned char *ptr, int len)
7365{
7366 switch (TREE_CODE (type))
7367 {
7368 case INTEGER_TYPE:
7369 case ENUMERAL_TYPE:
7370 case BOOLEAN_TYPE:
7371 return native_interpret_int (type, ptr, len);
7372
7373 case REAL_TYPE:
7374 return native_interpret_real (type, ptr, len);
7375
7376 case COMPLEX_TYPE:
7377 return native_interpret_complex (type, ptr, len);
7378
7379 case VECTOR_TYPE:
7380 return native_interpret_vector (type, ptr, len);
7381
7382 default:
7383 return NULL_TREE;
7384 }
7385}
7386
7387
7388/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7389 TYPE at compile-time. If we're unable to perform the conversion
7390 return NULL_TREE. */
7391
7392static tree
7393fold_view_convert_expr (tree type, tree expr)
7394{
7395 /* We support up to 512-bit values (for V8DFmode). */
7396 unsigned char buffer[64];
7397 int len;
7398
7399 /* Check that the host and target are sane. */
7400 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7401 return NULL_TREE;
7402
7403 len = native_encode_expr (expr, buffer, sizeof (buffer));
7404 if (len == 0)
7405 return NULL_TREE;
7406
7407 return native_interpret_expr (type, buffer, len);
7408}
7409
7410
7411/* Fold a unary expression of code CODE and type TYPE with operand
7412 OP0. Return the folded expression if folding is successful.
7413 Otherwise, return NULL_TREE. */
7414
7415tree
7416fold_unary (enum tree_code code, tree type, tree op0)
7417{
7418 tree tem;
7419 tree arg0;
7420 enum tree_code_class kind = TREE_CODE_CLASS (code);
7421
7422 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7423 && TREE_CODE_LENGTH (code) == 1);
7424
7425 arg0 = op0;
7426 if (arg0)
7427 {
7428 if (code == NOP_EXPR || code == CONVERT_EXPR
7429 || code == FLOAT_EXPR || code == ABS_EXPR)
7430 {
7431 /* Don't use STRIP_NOPS, because signedness of argument type
7432 matters. */
7433 STRIP_SIGN_NOPS (arg0);
7434 }
7435 else
7436 {
7437 /* Strip any conversions that don't change the mode. This
7438 is safe for every expression, except for a comparison
7439 expression because its signedness is derived from its
7440 operands.
7441
7442 Note that this is done as an internal manipulation within
7443 the constant folder, in order to find the simplest
7444 representation of the arguments so that their form can be
7445 studied. In any cases, the appropriate type conversions
7446 should be put back in the tree that will get out of the
7447 constant folder. */
7448 STRIP_NOPS (arg0);
7449 }
7450 }
7451
7452 if (TREE_CODE_CLASS (code) == tcc_unary)
7453 {
7454 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7455 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7456 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7457 else if (TREE_CODE (arg0) == COND_EXPR)
7458 {
7459 tree arg01 = TREE_OPERAND (arg0, 1);
7460 tree arg02 = TREE_OPERAND (arg0, 2);
7461 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7462 arg01 = fold_build1 (code, type, arg01);
7463 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7464 arg02 = fold_build1 (code, type, arg02);
7465 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7466 arg01, arg02);
7467
7468 /* If this was a conversion, and all we did was to move into
7469 inside the COND_EXPR, bring it back out. But leave it if
7470 it is a conversion from integer to integer and the
7471 result precision is no wider than a word since such a
7472 conversion is cheap and may be optimized away by combine,
7473 while it couldn't if it were outside the COND_EXPR. Then return
7474 so we don't get into an infinite recursion loop taking the
7475 conversion out and then back in. */
7476
7477 if ((code == NOP_EXPR || code == CONVERT_EXPR
7478 || code == NON_LVALUE_EXPR)
7479 && TREE_CODE (tem) == COND_EXPR
7480 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7481 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7482 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7483 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7484 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7485 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7486 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7487 && (INTEGRAL_TYPE_P
7488 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7489 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7490 || flag_syntax_only))
7491 tem = build1 (code, type,
7492 build3 (COND_EXPR,
7493 TREE_TYPE (TREE_OPERAND
7494 (TREE_OPERAND (tem, 1), 0)),
7495 TREE_OPERAND (tem, 0),
7496 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7497 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7498 return tem;
7499 }
7500 else if (COMPARISON_CLASS_P (arg0))
7501 {
7502 if (TREE_CODE (type) == BOOLEAN_TYPE)
7503 {
7504 arg0 = copy_node (arg0);
7505 TREE_TYPE (arg0) = type;
7506 return arg0;
7507 }
7508 else if (TREE_CODE (type) != INTEGER_TYPE)
7509 return fold_build3 (COND_EXPR, type, arg0,
7510 fold_build1 (code, type,
7511 integer_one_node),
7512 fold_build1 (code, type,
7513 integer_zero_node));
7514 }
7515 }
7516
7517 switch (code)
7518 {
7519 case NOP_EXPR:
7520 case FLOAT_EXPR:
7521 case CONVERT_EXPR:
7522 case FIX_TRUNC_EXPR:
7523 case FIX_CEIL_EXPR:
7524 case FIX_FLOOR_EXPR:
7525 case FIX_ROUND_EXPR:
7526 if (TREE_TYPE (op0) == type)
7527 return op0;
7528
7529 /* If we have (type) (a CMP b) and type is an integral type, return
7530 new expression involving the new type. */
7531 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7532 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7533 TREE_OPERAND (op0, 1));
7534
7535 /* Handle cases of two conversions in a row. */
7536 if (TREE_CODE (op0) == NOP_EXPR
7537 || TREE_CODE (op0) == CONVERT_EXPR)
7538 {
7539 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7540 tree inter_type = TREE_TYPE (op0);
7541 int inside_int = INTEGRAL_TYPE_P (inside_type);
7542 int inside_ptr = POINTER_TYPE_P (inside_type);
7543 int inside_float = FLOAT_TYPE_P (inside_type);
7544 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7545 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7546 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7547 int inter_int = INTEGRAL_TYPE_P (inter_type);
7548 int inter_ptr = POINTER_TYPE_P (inter_type);
7549 int inter_float = FLOAT_TYPE_P (inter_type);
7550 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7551 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7552 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7553 int final_int = INTEGRAL_TYPE_P (type);
7554 int final_ptr = POINTER_TYPE_P (type);
7555 int final_float = FLOAT_TYPE_P (type);
7556 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7557 unsigned int final_prec = TYPE_PRECISION (type);
7558 int final_unsignedp = TYPE_UNSIGNED (type);
7559
7560 /* In addition to the cases of two conversions in a row
7561 handled below, if we are converting something to its own
7562 type via an object of identical or wider precision, neither
7563 conversion is needed. */
7564 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7565 && (((inter_int || inter_ptr) && final_int)
7566 || (inter_float && final_float))
7567 && inter_prec >= final_prec)
7568 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7569
7570 /* Likewise, if the intermediate and final types are either both
7571 float or both integer, we don't need the middle conversion if
7572 it is wider than the final type and doesn't change the signedness
7573 (for integers). Avoid this if the final type is a pointer
7574 since then we sometimes need the inner conversion. Likewise if
7575 the outer has a precision not equal to the size of its mode. */
7576 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7577 || (inter_float && inside_float)
7578 || (inter_vec && inside_vec))
7579 && inter_prec >= inside_prec
7580 && (inter_float || inter_vec
7581 || inter_unsignedp == inside_unsignedp)
7582 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7583 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7584 && ! final_ptr
7585 && (! final_vec || inter_prec == inside_prec))
7586 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7587
7588 /* If we have a sign-extension of a zero-extended value, we can
7589 replace that by a single zero-extension. */
7590 if (inside_int && inter_int && final_int
7591 && inside_prec < inter_prec && inter_prec < final_prec
7592 && inside_unsignedp && !inter_unsignedp)
7593 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7594
7595 /* Two conversions in a row are not needed unless:
7596 - some conversion is floating-point (overstrict for now), or
7597 - some conversion is a vector (overstrict for now), or
7598 - the intermediate type is narrower than both initial and
7599 final, or
7600 - the intermediate type and innermost type differ in signedness,
7601 and the outermost type is wider than the intermediate, or
7602 - the initial type is a pointer type and the precisions of the
7603 intermediate and final types differ, or
7604 - the final type is a pointer type and the precisions of the
7605 initial and intermediate types differ.
7606 - the final type is a pointer type and the initial type not
7607 - the initial type is a pointer to an array and the final type
7608 not. */
7609 /* Java pointer type conversions generate checks in some
7610 cases, so we explicitly disallow this optimization. */
7611 if (! inside_float && ! inter_float && ! final_float
7612 && ! inside_vec && ! inter_vec && ! final_vec
7613 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7614 && ! (inside_int && inter_int
7615 && inter_unsignedp != inside_unsignedp
7616 && inter_prec < final_prec)
7617 && ((inter_unsignedp && inter_prec > inside_prec)
7618 == (final_unsignedp && final_prec > inter_prec))
7619 && ! (inside_ptr && inter_prec != final_prec)
7620 && ! (final_ptr && inside_prec != inter_prec)
7621 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7622 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7623 && final_ptr == inside_ptr
7624 && ! (inside_ptr
7625 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7626 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7627 && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7628 && final_ptr))
7629 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7630 }
7631
7632 /* Handle (T *)&A.B.C for A being of type T and B and C
7633 living at offset zero. This occurs frequently in
7634 C++ upcasting and then accessing the base. */
7635 if (TREE_CODE (op0) == ADDR_EXPR
7636 && POINTER_TYPE_P (type)
7637 && handled_component_p (TREE_OPERAND (op0, 0)))
7638 {
7639 HOST_WIDE_INT bitsize, bitpos;
7640 tree offset;
7641 enum machine_mode mode;
7642 int unsignedp, volatilep;
7643 tree base = TREE_OPERAND (op0, 0);
7644 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7645 &mode, &unsignedp, &volatilep, false);
7646 /* If the reference was to a (constant) zero offset, we can use
7647 the address of the base if it has the same base type
7648 as the result type. */
7649 if (! offset && bitpos == 0
7650 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7651 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7652 return fold_convert (type, build_fold_addr_expr (base));
7653 }
7654
7655 if (TREE_CODE (op0) == MODIFY_EXPR
7656 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7657 /* Detect assigning a bitfield. */
7658 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7659 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7660 {
7661 /* Don't leave an assignment inside a conversion
7662 unless assigning a bitfield. */
7663 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7664 /* First do the assignment, then return converted constant. */
7665 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7666 TREE_NO_WARNING (tem) = 1;
7667 TREE_USED (tem) = 1;
7668 return tem;
7669 }
7670
7671 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7672 constants (if x has signed type, the sign bit cannot be set
7673 in c). This folds extension into the BIT_AND_EXPR. */
7674 if (INTEGRAL_TYPE_P (type)
7675 && TREE_CODE (type) != BOOLEAN_TYPE
7676 && TREE_CODE (op0) == BIT_AND_EXPR
7677 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7678 {
7679 tree and = op0;
7680 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7681 int change = 0;
7682
7683 if (TYPE_UNSIGNED (TREE_TYPE (and))
7684 || (TYPE_PRECISION (type)
7685 <= TYPE_PRECISION (TREE_TYPE (and))))
7686 change = 1;
7687 else if (TYPE_PRECISION (TREE_TYPE (and1))
7688 <= HOST_BITS_PER_WIDE_INT
7689 && host_integerp (and1, 1))
7690 {
7691 unsigned HOST_WIDE_INT cst;
7692
7693 cst = tree_low_cst (and1, 1);
7694 cst &= (HOST_WIDE_INT) -1
7695 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7696 change = (cst == 0);
7697#ifdef LOAD_EXTEND_OP
7698 if (change
7699 && !flag_syntax_only
7700 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7701 == ZERO_EXTEND))
7702 {
7703 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7704 and0 = fold_convert (uns, and0);
7705 and1 = fold_convert (uns, and1);
7706 }
7707#endif
7708 }
7709 if (change)
7710 {
7711 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7712 TREE_INT_CST_HIGH (and1));
7713 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7714 TREE_CONSTANT_OVERFLOW (and1));
7715 return fold_build2 (BIT_AND_EXPR, type,
7716 fold_convert (type, and0), tem);
7717 }
7718 }
7719
7720 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7721 T2 being pointers to types of the same size. */
7722 if (POINTER_TYPE_P (type)
7723 && BINARY_CLASS_P (arg0)
7724 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7725 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7726 {
7727 tree arg00 = TREE_OPERAND (arg0, 0);
7728 tree t0 = type;
7729 tree t1 = TREE_TYPE (arg00);
7730 tree tt0 = TREE_TYPE (t0);
7731 tree tt1 = TREE_TYPE (t1);
7732 tree s0 = TYPE_SIZE (tt0);
7733 tree s1 = TYPE_SIZE (tt1);
7734
7735 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7736 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7737 TREE_OPERAND (arg0, 1));
7738 }
7739
7740 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7741 of the same precision, and X is a integer type not narrower than
7742 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7743 if (INTEGRAL_TYPE_P (type)
7744 && TREE_CODE (op0) == BIT_NOT_EXPR
7745 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7746 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7747 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7748 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7749 {
7750 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7751 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7752 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7753 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7754 }
7755
7756 tem = fold_convert_const (code, type, op0);
7757 return tem ? tem : NULL_TREE;
7758
7759 case VIEW_CONVERT_EXPR:
7760 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7761 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7762 return fold_view_convert_expr (type, op0);
7763
7764 case NEGATE_EXPR:
7765 tem = fold_negate_expr (arg0);
7766 if (tem)
7767 return fold_convert (type, tem);
7768 return NULL_TREE;
7769
7770 case ABS_EXPR:
7771 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7772 return fold_abs_const (arg0, type);
7773 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7774 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7775 /* Convert fabs((double)float) into (double)fabsf(float). */
7776 else if (TREE_CODE (arg0) == NOP_EXPR
7777 && TREE_CODE (type) == REAL_TYPE)
7778 {
7779 tree targ0 = strip_float_extensions (arg0);
7780 if (targ0 != arg0)
7781 return fold_convert (type, fold_build1 (ABS_EXPR,
7782 TREE_TYPE (targ0),
7783 targ0));
7784 }
7785 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7786 else if (TREE_CODE (arg0) == ABS_EXPR)
7787 return arg0;
7788 else if (tree_expr_nonnegative_p (arg0))
7789 return arg0;
7790
7791 /* Strip sign ops from argument. */
7792 if (TREE_CODE (type) == REAL_TYPE)
7793 {
7794 tem = fold_strip_sign_ops (arg0);
7795 if (tem)
7796 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7797 }
7798 return NULL_TREE;
7799
7800 case CONJ_EXPR:
7801 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7802 return fold_convert (type, arg0);
7803 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7804 {
7805 tree itype = TREE_TYPE (type);
7806 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7807 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7808 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7809 }
7810 if (TREE_CODE (arg0) == COMPLEX_CST)
7811 {
7812 tree itype = TREE_TYPE (type);
7813 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7814 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7815 return build_complex (type, rpart, negate_expr (ipart));
7816 }
7817 if (TREE_CODE (arg0) == CONJ_EXPR)
7818 return fold_convert (type, TREE_OPERAND (arg0, 0));
7819 return NULL_TREE;
7820
7821 case BIT_NOT_EXPR:
7822 if (TREE_CODE (arg0) == INTEGER_CST)
7823 return fold_not_const (arg0, type);
7824 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7825 return TREE_OPERAND (arg0, 0);
7826 /* Convert ~ (-A) to A - 1. */
7827 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7828 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7829 build_int_cst (type, 1));
7830 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7831 else if (INTEGRAL_TYPE_P (type)
7832 && ((TREE_CODE (arg0) == MINUS_EXPR
7833 && integer_onep (TREE_OPERAND (arg0, 1)))
7834 || (TREE_CODE (arg0) == PLUS_EXPR
7835 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7836 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7837 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7838 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7839 && (tem = fold_unary (BIT_NOT_EXPR, type,
7840 fold_convert (type,
7841 TREE_OPERAND (arg0, 0)))))
7842 return fold_build2 (BIT_XOR_EXPR, type, tem,
7843 fold_convert (type, TREE_OPERAND (arg0, 1)));
7844 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7845 && (tem = fold_unary (BIT_NOT_EXPR, type,
7846 fold_convert (type,
7847 TREE_OPERAND (arg0, 1)))))
7848 return fold_build2 (BIT_XOR_EXPR, type,
7849 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7850
7851 return NULL_TREE;
7852
7853 case TRUTH_NOT_EXPR:
7854 /* The argument to invert_truthvalue must have Boolean type. */
7855 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7856 arg0 = fold_convert (boolean_type_node, arg0);
7857
7858 /* Note that the operand of this must be an int
7859 and its values must be 0 or 1.
7860 ("true" is a fixed value perhaps depending on the language,
7861 but we don't handle values other than 1 correctly yet.) */
7862 tem = fold_truth_not_expr (arg0);
7863 if (!tem)
7864 return NULL_TREE;
7865 return fold_convert (type, tem);
7866
7867 case REALPART_EXPR:
7868 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7869 return fold_convert (type, arg0);
7870 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7871 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7872 TREE_OPERAND (arg0, 1));
7873 if (TREE_CODE (arg0) == COMPLEX_CST)
7874 return fold_convert (type, TREE_REALPART (arg0));
7875 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7876 {
7877 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7878 tem = fold_build2 (TREE_CODE (arg0), itype,
7879 fold_build1 (REALPART_EXPR, itype,
7880 TREE_OPERAND (arg0, 0)),
7881 fold_build1 (REALPART_EXPR, itype,
7882 TREE_OPERAND (arg0, 1)));
7883 return fold_convert (type, tem);
7884 }
7885 if (TREE_CODE (arg0) == CONJ_EXPR)
7886 {
7887 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7888 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7889 return fold_convert (type, tem);
7890 }
7891 return NULL_TREE;
7892
7893 case IMAGPART_EXPR:
7894 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7895 return fold_convert (type, integer_zero_node);
7896 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7897 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7898 TREE_OPERAND (arg0, 0));
7899 if (TREE_CODE (arg0) == COMPLEX_CST)
7900 return fold_convert (type, TREE_IMAGPART (arg0));
7901 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7902 {
7903 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7904 tem = fold_build2 (TREE_CODE (arg0), itype,
7905 fold_build1 (IMAGPART_EXPR, itype,
7906 TREE_OPERAND (arg0, 0)),
7907 fold_build1 (IMAGPART_EXPR, itype,
7908 TREE_OPERAND (arg0, 1)));
7909 return fold_convert (type, tem);
7910 }
7911 if (TREE_CODE (arg0) == CONJ_EXPR)
7912 {
7913 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7914 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7915 return fold_convert (type, negate_expr (tem));
7916 }
7917 return NULL_TREE;
7918
7919 default:
7920 return NULL_TREE;
7921 } /* switch (code) */
7922}
7923
7924/* Fold a binary expression of code CODE and type TYPE with operands
7925 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7926 Return the folded expression if folding is successful. Otherwise,
7927 return NULL_TREE. */
7928
7929static tree
7930fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7931{
7932 enum tree_code compl_code;
7933
7934 if (code == MIN_EXPR)
7935 compl_code = MAX_EXPR;
7936 else if (code == MAX_EXPR)
7937 compl_code = MIN_EXPR;
7938 else
7939 gcc_unreachable ();
7940
7941 /* MIN (MAX (a, b), b) == b. */
7942 if (TREE_CODE (op0) == compl_code
7943 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7944 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7945
7946 /* MIN (MAX (b, a), b) == b. */
7947 if (TREE_CODE (op0) == compl_code
7948 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7949 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7950 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7951
7952 /* MIN (a, MAX (a, b)) == a. */
7953 if (TREE_CODE (op1) == compl_code
7954 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7955 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7956 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7957
7958 /* MIN (a, MAX (b, a)) == a. */
7959 if (TREE_CODE (op1) == compl_code
7960 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7961 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7962 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7963
7964 return NULL_TREE;
7965}
7966
7967/* Subroutine of fold_binary. This routine performs all of the
7968 transformations that are common to the equality/inequality
7969 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7970 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7971 fold_binary should call fold_binary. Fold a comparison with
7972 tree code CODE and type TYPE with operands OP0 and OP1. Return
7973 the folded comparison or NULL_TREE. */
7974
7975static tree
7976fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7977{
7978 tree arg0, arg1, tem;
7979
7980 arg0 = op0;
7981 arg1 = op1;
7982
7983 STRIP_SIGN_NOPS (arg0);
7984 STRIP_SIGN_NOPS (arg1);
7985
7986 tem = fold_relational_const (code, type, arg0, arg1);
7987 if (tem != NULL_TREE)
7988 return tem;
7989
7990 /* If one arg is a real or integer constant, put it last. */
7991 if (tree_swap_operands_p (arg0, arg1, true))
7992 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7993
7994 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7995 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7996 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7997 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7998 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
7999 && (TREE_CODE (arg1) == INTEGER_CST
8000 && !TREE_OVERFLOW (arg1)))
8001 {
8002 tree const1 = TREE_OPERAND (arg0, 1);
8003 tree const2 = arg1;
8004 tree variable = TREE_OPERAND (arg0, 0);
8005 tree lhs;
8006 int lhs_add;
8007 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8008
8009 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8010 TREE_TYPE (arg1), const2, const1);
8011 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8012 && (TREE_CODE (lhs) != INTEGER_CST
8013 || !TREE_OVERFLOW (lhs)))
8014 {
8015 fold_overflow_warning (("assuming signed overflow does not occur "
8016 "when changing X +- C1 cmp C2 to "
8017 "X cmp C1 +- C2"),
8018 WARN_STRICT_OVERFLOW_COMPARISON);
8019 return fold_build2 (code, type, variable, lhs);
8020 }
8021 }
8022
8023 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8024 same object, then we can fold this to a comparison of the two offsets in
8025 signed size type. This is possible because pointer arithmetic is
8026 restricted to retain within an object and overflow on pointer differences
8027 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8028
8029 We check flag_wrapv directly because pointers types are unsigned,
8030 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8031 normally what we want to avoid certain odd overflow cases, but
8032 not here. */
8033 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8034 && !flag_wrapv
8035 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8036 {
8037 tree base0, offset0, base1, offset1;
8038
8039 if (extract_array_ref (arg0, &base0, &offset0)
8040 && extract_array_ref (arg1, &base1, &offset1)
8041 && operand_equal_p (base0, base1, 0))
8042 {
8043 tree signed_size_type_node;
8044 signed_size_type_node = signed_type_for (size_type_node);
8045
8046 /* By converting to signed size type we cover middle-end pointer
8047 arithmetic which operates on unsigned pointer types of size
8048 type size and ARRAY_REF offsets which are properly sign or
8049 zero extended from their type in case it is narrower than
8050 size type. */
8051 if (offset0 == NULL_TREE)
8052 offset0 = build_int_cst (signed_size_type_node, 0);
8053 else
8054 offset0 = fold_convert (signed_size_type_node, offset0);
8055 if (offset1 == NULL_TREE)
8056 offset1 = build_int_cst (signed_size_type_node, 0);
8057 else
8058 offset1 = fold_convert (signed_size_type_node, offset1);
8059
8060 return fold_build2 (code, type, offset0, offset1);
8061 }
8062 }
8063
8064 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8065 {
8066 tree targ0 = strip_float_extensions (arg0);
8067 tree targ1 = strip_float_extensions (arg1);
8068 tree newtype = TREE_TYPE (targ0);
8069
8070 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8071 newtype = TREE_TYPE (targ1);
8072
8073 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8074 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8075 return fold_build2 (code, type, fold_convert (newtype, targ0),
8076 fold_convert (newtype, targ1));
8077
8078 /* (-a) CMP (-b) -> b CMP a */
8079 if (TREE_CODE (arg0) == NEGATE_EXPR
8080 && TREE_CODE (arg1) == NEGATE_EXPR)
8081 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8082 TREE_OPERAND (arg0, 0));
8083
8084 if (TREE_CODE (arg1) == REAL_CST)
8085 {
8086 REAL_VALUE_TYPE cst;
8087 cst = TREE_REAL_CST (arg1);
8088
8089 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8090 if (TREE_CODE (arg0) == NEGATE_EXPR)
8091 return fold_build2 (swap_tree_comparison (code), type,
8092 TREE_OPERAND (arg0, 0),
8093 build_real (TREE_TYPE (arg1),
8094 REAL_VALUE_NEGATE (cst)));
8095
8096 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8097 /* a CMP (-0) -> a CMP 0 */
8098 if (REAL_VALUE_MINUS_ZERO (cst))
8099 return fold_build2 (code, type, arg0,
8100 build_real (TREE_TYPE (arg1), dconst0));
8101
8102 /* x != NaN is always true, other ops are always false. */
8103 if (REAL_VALUE_ISNAN (cst)
8104 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8105 {
8106 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8107 return omit_one_operand (type, tem, arg0);
8108 }
8109
8110 /* Fold comparisons against infinity. */
8111 if (REAL_VALUE_ISINF (cst))
8112 {
8113 tem = fold_inf_compare (code, type, arg0, arg1);
8114 if (tem != NULL_TREE)
8115 return tem;
8116 }
8117 }
8118
8119 /* If this is a comparison of a real constant with a PLUS_EXPR
8120 or a MINUS_EXPR of a real constant, we can convert it into a
8121 comparison with a revised real constant as long as no overflow
8122 occurs when unsafe_math_optimizations are enabled. */
8123 if (flag_unsafe_math_optimizations
8124 && TREE_CODE (arg1) == REAL_CST
8125 && (TREE_CODE (arg0) == PLUS_EXPR
8126 || TREE_CODE (arg0) == MINUS_EXPR)
8127 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8128 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8129 ? MINUS_EXPR : PLUS_EXPR,
8130 arg1, TREE_OPERAND (arg0, 1), 0))
8131 && ! TREE_CONSTANT_OVERFLOW (tem))
8132 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8133
8134 /* Likewise, we can simplify a comparison of a real constant with
8135 a MINUS_EXPR whose first operand is also a real constant, i.e.
8136 (c1 - x) < c2 becomes x > c1-c2. */
8137 if (flag_unsafe_math_optimizations
8138 && TREE_CODE (arg1) == REAL_CST
8139 && TREE_CODE (arg0) == MINUS_EXPR
8140 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8141 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8142 arg1, 0))
8143 && ! TREE_CONSTANT_OVERFLOW (tem))
8144 return fold_build2 (swap_tree_comparison (code), type,
8145 TREE_OPERAND (arg0, 1), tem);
8146
8147 /* Fold comparisons against built-in math functions. */
8148 if (TREE_CODE (arg1) == REAL_CST
8149 && flag_unsafe_math_optimizations
8150 && ! flag_errno_math)
8151 {
8152 enum built_in_function fcode = builtin_mathfn_code (arg0);
8153
8154 if (fcode != END_BUILTINS)
8155 {
8156 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8157 if (tem != NULL_TREE)
8158 return tem;
8159 }
8160 }
8161 }
8162
8163 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8164 if (TREE_CONSTANT (arg1)
8165 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8166 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8167 /* This optimization is invalid for ordered comparisons
8168 if CONST+INCR overflows or if foo+incr might overflow.
8169 This optimization is invalid for floating point due to rounding.
8170 For pointer types we assume overflow doesn't happen. */
8171 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8172 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8173 && (code == EQ_EXPR || code == NE_EXPR))))
8174 {
8175 tree varop, newconst;
8176
8177 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8178 {
8179 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8180 arg1, TREE_OPERAND (arg0, 1));
8181 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8182 TREE_OPERAND (arg0, 0),
8183 TREE_OPERAND (arg0, 1));
8184 }
8185 else
8186 {
8187 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8188 arg1, TREE_OPERAND (arg0, 1));
8189 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8190 TREE_OPERAND (arg0, 0),
8191 TREE_OPERAND (arg0, 1));
8192 }
8193
8194
8195 /* If VAROP is a reference to a bitfield, we must mask
8196 the constant by the width of the field. */
8197 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8198 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8199 && host_integerp (DECL_SIZE (TREE_OPERAND
8200 (TREE_OPERAND (varop, 0), 1)), 1))
8201 {
8202 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8203 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8204 tree folded_compare, shift;
8205
8206 /* First check whether the comparison would come out
8207 always the same. If we don't do that we would
8208 change the meaning with the masking. */
8209 folded_compare = fold_build2 (code, type,
8210 TREE_OPERAND (varop, 0), arg1);
8211 if (TREE_CODE (folded_compare) == INTEGER_CST)
8212 return omit_one_operand (type, folded_compare, varop);
8213
8214 shift = build_int_cst (NULL_TREE,
8215 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8216 shift = fold_convert (TREE_TYPE (varop), shift);
8217 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8218 newconst, shift);
8219 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8220 newconst, shift);
8221 }
8222
8223 return fold_build2 (code, type, varop, newconst);
8224 }
8225
8226 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8227 && (TREE_CODE (arg0) == NOP_EXPR
8228 || TREE_CODE (arg0) == CONVERT_EXPR))
8229 {
8230 /* If we are widening one operand of an integer comparison,
8231 see if the other operand is similarly being widened. Perhaps we
8232 can do the comparison in the narrower type. */
8233 tem = fold_widened_comparison (code, type, arg0, arg1);
8234 if (tem)
8235 return tem;
8236
8237 /* Or if we are changing signedness. */
8238 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8239 if (tem)
8240 return tem;
8241 }
8242
8243 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8244 constant, we can simplify it. */
8245 if (TREE_CODE (arg1) == INTEGER_CST
8246 && (TREE_CODE (arg0) == MIN_EXPR
8247 || TREE_CODE (arg0) == MAX_EXPR)
8248 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8249 {
8250 tem = optimize_minmax_comparison (code, type, op0, op1);
8251 if (tem)
8252 return tem;
8253 }
8254
8255 /* Simplify comparison of something with itself. (For IEEE
8256 floating-point, we can only do some of these simplifications.) */
8257 if (operand_equal_p (arg0, arg1, 0))
8258 {
8259 switch (code)
8260 {
8261 case EQ_EXPR:
8262 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8263 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8264 return constant_boolean_node (1, type);
8265 break;
8266
8267 case GE_EXPR:
8268 case LE_EXPR:
8269 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8270 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8271 return constant_boolean_node (1, type);
8272 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8273
8274 case NE_EXPR:
8275 /* For NE, we can only do this simplification if integer
8276 or we don't honor IEEE floating point NaNs. */
8277 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8278 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8279 break;
8280 /* ... fall through ... */
8281 case GT_EXPR:
8282 case LT_EXPR:
8283 return constant_boolean_node (0, type);
8284 default:
8285 gcc_unreachable ();
8286 }
8287 }
8288
8289 /* If we are comparing an expression that just has comparisons
8290 of two integer values, arithmetic expressions of those comparisons,
8291 and constants, we can simplify it. There are only three cases
8292 to check: the two values can either be equal, the first can be
8293 greater, or the second can be greater. Fold the expression for
8294 those three values. Since each value must be 0 or 1, we have
8295 eight possibilities, each of which corresponds to the constant 0
8296 or 1 or one of the six possible comparisons.
8297
8298 This handles common cases like (a > b) == 0 but also handles
8299 expressions like ((x > y) - (y > x)) > 0, which supposedly
8300 occur in macroized code. */
8301
8302 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8303 {
8304 tree cval1 = 0, cval2 = 0;
8305 int save_p = 0;
8306
8307 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8308 /* Don't handle degenerate cases here; they should already
8309 have been handled anyway. */
8310 && cval1 != 0 && cval2 != 0
8311 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8312 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8313 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8314 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8315 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8316 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8317 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8318 {
8319 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8320 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8321
8322 /* We can't just pass T to eval_subst in case cval1 or cval2
8323 was the same as ARG1. */
8324
8325 tree high_result
8326 = fold_build2 (code, type,
8327 eval_subst (arg0, cval1, maxval,
8328 cval2, minval),
8329 arg1);
8330 tree equal_result
8331 = fold_build2 (code, type,
8332 eval_subst (arg0, cval1, maxval,
8333 cval2, maxval),
8334 arg1);
8335 tree low_result
8336 = fold_build2 (code, type,
8337 eval_subst (arg0, cval1, minval,
8338 cval2, maxval),
8339 arg1);
8340
8341 /* All three of these results should be 0 or 1. Confirm they are.
8342 Then use those values to select the proper code to use. */
8343
8344 if (TREE_CODE (high_result) == INTEGER_CST
8345 && TREE_CODE (equal_result) == INTEGER_CST
8346 && TREE_CODE (low_result) == INTEGER_CST)
8347 {
8348 /* Make a 3-bit mask with the high-order bit being the
8349 value for `>', the next for '=', and the low for '<'. */
8350 switch ((integer_onep (high_result) * 4)
8351 + (integer_onep (equal_result) * 2)
8352 + integer_onep (low_result))
8353 {
8354 case 0:
8355 /* Always false. */
8356 return omit_one_operand (type, integer_zero_node, arg0);
8357 case 1:
8358 code = LT_EXPR;
8359 break;
8360 case 2:
8361 code = EQ_EXPR;
8362 break;
8363 case 3:
8364 code = LE_EXPR;
8365 break;
8366 case 4:
8367 code = GT_EXPR;
8368 break;
8369 case 5:
8370 code = NE_EXPR;
8371 break;
8372 case 6:
8373 code = GE_EXPR;
8374 break;
8375 case 7:
8376 /* Always true. */
8377 return omit_one_operand (type, integer_one_node, arg0);
8378 }
8379
8380 if (save_p)
8381 return save_expr (build2 (code, type, cval1, cval2));
8382 return fold_build2 (code, type, cval1, cval2);
8383 }
8384 }
8385 }
8386
8387 /* Fold a comparison of the address of COMPONENT_REFs with the same
8388 type and component to a comparison of the address of the base
8389 object. In short, &x->a OP &y->a to x OP y and
8390 &x->a OP &y.a to x OP &y */
8391 if (TREE_CODE (arg0) == ADDR_EXPR
8392 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8393 && TREE_CODE (arg1) == ADDR_EXPR
8394 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8395 {
8396 tree cref0 = TREE_OPERAND (arg0, 0);
8397 tree cref1 = TREE_OPERAND (arg1, 0);
8398 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8399 {
8400 tree op0 = TREE_OPERAND (cref0, 0);
8401 tree op1 = TREE_OPERAND (cref1, 0);
8402 return fold_build2 (code, type,
8403 build_fold_addr_expr (op0),
8404 build_fold_addr_expr (op1));
8405 }
8406 }
8407
8408 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8409 into a single range test. */
8410 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8411 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8412 && TREE_CODE (arg1) == INTEGER_CST
8413 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8414 && !integer_zerop (TREE_OPERAND (arg0, 1))
8415 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8416 && !TREE_OVERFLOW (arg1))
8417 {
8418 tem = fold_div_compare (code, type, arg0, arg1);
8419 if (tem != NULL_TREE)
8420 return tem;
8421 }
8422
8423 return NULL_TREE;
8424}
8425
8426
8427/* Subroutine of fold_binary. Optimize complex multiplications of the
8428 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8429 argument EXPR represents the expression "z" of type TYPE. */
8430
8431static tree
8432fold_mult_zconjz (tree type, tree expr)
8433{
8434 tree itype = TREE_TYPE (type);
8435 tree rpart, ipart, tem;
8436
8437 if (TREE_CODE (expr) == COMPLEX_EXPR)
8438 {
8439 rpart = TREE_OPERAND (expr, 0);
8440 ipart = TREE_OPERAND (expr, 1);
8441 }
8442 else if (TREE_CODE (expr) == COMPLEX_CST)
8443 {
8444 rpart = TREE_REALPART (expr);
8445 ipart = TREE_IMAGPART (expr);
8446 }
8447 else
8448 {
8449 expr = save_expr (expr);
8450 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8451 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8452 }
8453
8454 rpart = save_expr (rpart);
8455 ipart = save_expr (ipart);
8456 tem = fold_build2 (PLUS_EXPR, itype,
8457 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8458 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8459 return fold_build2 (COMPLEX_EXPR, type, tem,
8460 fold_convert (itype, integer_zero_node));
8461}
8462
8463
8464/* Fold a binary expression of code CODE and type TYPE with operands
8465 OP0 and OP1. Return the folded expression if folding is
8466 successful. Otherwise, return NULL_TREE. */
8467
8468tree
8469fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8470{
8471 enum tree_code_class kind = TREE_CODE_CLASS (code);
8472 tree arg0, arg1, tem;
8473 tree t1 = NULL_TREE;
8474 bool strict_overflow_p;
8475
8476 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8477 && TREE_CODE_LENGTH (code) == 2
8478 && op0 != NULL_TREE
8479 && op1 != NULL_TREE);
8480
8481 arg0 = op0;
8482 arg1 = op1;
8483
8484 /* Strip any conversions that don't change the mode. This is
8485 safe for every expression, except for a comparison expression
8486 because its signedness is derived from its operands. So, in
8487 the latter case, only strip conversions that don't change the
8488 signedness.
8489
8490 Note that this is done as an internal manipulation within the
8491 constant folder, in order to find the simplest representation
8492 of the arguments so that their form can be studied. In any
8493 cases, the appropriate type conversions should be put back in
8494 the tree that will get out of the constant folder. */
8495
8496 if (kind == tcc_comparison)
8497 {
8498 STRIP_SIGN_NOPS (arg0);
8499 STRIP_SIGN_NOPS (arg1);
8500 }
8501 else
8502 {
8503 STRIP_NOPS (arg0);
8504 STRIP_NOPS (arg1);
8505 }
8506
8507 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8508 constant but we can't do arithmetic on them. */
8509 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8510 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8511 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8512 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8513 {
8514 if (kind == tcc_binary)
8515 tem = const_binop (code, arg0, arg1, 0);
8516 else if (kind == tcc_comparison)
8517 tem = fold_relational_const (code, type, arg0, arg1);
8518 else
8519 tem = NULL_TREE;
8520
8521 if (tem != NULL_TREE)
8522 {
8523 if (TREE_TYPE (tem) != type)
8524 tem = fold_convert (type, tem);
8525 return tem;
8526 }
8527 }
8528
8529 /* If this is a commutative operation, and ARG0 is a constant, move it
8530 to ARG1 to reduce the number of tests below. */
8531 if (commutative_tree_code (code)
8532 && tree_swap_operands_p (arg0, arg1, true))
8533 return fold_build2 (code, type, op1, op0);
8534
8535 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8536
8537 First check for cases where an arithmetic operation is applied to a
8538 compound, conditional, or comparison operation. Push the arithmetic
8539 operation inside the compound or conditional to see if any folding
8540 can then be done. Convert comparison to conditional for this purpose.
8541 The also optimizes non-constant cases that used to be done in
8542 expand_expr.
8543
8544 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8545 one of the operands is a comparison and the other is a comparison, a
8546 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8547 code below would make the expression more complex. Change it to a
8548 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8549 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8550
8551 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8552 || code == EQ_EXPR || code == NE_EXPR)
8553 && ((truth_value_p (TREE_CODE (arg0))
8554 && (truth_value_p (TREE_CODE (arg1))
8555 || (TREE_CODE (arg1) == BIT_AND_EXPR
8556 && integer_onep (TREE_OPERAND (arg1, 1)))))
8557 || (truth_value_p (TREE_CODE (arg1))
8558 && (truth_value_p (TREE_CODE (arg0))
8559 || (TREE_CODE (arg0) == BIT_AND_EXPR
8560 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8561 {
8562 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8563 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8564 : TRUTH_XOR_EXPR,
8565 boolean_type_node,
8566 fold_convert (boolean_type_node, arg0),
8567 fold_convert (boolean_type_node, arg1));
8568
8569 if (code == EQ_EXPR)
8570 tem = invert_truthvalue (tem);
8571
8572 return fold_convert (type, tem);
8573 }
8574
8575 if (TREE_CODE_CLASS (code) == tcc_binary
8576 || TREE_CODE_CLASS (code) == tcc_comparison)
8577 {
8578 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8579 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8580 fold_build2 (code, type,
8581 TREE_OPERAND (arg0, 1), op1));
8582 if (TREE_CODE (arg1) == COMPOUND_EXPR
8583 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8584 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8585 fold_build2 (code, type,
8586 op0, TREE_OPERAND (arg1, 1)));
8587
8588 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8589 {
8590 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8591 arg0, arg1,
8592 /*cond_first_p=*/1);
8593 if (tem != NULL_TREE)
8594 return tem;
8595 }
8596
8597 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8598 {
8599 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8600 arg1, arg0,
8601 /*cond_first_p=*/0);
8602 if (tem != NULL_TREE)
8603 return tem;
8604 }
8605 }
8606
8607 switch (code)
8608 {
8609 case PLUS_EXPR:
8610 /* A + (-B) -> A - B */
8611 if (TREE_CODE (arg1) == NEGATE_EXPR)
8612 return fold_build2 (MINUS_EXPR, type,
8613 fold_convert (type, arg0),
8614 fold_convert (type, TREE_OPERAND (arg1, 0)));
8615 /* (-A) + B -> B - A */
8616 if (TREE_CODE (arg0) == NEGATE_EXPR
8617 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8618 return fold_build2 (MINUS_EXPR, type,
8619 fold_convert (type, arg1),
8620 fold_convert (type, TREE_OPERAND (arg0, 0)));
8621 /* Convert ~A + 1 to -A. */
8622 if (INTEGRAL_TYPE_P (type)
8623 && TREE_CODE (arg0) == BIT_NOT_EXPR
8624 && integer_onep (arg1))
8625 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8626
8627 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8628 same or one. */
8629 if ((TREE_CODE (arg0) == MULT_EXPR
8630 || TREE_CODE (arg1) == MULT_EXPR)
8631 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8632 {
8633 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8634 if (tem)
8635 return tem;
8636 }
8637
8638 if (! FLOAT_TYPE_P (type))
8639 {
8640 if (integer_zerop (arg1))
8641 return non_lvalue (fold_convert (type, arg0));
8642
8643 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8644 with a constant, and the two constants have no bits in common,
8645 we should treat this as a BIT_IOR_EXPR since this may produce more
8646 simplifications. */
8647 if (TREE_CODE (arg0) == BIT_AND_EXPR
8648 && TREE_CODE (arg1) == BIT_AND_EXPR
8649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8650 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8651 && integer_zerop (const_binop (BIT_AND_EXPR,
8652 TREE_OPERAND (arg0, 1),
8653 TREE_OPERAND (arg1, 1), 0)))
8654 {
8655 code = BIT_IOR_EXPR;
8656 goto bit_ior;
8657 }
8658
8659 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8660 (plus (plus (mult) (mult)) (foo)) so that we can
8661 take advantage of the factoring cases below. */
8662 if (((TREE_CODE (arg0) == PLUS_EXPR
8663 || TREE_CODE (arg0) == MINUS_EXPR)
8664 && TREE_CODE (arg1) == MULT_EXPR)
8665 || ((TREE_CODE (arg1) == PLUS_EXPR
8666 || TREE_CODE (arg1) == MINUS_EXPR)
8667 && TREE_CODE (arg0) == MULT_EXPR))
8668 {
8669 tree parg0, parg1, parg, marg;
8670 enum tree_code pcode;
8671
8672 if (TREE_CODE (arg1) == MULT_EXPR)
8673 parg = arg0, marg = arg1;
8674 else
8675 parg = arg1, marg = arg0;
8676 pcode = TREE_CODE (parg);
8677 parg0 = TREE_OPERAND (parg, 0);
8678 parg1 = TREE_OPERAND (parg, 1);
8679 STRIP_NOPS (parg0);
8680 STRIP_NOPS (parg1);
8681
8682 if (TREE_CODE (parg0) == MULT_EXPR
8683 && TREE_CODE (parg1) != MULT_EXPR)
8684 return fold_build2 (pcode, type,
8685 fold_build2 (PLUS_EXPR, type,
8686 fold_convert (type, parg0),
8687 fold_convert (type, marg)),
8688 fold_convert (type, parg1));
8689 if (TREE_CODE (parg0) != MULT_EXPR
8690 && TREE_CODE (parg1) == MULT_EXPR)
8691 return fold_build2 (PLUS_EXPR, type,
8692 fold_convert (type, parg0),
8693 fold_build2 (pcode, type,
8694 fold_convert (type, marg),
8695 fold_convert (type,
8696 parg1)));
8697 }
8698
8699 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8700 of the array. Loop optimizer sometimes produce this type of
8701 expressions. */
8702 if (TREE_CODE (arg0) == ADDR_EXPR)
8703 {
8704 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8705 if (tem)
8706 return fold_convert (type, tem);
8707 }
8708 else if (TREE_CODE (arg1) == ADDR_EXPR)
8709 {
8710 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8711 if (tem)
8712 return fold_convert (type, tem);
8713 }
8714 }
8715 else
8716 {
8717 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8718 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8719 return non_lvalue (fold_convert (type, arg0));
8720
8721 /* Likewise if the operands are reversed. */
8722 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8723 return non_lvalue (fold_convert (type, arg1));
8724
8725 /* Convert X + -C into X - C. */
8726 if (TREE_CODE (arg1) == REAL_CST
8727 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8728 {
8729 tem = fold_negate_const (arg1, type);
8730 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8731 return fold_build2 (MINUS_EXPR, type,
8732 fold_convert (type, arg0),
8733 fold_convert (type, tem));
8734 }
8735
8736 if (flag_unsafe_math_optimizations
8737 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8738 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8739 && (tem = distribute_real_division (code, type, arg0, arg1)))
8740 return tem;
8741
8742 /* Convert x+x into x*2.0. */
8743 if (operand_equal_p (arg0, arg1, 0)
8744 && SCALAR_FLOAT_TYPE_P (type))
8745 return fold_build2 (MULT_EXPR, type, arg0,
8746 build_real (type, dconst2));
8747
8748 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8749 if (flag_unsafe_math_optimizations
8750 && TREE_CODE (arg1) == PLUS_EXPR
8751 && TREE_CODE (arg0) != MULT_EXPR)
8752 {
8753 tree tree10 = TREE_OPERAND (arg1, 0);
8754 tree tree11 = TREE_OPERAND (arg1, 1);
8755 if (TREE_CODE (tree11) == MULT_EXPR
8756 && TREE_CODE (tree10) == MULT_EXPR)
8757 {
8758 tree tree0;
8759 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8760 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8761 }
8762 }
8763 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8764 if (flag_unsafe_math_optimizations
8765 && TREE_CODE (arg0) == PLUS_EXPR
8766 && TREE_CODE (arg1) != MULT_EXPR)
8767 {
8768 tree tree00 = TREE_OPERAND (arg0, 0);
8769 tree tree01 = TREE_OPERAND (arg0, 1);
8770 if (TREE_CODE (tree01) == MULT_EXPR
8771 && TREE_CODE (tree00) == MULT_EXPR)
8772 {
8773 tree tree0;
8774 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8775 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8776 }
8777 }
8778 }
8779
8780 bit_rotate:
8781 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8782 is a rotate of A by C1 bits. */
8783 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8784 is a rotate of A by B bits. */
8785 {
8786 enum tree_code code0, code1;
8787 code0 = TREE_CODE (arg0);
8788 code1 = TREE_CODE (arg1);
8789 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8790 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8791 && operand_equal_p (TREE_OPERAND (arg0, 0),
8792 TREE_OPERAND (arg1, 0), 0)
8793 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8794 {
8795 tree tree01, tree11;
8796 enum tree_code code01, code11;
8797
8798 tree01 = TREE_OPERAND (arg0, 1);
8799 tree11 = TREE_OPERAND (arg1, 1);
8800 STRIP_NOPS (tree01);
8801 STRIP_NOPS (tree11);
8802 code01 = TREE_CODE (tree01);
8803 code11 = TREE_CODE (tree11);
8804 if (code01 == INTEGER_CST
8805 && code11 == INTEGER_CST
8806 && TREE_INT_CST_HIGH (tree01) == 0
8807 && TREE_INT_CST_HIGH (tree11) == 0
8808 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8809 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8810 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8811 code0 == LSHIFT_EXPR ? tree01 : tree11);
8812 else if (code11 == MINUS_EXPR)
8813 {
8814 tree tree110, tree111;
8815 tree110 = TREE_OPERAND (tree11, 0);
8816 tree111 = TREE_OPERAND (tree11, 1);
8817 STRIP_NOPS (tree110);
8818 STRIP_NOPS (tree111);
8819 if (TREE_CODE (tree110) == INTEGER_CST
8820 && 0 == compare_tree_int (tree110,
8821 TYPE_PRECISION
8822 (TREE_TYPE (TREE_OPERAND
8823 (arg0, 0))))
8824 && operand_equal_p (tree01, tree111, 0))
8825 return build2 ((code0 == LSHIFT_EXPR
8826 ? LROTATE_EXPR
8827 : RROTATE_EXPR),
8828 type, TREE_OPERAND (arg0, 0), tree01);
8829 }
8830 else if (code01 == MINUS_EXPR)
8831 {
8832 tree tree010, tree011;
8833 tree010 = TREE_OPERAND (tree01, 0);
8834 tree011 = TREE_OPERAND (tree01, 1);
8835 STRIP_NOPS (tree010);
8836 STRIP_NOPS (tree011);
8837 if (TREE_CODE (tree010) == INTEGER_CST
8838 && 0 == compare_tree_int (tree010,
8839 TYPE_PRECISION
8840 (TREE_TYPE (TREE_OPERAND
8841 (arg0, 0))))
8842 && operand_equal_p (tree11, tree011, 0))
8843 return build2 ((code0 != LSHIFT_EXPR
8844 ? LROTATE_EXPR
8845 : RROTATE_EXPR),
8846 type, TREE_OPERAND (arg0, 0), tree11);
8847 }
8848 }
8849 }
8850
8851 associate:
8852 /* In most languages, can't associate operations on floats through
8853 parentheses. Rather than remember where the parentheses were, we
8854 don't associate floats at all, unless the user has specified
8855 -funsafe-math-optimizations. */
8856
8857 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8858 {
8859 tree var0, con0, lit0, minus_lit0;
8860 tree var1, con1, lit1, minus_lit1;
8861 bool ok = true;
8862
8863 /* Split both trees into variables, constants, and literals. Then
8864 associate each group together, the constants with literals,
8865 then the result with variables. This increases the chances of
8866 literals being recombined later and of generating relocatable
8867 expressions for the sum of a constant and literal. */
8868 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8869 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8870 code == MINUS_EXPR);
8871
8872 /* With undefined overflow we can only associate constants
8873 with one variable. */
8874 if ((POINTER_TYPE_P (type)
8875 || (INTEGRAL_TYPE_P (type)
8876 && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8877 && var0 && var1)
8878 {
8879 tree tmp0 = var0;
8880 tree tmp1 = var1;
8881
8882 if (TREE_CODE (tmp0) == NEGATE_EXPR)
8883 tmp0 = TREE_OPERAND (tmp0, 0);
8884 if (TREE_CODE (tmp1) == NEGATE_EXPR)
8885 tmp1 = TREE_OPERAND (tmp1, 0);
8886 /* The only case we can still associate with two variables
8887 is if they are the same, modulo negation. */
8888 if (!operand_equal_p (tmp0, tmp1, 0))
8889 ok = false;
8890 }
8891
8892 /* Only do something if we found more than two objects. Otherwise,
8893 nothing has changed and we risk infinite recursion. */
8894 if (ok
8895 && (2 < ((var0 != 0) + (var1 != 0)
8896 + (con0 != 0) + (con1 != 0)
8897 + (lit0 != 0) + (lit1 != 0)
8898 + (minus_lit0 != 0) + (minus_lit1 != 0))))
8899 {
8900 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8901 if (code == MINUS_EXPR)
8902 code = PLUS_EXPR;
8903
8904 var0 = associate_trees (var0, var1, code, type);
8905 con0 = associate_trees (con0, con1, code, type);
8906 lit0 = associate_trees (lit0, lit1, code, type);
8907 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8908
8909 /* Preserve the MINUS_EXPR if the negative part of the literal is
8910 greater than the positive part. Otherwise, the multiplicative
8911 folding code (i.e extract_muldiv) may be fooled in case
8912 unsigned constants are subtracted, like in the following
8913 example: ((X*2 + 4) - 8U)/2. */
8914 if (minus_lit0 && lit0)
8915 {
8916 if (TREE_CODE (lit0) == INTEGER_CST
8917 && TREE_CODE (minus_lit0) == INTEGER_CST
8918 && tree_int_cst_lt (lit0, minus_lit0))
8919 {
8920 minus_lit0 = associate_trees (minus_lit0, lit0,
8921 MINUS_EXPR, type);
8922 lit0 = 0;
8923 }
8924 else
8925 {
8926 lit0 = associate_trees (lit0, minus_lit0,
8927 MINUS_EXPR, type);
8928 minus_lit0 = 0;
8929 }
8930 }
8931 if (minus_lit0)
8932 {
8933 if (con0 == 0)
8934 return fold_convert (type,
8935 associate_trees (var0, minus_lit0,
8936 MINUS_EXPR, type));
8937 else
8938 {
8939 con0 = associate_trees (con0, minus_lit0,
8940 MINUS_EXPR, type);
8941 return fold_convert (type,
8942 associate_trees (var0, con0,
8943 PLUS_EXPR, type));
8944 }
8945 }
8946
8947 con0 = associate_trees (con0, lit0, code, type);
8948 return fold_convert (type, associate_trees (var0, con0,
8949 code, type));
8950 }
8951 }
8952
8953 return NULL_TREE;
8954
8955 case MINUS_EXPR:
8956 /* A - (-B) -> A + B */
8957 if (TREE_CODE (arg1) == NEGATE_EXPR)
8958 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8959 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8960 if (TREE_CODE (arg0) == NEGATE_EXPR
8961 && (FLOAT_TYPE_P (type)
8962 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8963 && negate_expr_p (arg1)
8964 && reorder_operands_p (arg0, arg1))
8965 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8966 TREE_OPERAND (arg0, 0));
8967 /* Convert -A - 1 to ~A. */
8968 if (INTEGRAL_TYPE_P (type)
8969 && TREE_CODE (arg0) == NEGATE_EXPR
8970 && integer_onep (arg1))
8971 return fold_build1 (BIT_NOT_EXPR, type,
8972 fold_convert (type, TREE_OPERAND (arg0, 0)));
8973
8974 /* Convert -1 - A to ~A. */
8975 if (INTEGRAL_TYPE_P (type)
8976 && integer_all_onesp (arg0))
8977 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8978
8979 if (! FLOAT_TYPE_P (type))
8980 {
8981 if (integer_zerop (arg0))
8982 return negate_expr (fold_convert (type, arg1));
8983 if (integer_zerop (arg1))
8984 return non_lvalue (fold_convert (type, arg0));
8985
8986 /* Fold A - (A & B) into ~B & A. */
8987 if (!TREE_SIDE_EFFECTS (arg0)
8988 && TREE_CODE (arg1) == BIT_AND_EXPR)
8989 {
8990 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8991 return fold_build2 (BIT_AND_EXPR, type,
8992 fold_build1 (BIT_NOT_EXPR, type,
8993 TREE_OPERAND (arg1, 0)),
8994 arg0);
8995 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8996 return fold_build2 (BIT_AND_EXPR, type,
8997 fold_build1 (BIT_NOT_EXPR, type,
8998 TREE_OPERAND (arg1, 1)),
8999 arg0);
9000 }
9001
9002 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9003 any power of 2 minus 1. */
9004 if (TREE_CODE (arg0) == BIT_AND_EXPR
9005 && TREE_CODE (arg1) == BIT_AND_EXPR
9006 && operand_equal_p (TREE_OPERAND (arg0, 0),
9007 TREE_OPERAND (arg1, 0), 0))
9008 {
9009 tree mask0 = TREE_OPERAND (arg0, 1);
9010 tree mask1 = TREE_OPERAND (arg1, 1);
9011 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9012
9013 if (operand_equal_p (tem, mask1, 0))
9014 {
9015 tem = fold_build2 (BIT_XOR_EXPR, type,
9016 TREE_OPERAND (arg0, 0), mask1);
9017 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9018 }
9019 }
9020 }
9021
9022 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9023 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9024 return non_lvalue (fold_convert (type, arg0));
9025
9026 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9027 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9028 (-ARG1 + ARG0) reduces to -ARG1. */
9029 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9030 return negate_expr (fold_convert (type, arg1));
9031
9032 /* Fold &x - &x. This can happen from &x.foo - &x.
9033 This is unsafe for certain floats even in non-IEEE formats.
9034 In IEEE, it is unsafe because it does wrong for NaNs.
9035 Also note that operand_equal_p is always false if an operand
9036 is volatile. */
9037
9038 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9039 && operand_equal_p (arg0, arg1, 0))
9040 return fold_convert (type, integer_zero_node);
9041
9042 /* A - B -> A + (-B) if B is easily negatable. */
9043 if (negate_expr_p (arg1)
9044 && ((FLOAT_TYPE_P (type)
9045 /* Avoid this transformation if B is a positive REAL_CST. */
9046 && (TREE_CODE (arg1) != REAL_CST
9047 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9048 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9049 return fold_build2 (PLUS_EXPR, type,
9050 fold_convert (type, arg0),
9051 fold_convert (type, negate_expr (arg1)));
9052
9053 /* Try folding difference of addresses. */
9054 {
9055 HOST_WIDE_INT diff;
9056
9057 if ((TREE_CODE (arg0) == ADDR_EXPR
9058 || TREE_CODE (arg1) == ADDR_EXPR)
9059 && ptr_difference_const (arg0, arg1, &diff))
9060 return build_int_cst_type (type, diff);
9061 }
9062
9063 /* Fold &a[i] - &a[j] to i-j. */
9064 if (TREE_CODE (arg0) == ADDR_EXPR
9065 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9066 && TREE_CODE (arg1) == ADDR_EXPR
9067 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9068 {
9069 tree aref0 = TREE_OPERAND (arg0, 0);
9070 tree aref1 = TREE_OPERAND (arg1, 0);
9071 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9072 TREE_OPERAND (aref1, 0), 0))
9073 {
9074 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9075 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9076 tree esz = array_ref_element_size (aref0);
9077 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9078 return fold_build2 (MULT_EXPR, type, diff,
9079 fold_convert (type, esz));
9080
9081 }
9082 }
9083
9084 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9085 of the array. Loop optimizer sometimes produce this type of
9086 expressions. */
9087 if (TREE_CODE (arg0) == ADDR_EXPR)
9088 {
9089 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9090 if (tem)
9091 return fold_convert (type, tem);
9092 }
9093
9094 if (flag_unsafe_math_optimizations
9095 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9096 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9097 && (tem = distribute_real_division (code, type, arg0, arg1)))
9098 return tem;
9099
9100 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9101 same or one. */
9102 if ((TREE_CODE (arg0) == MULT_EXPR
9103 || TREE_CODE (arg1) == MULT_EXPR)
9104 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9105 {
9106 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9107 if (tem)
9108 return tem;
9109 }
9110
9111 goto associate;
9112
9113 case MULT_EXPR:
9114 /* (-A) * (-B) -> A * B */
9115 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9116 return fold_build2 (MULT_EXPR, type,
9117 fold_convert (type, TREE_OPERAND (arg0, 0)),
9118 fold_convert (type, negate_expr (arg1)));
9119 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9120 return fold_build2 (MULT_EXPR, type,
9121 fold_convert (type, negate_expr (arg0)),
9122 fold_convert (type, TREE_OPERAND (arg1, 0)));
9123
9124 if (! FLOAT_TYPE_P (type))
9125 {
9126 if (integer_zerop (arg1))
9127 return omit_one_operand (type, arg1, arg0);
9128 if (integer_onep (arg1))
9129 return non_lvalue (fold_convert (type, arg0));
9130 /* Transform x * -1 into -x. */
9131 if (integer_all_onesp (arg1))
9132 return fold_convert (type, negate_expr (arg0));
9133
9134 /* (a * (1 << b)) is (a << b) */
9135 if (TREE_CODE (arg1) == LSHIFT_EXPR
9136 && integer_onep (TREE_OPERAND (arg1, 0)))
9137 return fold_build2 (LSHIFT_EXPR, type, arg0,
9138 TREE_OPERAND (arg1, 1));
9139 if (TREE_CODE (arg0) == LSHIFT_EXPR
9140 && integer_onep (TREE_OPERAND (arg0, 0)))
9141 return fold_build2 (LSHIFT_EXPR, type, arg1,
9142 TREE_OPERAND (arg0, 1));
9143
9144 strict_overflow_p = false;
9145 if (TREE_CODE (arg1) == INTEGER_CST
9146 && 0 != (tem = extract_muldiv (op0,
9147 fold_convert (type, arg1),
9148 code, NULL_TREE,
9149 &strict_overflow_p)))
9150 {
9151 if (strict_overflow_p)
9152 fold_overflow_warning (("assuming signed overflow does not "
9153 "occur when simplifying "
9154 "multiplication"),
9155 WARN_STRICT_OVERFLOW_MISC);
9156 return fold_convert (type, tem);
9157 }
9158
9159 /* Optimize z * conj(z) for integer complex numbers. */
9160 if (TREE_CODE (arg0) == CONJ_EXPR
9161 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9162 return fold_mult_zconjz (type, arg1);
9163 if (TREE_CODE (arg1) == CONJ_EXPR
9164 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9165 return fold_mult_zconjz (type, arg0);
9166 }
9167 else
9168 {
9169 /* Maybe fold x * 0 to 0. The expressions aren't the same
9170 when x is NaN, since x * 0 is also NaN. Nor are they the
9171 same in modes with signed zeros, since multiplying a
9172 negative value by 0 gives -0, not +0. */
9173 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9174 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9175 && real_zerop (arg1))
9176 return omit_one_operand (type, arg1, arg0);
9177 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9178 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9179 && real_onep (arg1))
9180 return non_lvalue (fold_convert (type, arg0));
9181
9182 /* Transform x * -1.0 into -x. */
9183 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9184 && real_minus_onep (arg1))
9185 return fold_convert (type, negate_expr (arg0));
9186
9187 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9188 if (flag_unsafe_math_optimizations
9189 && TREE_CODE (arg0) == RDIV_EXPR
9190 && TREE_CODE (arg1) == REAL_CST
9191 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9192 {
9193 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9194 arg1, 0);
9195 if (tem)
9196 return fold_build2 (RDIV_EXPR, type, tem,
9197 TREE_OPERAND (arg0, 1));
9198 }
9199
9200 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9201 if (operand_equal_p (arg0, arg1, 0))
9202 {
9203 tree tem = fold_strip_sign_ops (arg0);
9204 if (tem != NULL_TREE)
9205 {
9206 tem = fold_convert (type, tem);
9207 return fold_build2 (MULT_EXPR, type, tem, tem);
9208 }
9209 }
9210
9211 /* Optimize z * conj(z) for floating point complex numbers.
9212 Guarded by flag_unsafe_math_optimizations as non-finite
9213 imaginary components don't produce scalar results. */
9214 if (flag_unsafe_math_optimizations
9215 && TREE_CODE (arg0) == CONJ_EXPR
9216 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9217 return fold_mult_zconjz (type, arg1);
9218 if (flag_unsafe_math_optimizations
9219 && TREE_CODE (arg1) == CONJ_EXPR
9220 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9221 return fold_mult_zconjz (type, arg0);
9222
9223 if (flag_unsafe_math_optimizations)
9224 {
9225 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9226 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9227
9228 /* Optimizations of root(...)*root(...). */
9229 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9230 {
9231 tree rootfn, arg, arglist;
9232 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9233 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9234
9235 /* Optimize sqrt(x)*sqrt(x) as x. */
9236 if (BUILTIN_SQRT_P (fcode0)
9237 && operand_equal_p (arg00, arg10, 0)
9238 && ! HONOR_SNANS (TYPE_MODE (type)))
9239 return arg00;
9240
9241 /* Optimize root(x)*root(y) as root(x*y). */
9242 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9243 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9244 arglist = build_tree_list (NULL_TREE, arg);
9245 return build_function_call_expr (rootfn, arglist);
9246 }
9247
9248 /* Optimize expN(x)*expN(y) as expN(x+y). */
9249 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9250 {
9251 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9252 tree arg = fold_build2 (PLUS_EXPR, type,
9253 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9254 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9255 tree arglist = build_tree_list (NULL_TREE, arg);
9256 return build_function_call_expr (expfn, arglist);
9257 }
9258
9259 /* Optimizations of pow(...)*pow(...). */
9260 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9261 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9262 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9263 {
9264 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9265 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9266 1)));
9267 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9268 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9269 1)));
9270
9271 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9272 if (operand_equal_p (arg01, arg11, 0))
9273 {
9274 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9275 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9276 tree arglist = tree_cons (NULL_TREE, arg,
9277 build_tree_list (NULL_TREE,
9278 arg01));
9279 return build_function_call_expr (powfn, arglist);
9280 }
9281
9282 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9283 if (operand_equal_p (arg00, arg10, 0))
9284 {
9285 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9286 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9287 tree arglist = tree_cons (NULL_TREE, arg00,
9288 build_tree_list (NULL_TREE,
9289 arg));
9290 return build_function_call_expr (powfn, arglist);
9291 }
9292 }
9293
9294 /* Optimize tan(x)*cos(x) as sin(x). */
9295 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9296 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9297 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9298 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9299 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9300 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9301 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9302 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9303 {
9304 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9305
9306 if (sinfn != NULL_TREE)
9307 return build_function_call_expr (sinfn,
9308 TREE_OPERAND (arg0, 1));
9309 }
9310
9311 /* Optimize x*pow(x,c) as pow(x,c+1). */
9312 if (fcode1 == BUILT_IN_POW
9313 || fcode1 == BUILT_IN_POWF
9314 || fcode1 == BUILT_IN_POWL)
9315 {
9316 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9317 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9318 1)));
9319 if (TREE_CODE (arg11) == REAL_CST
9320 && ! TREE_CONSTANT_OVERFLOW (arg11)
9321 && operand_equal_p (arg0, arg10, 0))
9322 {
9323 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9324 REAL_VALUE_TYPE c;
9325 tree arg, arglist;
9326
9327 c = TREE_REAL_CST (arg11);
9328 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9329 arg = build_real (type, c);
9330 arglist = build_tree_list (NULL_TREE, arg);
9331 arglist = tree_cons (NULL_TREE, arg0, arglist);
9332 return build_function_call_expr (powfn, arglist);
9333 }
9334 }
9335
9336 /* Optimize pow(x,c)*x as pow(x,c+1). */
9337 if (fcode0 == BUILT_IN_POW
9338 || fcode0 == BUILT_IN_POWF
9339 || fcode0 == BUILT_IN_POWL)
9340 {
9341 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9342 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9343 1)));
9344 if (TREE_CODE (arg01) == REAL_CST
9345 && ! TREE_CONSTANT_OVERFLOW (arg01)
9346 && operand_equal_p (arg1, arg00, 0))
9347 {
9348 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9349 REAL_VALUE_TYPE c;
9350 tree arg, arglist;
9351
9352 c = TREE_REAL_CST (arg01);
9353 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9354 arg = build_real (type, c);
9355 arglist = build_tree_list (NULL_TREE, arg);
9356 arglist = tree_cons (NULL_TREE, arg1, arglist);
9357 return build_function_call_expr (powfn, arglist);
9358 }
9359 }
9360
9361 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9362 if (! optimize_size
9363 && operand_equal_p (arg0, arg1, 0))
9364 {
9365 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9366
9367 if (powfn)
9368 {
9369 tree arg = build_real (type, dconst2);
9370 tree arglist = build_tree_list (NULL_TREE, arg);
9371 arglist = tree_cons (NULL_TREE, arg0, arglist);
9372 return build_function_call_expr (powfn, arglist);
9373 }
9374 }
9375 }
9376 }
9377 goto associate;
9378
9379 case BIT_IOR_EXPR:
9380 bit_ior:
9381 if (integer_all_onesp (arg1))
9382 return omit_one_operand (type, arg1, arg0);
9383 if (integer_zerop (arg1))
9384 return non_lvalue (fold_convert (type, arg0));
9385 if (operand_equal_p (arg0, arg1, 0))
9386 return non_lvalue (fold_convert (type, arg0));
9387
9388 /* ~X | X is -1. */
9389 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9390 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9390 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9391 {
9392 t1 = build_int_cst (type, -1);
9393 t1 = force_fit_type (t1, 0, false, false);
9394 return omit_one_operand (type, t1, arg1);
9395 }
9396
9397 /* X | ~X is -1. */
9398 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9391 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9392 {
9393 t1 = build_int_cst (type, -1);
9394 t1 = force_fit_type (t1, 0, false, false);
9395 return omit_one_operand (type, t1, arg1);
9396 }
9397
9398 /* X | ~X is -1. */
9399 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9400 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9399 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9400 {
9401 t1 = build_int_cst (type, -1);
9402 t1 = force_fit_type (t1, 0, false, false);
9403 return omit_one_operand (type, t1, arg0);
9404 }
9405
9406 /* Canonicalize (X & C1) | C2. */
9407 if (TREE_CODE (arg0) == BIT_AND_EXPR
9408 && TREE_CODE (arg1) == INTEGER_CST
9409 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9410 {
9411 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9412 int width = TYPE_PRECISION (type);
9413 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9414 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9415 hi2 = TREE_INT_CST_HIGH (arg1);
9416 lo2 = TREE_INT_CST_LOW (arg1);
9417
9418 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9419 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9420 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9421
9422 if (width > HOST_BITS_PER_WIDE_INT)
9423 {
9424 mhi = (unsigned HOST_WIDE_INT) -1
9425 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9426 mlo = -1;
9427 }
9428 else
9429 {
9430 mhi = 0;
9431 mlo = (unsigned HOST_WIDE_INT) -1
9432 >> (HOST_BITS_PER_WIDE_INT - width);
9433 }
9434
9435 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9436 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9437 return fold_build2 (BIT_IOR_EXPR, type,
9438 TREE_OPERAND (arg0, 0), arg1);
9439
9440 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9441 hi1 &= mhi;
9442 lo1 &= mlo;
9443 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9444 return fold_build2 (BIT_IOR_EXPR, type,
9445 fold_build2 (BIT_AND_EXPR, type,
9446 TREE_OPERAND (arg0, 0),
9447 build_int_cst_wide (type,
9448 lo1 & ~lo2,
9449 hi1 & ~hi2)),
9450 arg1);
9451 }
9452
9453 /* (X & Y) | Y is (X, Y). */
9454 if (TREE_CODE (arg0) == BIT_AND_EXPR
9455 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9456 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9457 /* (X & Y) | X is (Y, X). */
9458 if (TREE_CODE (arg0) == BIT_AND_EXPR
9459 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9460 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9461 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9462 /* X | (X & Y) is (Y, X). */
9463 if (TREE_CODE (arg1) == BIT_AND_EXPR
9464 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9465 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9466 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9467 /* X | (Y & X) is (Y, X). */
9468 if (TREE_CODE (arg1) == BIT_AND_EXPR
9469 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9470 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9471 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9472
9473 t1 = distribute_bit_expr (code, type, arg0, arg1);
9474 if (t1 != NULL_TREE)
9475 return t1;
9476
9477 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9478
9479 This results in more efficient code for machines without a NAND
9480 instruction. Combine will canonicalize to the first form
9481 which will allow use of NAND instructions provided by the
9482 backend if they exist. */
9483 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9484 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9485 {
9486 return fold_build1 (BIT_NOT_EXPR, type,
9487 build2 (BIT_AND_EXPR, type,
9488 TREE_OPERAND (arg0, 0),
9489 TREE_OPERAND (arg1, 0)));
9490 }
9491
9492 /* See if this can be simplified into a rotate first. If that
9493 is unsuccessful continue in the association code. */
9494 goto bit_rotate;
9495
9496 case BIT_XOR_EXPR:
9497 if (integer_zerop (arg1))
9498 return non_lvalue (fold_convert (type, arg0));
9499 if (integer_all_onesp (arg1))
9500 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9501 if (operand_equal_p (arg0, arg1, 0))
9502 return omit_one_operand (type, integer_zero_node, arg0);
9503
9504 /* ~X ^ X is -1. */
9505 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9401 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9402 {
9403 t1 = build_int_cst (type, -1);
9404 t1 = force_fit_type (t1, 0, false, false);
9405 return omit_one_operand (type, t1, arg0);
9406 }
9407
9408 /* Canonicalize (X & C1) | C2. */
9409 if (TREE_CODE (arg0) == BIT_AND_EXPR
9410 && TREE_CODE (arg1) == INTEGER_CST
9411 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9412 {
9413 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9414 int width = TYPE_PRECISION (type);
9415 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9416 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9417 hi2 = TREE_INT_CST_HIGH (arg1);
9418 lo2 = TREE_INT_CST_LOW (arg1);
9419
9420 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9421 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9422 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9423
9424 if (width > HOST_BITS_PER_WIDE_INT)
9425 {
9426 mhi = (unsigned HOST_WIDE_INT) -1
9427 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9428 mlo = -1;
9429 }
9430 else
9431 {
9432 mhi = 0;
9433 mlo = (unsigned HOST_WIDE_INT) -1
9434 >> (HOST_BITS_PER_WIDE_INT - width);
9435 }
9436
9437 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9438 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9439 return fold_build2 (BIT_IOR_EXPR, type,
9440 TREE_OPERAND (arg0, 0), arg1);
9441
9442 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9443 hi1 &= mhi;
9444 lo1 &= mlo;
9445 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9446 return fold_build2 (BIT_IOR_EXPR, type,
9447 fold_build2 (BIT_AND_EXPR, type,
9448 TREE_OPERAND (arg0, 0),
9449 build_int_cst_wide (type,
9450 lo1 & ~lo2,
9451 hi1 & ~hi2)),
9452 arg1);
9453 }
9454
9455 /* (X & Y) | Y is (X, Y). */
9456 if (TREE_CODE (arg0) == BIT_AND_EXPR
9457 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9458 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9459 /* (X & Y) | X is (Y, X). */
9460 if (TREE_CODE (arg0) == BIT_AND_EXPR
9461 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9462 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9463 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9464 /* X | (X & Y) is (Y, X). */
9465 if (TREE_CODE (arg1) == BIT_AND_EXPR
9466 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9467 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9468 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9469 /* X | (Y & X) is (Y, X). */
9470 if (TREE_CODE (arg1) == BIT_AND_EXPR
9471 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9472 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9473 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9474
9475 t1 = distribute_bit_expr (code, type, arg0, arg1);
9476 if (t1 != NULL_TREE)
9477 return t1;
9478
9479 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9480
9481 This results in more efficient code for machines without a NAND
9482 instruction. Combine will canonicalize to the first form
9483 which will allow use of NAND instructions provided by the
9484 backend if they exist. */
9485 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9486 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9487 {
9488 return fold_build1 (BIT_NOT_EXPR, type,
9489 build2 (BIT_AND_EXPR, type,
9490 TREE_OPERAND (arg0, 0),
9491 TREE_OPERAND (arg1, 0)));
9492 }
9493
9494 /* See if this can be simplified into a rotate first. If that
9495 is unsuccessful continue in the association code. */
9496 goto bit_rotate;
9497
9498 case BIT_XOR_EXPR:
9499 if (integer_zerop (arg1))
9500 return non_lvalue (fold_convert (type, arg0));
9501 if (integer_all_onesp (arg1))
9502 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9503 if (operand_equal_p (arg0, arg1, 0))
9504 return omit_one_operand (type, integer_zero_node, arg0);
9505
9506 /* ~X ^ X is -1. */
9507 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9508 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9506 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9507 {
9508 t1 = build_int_cst (type, -1);
9509 t1 = force_fit_type (t1, 0, false, false);
9510 return omit_one_operand (type, t1, arg1);
9511 }
9512
9513 /* X ^ ~X is -1. */
9514 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9509 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9510 {
9511 t1 = build_int_cst (type, -1);
9512 t1 = force_fit_type (t1, 0, false, false);
9513 return omit_one_operand (type, t1, arg1);
9514 }
9515
9516 /* X ^ ~X is -1. */
9517 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9518 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9515 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9516 {
9517 t1 = build_int_cst (type, -1);
9518 t1 = force_fit_type (t1, 0, false, false);
9519 return omit_one_operand (type, t1, arg0);
9520 }
9521
9522 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9523 with a constant, and the two constants have no bits in common,
9524 we should treat this as a BIT_IOR_EXPR since this may produce more
9525 simplifications. */
9526 if (TREE_CODE (arg0) == BIT_AND_EXPR
9527 && TREE_CODE (arg1) == BIT_AND_EXPR
9528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9529 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9530 && integer_zerop (const_binop (BIT_AND_EXPR,
9531 TREE_OPERAND (arg0, 1),
9532 TREE_OPERAND (arg1, 1), 0)))
9533 {
9534 code = BIT_IOR_EXPR;
9535 goto bit_ior;
9536 }
9537
9538 /* (X | Y) ^ X -> Y & ~ X*/
9539 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9540 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9541 {
9542 tree t2 = TREE_OPERAND (arg0, 1);
9543 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9544 arg1);
9545 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9546 fold_convert (type, t1));
9547 return t1;
9548 }
9549
9550 /* (Y | X) ^ X -> Y & ~ X*/
9551 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9552 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9553 {
9554 tree t2 = TREE_OPERAND (arg0, 0);
9555 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9556 arg1);
9557 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9558 fold_convert (type, t1));
9559 return t1;
9560 }
9561
9562 /* X ^ (X | Y) -> Y & ~ X*/
9563 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9564 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9565 {
9566 tree t2 = TREE_OPERAND (arg1, 1);
9567 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9568 arg0);
9569 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9570 fold_convert (type, t1));
9571 return t1;
9572 }
9573
9574 /* X ^ (Y | X) -> Y & ~ X*/
9575 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9576 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9577 {
9578 tree t2 = TREE_OPERAND (arg1, 0);
9579 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9580 arg0);
9581 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9582 fold_convert (type, t1));
9583 return t1;
9584 }
9585
9586 /* Convert ~X ^ ~Y to X ^ Y. */
9587 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9588 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9589 return fold_build2 (code, type,
9590 fold_convert (type, TREE_OPERAND (arg0, 0)),
9591 fold_convert (type, TREE_OPERAND (arg1, 0)));
9592
9593 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9594 if (TREE_CODE (arg0) == BIT_AND_EXPR
9595 && integer_onep (TREE_OPERAND (arg0, 1))
9596 && integer_onep (arg1))
9597 return fold_build2 (EQ_EXPR, type, arg0,
9598 build_int_cst (TREE_TYPE (arg0), 0));
9599
9600 /* Fold (X & Y) ^ Y as ~X & Y. */
9601 if (TREE_CODE (arg0) == BIT_AND_EXPR
9602 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9603 {
9604 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9605 return fold_build2 (BIT_AND_EXPR, type,
9606 fold_build1 (BIT_NOT_EXPR, type, tem),
9607 fold_convert (type, arg1));
9608 }
9609 /* Fold (X & Y) ^ X as ~Y & X. */
9610 if (TREE_CODE (arg0) == BIT_AND_EXPR
9611 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9612 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9613 {
9614 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9615 return fold_build2 (BIT_AND_EXPR, type,
9616 fold_build1 (BIT_NOT_EXPR, type, tem),
9617 fold_convert (type, arg1));
9618 }
9619 /* Fold X ^ (X & Y) as X & ~Y. */
9620 if (TREE_CODE (arg1) == BIT_AND_EXPR
9621 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9622 {
9623 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9624 return fold_build2 (BIT_AND_EXPR, type,
9625 fold_convert (type, arg0),
9626 fold_build1 (BIT_NOT_EXPR, type, tem));
9627 }
9628 /* Fold X ^ (Y & X) as ~Y & X. */
9629 if (TREE_CODE (arg1) == BIT_AND_EXPR
9630 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9631 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9632 {
9633 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9634 return fold_build2 (BIT_AND_EXPR, type,
9635 fold_build1 (BIT_NOT_EXPR, type, tem),
9636 fold_convert (type, arg0));
9637 }
9638
9639 /* See if this can be simplified into a rotate first. If that
9640 is unsuccessful continue in the association code. */
9641 goto bit_rotate;
9642
9643 case BIT_AND_EXPR:
9644 if (integer_all_onesp (arg1))
9645 return non_lvalue (fold_convert (type, arg0));
9646 if (integer_zerop (arg1))
9647 return omit_one_operand (type, arg1, arg0);
9648 if (operand_equal_p (arg0, arg1, 0))
9649 return non_lvalue (fold_convert (type, arg0));
9650
9651 /* ~X & X is always zero. */
9652 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9653 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9654 return omit_one_operand (type, integer_zero_node, arg1);
9655
9656 /* X & ~X is always zero. */
9657 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9658 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9659 return omit_one_operand (type, integer_zero_node, arg0);
9660
9661 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9662 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9663 && TREE_CODE (arg1) == INTEGER_CST
9664 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9665 return fold_build2 (BIT_IOR_EXPR, type,
9666 fold_build2 (BIT_AND_EXPR, type,
9667 TREE_OPERAND (arg0, 0), arg1),
9668 fold_build2 (BIT_AND_EXPR, type,
9669 TREE_OPERAND (arg0, 1), arg1));
9670
9671 /* (X | Y) & Y is (X, Y). */
9672 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9673 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9674 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9675 /* (X | Y) & X is (Y, X). */
9676 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9677 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9678 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9679 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9680 /* X & (X | Y) is (Y, X). */
9681 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9682 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9683 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9684 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9685 /* X & (Y | X) is (Y, X). */
9686 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9687 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9688 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9689 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9690
9691 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9692 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9693 && integer_onep (TREE_OPERAND (arg0, 1))
9694 && integer_onep (arg1))
9695 {
9696 tem = TREE_OPERAND (arg0, 0);
9697 return fold_build2 (EQ_EXPR, type,
9698 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9699 build_int_cst (TREE_TYPE (tem), 1)),
9700 build_int_cst (TREE_TYPE (tem), 0));
9701 }
9702 /* Fold ~X & 1 as (X & 1) == 0. */
9703 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9704 && integer_onep (arg1))
9705 {
9706 tem = TREE_OPERAND (arg0, 0);
9707 return fold_build2 (EQ_EXPR, type,
9708 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9709 build_int_cst (TREE_TYPE (tem), 1)),
9710 build_int_cst (TREE_TYPE (tem), 0));
9711 }
9712
9713 /* Fold (X ^ Y) & Y as ~X & Y. */
9714 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9715 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9716 {
9717 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9718 return fold_build2 (BIT_AND_EXPR, type,
9719 fold_build1 (BIT_NOT_EXPR, type, tem),
9720 fold_convert (type, arg1));
9721 }
9722 /* Fold (X ^ Y) & X as ~Y & X. */
9723 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9724 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9725 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9726 {
9727 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9728 return fold_build2 (BIT_AND_EXPR, type,
9729 fold_build1 (BIT_NOT_EXPR, type, tem),
9730 fold_convert (type, arg1));
9731 }
9732 /* Fold X & (X ^ Y) as X & ~Y. */
9733 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9734 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9735 {
9736 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9737 return fold_build2 (BIT_AND_EXPR, type,
9738 fold_convert (type, arg0),
9739 fold_build1 (BIT_NOT_EXPR, type, tem));
9740 }
9741 /* Fold X & (Y ^ X) as ~Y & X. */
9742 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9743 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9744 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9745 {
9746 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9747 return fold_build2 (BIT_AND_EXPR, type,
9748 fold_build1 (BIT_NOT_EXPR, type, tem),
9749 fold_convert (type, arg0));
9750 }
9751
9752 t1 = distribute_bit_expr (code, type, arg0, arg1);
9753 if (t1 != NULL_TREE)
9754 return t1;
9755 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9756 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9757 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9758 {
9759 unsigned int prec
9760 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9761
9762 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9763 && (~TREE_INT_CST_LOW (arg1)
9764 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9765 return fold_convert (type, TREE_OPERAND (arg0, 0));
9766 }
9767
9768 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9769
9770 This results in more efficient code for machines without a NOR
9771 instruction. Combine will canonicalize to the first form
9772 which will allow use of NOR instructions provided by the
9773 backend if they exist. */
9774 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9775 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9776 {
9777 return fold_build1 (BIT_NOT_EXPR, type,
9778 build2 (BIT_IOR_EXPR, type,
9779 TREE_OPERAND (arg0, 0),
9780 TREE_OPERAND (arg1, 0)));
9781 }
9782
9783 goto associate;
9784
9785 case RDIV_EXPR:
9786 /* Don't touch a floating-point divide by zero unless the mode
9787 of the constant can represent infinity. */
9788 if (TREE_CODE (arg1) == REAL_CST
9789 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9790 && real_zerop (arg1))
9791 return NULL_TREE;
9792
9793 /* Optimize A / A to 1.0 if we don't care about
9794 NaNs or Infinities. Skip the transformation
9795 for non-real operands. */
9796 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9797 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9798 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9799 && operand_equal_p (arg0, arg1, 0))
9800 {
9801 tree r = build_real (TREE_TYPE (arg0), dconst1);
9802
9803 return omit_two_operands (type, r, arg0, arg1);
9804 }
9805
9806 /* The complex version of the above A / A optimization. */
9807 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9808 && operand_equal_p (arg0, arg1, 0))
9809 {
9810 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9811 if (! HONOR_NANS (TYPE_MODE (elem_type))
9812 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9813 {
9814 tree r = build_real (elem_type, dconst1);
9815 /* omit_two_operands will call fold_convert for us. */
9816 return omit_two_operands (type, r, arg0, arg1);
9817 }
9818 }
9819
9820 /* (-A) / (-B) -> A / B */
9821 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9822 return fold_build2 (RDIV_EXPR, type,
9823 TREE_OPERAND (arg0, 0),
9824 negate_expr (arg1));
9825 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9826 return fold_build2 (RDIV_EXPR, type,
9827 negate_expr (arg0),
9828 TREE_OPERAND (arg1, 0));
9829
9830 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9831 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9832 && real_onep (arg1))
9833 return non_lvalue (fold_convert (type, arg0));
9834
9835 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9836 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9837 && real_minus_onep (arg1))
9838 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9839
9840 /* If ARG1 is a constant, we can convert this to a multiply by the
9841 reciprocal. This does not have the same rounding properties,
9842 so only do this if -funsafe-math-optimizations. We can actually
9843 always safely do it if ARG1 is a power of two, but it's hard to
9844 tell if it is or not in a portable manner. */
9845 if (TREE_CODE (arg1) == REAL_CST)
9846 {
9847 if (flag_unsafe_math_optimizations
9848 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9849 arg1, 0)))
9850 return fold_build2 (MULT_EXPR, type, arg0, tem);
9851 /* Find the reciprocal if optimizing and the result is exact. */
9852 if (optimize)
9853 {
9854 REAL_VALUE_TYPE r;
9855 r = TREE_REAL_CST (arg1);
9856 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9857 {
9858 tem = build_real (type, r);
9859 return fold_build2 (MULT_EXPR, type,
9860 fold_convert (type, arg0), tem);
9861 }
9862 }
9863 }
9864 /* Convert A/B/C to A/(B*C). */
9865 if (flag_unsafe_math_optimizations
9866 && TREE_CODE (arg0) == RDIV_EXPR)
9867 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9868 fold_build2 (MULT_EXPR, type,
9869 TREE_OPERAND (arg0, 1), arg1));
9870
9871 /* Convert A/(B/C) to (A/B)*C. */
9872 if (flag_unsafe_math_optimizations
9873 && TREE_CODE (arg1) == RDIV_EXPR)
9874 return fold_build2 (MULT_EXPR, type,
9875 fold_build2 (RDIV_EXPR, type, arg0,
9876 TREE_OPERAND (arg1, 0)),
9877 TREE_OPERAND (arg1, 1));
9878
9879 /* Convert C1/(X*C2) into (C1/C2)/X. */
9880 if (flag_unsafe_math_optimizations
9881 && TREE_CODE (arg1) == MULT_EXPR
9882 && TREE_CODE (arg0) == REAL_CST
9883 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9884 {
9885 tree tem = const_binop (RDIV_EXPR, arg0,
9886 TREE_OPERAND (arg1, 1), 0);
9887 if (tem)
9888 return fold_build2 (RDIV_EXPR, type, tem,
9889 TREE_OPERAND (arg1, 0));
9890 }
9891
9892 if (flag_unsafe_math_optimizations)
9893 {
9894 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9895 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9896
9897 /* Optimize sin(x)/cos(x) as tan(x). */
9898 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9899 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9900 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9901 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9902 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9903 {
9904 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9905
9906 if (tanfn != NULL_TREE)
9907 return build_function_call_expr (tanfn,
9908 TREE_OPERAND (arg0, 1));
9909 }
9910
9911 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9912 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9913 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9914 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9915 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9916 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9917 {
9918 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9919
9920 if (tanfn != NULL_TREE)
9921 {
9922 tree tmp = TREE_OPERAND (arg0, 1);
9923 tmp = build_function_call_expr (tanfn, tmp);
9924 return fold_build2 (RDIV_EXPR, type,
9925 build_real (type, dconst1), tmp);
9926 }
9927 }
9928
9929 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9930 NaNs or Infinities. */
9931 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9932 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9933 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9934 {
9935 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9936 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9937
9938 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9939 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9940 && operand_equal_p (arg00, arg01, 0))
9941 {
9942 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9943
9944 if (cosfn != NULL_TREE)
9945 return build_function_call_expr (cosfn,
9946 TREE_OPERAND (arg0, 1));
9947 }
9948 }
9949
9950 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9951 NaNs or Infinities. */
9952 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9953 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9954 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9955 {
9956 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9957 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9958
9959 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9960 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9961 && operand_equal_p (arg00, arg01, 0))
9962 {
9963 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9964
9965 if (cosfn != NULL_TREE)
9966 {
9967 tree tmp = TREE_OPERAND (arg0, 1);
9968 tmp = build_function_call_expr (cosfn, tmp);
9969 return fold_build2 (RDIV_EXPR, type,
9970 build_real (type, dconst1),
9971 tmp);
9972 }
9973 }
9974 }
9975
9976 /* Optimize pow(x,c)/x as pow(x,c-1). */
9977 if (fcode0 == BUILT_IN_POW
9978 || fcode0 == BUILT_IN_POWF
9979 || fcode0 == BUILT_IN_POWL)
9980 {
9981 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9982 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9983 if (TREE_CODE (arg01) == REAL_CST
9984 && ! TREE_CONSTANT_OVERFLOW (arg01)
9985 && operand_equal_p (arg1, arg00, 0))
9986 {
9987 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9988 REAL_VALUE_TYPE c;
9989 tree arg, arglist;
9990
9991 c = TREE_REAL_CST (arg01);
9992 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9993 arg = build_real (type, c);
9994 arglist = build_tree_list (NULL_TREE, arg);
9995 arglist = tree_cons (NULL_TREE, arg1, arglist);
9996 return build_function_call_expr (powfn, arglist);
9997 }
9998 }
9999
10000 /* Optimize x/expN(y) into x*expN(-y). */
10001 if (BUILTIN_EXPONENT_P (fcode1))
10002 {
10003 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10004 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10005 tree arglist = build_tree_list (NULL_TREE,
10006 fold_convert (type, arg));
10007 arg1 = build_function_call_expr (expfn, arglist);
10008 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10009 }
10010
10011 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10012 if (fcode1 == BUILT_IN_POW
10013 || fcode1 == BUILT_IN_POWF
10014 || fcode1 == BUILT_IN_POWL)
10015 {
10016 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10017 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10018 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10019 tree neg11 = fold_convert (type, negate_expr (arg11));
10020 tree arglist = tree_cons(NULL_TREE, arg10,
10021 build_tree_list (NULL_TREE, neg11));
10022 arg1 = build_function_call_expr (powfn, arglist);
10023 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10024 }
10025 }
10026 return NULL_TREE;
10027
10028 case TRUNC_DIV_EXPR:
10029 case FLOOR_DIV_EXPR:
10030 /* Simplify A / (B << N) where A and B are positive and B is
10031 a power of 2, to A >> (N + log2(B)). */
10032 strict_overflow_p = false;
10033 if (TREE_CODE (arg1) == LSHIFT_EXPR
10034 && (TYPE_UNSIGNED (type)
10035 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10036 {
10037 tree sval = TREE_OPERAND (arg1, 0);
10038 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10039 {
10040 tree sh_cnt = TREE_OPERAND (arg1, 1);
10041 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10042
10043 if (strict_overflow_p)
10044 fold_overflow_warning (("assuming signed overflow does not "
10045 "occur when simplifying A / (B << N)"),
10046 WARN_STRICT_OVERFLOW_MISC);
10047
10048 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10049 sh_cnt, build_int_cst (NULL_TREE, pow2));
10050 return fold_build2 (RSHIFT_EXPR, type,
10051 fold_convert (type, arg0), sh_cnt);
10052 }
10053 }
10054 /* Fall thru */
10055
10056 case ROUND_DIV_EXPR:
10057 case CEIL_DIV_EXPR:
10058 case EXACT_DIV_EXPR:
10059 if (integer_onep (arg1))
10060 return non_lvalue (fold_convert (type, arg0));
10061 if (integer_zerop (arg1))
10062 return NULL_TREE;
10063 /* X / -1 is -X. */
10064 if (!TYPE_UNSIGNED (type)
10065 && TREE_CODE (arg1) == INTEGER_CST
10066 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10067 && TREE_INT_CST_HIGH (arg1) == -1)
10068 return fold_convert (type, negate_expr (arg0));
10069
10070 /* Convert -A / -B to A / B when the type is signed and overflow is
10071 undefined. */
10072 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10073 && TREE_CODE (arg0) == NEGATE_EXPR
10074 && negate_expr_p (arg1))
10075 {
10076 if (INTEGRAL_TYPE_P (type))
10077 fold_overflow_warning (("assuming signed overflow does not occur "
10078 "when distributing negation across "
10079 "division"),
10080 WARN_STRICT_OVERFLOW_MISC);
10081 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10082 negate_expr (arg1));
10083 }
10084 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10085 && TREE_CODE (arg1) == NEGATE_EXPR
10086 && negate_expr_p (arg0))
10087 {
10088 if (INTEGRAL_TYPE_P (type))
10089 fold_overflow_warning (("assuming signed overflow does not occur "
10090 "when distributing negation across "
10091 "division"),
10092 WARN_STRICT_OVERFLOW_MISC);
10093 return fold_build2 (code, type, negate_expr (arg0),
10094 TREE_OPERAND (arg1, 0));
10095 }
10096
10097 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10098 operation, EXACT_DIV_EXPR.
10099
10100 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10101 At one time others generated faster code, it's not clear if they do
10102 after the last round to changes to the DIV code in expmed.c. */
10103 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10104 && multiple_of_p (type, arg0, arg1))
10105 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10106
10107 strict_overflow_p = false;
10108 if (TREE_CODE (arg1) == INTEGER_CST
10109 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10110 &strict_overflow_p)))
10111 {
10112 if (strict_overflow_p)
10113 fold_overflow_warning (("assuming signed overflow does not occur "
10114 "when simplifying division"),
10115 WARN_STRICT_OVERFLOW_MISC);
10116 return fold_convert (type, tem);
10117 }
10118
10119 return NULL_TREE;
10120
10121 case CEIL_MOD_EXPR:
10122 case FLOOR_MOD_EXPR:
10123 case ROUND_MOD_EXPR:
10124 case TRUNC_MOD_EXPR:
10125 /* X % 1 is always zero, but be sure to preserve any side
10126 effects in X. */
10127 if (integer_onep (arg1))
10128 return omit_one_operand (type, integer_zero_node, arg0);
10129
10130 /* X % 0, return X % 0 unchanged so that we can get the
10131 proper warnings and errors. */
10132 if (integer_zerop (arg1))
10133 return NULL_TREE;
10134
10135 /* 0 % X is always zero, but be sure to preserve any side
10136 effects in X. Place this after checking for X == 0. */
10137 if (integer_zerop (arg0))
10138 return omit_one_operand (type, integer_zero_node, arg1);
10139
10140 /* X % -1 is zero. */
10141 if (!TYPE_UNSIGNED (type)
10142 && TREE_CODE (arg1) == INTEGER_CST
10143 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10144 && TREE_INT_CST_HIGH (arg1) == -1)
10145 return omit_one_operand (type, integer_zero_node, arg0);
10146
10147 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10148 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10149 strict_overflow_p = false;
10150 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10151 && (TYPE_UNSIGNED (type)
10152 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10153 {
10154 tree c = arg1;
10155 /* Also optimize A % (C << N) where C is a power of 2,
10156 to A & ((C << N) - 1). */
10157 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10158 c = TREE_OPERAND (arg1, 0);
10159
10160 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10161 {
10162 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10163 arg1, integer_one_node);
10164 if (strict_overflow_p)
10165 fold_overflow_warning (("assuming signed overflow does not "
10166 "occur when simplifying "
10167 "X % (power of two)"),
10168 WARN_STRICT_OVERFLOW_MISC);
10169 return fold_build2 (BIT_AND_EXPR, type,
10170 fold_convert (type, arg0),
10171 fold_convert (type, mask));
10172 }
10173 }
10174
10175 /* X % -C is the same as X % C. */
10176 if (code == TRUNC_MOD_EXPR
10177 && !TYPE_UNSIGNED (type)
10178 && TREE_CODE (arg1) == INTEGER_CST
10179 && !TREE_CONSTANT_OVERFLOW (arg1)
10180 && TREE_INT_CST_HIGH (arg1) < 0
10181 && !TYPE_OVERFLOW_TRAPS (type)
10182 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10183 && !sign_bit_p (arg1, arg1))
10184 return fold_build2 (code, type, fold_convert (type, arg0),
10185 fold_convert (type, negate_expr (arg1)));
10186
10187 /* X % -Y is the same as X % Y. */
10188 if (code == TRUNC_MOD_EXPR
10189 && !TYPE_UNSIGNED (type)
10190 && TREE_CODE (arg1) == NEGATE_EXPR
10191 && !TYPE_OVERFLOW_TRAPS (type))
10192 return fold_build2 (code, type, fold_convert (type, arg0),
10193 fold_convert (type, TREE_OPERAND (arg1, 0)));
10194
10195 if (TREE_CODE (arg1) == INTEGER_CST
10196 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10197 &strict_overflow_p)))
10198 {
10199 if (strict_overflow_p)
10200 fold_overflow_warning (("assuming signed overflow does not occur "
10201 "when simplifying modulos"),
10202 WARN_STRICT_OVERFLOW_MISC);
10203 return fold_convert (type, tem);
10204 }
10205
10206 return NULL_TREE;
10207
10208 case LROTATE_EXPR:
10209 case RROTATE_EXPR:
10210 if (integer_all_onesp (arg0))
10211 return omit_one_operand (type, arg0, arg1);
10212 goto shift;
10213
10214 case RSHIFT_EXPR:
10215 /* Optimize -1 >> x for arithmetic right shifts. */
10216 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10217 return omit_one_operand (type, arg0, arg1);
10218 /* ... fall through ... */
10219
10220 case LSHIFT_EXPR:
10221 shift:
10222 if (integer_zerop (arg1))
10223 return non_lvalue (fold_convert (type, arg0));
10224 if (integer_zerop (arg0))
10225 return omit_one_operand (type, arg0, arg1);
10226
10227 /* Since negative shift count is not well-defined,
10228 don't try to compute it in the compiler. */
10229 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10230 return NULL_TREE;
10231
10232 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10233 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10234 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10235 && host_integerp (TREE_OPERAND (arg0, 1), false)
10236 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10237 {
10238 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10239 + TREE_INT_CST_LOW (arg1));
10240
10241 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10242 being well defined. */
10243 if (low >= TYPE_PRECISION (type))
10244 {
10245 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10246 low = low % TYPE_PRECISION (type);
10247 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10248 return build_int_cst (type, 0);
10249 else
10250 low = TYPE_PRECISION (type) - 1;
10251 }
10252
10253 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10254 build_int_cst (type, low));
10255 }
10256
10257 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10258 into x & ((unsigned)-1 >> c) for unsigned types. */
10259 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10260 || (TYPE_UNSIGNED (type)
10261 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10262 && host_integerp (arg1, false)
10263 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10264 && host_integerp (TREE_OPERAND (arg0, 1), false)
10265 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10266 {
10267 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10268 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10269 tree lshift;
10270 tree arg00;
10271
10272 if (low0 == low1)
10273 {
10274 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10275
10276 lshift = build_int_cst (type, -1);
10277 lshift = int_const_binop (code, lshift, arg1, 0);
10278
10279 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10280 }
10281 }
10282
10283 /* Rewrite an LROTATE_EXPR by a constant into an
10284 RROTATE_EXPR by a new constant. */
10285 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10286 {
10287 tree tem = build_int_cst (NULL_TREE,
10288 GET_MODE_BITSIZE (TYPE_MODE (type)));
10289 tem = fold_convert (TREE_TYPE (arg1), tem);
10290 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10291 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10292 }
10293
10294 /* If we have a rotate of a bit operation with the rotate count and
10295 the second operand of the bit operation both constant,
10296 permute the two operations. */
10297 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10298 && (TREE_CODE (arg0) == BIT_AND_EXPR
10299 || TREE_CODE (arg0) == BIT_IOR_EXPR
10300 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10301 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10302 return fold_build2 (TREE_CODE (arg0), type,
10303 fold_build2 (code, type,
10304 TREE_OPERAND (arg0, 0), arg1),
10305 fold_build2 (code, type,
10306 TREE_OPERAND (arg0, 1), arg1));
10307
10308 /* Two consecutive rotates adding up to the width of the mode can
10309 be ignored. */
10310 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10311 && TREE_CODE (arg0) == RROTATE_EXPR
10312 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10313 && TREE_INT_CST_HIGH (arg1) == 0
10314 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10315 && ((TREE_INT_CST_LOW (arg1)
10316 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10317 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10318 return TREE_OPERAND (arg0, 0);
10319
10320 return NULL_TREE;
10321
10322 case MIN_EXPR:
10323 if (operand_equal_p (arg0, arg1, 0))
10324 return omit_one_operand (type, arg0, arg1);
10325 if (INTEGRAL_TYPE_P (type)
10326 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10327 return omit_one_operand (type, arg1, arg0);
10328 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10329 if (tem)
10330 return tem;
10331 goto associate;
10332
10333 case MAX_EXPR:
10334 if (operand_equal_p (arg0, arg1, 0))
10335 return omit_one_operand (type, arg0, arg1);
10336 if (INTEGRAL_TYPE_P (type)
10337 && TYPE_MAX_VALUE (type)
10338 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10339 return omit_one_operand (type, arg1, arg0);
10340 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10341 if (tem)
10342 return tem;
10343 goto associate;
10344
10345 case TRUTH_ANDIF_EXPR:
10346 /* Note that the operands of this must be ints
10347 and their values must be 0 or 1.
10348 ("true" is a fixed value perhaps depending on the language.) */
10349 /* If first arg is constant zero, return it. */
10350 if (integer_zerop (arg0))
10351 return fold_convert (type, arg0);
10352 case TRUTH_AND_EXPR:
10353 /* If either arg is constant true, drop it. */
10354 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10355 return non_lvalue (fold_convert (type, arg1));
10356 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10357 /* Preserve sequence points. */
10358 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10359 return non_lvalue (fold_convert (type, arg0));
10360 /* If second arg is constant zero, result is zero, but first arg
10361 must be evaluated. */
10362 if (integer_zerop (arg1))
10363 return omit_one_operand (type, arg1, arg0);
10364 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10365 case will be handled here. */
10366 if (integer_zerop (arg0))
10367 return omit_one_operand (type, arg0, arg1);
10368
10369 /* !X && X is always false. */
10370 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10371 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10372 return omit_one_operand (type, integer_zero_node, arg1);
10373 /* X && !X is always false. */
10374 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10375 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10376 return omit_one_operand (type, integer_zero_node, arg0);
10377
10378 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10379 means A >= Y && A != MAX, but in this case we know that
10380 A < X <= MAX. */
10381
10382 if (!TREE_SIDE_EFFECTS (arg0)
10383 && !TREE_SIDE_EFFECTS (arg1))
10384 {
10385 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10386 if (tem && !operand_equal_p (tem, arg0, 0))
10387 return fold_build2 (code, type, tem, arg1);
10388
10389 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10390 if (tem && !operand_equal_p (tem, arg1, 0))
10391 return fold_build2 (code, type, arg0, tem);
10392 }
10393
10394 truth_andor:
10395 /* We only do these simplifications if we are optimizing. */
10396 if (!optimize)
10397 return NULL_TREE;
10398
10399 /* Check for things like (A || B) && (A || C). We can convert this
10400 to A || (B && C). Note that either operator can be any of the four
10401 truth and/or operations and the transformation will still be
10402 valid. Also note that we only care about order for the
10403 ANDIF and ORIF operators. If B contains side effects, this
10404 might change the truth-value of A. */
10405 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10406 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10407 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10408 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10409 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10410 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10411 {
10412 tree a00 = TREE_OPERAND (arg0, 0);
10413 tree a01 = TREE_OPERAND (arg0, 1);
10414 tree a10 = TREE_OPERAND (arg1, 0);
10415 tree a11 = TREE_OPERAND (arg1, 1);
10416 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10417 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10418 && (code == TRUTH_AND_EXPR
10419 || code == TRUTH_OR_EXPR));
10420
10421 if (operand_equal_p (a00, a10, 0))
10422 return fold_build2 (TREE_CODE (arg0), type, a00,
10423 fold_build2 (code, type, a01, a11));
10424 else if (commutative && operand_equal_p (a00, a11, 0))
10425 return fold_build2 (TREE_CODE (arg0), type, a00,
10426 fold_build2 (code, type, a01, a10));
10427 else if (commutative && operand_equal_p (a01, a10, 0))
10428 return fold_build2 (TREE_CODE (arg0), type, a01,
10429 fold_build2 (code, type, a00, a11));
10430
10431 /* This case if tricky because we must either have commutative
10432 operators or else A10 must not have side-effects. */
10433
10434 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10435 && operand_equal_p (a01, a11, 0))
10436 return fold_build2 (TREE_CODE (arg0), type,
10437 fold_build2 (code, type, a00, a10),
10438 a01);
10439 }
10440
10441 /* See if we can build a range comparison. */
10442 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10443 return tem;
10444
10445 /* Check for the possibility of merging component references. If our
10446 lhs is another similar operation, try to merge its rhs with our
10447 rhs. Then try to merge our lhs and rhs. */
10448 if (TREE_CODE (arg0) == code
10449 && 0 != (tem = fold_truthop (code, type,
10450 TREE_OPERAND (arg0, 1), arg1)))
10451 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10452
10453 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10454 return tem;
10455
10456 return NULL_TREE;
10457
10458 case TRUTH_ORIF_EXPR:
10459 /* Note that the operands of this must be ints
10460 and their values must be 0 or true.
10461 ("true" is a fixed value perhaps depending on the language.) */
10462 /* If first arg is constant true, return it. */
10463 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10464 return fold_convert (type, arg0);
10465 case TRUTH_OR_EXPR:
10466 /* If either arg is constant zero, drop it. */
10467 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10468 return non_lvalue (fold_convert (type, arg1));
10469 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10470 /* Preserve sequence points. */
10471 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10472 return non_lvalue (fold_convert (type, arg0));
10473 /* If second arg is constant true, result is true, but we must
10474 evaluate first arg. */
10475 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10476 return omit_one_operand (type, arg1, arg0);
10477 /* Likewise for first arg, but note this only occurs here for
10478 TRUTH_OR_EXPR. */
10479 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10480 return omit_one_operand (type, arg0, arg1);
10481
10482 /* !X || X is always true. */
10483 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10484 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10485 return omit_one_operand (type, integer_one_node, arg1);
10486 /* X || !X is always true. */
10487 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10488 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10489 return omit_one_operand (type, integer_one_node, arg0);
10490
10491 goto truth_andor;
10492
10493 case TRUTH_XOR_EXPR:
10494 /* If the second arg is constant zero, drop it. */
10495 if (integer_zerop (arg1))
10496 return non_lvalue (fold_convert (type, arg0));
10497 /* If the second arg is constant true, this is a logical inversion. */
10498 if (integer_onep (arg1))
10499 {
10500 /* Only call invert_truthvalue if operand is a truth value. */
10501 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10502 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10503 else
10504 tem = invert_truthvalue (arg0);
10505 return non_lvalue (fold_convert (type, tem));
10506 }
10507 /* Identical arguments cancel to zero. */
10508 if (operand_equal_p (arg0, arg1, 0))
10509 return omit_one_operand (type, integer_zero_node, arg0);
10510
10511 /* !X ^ X is always true. */
10512 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10513 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10514 return omit_one_operand (type, integer_one_node, arg1);
10515
10516 /* X ^ !X is always true. */
10517 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10518 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10519 return omit_one_operand (type, integer_one_node, arg0);
10520
10521 return NULL_TREE;
10522
10523 case EQ_EXPR:
10524 case NE_EXPR:
10525 tem = fold_comparison (code, type, op0, op1);
10526 if (tem != NULL_TREE)
10527 return tem;
10528
10529 /* bool_var != 0 becomes bool_var. */
10530 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10531 && code == NE_EXPR)
10532 return non_lvalue (fold_convert (type, arg0));
10533
10534 /* bool_var == 1 becomes bool_var. */
10535 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10536 && code == EQ_EXPR)
10537 return non_lvalue (fold_convert (type, arg0));
10538
10539 /* bool_var != 1 becomes !bool_var. */
10540 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10541 && code == NE_EXPR)
10542 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10543
10544 /* bool_var == 0 becomes !bool_var. */
10545 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10546 && code == EQ_EXPR)
10547 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10548
10549 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10550 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10551 && TREE_CODE (arg1) == INTEGER_CST)
10552 {
10553 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10554 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10555 fold_build1 (BIT_NOT_EXPR, cmp_type,
10556 fold_convert (cmp_type, arg1)));
10557 }
10558
10559 /* If this is an equality comparison of the address of a non-weak
10560 object against zero, then we know the result. */
10561 if (TREE_CODE (arg0) == ADDR_EXPR
10562 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10563 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10564 && integer_zerop (arg1))
10565 return constant_boolean_node (code != EQ_EXPR, type);
10566
10567 /* If this is an equality comparison of the address of two non-weak,
10568 unaliased symbols neither of which are extern (since we do not
10569 have access to attributes for externs), then we know the result. */
10570 if (TREE_CODE (arg0) == ADDR_EXPR
10571 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10572 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10573 && ! lookup_attribute ("alias",
10574 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10575 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10576 && TREE_CODE (arg1) == ADDR_EXPR
10577 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10578 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10579 && ! lookup_attribute ("alias",
10580 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10581 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10582 {
10583 /* We know that we're looking at the address of two
10584 non-weak, unaliased, static _DECL nodes.
10585
10586 It is both wasteful and incorrect to call operand_equal_p
10587 to compare the two ADDR_EXPR nodes. It is wasteful in that
10588 all we need to do is test pointer equality for the arguments
10589 to the two ADDR_EXPR nodes. It is incorrect to use
10590 operand_equal_p as that function is NOT equivalent to a
10591 C equality test. It can in fact return false for two
10592 objects which would test as equal using the C equality
10593 operator. */
10594 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10595 return constant_boolean_node (equal
10596 ? code == EQ_EXPR : code != EQ_EXPR,
10597 type);
10598 }
10599
10600 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10601 a MINUS_EXPR of a constant, we can convert it into a comparison with
10602 a revised constant as long as no overflow occurs. */
10603 if (TREE_CODE (arg1) == INTEGER_CST
10604 && (TREE_CODE (arg0) == PLUS_EXPR
10605 || TREE_CODE (arg0) == MINUS_EXPR)
10606 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10607 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10608 ? MINUS_EXPR : PLUS_EXPR,
10609 fold_convert (TREE_TYPE (arg0), arg1),
10610 TREE_OPERAND (arg0, 1), 0))
10611 && ! TREE_CONSTANT_OVERFLOW (tem))
10612 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10613
10614 /* Similarly for a NEGATE_EXPR. */
10615 if (TREE_CODE (arg0) == NEGATE_EXPR
10616 && TREE_CODE (arg1) == INTEGER_CST
10617 && 0 != (tem = negate_expr (arg1))
10618 && TREE_CODE (tem) == INTEGER_CST
10619 && ! TREE_CONSTANT_OVERFLOW (tem))
10620 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10621
10622 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10623 for !=. Don't do this for ordered comparisons due to overflow. */
10624 if (TREE_CODE (arg0) == MINUS_EXPR
10625 && integer_zerop (arg1))
10626 return fold_build2 (code, type,
10627 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10628
10629 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10630 if (TREE_CODE (arg0) == ABS_EXPR
10631 && (integer_zerop (arg1) || real_zerop (arg1)))
10632 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10633
10634 /* If this is an EQ or NE comparison with zero and ARG0 is
10635 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10636 two operations, but the latter can be done in one less insn
10637 on machines that have only two-operand insns or on which a
10638 constant cannot be the first operand. */
10639 if (TREE_CODE (arg0) == BIT_AND_EXPR
10640 && integer_zerop (arg1))
10641 {
10642 tree arg00 = TREE_OPERAND (arg0, 0);
10643 tree arg01 = TREE_OPERAND (arg0, 1);
10644 if (TREE_CODE (arg00) == LSHIFT_EXPR
10645 && integer_onep (TREE_OPERAND (arg00, 0)))
10646 return
10647 fold_build2 (code, type,
10648 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10649 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10650 arg01, TREE_OPERAND (arg00, 1)),
10651 fold_convert (TREE_TYPE (arg0),
10652 integer_one_node)),
10653 arg1);
10654 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10655 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10656 return
10657 fold_build2 (code, type,
10658 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10659 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10660 arg00, TREE_OPERAND (arg01, 1)),
10661 fold_convert (TREE_TYPE (arg0),
10662 integer_one_node)),
10663 arg1);
10664 }
10665
10666 /* If this is an NE or EQ comparison of zero against the result of a
10667 signed MOD operation whose second operand is a power of 2, make
10668 the MOD operation unsigned since it is simpler and equivalent. */
10669 if (integer_zerop (arg1)
10670 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10671 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10672 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10673 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10674 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10675 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10676 {
10677 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10678 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10679 fold_convert (newtype,
10680 TREE_OPERAND (arg0, 0)),
10681 fold_convert (newtype,
10682 TREE_OPERAND (arg0, 1)));
10683
10684 return fold_build2 (code, type, newmod,
10685 fold_convert (newtype, arg1));
10686 }
10687
10688 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10689 C1 is a valid shift constant, and C2 is a power of two, i.e.
10690 a single bit. */
10691 if (TREE_CODE (arg0) == BIT_AND_EXPR
10692 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10693 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10694 == INTEGER_CST
10695 && integer_pow2p (TREE_OPERAND (arg0, 1))
10696 && integer_zerop (arg1))
10697 {
10698 tree itype = TREE_TYPE (arg0);
10699 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10700 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10701
10702 /* Check for a valid shift count. */
10703 if (TREE_INT_CST_HIGH (arg001) == 0
10704 && TREE_INT_CST_LOW (arg001) < prec)
10705 {
10706 tree arg01 = TREE_OPERAND (arg0, 1);
10707 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10708 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10709 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10710 can be rewritten as (X & (C2 << C1)) != 0. */
10711 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10712 {
10713 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10714 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10715 return fold_build2 (code, type, tem, arg1);
10716 }
10717 /* Otherwise, for signed (arithmetic) shifts,
10718 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10719 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10720 else if (!TYPE_UNSIGNED (itype))
10721 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10722 arg000, build_int_cst (itype, 0));
10723 /* Otherwise, of unsigned (logical) shifts,
10724 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10725 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10726 else
10727 return omit_one_operand (type,
10728 code == EQ_EXPR ? integer_one_node
10729 : integer_zero_node,
10730 arg000);
10731 }
10732 }
10733
10734 /* If this is an NE comparison of zero with an AND of one, remove the
10735 comparison since the AND will give the correct value. */
10736 if (code == NE_EXPR
10737 && integer_zerop (arg1)
10738 && TREE_CODE (arg0) == BIT_AND_EXPR
10739 && integer_onep (TREE_OPERAND (arg0, 1)))
10740 return fold_convert (type, arg0);
10741
10742 /* If we have (A & C) == C where C is a power of 2, convert this into
10743 (A & C) != 0. Similarly for NE_EXPR. */
10744 if (TREE_CODE (arg0) == BIT_AND_EXPR
10745 && integer_pow2p (TREE_OPERAND (arg0, 1))
10746 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10747 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10748 arg0, fold_convert (TREE_TYPE (arg0),
10749 integer_zero_node));
10750
10751 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10752 bit, then fold the expression into A < 0 or A >= 0. */
10753 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10754 if (tem)
10755 return tem;
10756
10757 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10758 Similarly for NE_EXPR. */
10759 if (TREE_CODE (arg0) == BIT_AND_EXPR
10760 && TREE_CODE (arg1) == INTEGER_CST
10761 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10762 {
10763 tree notc = fold_build1 (BIT_NOT_EXPR,
10764 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10765 TREE_OPERAND (arg0, 1));
10766 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10767 arg1, notc);
10768 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10769 if (integer_nonzerop (dandnotc))
10770 return omit_one_operand (type, rslt, arg0);
10771 }
10772
10773 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10774 Similarly for NE_EXPR. */
10775 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10776 && TREE_CODE (arg1) == INTEGER_CST
10777 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10778 {
10779 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10780 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10781 TREE_OPERAND (arg0, 1), notd);
10782 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10783 if (integer_nonzerop (candnotd))
10784 return omit_one_operand (type, rslt, arg0);
10785 }
10786
10787 /* If this is a comparison of a field, we may be able to simplify it. */
10788 if (((TREE_CODE (arg0) == COMPONENT_REF
10789 && lang_hooks.can_use_bit_fields_p ())
10790 || TREE_CODE (arg0) == BIT_FIELD_REF)
10791 /* Handle the constant case even without -O
10792 to make sure the warnings are given. */
10793 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10794 {
10795 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10796 if (t1)
10797 return t1;
10798 }
10799
10800 /* Optimize comparisons of strlen vs zero to a compare of the
10801 first character of the string vs zero. To wit,
10802 strlen(ptr) == 0 => *ptr == 0
10803 strlen(ptr) != 0 => *ptr != 0
10804 Other cases should reduce to one of these two (or a constant)
10805 due to the return value of strlen being unsigned. */
10806 if (TREE_CODE (arg0) == CALL_EXPR
10807 && integer_zerop (arg1))
10808 {
10809 tree fndecl = get_callee_fndecl (arg0);
10810 tree arglist;
10811
10812 if (fndecl
10813 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10814 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10815 && (arglist = TREE_OPERAND (arg0, 1))
10816 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10817 && ! TREE_CHAIN (arglist))
10818 {
10819 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10820 return fold_build2 (code, type, iref,
10821 build_int_cst (TREE_TYPE (iref), 0));
10822 }
10823 }
10824
10825 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10826 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10827 if (TREE_CODE (arg0) == RSHIFT_EXPR
10828 && integer_zerop (arg1)
10829 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10830 {
10831 tree arg00 = TREE_OPERAND (arg0, 0);
10832 tree arg01 = TREE_OPERAND (arg0, 1);
10833 tree itype = TREE_TYPE (arg00);
10834 if (TREE_INT_CST_HIGH (arg01) == 0
10835 && TREE_INT_CST_LOW (arg01)
10836 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10837 {
10838 if (TYPE_UNSIGNED (itype))
10839 {
10840 itype = lang_hooks.types.signed_type (itype);
10841 arg00 = fold_convert (itype, arg00);
10842 }
10843 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10844 type, arg00, build_int_cst (itype, 0));
10845 }
10846 }
10847
10848 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10849 if (integer_zerop (arg1)
10850 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10851 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10852 TREE_OPERAND (arg0, 1));
10853
10854 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10855 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10856 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10857 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10858 build_int_cst (TREE_TYPE (arg1), 0));
10859 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10860 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10861 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10862 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10863 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10864 build_int_cst (TREE_TYPE (arg1), 0));
10865
10866 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10867 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10868 && TREE_CODE (arg1) == INTEGER_CST
10869 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10870 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10871 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10872 TREE_OPERAND (arg0, 1), arg1));
10873
10874 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10875 (X & C) == 0 when C is a single bit. */
10876 if (TREE_CODE (arg0) == BIT_AND_EXPR
10877 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10878 && integer_zerop (arg1)
10879 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10880 {
10881 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10882 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10883 TREE_OPERAND (arg0, 1));
10884 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10885 type, tem, arg1);
10886 }
10887
10888 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10889 constant C is a power of two, i.e. a single bit. */
10890 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10891 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10892 && integer_zerop (arg1)
10893 && integer_pow2p (TREE_OPERAND (arg0, 1))
10894 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10895 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10896 {
10897 tree arg00 = TREE_OPERAND (arg0, 0);
10898 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10899 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10900 }
10901
10902 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10903 when is C is a power of two, i.e. a single bit. */
10904 if (TREE_CODE (arg0) == BIT_AND_EXPR
10905 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10906 && integer_zerop (arg1)
10907 && integer_pow2p (TREE_OPERAND (arg0, 1))
10908 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10909 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10910 {
10911 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10912 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10913 arg000, TREE_OPERAND (arg0, 1));
10914 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10915 tem, build_int_cst (TREE_TYPE (tem), 0));
10916 }
10917
10918 if (integer_zerop (arg1)
10919 && tree_expr_nonzero_p (arg0))
10920 {
10921 tree res = constant_boolean_node (code==NE_EXPR, type);
10922 return omit_one_operand (type, res, arg0);
10923 }
10924 return NULL_TREE;
10925
10926 case LT_EXPR:
10927 case GT_EXPR:
10928 case LE_EXPR:
10929 case GE_EXPR:
10930 tem = fold_comparison (code, type, op0, op1);
10931 if (tem != NULL_TREE)
10932 return tem;
10933
10934 /* Transform comparisons of the form X +- C CMP X. */
10935 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10936 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10937 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10938 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10939 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10941 {
10942 tree arg01 = TREE_OPERAND (arg0, 1);
10943 enum tree_code code0 = TREE_CODE (arg0);
10944 int is_positive;
10945
10946 if (TREE_CODE (arg01) == REAL_CST)
10947 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10948 else
10949 is_positive = tree_int_cst_sgn (arg01);
10950
10951 /* (X - c) > X becomes false. */
10952 if (code == GT_EXPR
10953 && ((code0 == MINUS_EXPR && is_positive >= 0)
10954 || (code0 == PLUS_EXPR && is_positive <= 0)))
10955 {
10956 if (TREE_CODE (arg01) == INTEGER_CST
10957 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10958 fold_overflow_warning (("assuming signed overflow does not "
10959 "occur when assuming that (X - c) > X "
10960 "is always false"),
10961 WARN_STRICT_OVERFLOW_ALL);
10962 return constant_boolean_node (0, type);
10963 }
10964
10965 /* Likewise (X + c) < X becomes false. */
10966 if (code == LT_EXPR
10967 && ((code0 == PLUS_EXPR && is_positive >= 0)
10968 || (code0 == MINUS_EXPR && is_positive <= 0)))
10969 {
10970 if (TREE_CODE (arg01) == INTEGER_CST
10971 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10972 fold_overflow_warning (("assuming signed overflow does not "
10973 "occur when assuming that "
10974 "(X + c) < X is always false"),
10975 WARN_STRICT_OVERFLOW_ALL);
10976 return constant_boolean_node (0, type);
10977 }
10978
10979 /* Convert (X - c) <= X to true. */
10980 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10981 && code == LE_EXPR
10982 && ((code0 == MINUS_EXPR && is_positive >= 0)
10983 || (code0 == PLUS_EXPR && is_positive <= 0)))
10984 {
10985 if (TREE_CODE (arg01) == INTEGER_CST
10986 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10987 fold_overflow_warning (("assuming signed overflow does not "
10988 "occur when assuming that "
10989 "(X - c) <= X is always true"),
10990 WARN_STRICT_OVERFLOW_ALL);
10991 return constant_boolean_node (1, type);
10992 }
10993
10994 /* Convert (X + c) >= X to true. */
10995 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10996 && code == GE_EXPR
10997 && ((code0 == PLUS_EXPR && is_positive >= 0)
10998 || (code0 == MINUS_EXPR && is_positive <= 0)))
10999 {
11000 if (TREE_CODE (arg01) == INTEGER_CST
11001 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11002 fold_overflow_warning (("assuming signed overflow does not "
11003 "occur when assuming that "
11004 "(X + c) >= X is always true"),
11005 WARN_STRICT_OVERFLOW_ALL);
11006 return constant_boolean_node (1, type);
11007 }
11008
11009 if (TREE_CODE (arg01) == INTEGER_CST)
11010 {
11011 /* Convert X + c > X and X - c < X to true for integers. */
11012 if (code == GT_EXPR
11013 && ((code0 == PLUS_EXPR && is_positive > 0)
11014 || (code0 == MINUS_EXPR && is_positive < 0)))
11015 {
11016 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11017 fold_overflow_warning (("assuming signed overflow does "
11018 "not occur when assuming that "
11019 "(X + c) > X is always true"),
11020 WARN_STRICT_OVERFLOW_ALL);
11021 return constant_boolean_node (1, type);
11022 }
11023
11024 if (code == LT_EXPR
11025 && ((code0 == MINUS_EXPR && is_positive > 0)
11026 || (code0 == PLUS_EXPR && is_positive < 0)))
11027 {
11028 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11029 fold_overflow_warning (("assuming signed overflow does "
11030 "not occur when assuming that "
11031 "(X - c) < X is always true"),
11032 WARN_STRICT_OVERFLOW_ALL);
11033 return constant_boolean_node (1, type);
11034 }
11035
11036 /* Convert X + c <= X and X - c >= X to false for integers. */
11037 if (code == LE_EXPR
11038 && ((code0 == PLUS_EXPR && is_positive > 0)
11039 || (code0 == MINUS_EXPR && is_positive < 0)))
11040 {
11041 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11042 fold_overflow_warning (("assuming signed overflow does "
11043 "not occur when assuming that "
11044 "(X + c) <= X is always false"),
11045 WARN_STRICT_OVERFLOW_ALL);
11046 return constant_boolean_node (0, type);
11047 }
11048
11049 if (code == GE_EXPR
11050 && ((code0 == MINUS_EXPR && is_positive > 0)
11051 || (code0 == PLUS_EXPR && is_positive < 0)))
11052 {
11053 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11054 fold_overflow_warning (("assuming signed overflow does "
11055 "not occur when assuming that "
11056 "(X - c) >= X is always true"),
11057 WARN_STRICT_OVERFLOW_ALL);
11058 return constant_boolean_node (0, type);
11059 }
11060 }
11061 }
11062
11063 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11064 This transformation affects the cases which are handled in later
11065 optimizations involving comparisons with non-negative constants. */
11066 if (TREE_CODE (arg1) == INTEGER_CST
11067 && TREE_CODE (arg0) != INTEGER_CST
11068 && tree_int_cst_sgn (arg1) > 0)
11069 {
11070 if (code == GE_EXPR)
11071 {
11072 arg1 = const_binop (MINUS_EXPR, arg1,
11073 build_int_cst (TREE_TYPE (arg1), 1), 0);
11074 return fold_build2 (GT_EXPR, type, arg0,
11075 fold_convert (TREE_TYPE (arg0), arg1));
11076 }
11077 if (code == LT_EXPR)
11078 {
11079 arg1 = const_binop (MINUS_EXPR, arg1,
11080 build_int_cst (TREE_TYPE (arg1), 1), 0);
11081 return fold_build2 (LE_EXPR, type, arg0,
11082 fold_convert (TREE_TYPE (arg0), arg1));
11083 }
11084 }
11085
11086 /* Comparisons with the highest or lowest possible integer of
11087 the specified size will have known values. */
11088 {
11089 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11090
11091 if (TREE_CODE (arg1) == INTEGER_CST
11092 && ! TREE_CONSTANT_OVERFLOW (arg1)
11093 && width <= 2 * HOST_BITS_PER_WIDE_INT
11094 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11095 || POINTER_TYPE_P (TREE_TYPE (arg1))))
11096 {
11097 HOST_WIDE_INT signed_max_hi;
11098 unsigned HOST_WIDE_INT signed_max_lo;
11099 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11100
11101 if (width <= HOST_BITS_PER_WIDE_INT)
11102 {
11103 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11104 - 1;
11105 signed_max_hi = 0;
11106 max_hi = 0;
11107
11108 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11109 {
11110 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11111 min_lo = 0;
11112 min_hi = 0;
11113 }
11114 else
11115 {
11116 max_lo = signed_max_lo;
11117 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11118 min_hi = -1;
11119 }
11120 }
11121 else
11122 {
11123 width -= HOST_BITS_PER_WIDE_INT;
11124 signed_max_lo = -1;
11125 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11126 - 1;
11127 max_lo = -1;
11128 min_lo = 0;
11129
11130 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11131 {
11132 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11133 min_hi = 0;
11134 }
11135 else
11136 {
11137 max_hi = signed_max_hi;
11138 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11139 }
11140 }
11141
11142 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11143 && TREE_INT_CST_LOW (arg1) == max_lo)
11144 switch (code)
11145 {
11146 case GT_EXPR:
11147 return omit_one_operand (type, integer_zero_node, arg0);
11148
11149 case GE_EXPR:
11150 return fold_build2 (EQ_EXPR, type, op0, op1);
11151
11152 case LE_EXPR:
11153 return omit_one_operand (type, integer_one_node, arg0);
11154
11155 case LT_EXPR:
11156 return fold_build2 (NE_EXPR, type, op0, op1);
11157
11158 /* The GE_EXPR and LT_EXPR cases above are not normally
11159 reached because of previous transformations. */
11160
11161 default:
11162 break;
11163 }
11164 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11165 == max_hi
11166 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11167 switch (code)
11168 {
11169 case GT_EXPR:
11170 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11171 return fold_build2 (EQ_EXPR, type,
11172 fold_convert (TREE_TYPE (arg1), arg0),
11173 arg1);
11174 case LE_EXPR:
11175 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11176 return fold_build2 (NE_EXPR, type,
11177 fold_convert (TREE_TYPE (arg1), arg0),
11178 arg1);
11179 default:
11180 break;
11181 }
11182 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11183 == min_hi
11184 && TREE_INT_CST_LOW (arg1) == min_lo)
11185 switch (code)
11186 {
11187 case LT_EXPR:
11188 return omit_one_operand (type, integer_zero_node, arg0);
11189
11190 case LE_EXPR:
11191 return fold_build2 (EQ_EXPR, type, op0, op1);
11192
11193 case GE_EXPR:
11194 return omit_one_operand (type, integer_one_node, arg0);
11195
11196 case GT_EXPR:
11197 return fold_build2 (NE_EXPR, type, op0, op1);
11198
11199 default:
11200 break;
11201 }
11202 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11203 == min_hi
11204 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11205 switch (code)
11206 {
11207 case GE_EXPR:
11208 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11209 return fold_build2 (NE_EXPR, type,
11210 fold_convert (TREE_TYPE (arg1), arg0),
11211 arg1);
11212 case LT_EXPR:
11213 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11214 return fold_build2 (EQ_EXPR, type,
11215 fold_convert (TREE_TYPE (arg1), arg0),
11216 arg1);
11217 default:
11218 break;
11219 }
11220
11221 else if (!in_gimple_form
11222 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
11223 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11224 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11225 /* signed_type does not work on pointer types. */
11226 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11227 {
11228 /* The following case also applies to X < signed_max+1
11229 and X >= signed_max+1 because previous transformations. */
11230 if (code == LE_EXPR || code == GT_EXPR)
11231 {
11232 tree st;
11233 st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11234 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11235 type, fold_convert (st, arg0),
11236 build_int_cst (st, 0));
11237 }
11238 }
11239 }
11240 }
11241
11242 /* If we are comparing an ABS_EXPR with a constant, we can
11243 convert all the cases into explicit comparisons, but they may
11244 well not be faster than doing the ABS and one comparison.
11245 But ABS (X) <= C is a range comparison, which becomes a subtraction
11246 and a comparison, and is probably faster. */
11247 if (code == LE_EXPR
11248 && TREE_CODE (arg1) == INTEGER_CST
11249 && TREE_CODE (arg0) == ABS_EXPR
11250 && ! TREE_SIDE_EFFECTS (arg0)
11251 && (0 != (tem = negate_expr (arg1)))
11252 && TREE_CODE (tem) == INTEGER_CST
11253 && ! TREE_CONSTANT_OVERFLOW (tem))
11254 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11255 build2 (GE_EXPR, type,
11256 TREE_OPERAND (arg0, 0), tem),
11257 build2 (LE_EXPR, type,
11258 TREE_OPERAND (arg0, 0), arg1));
11259
11260 /* Convert ABS_EXPR<x> >= 0 to true. */
11261 strict_overflow_p = false;
11262 if (code == GE_EXPR
11263 && (integer_zerop (arg1)
11264 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11265 && real_zerop (arg1)))
11266 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11267 {
11268 if (strict_overflow_p)
11269 fold_overflow_warning (("assuming signed overflow does not occur "
11270 "when simplifying comparison of "
11271 "absolute value and zero"),
11272 WARN_STRICT_OVERFLOW_CONDITIONAL);
11273 return omit_one_operand (type, integer_one_node, arg0);
11274 }
11275
11276 /* Convert ABS_EXPR<x> < 0 to false. */
11277 strict_overflow_p = false;
11278 if (code == LT_EXPR
11279 && (integer_zerop (arg1) || real_zerop (arg1))
11280 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11281 {
11282 if (strict_overflow_p)
11283 fold_overflow_warning (("assuming signed overflow does not occur "
11284 "when simplifying comparison of "
11285 "absolute value and zero"),
11286 WARN_STRICT_OVERFLOW_CONDITIONAL);
11287 return omit_one_operand (type, integer_zero_node, arg0);
11288 }
11289
11290 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11291 and similarly for >= into !=. */
11292 if ((code == LT_EXPR || code == GE_EXPR)
11293 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11294 && TREE_CODE (arg1) == LSHIFT_EXPR
11295 && integer_onep (TREE_OPERAND (arg1, 0)))
11296 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11297 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11298 TREE_OPERAND (arg1, 1)),
11299 build_int_cst (TREE_TYPE (arg0), 0));
11300
11301 if ((code == LT_EXPR || code == GE_EXPR)
11302 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11303 && (TREE_CODE (arg1) == NOP_EXPR
11304 || TREE_CODE (arg1) == CONVERT_EXPR)
11305 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11306 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11307 return
11308 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11309 fold_convert (TREE_TYPE (arg0),
11310 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11311 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11312 1))),
11313 build_int_cst (TREE_TYPE (arg0), 0));
11314
11315 return NULL_TREE;
11316
11317 case UNORDERED_EXPR:
11318 case ORDERED_EXPR:
11319 case UNLT_EXPR:
11320 case UNLE_EXPR:
11321 case UNGT_EXPR:
11322 case UNGE_EXPR:
11323 case UNEQ_EXPR:
11324 case LTGT_EXPR:
11325 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11326 {
11327 t1 = fold_relational_const (code, type, arg0, arg1);
11328 if (t1 != NULL_TREE)
11329 return t1;
11330 }
11331
11332 /* If the first operand is NaN, the result is constant. */
11333 if (TREE_CODE (arg0) == REAL_CST
11334 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11335 && (code != LTGT_EXPR || ! flag_trapping_math))
11336 {
11337 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11338 ? integer_zero_node
11339 : integer_one_node;
11340 return omit_one_operand (type, t1, arg1);
11341 }
11342
11343 /* If the second operand is NaN, the result is constant. */
11344 if (TREE_CODE (arg1) == REAL_CST
11345 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11346 && (code != LTGT_EXPR || ! flag_trapping_math))
11347 {
11348 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11349 ? integer_zero_node
11350 : integer_one_node;
11351 return omit_one_operand (type, t1, arg0);
11352 }
11353
11354 /* Simplify unordered comparison of something with itself. */
11355 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11356 && operand_equal_p (arg0, arg1, 0))
11357 return constant_boolean_node (1, type);
11358
11359 if (code == LTGT_EXPR
11360 && !flag_trapping_math
11361 && operand_equal_p (arg0, arg1, 0))
11362 return constant_boolean_node (0, type);
11363
11364 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11365 {
11366 tree targ0 = strip_float_extensions (arg0);
11367 tree targ1 = strip_float_extensions (arg1);
11368 tree newtype = TREE_TYPE (targ0);
11369
11370 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11371 newtype = TREE_TYPE (targ1);
11372
11373 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11374 return fold_build2 (code, type, fold_convert (newtype, targ0),
11375 fold_convert (newtype, targ1));
11376 }
11377
11378 return NULL_TREE;
11379
11380 case COMPOUND_EXPR:
11381 /* When pedantic, a compound expression can be neither an lvalue
11382 nor an integer constant expression. */
11383 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11384 return NULL_TREE;
11385 /* Don't let (0, 0) be null pointer constant. */
11386 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11387 : fold_convert (type, arg1);
11388 return pedantic_non_lvalue (tem);
11389
11390 case COMPLEX_EXPR:
11391 if ((TREE_CODE (arg0) == REAL_CST
11392 && TREE_CODE (arg1) == REAL_CST)
11393 || (TREE_CODE (arg0) == INTEGER_CST
11394 && TREE_CODE (arg1) == INTEGER_CST))
11395 return build_complex (type, arg0, arg1);
11396 return NULL_TREE;
11397
11398 case ASSERT_EXPR:
11399 /* An ASSERT_EXPR should never be passed to fold_binary. */
11400 gcc_unreachable ();
11401
11402 default:
11403 return NULL_TREE;
11404 } /* switch (code) */
11405}
11406
11407/* Callback for walk_tree, looking for LABEL_EXPR.
11408 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11409 Do not check the sub-tree of GOTO_EXPR. */
11410
11411static tree
11412contains_label_1 (tree *tp,
11413 int *walk_subtrees,
11414 void *data ATTRIBUTE_UNUSED)
11415{
11416 switch (TREE_CODE (*tp))
11417 {
11418 case LABEL_EXPR:
11419 return *tp;
11420 case GOTO_EXPR:
11421 *walk_subtrees = 0;
11422 /* no break */
11423 default:
11424 return NULL_TREE;
11425 }
11426}
11427
11428/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11429 accessible from outside the sub-tree. Returns NULL_TREE if no
11430 addressable label is found. */
11431
11432static bool
11433contains_label_p (tree st)
11434{
11435 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11436}
11437
11438/* Fold a ternary expression of code CODE and type TYPE with operands
11439 OP0, OP1, and OP2. Return the folded expression if folding is
11440 successful. Otherwise, return NULL_TREE. */
11441
11442tree
11443fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11444{
11445 tree tem;
11446 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11447 enum tree_code_class kind = TREE_CODE_CLASS (code);
11448
11449 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11450 && TREE_CODE_LENGTH (code) == 3);
11451
11452 /* Strip any conversions that don't change the mode. This is safe
11453 for every expression, except for a comparison expression because
11454 its signedness is derived from its operands. So, in the latter
11455 case, only strip conversions that don't change the signedness.
11456
11457 Note that this is done as an internal manipulation within the
11458 constant folder, in order to find the simplest representation of
11459 the arguments so that their form can be studied. In any cases,
11460 the appropriate type conversions should be put back in the tree
11461 that will get out of the constant folder. */
11462 if (op0)
11463 {
11464 arg0 = op0;
11465 STRIP_NOPS (arg0);
11466 }
11467
11468 if (op1)
11469 {
11470 arg1 = op1;
11471 STRIP_NOPS (arg1);
11472 }
11473
11474 switch (code)
11475 {
11476 case COMPONENT_REF:
11477 if (TREE_CODE (arg0) == CONSTRUCTOR
11478 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11479 {
11480 unsigned HOST_WIDE_INT idx;
11481 tree field, value;
11482 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11483 if (field == arg1)
11484 return value;
11485 }
11486 return NULL_TREE;
11487
11488 case COND_EXPR:
11489 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11490 so all simple results must be passed through pedantic_non_lvalue. */
11491 if (TREE_CODE (arg0) == INTEGER_CST)
11492 {
11493 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11494 tem = integer_zerop (arg0) ? op2 : op1;
11495 /* Only optimize constant conditions when the selected branch
11496 has the same type as the COND_EXPR. This avoids optimizing
11497 away "c ? x : throw", where the throw has a void type.
11498 Avoid throwing away that operand which contains label. */
11499 if ((!TREE_SIDE_EFFECTS (unused_op)
11500 || !contains_label_p (unused_op))
11501 && (! VOID_TYPE_P (TREE_TYPE (tem))
11502 || VOID_TYPE_P (type)))
11503 return pedantic_non_lvalue (tem);
11504 return NULL_TREE;
11505 }
11506 if (operand_equal_p (arg1, op2, 0))
11507 return pedantic_omit_one_operand (type, arg1, arg0);
11508
11509 /* If we have A op B ? A : C, we may be able to convert this to a
11510 simpler expression, depending on the operation and the values
11511 of B and C. Signed zeros prevent all of these transformations,
11512 for reasons given above each one.
11513
11514 Also try swapping the arguments and inverting the conditional. */
11515 if (COMPARISON_CLASS_P (arg0)
11516 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11517 arg1, TREE_OPERAND (arg0, 1))
11518 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11519 {
11520 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11521 if (tem)
11522 return tem;
11523 }
11524
11525 if (COMPARISON_CLASS_P (arg0)
11526 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11527 op2,
11528 TREE_OPERAND (arg0, 1))
11529 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11530 {
11531 tem = fold_truth_not_expr (arg0);
11532 if (tem && COMPARISON_CLASS_P (tem))
11533 {
11534 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11535 if (tem)
11536 return tem;
11537 }
11538 }
11539
11540 /* If the second operand is simpler than the third, swap them
11541 since that produces better jump optimization results. */
11542 if (truth_value_p (TREE_CODE (arg0))
11543 && tree_swap_operands_p (op1, op2, false))
11544 {
11545 /* See if this can be inverted. If it can't, possibly because
11546 it was a floating-point inequality comparison, don't do
11547 anything. */
11548 tem = fold_truth_not_expr (arg0);
11549 if (tem)
11550 return fold_build3 (code, type, tem, op2, op1);
11551 }
11552
11553 /* Convert A ? 1 : 0 to simply A. */
11554 if (integer_onep (op1)
11555 && integer_zerop (op2)
11556 /* If we try to convert OP0 to our type, the
11557 call to fold will try to move the conversion inside
11558 a COND, which will recurse. In that case, the COND_EXPR
11559 is probably the best choice, so leave it alone. */
11560 && type == TREE_TYPE (arg0))
11561 return pedantic_non_lvalue (arg0);
11562
11563 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11564 over COND_EXPR in cases such as floating point comparisons. */
11565 if (integer_zerop (op1)
11566 && integer_onep (op2)
11567 && truth_value_p (TREE_CODE (arg0)))
11568 return pedantic_non_lvalue (fold_convert (type,
11569 invert_truthvalue (arg0)));
11570
11571 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11572 if (TREE_CODE (arg0) == LT_EXPR
11573 && integer_zerop (TREE_OPERAND (arg0, 1))
11574 && integer_zerop (op2)
11575 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11576 {
11577 /* sign_bit_p only checks ARG1 bits within A's precision.
11578 If <sign bit of A> has wider type than A, bits outside
11579 of A's precision in <sign bit of A> need to be checked.
11580 If they are all 0, this optimization needs to be done
11581 in unsigned A's type, if they are all 1 in signed A's type,
11582 otherwise this can't be done. */
11583 if (TYPE_PRECISION (TREE_TYPE (tem))
11584 < TYPE_PRECISION (TREE_TYPE (arg1))
11585 && TYPE_PRECISION (TREE_TYPE (tem))
11586 < TYPE_PRECISION (type))
11587 {
11588 unsigned HOST_WIDE_INT mask_lo;
11589 HOST_WIDE_INT mask_hi;
11590 int inner_width, outer_width;
11591 tree tem_type;
11592
11593 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11594 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11595 if (outer_width > TYPE_PRECISION (type))
11596 outer_width = TYPE_PRECISION (type);
11597
11598 if (outer_width > HOST_BITS_PER_WIDE_INT)
11599 {
11600 mask_hi = ((unsigned HOST_WIDE_INT) -1
11601 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11602 mask_lo = -1;
11603 }
11604 else
11605 {
11606 mask_hi = 0;
11607 mask_lo = ((unsigned HOST_WIDE_INT) -1
11608 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11609 }
11610 if (inner_width > HOST_BITS_PER_WIDE_INT)
11611 {
11612 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11613 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11614 mask_lo = 0;
11615 }
11616 else
11617 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11618 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11619
11620 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11621 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11622 {
11623 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11624 tem = fold_convert (tem_type, tem);
11625 }
11626 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11627 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11628 {
11629 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11630 tem = fold_convert (tem_type, tem);
11631 }
11632 else
11633 tem = NULL;
11634 }
11635
11636 if (tem)
11637 return fold_convert (type,
11638 fold_build2 (BIT_AND_EXPR,
11639 TREE_TYPE (tem), tem,
11640 fold_convert (TREE_TYPE (tem),
11641 arg1)));
11642 }
11643
11644 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11645 already handled above. */
11646 if (TREE_CODE (arg0) == BIT_AND_EXPR
11647 && integer_onep (TREE_OPERAND (arg0, 1))
11648 && integer_zerop (op2)
11649 && integer_pow2p (arg1))
11650 {
11651 tree tem = TREE_OPERAND (arg0, 0);
11652 STRIP_NOPS (tem);
11653 if (TREE_CODE (tem) == RSHIFT_EXPR
11654 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11655 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11656 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11657 return fold_build2 (BIT_AND_EXPR, type,
11658 TREE_OPERAND (tem, 0), arg1);
11659 }
11660
11661 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11662 is probably obsolete because the first operand should be a
11663 truth value (that's why we have the two cases above), but let's
11664 leave it in until we can confirm this for all front-ends. */
11665 if (integer_zerop (op2)
11666 && TREE_CODE (arg0) == NE_EXPR
11667 && integer_zerop (TREE_OPERAND (arg0, 1))
11668 && integer_pow2p (arg1)
11669 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11670 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11671 arg1, OEP_ONLY_CONST))
11672 return pedantic_non_lvalue (fold_convert (type,
11673 TREE_OPERAND (arg0, 0)));
11674
11675 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11676 if (integer_zerop (op2)
11677 && truth_value_p (TREE_CODE (arg0))
11678 && truth_value_p (TREE_CODE (arg1)))
11679 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11680 fold_convert (type, arg0),
11681 arg1);
11682
11683 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11684 if (integer_onep (op2)
11685 && truth_value_p (TREE_CODE (arg0))
11686 && truth_value_p (TREE_CODE (arg1)))
11687 {
11688 /* Only perform transformation if ARG0 is easily inverted. */
11689 tem = fold_truth_not_expr (arg0);
11690 if (tem)
11691 return fold_build2 (TRUTH_ORIF_EXPR, type,
11692 fold_convert (type, tem),
11693 arg1);
11694 }
11695
11696 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11697 if (integer_zerop (arg1)
11698 && truth_value_p (TREE_CODE (arg0))
11699 && truth_value_p (TREE_CODE (op2)))
11700 {
11701 /* Only perform transformation if ARG0 is easily inverted. */
11702 tem = fold_truth_not_expr (arg0);
11703 if (tem)
11704 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11705 fold_convert (type, tem),
11706 op2);
11707 }
11708
11709 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11710 if (integer_onep (arg1)
11711 && truth_value_p (TREE_CODE (arg0))
11712 && truth_value_p (TREE_CODE (op2)))
11713 return fold_build2 (TRUTH_ORIF_EXPR, type,
11714 fold_convert (type, arg0),
11715 op2);
11716
11717 return NULL_TREE;
11718
11719 case CALL_EXPR:
11720 /* Check for a built-in function. */
11721 if (TREE_CODE (op0) == ADDR_EXPR
11722 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11723 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11724 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11725 return NULL_TREE;
11726
11727 case BIT_FIELD_REF:
11728 if (TREE_CODE (arg0) == VECTOR_CST
11729 && type == TREE_TYPE (TREE_TYPE (arg0))
11730 && host_integerp (arg1, 1)
11731 && host_integerp (op2, 1))
11732 {
11733 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11734 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11735
11736 if (width != 0
11737 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11738 && (idx % width) == 0
11739 && (idx = idx / width)
11740 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11741 {
11742 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11743 while (idx-- > 0 && elements)
11744 elements = TREE_CHAIN (elements);
11745 if (elements)
11746 return TREE_VALUE (elements);
11747 else
11748 return fold_convert (type, integer_zero_node);
11749 }
11750 }
11751 return NULL_TREE;
11752
11753 default:
11754 return NULL_TREE;
11755 } /* switch (code) */
11756}
11757
11758/* Perform constant folding and related simplification of EXPR.
11759 The related simplifications include x*1 => x, x*0 => 0, etc.,
11760 and application of the associative law.
11761 NOP_EXPR conversions may be removed freely (as long as we
11762 are careful not to change the type of the overall expression).
11763 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11764 but we can constant-fold them if they have constant operands. */
11765
11766#ifdef ENABLE_FOLD_CHECKING
11767# define fold(x) fold_1 (x)
11768static tree fold_1 (tree);
11769static
11770#endif
11771tree
11772fold (tree expr)
11773{
11774 const tree t = expr;
11775 enum tree_code code = TREE_CODE (t);
11776 enum tree_code_class kind = TREE_CODE_CLASS (code);
11777 tree tem;
11778
11779 /* Return right away if a constant. */
11780 if (kind == tcc_constant)
11781 return t;
11782
11783 if (IS_EXPR_CODE_CLASS (kind))
11784 {
11785 tree type = TREE_TYPE (t);
11786 tree op0, op1, op2;
11787
11788 switch (TREE_CODE_LENGTH (code))
11789 {
11790 case 1:
11791 op0 = TREE_OPERAND (t, 0);
11792 tem = fold_unary (code, type, op0);
11793 return tem ? tem : expr;
11794 case 2:
11795 op0 = TREE_OPERAND (t, 0);
11796 op1 = TREE_OPERAND (t, 1);
11797 tem = fold_binary (code, type, op0, op1);
11798 return tem ? tem : expr;
11799 case 3:
11800 op0 = TREE_OPERAND (t, 0);
11801 op1 = TREE_OPERAND (t, 1);
11802 op2 = TREE_OPERAND (t, 2);
11803 tem = fold_ternary (code, type, op0, op1, op2);
11804 return tem ? tem : expr;
11805 default:
11806 break;
11807 }
11808 }
11809
11810 switch (code)
11811 {
11812 case CONST_DECL:
11813 return fold (DECL_INITIAL (t));
11814
11815 default:
11816 return t;
11817 } /* switch (code) */
11818}
11819
11820#ifdef ENABLE_FOLD_CHECKING
11821#undef fold
11822
11823static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11824static void fold_check_failed (tree, tree);
11825void print_fold_checksum (tree);
11826
11827/* When --enable-checking=fold, compute a digest of expr before
11828 and after actual fold call to see if fold did not accidentally
11829 change original expr. */
11830
11831tree
11832fold (tree expr)
11833{
11834 tree ret;
11835 struct md5_ctx ctx;
11836 unsigned char checksum_before[16], checksum_after[16];
11837 htab_t ht;
11838
11839 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11840 md5_init_ctx (&ctx);
11841 fold_checksum_tree (expr, &ctx, ht);
11842 md5_finish_ctx (&ctx, checksum_before);
11843 htab_empty (ht);
11844
11845 ret = fold_1 (expr);
11846
11847 md5_init_ctx (&ctx);
11848 fold_checksum_tree (expr, &ctx, ht);
11849 md5_finish_ctx (&ctx, checksum_after);
11850 htab_delete (ht);
11851
11852 if (memcmp (checksum_before, checksum_after, 16))
11853 fold_check_failed (expr, ret);
11854
11855 return ret;
11856}
11857
11858void
11859print_fold_checksum (tree expr)
11860{
11861 struct md5_ctx ctx;
11862 unsigned char checksum[16], cnt;
11863 htab_t ht;
11864
11865 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11866 md5_init_ctx (&ctx);
11867 fold_checksum_tree (expr, &ctx, ht);
11868 md5_finish_ctx (&ctx, checksum);
11869 htab_delete (ht);
11870 for (cnt = 0; cnt < 16; ++cnt)
11871 fprintf (stderr, "%02x", checksum[cnt]);
11872 putc ('\n', stderr);
11873}
11874
11875static void
11876fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11877{
11878 internal_error ("fold check: original tree changed by fold");
11879}
11880
11881static void
11882fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11883{
11884 void **slot;
11885 enum tree_code code;
11886 struct tree_function_decl buf;
11887 int i, len;
11888
11889recursive_label:
11890
11891 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11892 <= sizeof (struct tree_function_decl))
11893 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11894 if (expr == NULL)
11895 return;
11896 slot = htab_find_slot (ht, expr, INSERT);
11897 if (*slot != NULL)
11898 return;
11899 *slot = expr;
11900 code = TREE_CODE (expr);
11901 if (TREE_CODE_CLASS (code) == tcc_declaration
11902 && DECL_ASSEMBLER_NAME_SET_P (expr))
11903 {
11904 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11905 memcpy ((char *) &buf, expr, tree_size (expr));
11906 expr = (tree) &buf;
11907 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11908 }
11909 else if (TREE_CODE_CLASS (code) == tcc_type
11910 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11911 || TYPE_CACHED_VALUES_P (expr)
11912 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11913 {
11914 /* Allow these fields to be modified. */
11915 memcpy ((char *) &buf, expr, tree_size (expr));
11916 expr = (tree) &buf;
11917 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11918 TYPE_POINTER_TO (expr) = NULL;
11919 TYPE_REFERENCE_TO (expr) = NULL;
11920 if (TYPE_CACHED_VALUES_P (expr))
11921 {
11922 TYPE_CACHED_VALUES_P (expr) = 0;
11923 TYPE_CACHED_VALUES (expr) = NULL;
11924 }
11925 }
11926 md5_process_bytes (expr, tree_size (expr), ctx);
11927 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11928 if (TREE_CODE_CLASS (code) != tcc_type
11929 && TREE_CODE_CLASS (code) != tcc_declaration
11930 && code != TREE_LIST)
11931 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11932 switch (TREE_CODE_CLASS (code))
11933 {
11934 case tcc_constant:
11935 switch (code)
11936 {
11937 case STRING_CST:
11938 md5_process_bytes (TREE_STRING_POINTER (expr),
11939 TREE_STRING_LENGTH (expr), ctx);
11940 break;
11941 case COMPLEX_CST:
11942 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11943 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11944 break;
11945 case VECTOR_CST:
11946 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11947 break;
11948 default:
11949 break;
11950 }
11951 break;
11952 case tcc_exceptional:
11953 switch (code)
11954 {
11955 case TREE_LIST:
11956 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11957 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11958 expr = TREE_CHAIN (expr);
11959 goto recursive_label;
11960 break;
11961 case TREE_VEC:
11962 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11963 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11964 break;
11965 default:
11966 break;
11967 }
11968 break;
11969 case tcc_expression:
11970 case tcc_reference:
11971 case tcc_comparison:
11972 case tcc_unary:
11973 case tcc_binary:
11974 case tcc_statement:
11975 len = TREE_CODE_LENGTH (code);
11976 for (i = 0; i < len; ++i)
11977 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11978 break;
11979 case tcc_declaration:
11980 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11981 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11982 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11983 {
11984 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11985 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11986 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11987 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11988 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11989 }
11990 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11991 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11992
11993 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11994 {
11995 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11996 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11997 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11998 }
11999 break;
12000 case tcc_type:
12001 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12002 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12003 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12004 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12005 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12006 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12007 if (INTEGRAL_TYPE_P (expr)
12008 || SCALAR_FLOAT_TYPE_P (expr))
12009 {
12010 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12011 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12012 }
12013 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12014 if (TREE_CODE (expr) == RECORD_TYPE
12015 || TREE_CODE (expr) == UNION_TYPE
12016 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12017 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12018 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12019 break;
12020 default:
12021 break;
12022 }
12023}
12024
12025#endif
12026
12027/* Fold a unary tree expression with code CODE of type TYPE with an
12028 operand OP0. Return a folded expression if successful. Otherwise,
12029 return a tree expression with code CODE of type TYPE with an
12030 operand OP0. */
12031
12032tree
12033fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12034{
12035 tree tem;
12036#ifdef ENABLE_FOLD_CHECKING
12037 unsigned char checksum_before[16], checksum_after[16];
12038 struct md5_ctx ctx;
12039 htab_t ht;
12040
12041 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12042 md5_init_ctx (&ctx);
12043 fold_checksum_tree (op0, &ctx, ht);
12044 md5_finish_ctx (&ctx, checksum_before);
12045 htab_empty (ht);
12046#endif
12047
12048 tem = fold_unary (code, type, op0);
12049 if (!tem)
12050 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12051
12052#ifdef ENABLE_FOLD_CHECKING
12053 md5_init_ctx (&ctx);
12054 fold_checksum_tree (op0, &ctx, ht);
12055 md5_finish_ctx (&ctx, checksum_after);
12056 htab_delete (ht);
12057
12058 if (memcmp (checksum_before, checksum_after, 16))
12059 fold_check_failed (op0, tem);
12060#endif
12061 return tem;
12062}
12063
12064/* Fold a binary tree expression with code CODE of type TYPE with
12065 operands OP0 and OP1. Return a folded expression if successful.
12066 Otherwise, return a tree expression with code CODE of type TYPE
12067 with operands OP0 and OP1. */
12068
12069tree
12070fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12071 MEM_STAT_DECL)
12072{
12073 tree tem;
12074#ifdef ENABLE_FOLD_CHECKING
12075 unsigned char checksum_before_op0[16],
12076 checksum_before_op1[16],
12077 checksum_after_op0[16],
12078 checksum_after_op1[16];
12079 struct md5_ctx ctx;
12080 htab_t ht;
12081
12082 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12083 md5_init_ctx (&ctx);
12084 fold_checksum_tree (op0, &ctx, ht);
12085 md5_finish_ctx (&ctx, checksum_before_op0);
12086 htab_empty (ht);
12087
12088 md5_init_ctx (&ctx);
12089 fold_checksum_tree (op1, &ctx, ht);
12090 md5_finish_ctx (&ctx, checksum_before_op1);
12091 htab_empty (ht);
12092#endif
12093
12094 tem = fold_binary (code, type, op0, op1);
12095 if (!tem)
12096 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12097
12098#ifdef ENABLE_FOLD_CHECKING
12099 md5_init_ctx (&ctx);
12100 fold_checksum_tree (op0, &ctx, ht);
12101 md5_finish_ctx (&ctx, checksum_after_op0);
12102 htab_empty (ht);
12103
12104 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12105 fold_check_failed (op0, tem);
12106
12107 md5_init_ctx (&ctx);
12108 fold_checksum_tree (op1, &ctx, ht);
12109 md5_finish_ctx (&ctx, checksum_after_op1);
12110 htab_delete (ht);
12111
12112 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12113 fold_check_failed (op1, tem);
12114#endif
12115 return tem;
12116}
12117
12118/* Fold a ternary tree expression with code CODE of type TYPE with
12119 operands OP0, OP1, and OP2. Return a folded expression if
12120 successful. Otherwise, return a tree expression with code CODE of
12121 type TYPE with operands OP0, OP1, and OP2. */
12122
12123tree
12124fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12125 MEM_STAT_DECL)
12126{
12127 tree tem;
12128#ifdef ENABLE_FOLD_CHECKING
12129 unsigned char checksum_before_op0[16],
12130 checksum_before_op1[16],
12131 checksum_before_op2[16],
12132 checksum_after_op0[16],
12133 checksum_after_op1[16],
12134 checksum_after_op2[16];
12135 struct md5_ctx ctx;
12136 htab_t ht;
12137
12138 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12139 md5_init_ctx (&ctx);
12140 fold_checksum_tree (op0, &ctx, ht);
12141 md5_finish_ctx (&ctx, checksum_before_op0);
12142 htab_empty (ht);
12143
12144 md5_init_ctx (&ctx);
12145 fold_checksum_tree (op1, &ctx, ht);
12146 md5_finish_ctx (&ctx, checksum_before_op1);
12147 htab_empty (ht);
12148
12149 md5_init_ctx (&ctx);
12150 fold_checksum_tree (op2, &ctx, ht);
12151 md5_finish_ctx (&ctx, checksum_before_op2);
12152 htab_empty (ht);
12153#endif
12154
12155 tem = fold_ternary (code, type, op0, op1, op2);
12156 if (!tem)
12157 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12158
12159#ifdef ENABLE_FOLD_CHECKING
12160 md5_init_ctx (&ctx);
12161 fold_checksum_tree (op0, &ctx, ht);
12162 md5_finish_ctx (&ctx, checksum_after_op0);
12163 htab_empty (ht);
12164
12165 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12166 fold_check_failed (op0, tem);
12167
12168 md5_init_ctx (&ctx);
12169 fold_checksum_tree (op1, &ctx, ht);
12170 md5_finish_ctx (&ctx, checksum_after_op1);
12171 htab_empty (ht);
12172
12173 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12174 fold_check_failed (op1, tem);
12175
12176 md5_init_ctx (&ctx);
12177 fold_checksum_tree (op2, &ctx, ht);
12178 md5_finish_ctx (&ctx, checksum_after_op2);
12179 htab_delete (ht);
12180
12181 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12182 fold_check_failed (op2, tem);
12183#endif
12184 return tem;
12185}
12186
12187/* Perform constant folding and related simplification of initializer
12188 expression EXPR. These behave identically to "fold_buildN" but ignore
12189 potential run-time traps and exceptions that fold must preserve. */
12190
12191#define START_FOLD_INIT \
12192 int saved_signaling_nans = flag_signaling_nans;\
12193 int saved_trapping_math = flag_trapping_math;\
12194 int saved_rounding_math = flag_rounding_math;\
12195 int saved_trapv = flag_trapv;\
12196 int saved_folding_initializer = folding_initializer;\
12197 flag_signaling_nans = 0;\
12198 flag_trapping_math = 0;\
12199 flag_rounding_math = 0;\
12200 flag_trapv = 0;\
12201 folding_initializer = 1;
12202
12203#define END_FOLD_INIT \
12204 flag_signaling_nans = saved_signaling_nans;\
12205 flag_trapping_math = saved_trapping_math;\
12206 flag_rounding_math = saved_rounding_math;\
12207 flag_trapv = saved_trapv;\
12208 folding_initializer = saved_folding_initializer;
12209
12210tree
12211fold_build1_initializer (enum tree_code code, tree type, tree op)
12212{
12213 tree result;
12214 START_FOLD_INIT;
12215
12216 result = fold_build1 (code, type, op);
12217
12218 END_FOLD_INIT;
12219 return result;
12220}
12221
12222tree
12223fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12224{
12225 tree result;
12226 START_FOLD_INIT;
12227
12228 result = fold_build2 (code, type, op0, op1);
12229
12230 END_FOLD_INIT;
12231 return result;
12232}
12233
12234tree
12235fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12236 tree op2)
12237{
12238 tree result;
12239 START_FOLD_INIT;
12240
12241 result = fold_build3 (code, type, op0, op1, op2);
12242
12243 END_FOLD_INIT;
12244 return result;
12245}
12246
12247#undef START_FOLD_INIT
12248#undef END_FOLD_INIT
12249
12250/* Determine if first argument is a multiple of second argument. Return 0 if
12251 it is not, or we cannot easily determined it to be.
12252
12253 An example of the sort of thing we care about (at this point; this routine
12254 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12255 fold cases do now) is discovering that
12256
12257 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12258
12259 is a multiple of
12260
12261 SAVE_EXPR (J * 8)
12262
12263 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12264
12265 This code also handles discovering that
12266
12267 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12268
12269 is a multiple of 8 so we don't have to worry about dealing with a
12270 possible remainder.
12271
12272 Note that we *look* inside a SAVE_EXPR only to determine how it was
12273 calculated; it is not safe for fold to do much of anything else with the
12274 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12275 at run time. For example, the latter example above *cannot* be implemented
12276 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12277 evaluation time of the original SAVE_EXPR is not necessarily the same at
12278 the time the new expression is evaluated. The only optimization of this
12279 sort that would be valid is changing
12280
12281 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12282
12283 divided by 8 to
12284
12285 SAVE_EXPR (I) * SAVE_EXPR (J)
12286
12287 (where the same SAVE_EXPR (J) is used in the original and the
12288 transformed version). */
12289
12290static int
12291multiple_of_p (tree type, tree top, tree bottom)
12292{
12293 if (operand_equal_p (top, bottom, 0))
12294 return 1;
12295
12296 if (TREE_CODE (type) != INTEGER_TYPE)
12297 return 0;
12298
12299 switch (TREE_CODE (top))
12300 {
12301 case BIT_AND_EXPR:
12302 /* Bitwise and provides a power of two multiple. If the mask is
12303 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12304 if (!integer_pow2p (bottom))
12305 return 0;
12306 /* FALLTHRU */
12307
12308 case MULT_EXPR:
12309 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12310 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12311
12312 case PLUS_EXPR:
12313 case MINUS_EXPR:
12314 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12315 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12316
12317 case LSHIFT_EXPR:
12318 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12319 {
12320 tree op1, t1;
12321
12322 op1 = TREE_OPERAND (top, 1);
12323 /* const_binop may not detect overflow correctly,
12324 so check for it explicitly here. */
12325 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12326 > TREE_INT_CST_LOW (op1)
12327 && TREE_INT_CST_HIGH (op1) == 0
12328 && 0 != (t1 = fold_convert (type,
12329 const_binop (LSHIFT_EXPR,
12330 size_one_node,
12331 op1, 0)))
12332 && ! TREE_OVERFLOW (t1))
12333 return multiple_of_p (type, t1, bottom);
12334 }
12335 return 0;
12336
12337 case NOP_EXPR:
12338 /* Can't handle conversions from non-integral or wider integral type. */
12339 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12340 || (TYPE_PRECISION (type)
12341 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12342 return 0;
12343
12344 /* .. fall through ... */
12345
12346 case SAVE_EXPR:
12347 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12348
12349 case INTEGER_CST:
12350 if (TREE_CODE (bottom) != INTEGER_CST
12351 || (TYPE_UNSIGNED (type)
12352 && (tree_int_cst_sgn (top) < 0
12353 || tree_int_cst_sgn (bottom) < 0)))
12354 return 0;
12355 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12356 top, bottom, 0));
12357
12358 default:
12359 return 0;
12360 }
12361}
12362
12363/* Return true if `t' is known to be non-negative. If the return
12364 value is based on the assumption that signed overflow is undefined,
12365 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12366 *STRICT_OVERFLOW_P. */
12367
12368int
12369tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12370{
12371 if (t == error_mark_node)
12372 return 0;
12373
12374 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12375 return 1;
12376
12377 switch (TREE_CODE (t))
12378 {
12379 case SSA_NAME:
12380 /* Query VRP to see if it has recorded any information about
12381 the range of this object. */
12382 return ssa_name_nonnegative_p (t);
12383
12384 case ABS_EXPR:
12385 /* We can't return 1 if flag_wrapv is set because
12386 ABS_EXPR<INT_MIN> = INT_MIN. */
12387 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12388 return 1;
12389 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12390 {
12391 *strict_overflow_p = true;
12392 return 1;
12393 }
12394 break;
12395
12396 case INTEGER_CST:
12397 return tree_int_cst_sgn (t) >= 0;
12398
12399 case REAL_CST:
12400 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12401
12402 case PLUS_EXPR:
12403 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12404 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12405 strict_overflow_p)
12406 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12407 strict_overflow_p));
12408
12409 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12410 both unsigned and at least 2 bits shorter than the result. */
12411 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12412 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12413 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12414 {
12415 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12416 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12417 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12418 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12419 {
12420 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12421 TYPE_PRECISION (inner2)) + 1;
12422 return prec < TYPE_PRECISION (TREE_TYPE (t));
12423 }
12424 }
12425 break;
12426
12427 case MULT_EXPR:
12428 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12429 {
12430 /* x * x for floating point x is always non-negative. */
12431 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12432 return 1;
12433 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12434 strict_overflow_p)
12435 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12436 strict_overflow_p));
12437 }
12438
12439 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12440 both unsigned and their total bits is shorter than the result. */
12441 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12442 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12443 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12444 {
12445 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12446 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12447 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12448 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12449 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12450 < TYPE_PRECISION (TREE_TYPE (t));
12451 }
12452 return 0;
12453
12454 case BIT_AND_EXPR:
12455 case MAX_EXPR:
12456 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12457 strict_overflow_p)
12458 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12459 strict_overflow_p));
12460
12461 case BIT_IOR_EXPR:
12462 case BIT_XOR_EXPR:
12463 case MIN_EXPR:
12464 case RDIV_EXPR:
12465 case TRUNC_DIV_EXPR:
12466 case CEIL_DIV_EXPR:
12467 case FLOOR_DIV_EXPR:
12468 case ROUND_DIV_EXPR:
12469 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12470 strict_overflow_p)
12471 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12472 strict_overflow_p));
12473
12474 case TRUNC_MOD_EXPR:
12475 case CEIL_MOD_EXPR:
12476 case FLOOR_MOD_EXPR:
12477 case ROUND_MOD_EXPR:
12478 case SAVE_EXPR:
12479 case NON_LVALUE_EXPR:
12480 case FLOAT_EXPR:
12481 case FIX_TRUNC_EXPR:
12482 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12483 strict_overflow_p);
12484
12485 case COMPOUND_EXPR:
12486 case MODIFY_EXPR:
12487 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12488 strict_overflow_p);
12489
12490 case BIND_EXPR:
12491 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12492 strict_overflow_p);
12493
12494 case COND_EXPR:
12495 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12496 strict_overflow_p)
12497 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12498 strict_overflow_p));
12499
12500 case NOP_EXPR:
12501 {
12502 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12503 tree outer_type = TREE_TYPE (t);
12504
12505 if (TREE_CODE (outer_type) == REAL_TYPE)
12506 {
12507 if (TREE_CODE (inner_type) == REAL_TYPE)
12508 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12509 strict_overflow_p);
12510 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12511 {
12512 if (TYPE_UNSIGNED (inner_type))
12513 return 1;
12514 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12515 strict_overflow_p);
12516 }
12517 }
12518 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12519 {
12520 if (TREE_CODE (inner_type) == REAL_TYPE)
12521 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12522 strict_overflow_p);
12523 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12524 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12525 && TYPE_UNSIGNED (inner_type);
12526 }
12527 }
12528 break;
12529
12530 case TARGET_EXPR:
12531 {
12532 tree temp = TARGET_EXPR_SLOT (t);
12533 t = TARGET_EXPR_INITIAL (t);
12534
12535 /* If the initializer is non-void, then it's a normal expression
12536 that will be assigned to the slot. */
12537 if (!VOID_TYPE_P (t))
12538 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12539
12540 /* Otherwise, the initializer sets the slot in some way. One common
12541 way is an assignment statement at the end of the initializer. */
12542 while (1)
12543 {
12544 if (TREE_CODE (t) == BIND_EXPR)
12545 t = expr_last (BIND_EXPR_BODY (t));
12546 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12547 || TREE_CODE (t) == TRY_CATCH_EXPR)
12548 t = expr_last (TREE_OPERAND (t, 0));
12549 else if (TREE_CODE (t) == STATEMENT_LIST)
12550 t = expr_last (t);
12551 else
12552 break;
12553 }
12554 if (TREE_CODE (t) == MODIFY_EXPR
12555 && TREE_OPERAND (t, 0) == temp)
12556 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12557 strict_overflow_p);
12558
12559 return 0;
12560 }
12561
12562 case CALL_EXPR:
12563 {
12564 tree fndecl = get_callee_fndecl (t);
12565 tree arglist = TREE_OPERAND (t, 1);
12566 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12567 switch (DECL_FUNCTION_CODE (fndecl))
12568 {
12569 CASE_FLT_FN (BUILT_IN_ACOS):
12570 CASE_FLT_FN (BUILT_IN_ACOSH):
12571 CASE_FLT_FN (BUILT_IN_CABS):
12572 CASE_FLT_FN (BUILT_IN_COSH):
12573 CASE_FLT_FN (BUILT_IN_ERFC):
12574 CASE_FLT_FN (BUILT_IN_EXP):
12575 CASE_FLT_FN (BUILT_IN_EXP10):
12576 CASE_FLT_FN (BUILT_IN_EXP2):
12577 CASE_FLT_FN (BUILT_IN_FABS):
12578 CASE_FLT_FN (BUILT_IN_FDIM):
12579 CASE_FLT_FN (BUILT_IN_HYPOT):
12580 CASE_FLT_FN (BUILT_IN_POW10):
12581 CASE_INT_FN (BUILT_IN_FFS):
12582 CASE_INT_FN (BUILT_IN_PARITY):
12583 CASE_INT_FN (BUILT_IN_POPCOUNT):
12584 /* Always true. */
12585 return 1;
12586
12587 CASE_FLT_FN (BUILT_IN_SQRT):
12588 /* sqrt(-0.0) is -0.0. */
12589 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12590 return 1;
12591 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12592 strict_overflow_p);
12593
12594 CASE_FLT_FN (BUILT_IN_ASINH):
12595 CASE_FLT_FN (BUILT_IN_ATAN):
12596 CASE_FLT_FN (BUILT_IN_ATANH):
12597 CASE_FLT_FN (BUILT_IN_CBRT):
12598 CASE_FLT_FN (BUILT_IN_CEIL):
12599 CASE_FLT_FN (BUILT_IN_ERF):
12600 CASE_FLT_FN (BUILT_IN_EXPM1):
12601 CASE_FLT_FN (BUILT_IN_FLOOR):
12602 CASE_FLT_FN (BUILT_IN_FMOD):
12603 CASE_FLT_FN (BUILT_IN_FREXP):
12604 CASE_FLT_FN (BUILT_IN_LCEIL):
12605 CASE_FLT_FN (BUILT_IN_LDEXP):
12606 CASE_FLT_FN (BUILT_IN_LFLOOR):
12607 CASE_FLT_FN (BUILT_IN_LLCEIL):
12608 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12609 CASE_FLT_FN (BUILT_IN_LLRINT):
12610 CASE_FLT_FN (BUILT_IN_LLROUND):
12611 CASE_FLT_FN (BUILT_IN_LRINT):
12612 CASE_FLT_FN (BUILT_IN_LROUND):
12613 CASE_FLT_FN (BUILT_IN_MODF):
12614 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12615 CASE_FLT_FN (BUILT_IN_POW):
12616 CASE_FLT_FN (BUILT_IN_RINT):
12617 CASE_FLT_FN (BUILT_IN_ROUND):
12618 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12619 CASE_FLT_FN (BUILT_IN_SINH):
12620 CASE_FLT_FN (BUILT_IN_TANH):
12621 CASE_FLT_FN (BUILT_IN_TRUNC):
12622 /* True if the 1st argument is nonnegative. */
12623 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12624 strict_overflow_p);
12625
12626 CASE_FLT_FN (BUILT_IN_FMAX):
12627 /* True if the 1st OR 2nd arguments are nonnegative. */
12628 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12629 strict_overflow_p)
12630 || (tree_expr_nonnegative_warnv_p
12631 (TREE_VALUE (TREE_CHAIN (arglist)),
12632 strict_overflow_p)));
12633
12634 CASE_FLT_FN (BUILT_IN_FMIN):
12635 /* True if the 1st AND 2nd arguments are nonnegative. */
12636 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12637 strict_overflow_p)
12638 && (tree_expr_nonnegative_warnv_p
12639 (TREE_VALUE (TREE_CHAIN (arglist)),
12640 strict_overflow_p)));
12641
12642 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12643 /* True if the 2nd argument is nonnegative. */
12644 return (tree_expr_nonnegative_warnv_p
12645 (TREE_VALUE (TREE_CHAIN (arglist)),
12646 strict_overflow_p));
12647
12648 default:
12649 break;
12650 }
12651 }
12652
12653 /* ... fall through ... */
12654
12655 default:
12656 {
12657 tree type = TREE_TYPE (t);
12658 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12659 && truth_value_p (TREE_CODE (t)))
12660 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12661 have a signed:1 type (where the value is -1 and 0). */
12662 return true;
12663 }
12664 }
12665
12666 /* We don't know sign of `t', so be conservative and return false. */
12667 return 0;
12668}
12669
12670/* Return true if `t' is known to be non-negative. Handle warnings
12671 about undefined signed overflow. */
12672
12673int
12674tree_expr_nonnegative_p (tree t)
12675{
12676 int ret;
12677 bool strict_overflow_p;
12678
12679 strict_overflow_p = false;
12680 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12681 if (strict_overflow_p)
12682 fold_overflow_warning (("assuming signed overflow does not occur when "
12683 "determining that expression is always "
12684 "non-negative"),
12685 WARN_STRICT_OVERFLOW_MISC);
12686 return ret;
12687}
12688
12689/* Return true when T is an address and is known to be nonzero.
12690 For floating point we further ensure that T is not denormal.
12691 Similar logic is present in nonzero_address in rtlanal.h.
12692
12693 If the return value is based on the assumption that signed overflow
12694 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12695 change *STRICT_OVERFLOW_P. */
12696
12697bool
12698tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12699{
12700 tree type = TREE_TYPE (t);
12701 bool sub_strict_overflow_p;
12702
12703 /* Doing something useful for floating point would need more work. */
12704 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12705 return false;
12706
12707 switch (TREE_CODE (t))
12708 {
12709 case SSA_NAME:
12710 /* Query VRP to see if it has recorded any information about
12711 the range of this object. */
12712 return ssa_name_nonzero_p (t);
12713
12714 case ABS_EXPR:
12715 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12716 strict_overflow_p);
12717
12718 case INTEGER_CST:
12719 /* We used to test for !integer_zerop here. This does not work correctly
12720 if TREE_CONSTANT_OVERFLOW (t). */
12721 return (TREE_INT_CST_LOW (t) != 0
12722 || TREE_INT_CST_HIGH (t) != 0);
12723
12724 case PLUS_EXPR:
12725 if (TYPE_OVERFLOW_UNDEFINED (type))
12726 {
12727 /* With the presence of negative values it is hard
12728 to say something. */
12729 sub_strict_overflow_p = false;
12730 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12731 &sub_strict_overflow_p)
12732 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12733 &sub_strict_overflow_p))
12734 return false;
12735 /* One of operands must be positive and the other non-negative. */
12736 /* We don't set *STRICT_OVERFLOW_P here: even if this value
12737 overflows, on a twos-complement machine the sum of two
12738 nonnegative numbers can never be zero. */
12739 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12740 strict_overflow_p)
12741 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12742 strict_overflow_p));
12743 }
12744 break;
12745
12746 case MULT_EXPR:
12747 if (TYPE_OVERFLOW_UNDEFINED (type))
12748 {
12749 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12750 strict_overflow_p)
12751 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12752 strict_overflow_p))
12753 {
12754 *strict_overflow_p = true;
12755 return true;
12756 }
12757 }
12758 break;
12759
12760 case NOP_EXPR:
12761 {
12762 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12763 tree outer_type = TREE_TYPE (t);
12764
12765 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12766 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12767 strict_overflow_p));
12768 }
12769 break;
12770
12771 case ADDR_EXPR:
12772 {
12773 tree base = get_base_address (TREE_OPERAND (t, 0));
12774
12775 if (!base)
12776 return false;
12777
12778 /* Weak declarations may link to NULL. */
12779 if (VAR_OR_FUNCTION_DECL_P (base))
12780 return !DECL_WEAK (base);
12781
12782 /* Constants are never weak. */
12783 if (CONSTANT_CLASS_P (base))
12784 return true;
12785
12786 return false;
12787 }
12788
12789 case COND_EXPR:
12790 sub_strict_overflow_p = false;
12791 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12792 &sub_strict_overflow_p)
12793 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12794 &sub_strict_overflow_p))
12795 {
12796 if (sub_strict_overflow_p)
12797 *strict_overflow_p = true;
12798 return true;
12799 }
12800 break;
12801
12802 case MIN_EXPR:
12803 sub_strict_overflow_p = false;
12804 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12805 &sub_strict_overflow_p)
12806 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12807 &sub_strict_overflow_p))
12808 {
12809 if (sub_strict_overflow_p)
12810 *strict_overflow_p = true;
12811 }
12812 break;
12813
12814 case MAX_EXPR:
12815 sub_strict_overflow_p = false;
12816 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12817 &sub_strict_overflow_p))
12818 {
12819 if (sub_strict_overflow_p)
12820 *strict_overflow_p = true;
12821
12822 /* When both operands are nonzero, then MAX must be too. */
12823 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12824 strict_overflow_p))
12825 return true;
12826
12827 /* MAX where operand 0 is positive is positive. */
12828 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12829 strict_overflow_p);
12830 }
12831 /* MAX where operand 1 is positive is positive. */
12832 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12833 &sub_strict_overflow_p)
12834 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12835 &sub_strict_overflow_p))
12836 {
12837 if (sub_strict_overflow_p)
12838 *strict_overflow_p = true;
12839 return true;
12840 }
12841 break;
12842
12843 case COMPOUND_EXPR:
12844 case MODIFY_EXPR:
12845 case BIND_EXPR:
12846 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12847 strict_overflow_p);
12848
12849 case SAVE_EXPR:
12850 case NON_LVALUE_EXPR:
12851 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12852 strict_overflow_p);
12853
12854 case BIT_IOR_EXPR:
12855 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12856 strict_overflow_p)
12857 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12858 strict_overflow_p));
12859
12860 case CALL_EXPR:
12861 return alloca_call_p (t);
12862
12863 default:
12864 break;
12865 }
12866 return false;
12867}
12868
12869/* Return true when T is an address and is known to be nonzero.
12870 Handle warnings about undefined signed overflow. */
12871
12872bool
12873tree_expr_nonzero_p (tree t)
12874{
12875 bool ret, strict_overflow_p;
12876
12877 strict_overflow_p = false;
12878 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12879 if (strict_overflow_p)
12880 fold_overflow_warning (("assuming signed overflow does not occur when "
12881 "determining that expression is always "
12882 "non-zero"),
12883 WARN_STRICT_OVERFLOW_MISC);
12884 return ret;
12885}
12886
12887/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12888 attempt to fold the expression to a constant without modifying TYPE,
12889 OP0 or OP1.
12890
12891 If the expression could be simplified to a constant, then return
12892 the constant. If the expression would not be simplified to a
12893 constant, then return NULL_TREE. */
12894
12895tree
12896fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12897{
12898 tree tem = fold_binary (code, type, op0, op1);
12899 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12900}
12901
12902/* Given the components of a unary expression CODE, TYPE and OP0,
12903 attempt to fold the expression to a constant without modifying
12904 TYPE or OP0.
12905
12906 If the expression could be simplified to a constant, then return
12907 the constant. If the expression would not be simplified to a
12908 constant, then return NULL_TREE. */
12909
12910tree
12911fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12912{
12913 tree tem = fold_unary (code, type, op0);
12914 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12915}
12916
12917/* If EXP represents referencing an element in a constant string
12918 (either via pointer arithmetic or array indexing), return the
12919 tree representing the value accessed, otherwise return NULL. */
12920
12921tree
12922fold_read_from_constant_string (tree exp)
12923{
12924 if ((TREE_CODE (exp) == INDIRECT_REF
12925 || TREE_CODE (exp) == ARRAY_REF)
12926 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12927 {
12928 tree exp1 = TREE_OPERAND (exp, 0);
12929 tree index;
12930 tree string;
12931
12932 if (TREE_CODE (exp) == INDIRECT_REF)
12933 string = string_constant (exp1, &index);
12934 else
12935 {
12936 tree low_bound = array_ref_low_bound (exp);
12937 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12938
12939 /* Optimize the special-case of a zero lower bound.
12940
12941 We convert the low_bound to sizetype to avoid some problems
12942 with constant folding. (E.g. suppose the lower bound is 1,
12943 and its mode is QI. Without the conversion,l (ARRAY
12944 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12945 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12946 if (! integer_zerop (low_bound))
12947 index = size_diffop (index, fold_convert (sizetype, low_bound));
12948
12949 string = exp1;
12950 }
12951
12952 if (string
12953 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12954 && TREE_CODE (string) == STRING_CST
12955 && TREE_CODE (index) == INTEGER_CST
12956 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12957 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12958 == MODE_INT)
12959 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12960 return fold_convert (TREE_TYPE (exp),
12961 build_int_cst (NULL_TREE,
12962 (TREE_STRING_POINTER (string)
12963 [TREE_INT_CST_LOW (index)])));
12964 }
12965 return NULL;
12966}
12967
12968/* Return the tree for neg (ARG0) when ARG0 is known to be either
12969 an integer constant or real constant.
12970
12971 TYPE is the type of the result. */
12972
12973static tree
12974fold_negate_const (tree arg0, tree type)
12975{
12976 tree t = NULL_TREE;
12977
12978 switch (TREE_CODE (arg0))
12979 {
12980 case INTEGER_CST:
12981 {
12982 unsigned HOST_WIDE_INT low;
12983 HOST_WIDE_INT high;
12984 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12985 TREE_INT_CST_HIGH (arg0),
12986 &low, &high);
12987 t = build_int_cst_wide (type, low, high);
12988 t = force_fit_type (t, 1,
12989 (overflow | TREE_OVERFLOW (arg0))
12990 && !TYPE_UNSIGNED (type),
12991 TREE_CONSTANT_OVERFLOW (arg0));
12992 break;
12993 }
12994
12995 case REAL_CST:
12996 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12997 break;
12998
12999 default:
13000 gcc_unreachable ();
13001 }
13002
13003 return t;
13004}
13005
13006/* Return the tree for abs (ARG0) when ARG0 is known to be either
13007 an integer constant or real constant.
13008
13009 TYPE is the type of the result. */
13010
13011tree
13012fold_abs_const (tree arg0, tree type)
13013{
13014 tree t = NULL_TREE;
13015
13016 switch (TREE_CODE (arg0))
13017 {
13018 case INTEGER_CST:
13019 /* If the value is unsigned, then the absolute value is
13020 the same as the ordinary value. */
13021 if (TYPE_UNSIGNED (type))
13022 t = arg0;
13023 /* Similarly, if the value is non-negative. */
13024 else if (INT_CST_LT (integer_minus_one_node, arg0))
13025 t = arg0;
13026 /* If the value is negative, then the absolute value is
13027 its negation. */
13028 else
13029 {
13030 unsigned HOST_WIDE_INT low;
13031 HOST_WIDE_INT high;
13032 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13033 TREE_INT_CST_HIGH (arg0),
13034 &low, &high);
13035 t = build_int_cst_wide (type, low, high);
13036 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13037 TREE_CONSTANT_OVERFLOW (arg0));
13038 }
13039 break;
13040
13041 case REAL_CST:
13042 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13043 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13044 else
13045 t = arg0;
13046 break;
13047
13048 default:
13049 gcc_unreachable ();
13050 }
13051
13052 return t;
13053}
13054
13055/* Return the tree for not (ARG0) when ARG0 is known to be an integer
13056 constant. TYPE is the type of the result. */
13057
13058static tree
13059fold_not_const (tree arg0, tree type)
13060{
13061 tree t = NULL_TREE;
13062
13063 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13064
13065 t = build_int_cst_wide (type,
13066 ~ TREE_INT_CST_LOW (arg0),
13067 ~ TREE_INT_CST_HIGH (arg0));
13068 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13069 TREE_CONSTANT_OVERFLOW (arg0));
13070
13071 return t;
13072}
13073
13074/* Given CODE, a relational operator, the target type, TYPE and two
13075 constant operands OP0 and OP1, return the result of the
13076 relational operation. If the result is not a compile time
13077 constant, then return NULL_TREE. */
13078
13079static tree
13080fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13081{
13082 int result, invert;
13083
13084 /* From here on, the only cases we handle are when the result is
13085 known to be a constant. */
13086
13087 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13088 {
13089 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13090 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13091
13092 /* Handle the cases where either operand is a NaN. */
13093 if (real_isnan (c0) || real_isnan (c1))
13094 {
13095 switch (code)
13096 {
13097 case EQ_EXPR:
13098 case ORDERED_EXPR:
13099 result = 0;
13100 break;
13101
13102 case NE_EXPR:
13103 case UNORDERED_EXPR:
13104 case UNLT_EXPR:
13105 case UNLE_EXPR:
13106 case UNGT_EXPR:
13107 case UNGE_EXPR:
13108 case UNEQ_EXPR:
13109 result = 1;
13110 break;
13111
13112 case LT_EXPR:
13113 case LE_EXPR:
13114 case GT_EXPR:
13115 case GE_EXPR:
13116 case LTGT_EXPR:
13117 if (flag_trapping_math)
13118 return NULL_TREE;
13119 result = 0;
13120 break;
13121
13122 default:
13123 gcc_unreachable ();
13124 }
13125
13126 return constant_boolean_node (result, type);
13127 }
13128
13129 return constant_boolean_node (real_compare (code, c0, c1), type);
13130 }
13131
13132 /* Handle equality/inequality of complex constants. */
13133 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13134 {
13135 tree rcond = fold_relational_const (code, type,
13136 TREE_REALPART (op0),
13137 TREE_REALPART (op1));
13138 tree icond = fold_relational_const (code, type,
13139 TREE_IMAGPART (op0),
13140 TREE_IMAGPART (op1));
13141 if (code == EQ_EXPR)
13142 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13143 else if (code == NE_EXPR)
13144 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13145 else
13146 return NULL_TREE;
13147 }
13148
13149 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13150
13151 To compute GT, swap the arguments and do LT.
13152 To compute GE, do LT and invert the result.
13153 To compute LE, swap the arguments, do LT and invert the result.
13154 To compute NE, do EQ and invert the result.
13155
13156 Therefore, the code below must handle only EQ and LT. */
13157
13158 if (code == LE_EXPR || code == GT_EXPR)
13159 {
13160 tree tem = op0;
13161 op0 = op1;
13162 op1 = tem;
13163 code = swap_tree_comparison (code);
13164 }
13165
13166 /* Note that it is safe to invert for real values here because we
13167 have already handled the one case that it matters. */
13168
13169 invert = 0;
13170 if (code == NE_EXPR || code == GE_EXPR)
13171 {
13172 invert = 1;
13173 code = invert_tree_comparison (code, false);
13174 }
13175
13176 /* Compute a result for LT or EQ if args permit;
13177 Otherwise return T. */
13178 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13179 {
13180 if (code == EQ_EXPR)
13181 result = tree_int_cst_equal (op0, op1);
13182 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13183 result = INT_CST_LT_UNSIGNED (op0, op1);
13184 else
13185 result = INT_CST_LT (op0, op1);
13186 }
13187 else
13188 return NULL_TREE;
13189
13190 if (invert)
13191 result ^= 1;
13192 return constant_boolean_node (result, type);
13193}
13194
13195/* Build an expression for the a clean point containing EXPR with type TYPE.
13196 Don't build a cleanup point expression for EXPR which don't have side
13197 effects. */
13198
13199tree
13200fold_build_cleanup_point_expr (tree type, tree expr)
13201{
13202 /* If the expression does not have side effects then we don't have to wrap
13203 it with a cleanup point expression. */
13204 if (!TREE_SIDE_EFFECTS (expr))
13205 return expr;
13206
13207 /* If the expression is a return, check to see if the expression inside the
13208 return has no side effects or the right hand side of the modify expression
13209 inside the return. If either don't have side effects set we don't need to
13210 wrap the expression in a cleanup point expression. Note we don't check the
13211 left hand side of the modify because it should always be a return decl. */
13212 if (TREE_CODE (expr) == RETURN_EXPR)
13213 {
13214 tree op = TREE_OPERAND (expr, 0);
13215 if (!op || !TREE_SIDE_EFFECTS (op))
13216 return expr;
13217 op = TREE_OPERAND (op, 1);
13218 if (!TREE_SIDE_EFFECTS (op))
13219 return expr;
13220 }
13221
13222 return build1 (CLEANUP_POINT_EXPR, type, expr);
13223}
13224
13225/* Build an expression for the address of T. Folds away INDIRECT_REF to
13226 avoid confusing the gimplify process. */
13227
13228tree
13229build_fold_addr_expr_with_type (tree t, tree ptrtype)
13230{
13231 /* The size of the object is not relevant when talking about its address. */
13232 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13233 t = TREE_OPERAND (t, 0);
13234
13235 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13236 if (TREE_CODE (t) == INDIRECT_REF
13237 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13238 {
13239 t = TREE_OPERAND (t, 0);
13240 if (TREE_TYPE (t) != ptrtype)
13241 t = build1 (NOP_EXPR, ptrtype, t);
13242 }
13243 else
13244 {
13245 tree base = t;
13246
13247 while (handled_component_p (base))
13248 base = TREE_OPERAND (base, 0);
13249 if (DECL_P (base))
13250 TREE_ADDRESSABLE (base) = 1;
13251
13252 t = build1 (ADDR_EXPR, ptrtype, t);
13253 }
13254
13255 return t;
13256}
13257
13258tree
13259build_fold_addr_expr (tree t)
13260{
13261 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13262}
13263
13264/* Given a pointer value OP0 and a type TYPE, return a simplified version
13265 of an indirection through OP0, or NULL_TREE if no simplification is
13266 possible. */
13267
13268tree
13269fold_indirect_ref_1 (tree type, tree op0)
13270{
13271 tree sub = op0;
13272 tree subtype;
13273
13274 STRIP_NOPS (sub);
13275 subtype = TREE_TYPE (sub);
13276 if (!POINTER_TYPE_P (subtype))
13277 return NULL_TREE;
13278
13279 if (TREE_CODE (sub) == ADDR_EXPR)
13280 {
13281 tree op = TREE_OPERAND (sub, 0);
13282 tree optype = TREE_TYPE (op);
13283 /* *&CONST_DECL -> to the value of the const decl. */
13284 if (TREE_CODE (op) == CONST_DECL)
13285 return DECL_INITIAL (op);
13286 /* *&p => p; make sure to handle *&"str"[cst] here. */
13287 if (type == optype)
13288 {
13289 tree fop = fold_read_from_constant_string (op);
13290 if (fop)
13291 return fop;
13292 else
13293 return op;
13294 }
13295 /* *(foo *)&fooarray => fooarray[0] */
13296 else if (TREE_CODE (optype) == ARRAY_TYPE
13297 && type == TREE_TYPE (optype))
13298 {
13299 tree type_domain = TYPE_DOMAIN (optype);
13300 tree min_val = size_zero_node;
13301 if (type_domain && TYPE_MIN_VALUE (type_domain))
13302 min_val = TYPE_MIN_VALUE (type_domain);
13303 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13304 }
13305 /* *(foo *)&complexfoo => __real__ complexfoo */
13306 else if (TREE_CODE (optype) == COMPLEX_TYPE
13307 && type == TREE_TYPE (optype))
13308 return fold_build1 (REALPART_EXPR, type, op);
13309 }
13310
13311 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13312 if (TREE_CODE (sub) == PLUS_EXPR
13313 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13314 {
13315 tree op00 = TREE_OPERAND (sub, 0);
13316 tree op01 = TREE_OPERAND (sub, 1);
13317 tree op00type;
13318
13319 STRIP_NOPS (op00);
13320 op00type = TREE_TYPE (op00);
13321 if (TREE_CODE (op00) == ADDR_EXPR
13322 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13323 && type == TREE_TYPE (TREE_TYPE (op00type)))
13324 {
13325 tree size = TYPE_SIZE_UNIT (type);
13326 if (tree_int_cst_equal (size, op01))
13327 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13328 }
13329 }
13330
13331 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13332 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13333 && type == TREE_TYPE (TREE_TYPE (subtype)))
13334 {
13335 tree type_domain;
13336 tree min_val = size_zero_node;
13337 sub = build_fold_indirect_ref (sub);
13338 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13339 if (type_domain && TYPE_MIN_VALUE (type_domain))
13340 min_val = TYPE_MIN_VALUE (type_domain);
13341 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13342 }
13343
13344 return NULL_TREE;
13345}
13346
13347/* Builds an expression for an indirection through T, simplifying some
13348 cases. */
13349
13350tree
13351build_fold_indirect_ref (tree t)
13352{
13353 tree type = TREE_TYPE (TREE_TYPE (t));
13354 tree sub = fold_indirect_ref_1 (type, t);
13355
13356 if (sub)
13357 return sub;
13358 else
13359 return build1 (INDIRECT_REF, type, t);
13360}
13361
13362/* Given an INDIRECT_REF T, return either T or a simplified version. */
13363
13364tree
13365fold_indirect_ref (tree t)
13366{
13367 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13368
13369 if (sub)
13370 return sub;
13371 else
13372 return t;
13373}
13374
13375/* Strip non-trapping, non-side-effecting tree nodes from an expression
13376 whose result is ignored. The type of the returned tree need not be
13377 the same as the original expression. */
13378
13379tree
13380fold_ignored_result (tree t)
13381{
13382 if (!TREE_SIDE_EFFECTS (t))
13383 return integer_zero_node;
13384
13385 for (;;)
13386 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13387 {
13388 case tcc_unary:
13389 t = TREE_OPERAND (t, 0);
13390 break;
13391
13392 case tcc_binary:
13393 case tcc_comparison:
13394 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13395 t = TREE_OPERAND (t, 0);
13396 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13397 t = TREE_OPERAND (t, 1);
13398 else
13399 return t;
13400 break;
13401
13402 case tcc_expression:
13403 switch (TREE_CODE (t))
13404 {
13405 case COMPOUND_EXPR:
13406 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13407 return t;
13408 t = TREE_OPERAND (t, 0);
13409 break;
13410
13411 case COND_EXPR:
13412 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13413 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13414 return t;
13415 t = TREE_OPERAND (t, 0);
13416 break;
13417
13418 default:
13419 return t;
13420 }
13421 break;
13422
13423 default:
13424 return t;
13425 }
13426}
13427
13428/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13429 This can only be applied to objects of a sizetype. */
13430
13431tree
13432round_up (tree value, int divisor)
13433{
13434 tree div = NULL_TREE;
13435
13436 gcc_assert (divisor > 0);
13437 if (divisor == 1)
13438 return value;
13439
13440 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13441 have to do anything. Only do this when we are not given a const,
13442 because in that case, this check is more expensive than just
13443 doing it. */
13444 if (TREE_CODE (value) != INTEGER_CST)
13445 {
13446 div = build_int_cst (TREE_TYPE (value), divisor);
13447
13448 if (multiple_of_p (TREE_TYPE (value), value, div))
13449 return value;
13450 }
13451
13452 /* If divisor is a power of two, simplify this to bit manipulation. */
13453 if (divisor == (divisor & -divisor))
13454 {
13455 tree t;
13456
13457 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13458 value = size_binop (PLUS_EXPR, value, t);
13459 t = build_int_cst (TREE_TYPE (value), -divisor);
13460 value = size_binop (BIT_AND_EXPR, value, t);
13461 }
13462 else
13463 {
13464 if (!div)
13465 div = build_int_cst (TREE_TYPE (value), divisor);
13466 value = size_binop (CEIL_DIV_EXPR, value, div);
13467 value = size_binop (MULT_EXPR, value, div);
13468 }
13469
13470 return value;
13471}
13472
13473/* Likewise, but round down. */
13474
13475tree
13476round_down (tree value, int divisor)
13477{
13478 tree div = NULL_TREE;
13479
13480 gcc_assert (divisor > 0);
13481 if (divisor == 1)
13482 return value;
13483
13484 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13485 have to do anything. Only do this when we are not given a const,
13486 because in that case, this check is more expensive than just
13487 doing it. */
13488 if (TREE_CODE (value) != INTEGER_CST)
13489 {
13490 div = build_int_cst (TREE_TYPE (value), divisor);
13491
13492 if (multiple_of_p (TREE_TYPE (value), value, div))
13493 return value;
13494 }
13495
13496 /* If divisor is a power of two, simplify this to bit manipulation. */
13497 if (divisor == (divisor & -divisor))
13498 {
13499 tree t;
13500
13501 t = build_int_cst (TREE_TYPE (value), -divisor);
13502 value = size_binop (BIT_AND_EXPR, value, t);
13503 }
13504 else
13505 {
13506 if (!div)
13507 div = build_int_cst (TREE_TYPE (value), divisor);
13508 value = size_binop (FLOOR_DIV_EXPR, value, div);
13509 value = size_binop (MULT_EXPR, value, div);
13510 }
13511
13512 return value;
13513}
13514
13515/* Returns the pointer to the base of the object addressed by EXP and
13516 extracts the information about the offset of the access, storing it
13517 to PBITPOS and POFFSET. */
13518
13519static tree
13520split_address_to_core_and_offset (tree exp,
13521 HOST_WIDE_INT *pbitpos, tree *poffset)
13522{
13523 tree core;
13524 enum machine_mode mode;
13525 int unsignedp, volatilep;
13526 HOST_WIDE_INT bitsize;
13527
13528 if (TREE_CODE (exp) == ADDR_EXPR)
13529 {
13530 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13531 poffset, &mode, &unsignedp, &volatilep,
13532 false);
13533 core = build_fold_addr_expr (core);
13534 }
13535 else
13536 {
13537 core = exp;
13538 *pbitpos = 0;
13539 *poffset = NULL_TREE;
13540 }
13541
13542 return core;
13543}
13544
13545/* Returns true if addresses of E1 and E2 differ by a constant, false
13546 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13547
13548bool
13549ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13550{
13551 tree core1, core2;
13552 HOST_WIDE_INT bitpos1, bitpos2;
13553 tree toffset1, toffset2, tdiff, type;
13554
13555 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13556 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13557
13558 if (bitpos1 % BITS_PER_UNIT != 0
13559 || bitpos2 % BITS_PER_UNIT != 0
13560 || !operand_equal_p (core1, core2, 0))
13561 return false;
13562
13563 if (toffset1 && toffset2)
13564 {
13565 type = TREE_TYPE (toffset1);
13566 if (type != TREE_TYPE (toffset2))
13567 toffset2 = fold_convert (type, toffset2);
13568
13569 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13570 if (!cst_and_fits_in_hwi (tdiff))
13571 return false;
13572
13573 *diff = int_cst_value (tdiff);
13574 }
13575 else if (toffset1 || toffset2)
13576 {
13577 /* If only one of the offsets is non-constant, the difference cannot
13578 be a constant. */
13579 return false;
13580 }
13581 else
13582 *diff = 0;
13583
13584 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13585 return true;
13586}
13587
13588/* Simplify the floating point expression EXP when the sign of the
13589 result is not significant. Return NULL_TREE if no simplification
13590 is possible. */
13591
13592tree
13593fold_strip_sign_ops (tree exp)
13594{
13595 tree arg0, arg1;
13596
13597 switch (TREE_CODE (exp))
13598 {
13599 case ABS_EXPR:
13600 case NEGATE_EXPR:
13601 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13602 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13603
13604 case MULT_EXPR:
13605 case RDIV_EXPR:
13606 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13607 return NULL_TREE;
13608 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13609 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13610 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13611 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13612 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13613 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13614 break;
13615
13616 default:
13617 break;
13618 }
13619 return NULL_TREE;
13620}
13621
9519 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9520 {
9521 t1 = build_int_cst (type, -1);
9522 t1 = force_fit_type (t1, 0, false, false);
9523 return omit_one_operand (type, t1, arg0);
9524 }
9525
9526 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9527 with a constant, and the two constants have no bits in common,
9528 we should treat this as a BIT_IOR_EXPR since this may produce more
9529 simplifications. */
9530 if (TREE_CODE (arg0) == BIT_AND_EXPR
9531 && TREE_CODE (arg1) == BIT_AND_EXPR
9532 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9533 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9534 && integer_zerop (const_binop (BIT_AND_EXPR,
9535 TREE_OPERAND (arg0, 1),
9536 TREE_OPERAND (arg1, 1), 0)))
9537 {
9538 code = BIT_IOR_EXPR;
9539 goto bit_ior;
9540 }
9541
9542 /* (X | Y) ^ X -> Y & ~ X*/
9543 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9544 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9545 {
9546 tree t2 = TREE_OPERAND (arg0, 1);
9547 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9548 arg1);
9549 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9550 fold_convert (type, t1));
9551 return t1;
9552 }
9553
9554 /* (Y | X) ^ X -> Y & ~ X*/
9555 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9556 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9557 {
9558 tree t2 = TREE_OPERAND (arg0, 0);
9559 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9560 arg1);
9561 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9562 fold_convert (type, t1));
9563 return t1;
9564 }
9565
9566 /* X ^ (X | Y) -> Y & ~ X*/
9567 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9568 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9569 {
9570 tree t2 = TREE_OPERAND (arg1, 1);
9571 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9572 arg0);
9573 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9574 fold_convert (type, t1));
9575 return t1;
9576 }
9577
9578 /* X ^ (Y | X) -> Y & ~ X*/
9579 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9580 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9581 {
9582 tree t2 = TREE_OPERAND (arg1, 0);
9583 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9584 arg0);
9585 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9586 fold_convert (type, t1));
9587 return t1;
9588 }
9589
9590 /* Convert ~X ^ ~Y to X ^ Y. */
9591 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9592 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9593 return fold_build2 (code, type,
9594 fold_convert (type, TREE_OPERAND (arg0, 0)),
9595 fold_convert (type, TREE_OPERAND (arg1, 0)));
9596
9597 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9598 if (TREE_CODE (arg0) == BIT_AND_EXPR
9599 && integer_onep (TREE_OPERAND (arg0, 1))
9600 && integer_onep (arg1))
9601 return fold_build2 (EQ_EXPR, type, arg0,
9602 build_int_cst (TREE_TYPE (arg0), 0));
9603
9604 /* Fold (X & Y) ^ Y as ~X & Y. */
9605 if (TREE_CODE (arg0) == BIT_AND_EXPR
9606 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9607 {
9608 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9609 return fold_build2 (BIT_AND_EXPR, type,
9610 fold_build1 (BIT_NOT_EXPR, type, tem),
9611 fold_convert (type, arg1));
9612 }
9613 /* Fold (X & Y) ^ X as ~Y & X. */
9614 if (TREE_CODE (arg0) == BIT_AND_EXPR
9615 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9616 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9617 {
9618 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9619 return fold_build2 (BIT_AND_EXPR, type,
9620 fold_build1 (BIT_NOT_EXPR, type, tem),
9621 fold_convert (type, arg1));
9622 }
9623 /* Fold X ^ (X & Y) as X & ~Y. */
9624 if (TREE_CODE (arg1) == BIT_AND_EXPR
9625 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9626 {
9627 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9628 return fold_build2 (BIT_AND_EXPR, type,
9629 fold_convert (type, arg0),
9630 fold_build1 (BIT_NOT_EXPR, type, tem));
9631 }
9632 /* Fold X ^ (Y & X) as ~Y & X. */
9633 if (TREE_CODE (arg1) == BIT_AND_EXPR
9634 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9635 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9636 {
9637 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9638 return fold_build2 (BIT_AND_EXPR, type,
9639 fold_build1 (BIT_NOT_EXPR, type, tem),
9640 fold_convert (type, arg0));
9641 }
9642
9643 /* See if this can be simplified into a rotate first. If that
9644 is unsuccessful continue in the association code. */
9645 goto bit_rotate;
9646
9647 case BIT_AND_EXPR:
9648 if (integer_all_onesp (arg1))
9649 return non_lvalue (fold_convert (type, arg0));
9650 if (integer_zerop (arg1))
9651 return omit_one_operand (type, arg1, arg0);
9652 if (operand_equal_p (arg0, arg1, 0))
9653 return non_lvalue (fold_convert (type, arg0));
9654
9655 /* ~X & X is always zero. */
9656 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9657 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9658 return omit_one_operand (type, integer_zero_node, arg1);
9659
9660 /* X & ~X is always zero. */
9661 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9662 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9663 return omit_one_operand (type, integer_zero_node, arg0);
9664
9665 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9666 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9667 && TREE_CODE (arg1) == INTEGER_CST
9668 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9669 return fold_build2 (BIT_IOR_EXPR, type,
9670 fold_build2 (BIT_AND_EXPR, type,
9671 TREE_OPERAND (arg0, 0), arg1),
9672 fold_build2 (BIT_AND_EXPR, type,
9673 TREE_OPERAND (arg0, 1), arg1));
9674
9675 /* (X | Y) & Y is (X, Y). */
9676 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9677 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9678 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9679 /* (X | Y) & X is (Y, X). */
9680 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9681 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9682 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9683 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9684 /* X & (X | Y) is (Y, X). */
9685 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9686 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9687 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9688 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9689 /* X & (Y | X) is (Y, X). */
9690 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9692 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9693 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9694
9695 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9696 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9697 && integer_onep (TREE_OPERAND (arg0, 1))
9698 && integer_onep (arg1))
9699 {
9700 tem = TREE_OPERAND (arg0, 0);
9701 return fold_build2 (EQ_EXPR, type,
9702 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9703 build_int_cst (TREE_TYPE (tem), 1)),
9704 build_int_cst (TREE_TYPE (tem), 0));
9705 }
9706 /* Fold ~X & 1 as (X & 1) == 0. */
9707 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9708 && integer_onep (arg1))
9709 {
9710 tem = TREE_OPERAND (arg0, 0);
9711 return fold_build2 (EQ_EXPR, type,
9712 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9713 build_int_cst (TREE_TYPE (tem), 1)),
9714 build_int_cst (TREE_TYPE (tem), 0));
9715 }
9716
9717 /* Fold (X ^ Y) & Y as ~X & Y. */
9718 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9719 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9720 {
9721 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9722 return fold_build2 (BIT_AND_EXPR, type,
9723 fold_build1 (BIT_NOT_EXPR, type, tem),
9724 fold_convert (type, arg1));
9725 }
9726 /* Fold (X ^ Y) & X as ~Y & X. */
9727 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9728 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9729 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9730 {
9731 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9732 return fold_build2 (BIT_AND_EXPR, type,
9733 fold_build1 (BIT_NOT_EXPR, type, tem),
9734 fold_convert (type, arg1));
9735 }
9736 /* Fold X & (X ^ Y) as X & ~Y. */
9737 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9738 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9739 {
9740 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9741 return fold_build2 (BIT_AND_EXPR, type,
9742 fold_convert (type, arg0),
9743 fold_build1 (BIT_NOT_EXPR, type, tem));
9744 }
9745 /* Fold X & (Y ^ X) as ~Y & X. */
9746 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9747 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9748 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9749 {
9750 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9751 return fold_build2 (BIT_AND_EXPR, type,
9752 fold_build1 (BIT_NOT_EXPR, type, tem),
9753 fold_convert (type, arg0));
9754 }
9755
9756 t1 = distribute_bit_expr (code, type, arg0, arg1);
9757 if (t1 != NULL_TREE)
9758 return t1;
9759 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9760 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9761 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9762 {
9763 unsigned int prec
9764 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9765
9766 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9767 && (~TREE_INT_CST_LOW (arg1)
9768 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9769 return fold_convert (type, TREE_OPERAND (arg0, 0));
9770 }
9771
9772 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9773
9774 This results in more efficient code for machines without a NOR
9775 instruction. Combine will canonicalize to the first form
9776 which will allow use of NOR instructions provided by the
9777 backend if they exist. */
9778 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9779 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9780 {
9781 return fold_build1 (BIT_NOT_EXPR, type,
9782 build2 (BIT_IOR_EXPR, type,
9783 TREE_OPERAND (arg0, 0),
9784 TREE_OPERAND (arg1, 0)));
9785 }
9786
9787 goto associate;
9788
9789 case RDIV_EXPR:
9790 /* Don't touch a floating-point divide by zero unless the mode
9791 of the constant can represent infinity. */
9792 if (TREE_CODE (arg1) == REAL_CST
9793 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9794 && real_zerop (arg1))
9795 return NULL_TREE;
9796
9797 /* Optimize A / A to 1.0 if we don't care about
9798 NaNs or Infinities. Skip the transformation
9799 for non-real operands. */
9800 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9801 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9802 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9803 && operand_equal_p (arg0, arg1, 0))
9804 {
9805 tree r = build_real (TREE_TYPE (arg0), dconst1);
9806
9807 return omit_two_operands (type, r, arg0, arg1);
9808 }
9809
9810 /* The complex version of the above A / A optimization. */
9811 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9812 && operand_equal_p (arg0, arg1, 0))
9813 {
9814 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9815 if (! HONOR_NANS (TYPE_MODE (elem_type))
9816 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9817 {
9818 tree r = build_real (elem_type, dconst1);
9819 /* omit_two_operands will call fold_convert for us. */
9820 return omit_two_operands (type, r, arg0, arg1);
9821 }
9822 }
9823
9824 /* (-A) / (-B) -> A / B */
9825 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9826 return fold_build2 (RDIV_EXPR, type,
9827 TREE_OPERAND (arg0, 0),
9828 negate_expr (arg1));
9829 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9830 return fold_build2 (RDIV_EXPR, type,
9831 negate_expr (arg0),
9832 TREE_OPERAND (arg1, 0));
9833
9834 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9835 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9836 && real_onep (arg1))
9837 return non_lvalue (fold_convert (type, arg0));
9838
9839 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9840 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9841 && real_minus_onep (arg1))
9842 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9843
9844 /* If ARG1 is a constant, we can convert this to a multiply by the
9845 reciprocal. This does not have the same rounding properties,
9846 so only do this if -funsafe-math-optimizations. We can actually
9847 always safely do it if ARG1 is a power of two, but it's hard to
9848 tell if it is or not in a portable manner. */
9849 if (TREE_CODE (arg1) == REAL_CST)
9850 {
9851 if (flag_unsafe_math_optimizations
9852 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9853 arg1, 0)))
9854 return fold_build2 (MULT_EXPR, type, arg0, tem);
9855 /* Find the reciprocal if optimizing and the result is exact. */
9856 if (optimize)
9857 {
9858 REAL_VALUE_TYPE r;
9859 r = TREE_REAL_CST (arg1);
9860 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9861 {
9862 tem = build_real (type, r);
9863 return fold_build2 (MULT_EXPR, type,
9864 fold_convert (type, arg0), tem);
9865 }
9866 }
9867 }
9868 /* Convert A/B/C to A/(B*C). */
9869 if (flag_unsafe_math_optimizations
9870 && TREE_CODE (arg0) == RDIV_EXPR)
9871 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9872 fold_build2 (MULT_EXPR, type,
9873 TREE_OPERAND (arg0, 1), arg1));
9874
9875 /* Convert A/(B/C) to (A/B)*C. */
9876 if (flag_unsafe_math_optimizations
9877 && TREE_CODE (arg1) == RDIV_EXPR)
9878 return fold_build2 (MULT_EXPR, type,
9879 fold_build2 (RDIV_EXPR, type, arg0,
9880 TREE_OPERAND (arg1, 0)),
9881 TREE_OPERAND (arg1, 1));
9882
9883 /* Convert C1/(X*C2) into (C1/C2)/X. */
9884 if (flag_unsafe_math_optimizations
9885 && TREE_CODE (arg1) == MULT_EXPR
9886 && TREE_CODE (arg0) == REAL_CST
9887 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9888 {
9889 tree tem = const_binop (RDIV_EXPR, arg0,
9890 TREE_OPERAND (arg1, 1), 0);
9891 if (tem)
9892 return fold_build2 (RDIV_EXPR, type, tem,
9893 TREE_OPERAND (arg1, 0));
9894 }
9895
9896 if (flag_unsafe_math_optimizations)
9897 {
9898 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9899 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9900
9901 /* Optimize sin(x)/cos(x) as tan(x). */
9902 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9903 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9904 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9905 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9906 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9907 {
9908 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9909
9910 if (tanfn != NULL_TREE)
9911 return build_function_call_expr (tanfn,
9912 TREE_OPERAND (arg0, 1));
9913 }
9914
9915 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9916 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9917 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9918 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9919 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9920 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9921 {
9922 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9923
9924 if (tanfn != NULL_TREE)
9925 {
9926 tree tmp = TREE_OPERAND (arg0, 1);
9927 tmp = build_function_call_expr (tanfn, tmp);
9928 return fold_build2 (RDIV_EXPR, type,
9929 build_real (type, dconst1), tmp);
9930 }
9931 }
9932
9933 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9934 NaNs or Infinities. */
9935 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9936 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9937 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9938 {
9939 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9940 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9941
9942 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9943 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9944 && operand_equal_p (arg00, arg01, 0))
9945 {
9946 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9947
9948 if (cosfn != NULL_TREE)
9949 return build_function_call_expr (cosfn,
9950 TREE_OPERAND (arg0, 1));
9951 }
9952 }
9953
9954 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9955 NaNs or Infinities. */
9956 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9957 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9958 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9959 {
9960 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9961 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9962
9963 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9964 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9965 && operand_equal_p (arg00, arg01, 0))
9966 {
9967 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9968
9969 if (cosfn != NULL_TREE)
9970 {
9971 tree tmp = TREE_OPERAND (arg0, 1);
9972 tmp = build_function_call_expr (cosfn, tmp);
9973 return fold_build2 (RDIV_EXPR, type,
9974 build_real (type, dconst1),
9975 tmp);
9976 }
9977 }
9978 }
9979
9980 /* Optimize pow(x,c)/x as pow(x,c-1). */
9981 if (fcode0 == BUILT_IN_POW
9982 || fcode0 == BUILT_IN_POWF
9983 || fcode0 == BUILT_IN_POWL)
9984 {
9985 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9986 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9987 if (TREE_CODE (arg01) == REAL_CST
9988 && ! TREE_CONSTANT_OVERFLOW (arg01)
9989 && operand_equal_p (arg1, arg00, 0))
9990 {
9991 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9992 REAL_VALUE_TYPE c;
9993 tree arg, arglist;
9994
9995 c = TREE_REAL_CST (arg01);
9996 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9997 arg = build_real (type, c);
9998 arglist = build_tree_list (NULL_TREE, arg);
9999 arglist = tree_cons (NULL_TREE, arg1, arglist);
10000 return build_function_call_expr (powfn, arglist);
10001 }
10002 }
10003
10004 /* Optimize x/expN(y) into x*expN(-y). */
10005 if (BUILTIN_EXPONENT_P (fcode1))
10006 {
10007 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10008 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10009 tree arglist = build_tree_list (NULL_TREE,
10010 fold_convert (type, arg));
10011 arg1 = build_function_call_expr (expfn, arglist);
10012 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10013 }
10014
10015 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10016 if (fcode1 == BUILT_IN_POW
10017 || fcode1 == BUILT_IN_POWF
10018 || fcode1 == BUILT_IN_POWL)
10019 {
10020 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10021 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10022 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10023 tree neg11 = fold_convert (type, negate_expr (arg11));
10024 tree arglist = tree_cons(NULL_TREE, arg10,
10025 build_tree_list (NULL_TREE, neg11));
10026 arg1 = build_function_call_expr (powfn, arglist);
10027 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10028 }
10029 }
10030 return NULL_TREE;
10031
10032 case TRUNC_DIV_EXPR:
10033 case FLOOR_DIV_EXPR:
10034 /* Simplify A / (B << N) where A and B are positive and B is
10035 a power of 2, to A >> (N + log2(B)). */
10036 strict_overflow_p = false;
10037 if (TREE_CODE (arg1) == LSHIFT_EXPR
10038 && (TYPE_UNSIGNED (type)
10039 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10040 {
10041 tree sval = TREE_OPERAND (arg1, 0);
10042 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10043 {
10044 tree sh_cnt = TREE_OPERAND (arg1, 1);
10045 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10046
10047 if (strict_overflow_p)
10048 fold_overflow_warning (("assuming signed overflow does not "
10049 "occur when simplifying A / (B << N)"),
10050 WARN_STRICT_OVERFLOW_MISC);
10051
10052 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10053 sh_cnt, build_int_cst (NULL_TREE, pow2));
10054 return fold_build2 (RSHIFT_EXPR, type,
10055 fold_convert (type, arg0), sh_cnt);
10056 }
10057 }
10058 /* Fall thru */
10059
10060 case ROUND_DIV_EXPR:
10061 case CEIL_DIV_EXPR:
10062 case EXACT_DIV_EXPR:
10063 if (integer_onep (arg1))
10064 return non_lvalue (fold_convert (type, arg0));
10065 if (integer_zerop (arg1))
10066 return NULL_TREE;
10067 /* X / -1 is -X. */
10068 if (!TYPE_UNSIGNED (type)
10069 && TREE_CODE (arg1) == INTEGER_CST
10070 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10071 && TREE_INT_CST_HIGH (arg1) == -1)
10072 return fold_convert (type, negate_expr (arg0));
10073
10074 /* Convert -A / -B to A / B when the type is signed and overflow is
10075 undefined. */
10076 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10077 && TREE_CODE (arg0) == NEGATE_EXPR
10078 && negate_expr_p (arg1))
10079 {
10080 if (INTEGRAL_TYPE_P (type))
10081 fold_overflow_warning (("assuming signed overflow does not occur "
10082 "when distributing negation across "
10083 "division"),
10084 WARN_STRICT_OVERFLOW_MISC);
10085 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10086 negate_expr (arg1));
10087 }
10088 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10089 && TREE_CODE (arg1) == NEGATE_EXPR
10090 && negate_expr_p (arg0))
10091 {
10092 if (INTEGRAL_TYPE_P (type))
10093 fold_overflow_warning (("assuming signed overflow does not occur "
10094 "when distributing negation across "
10095 "division"),
10096 WARN_STRICT_OVERFLOW_MISC);
10097 return fold_build2 (code, type, negate_expr (arg0),
10098 TREE_OPERAND (arg1, 0));
10099 }
10100
10101 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10102 operation, EXACT_DIV_EXPR.
10103
10104 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10105 At one time others generated faster code, it's not clear if they do
10106 after the last round to changes to the DIV code in expmed.c. */
10107 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10108 && multiple_of_p (type, arg0, arg1))
10109 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10110
10111 strict_overflow_p = false;
10112 if (TREE_CODE (arg1) == INTEGER_CST
10113 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10114 &strict_overflow_p)))
10115 {
10116 if (strict_overflow_p)
10117 fold_overflow_warning (("assuming signed overflow does not occur "
10118 "when simplifying division"),
10119 WARN_STRICT_OVERFLOW_MISC);
10120 return fold_convert (type, tem);
10121 }
10122
10123 return NULL_TREE;
10124
10125 case CEIL_MOD_EXPR:
10126 case FLOOR_MOD_EXPR:
10127 case ROUND_MOD_EXPR:
10128 case TRUNC_MOD_EXPR:
10129 /* X % 1 is always zero, but be sure to preserve any side
10130 effects in X. */
10131 if (integer_onep (arg1))
10132 return omit_one_operand (type, integer_zero_node, arg0);
10133
10134 /* X % 0, return X % 0 unchanged so that we can get the
10135 proper warnings and errors. */
10136 if (integer_zerop (arg1))
10137 return NULL_TREE;
10138
10139 /* 0 % X is always zero, but be sure to preserve any side
10140 effects in X. Place this after checking for X == 0. */
10141 if (integer_zerop (arg0))
10142 return omit_one_operand (type, integer_zero_node, arg1);
10143
10144 /* X % -1 is zero. */
10145 if (!TYPE_UNSIGNED (type)
10146 && TREE_CODE (arg1) == INTEGER_CST
10147 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10148 && TREE_INT_CST_HIGH (arg1) == -1)
10149 return omit_one_operand (type, integer_zero_node, arg0);
10150
10151 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10152 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10153 strict_overflow_p = false;
10154 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10155 && (TYPE_UNSIGNED (type)
10156 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10157 {
10158 tree c = arg1;
10159 /* Also optimize A % (C << N) where C is a power of 2,
10160 to A & ((C << N) - 1). */
10161 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10162 c = TREE_OPERAND (arg1, 0);
10163
10164 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10165 {
10166 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10167 arg1, integer_one_node);
10168 if (strict_overflow_p)
10169 fold_overflow_warning (("assuming signed overflow does not "
10170 "occur when simplifying "
10171 "X % (power of two)"),
10172 WARN_STRICT_OVERFLOW_MISC);
10173 return fold_build2 (BIT_AND_EXPR, type,
10174 fold_convert (type, arg0),
10175 fold_convert (type, mask));
10176 }
10177 }
10178
10179 /* X % -C is the same as X % C. */
10180 if (code == TRUNC_MOD_EXPR
10181 && !TYPE_UNSIGNED (type)
10182 && TREE_CODE (arg1) == INTEGER_CST
10183 && !TREE_CONSTANT_OVERFLOW (arg1)
10184 && TREE_INT_CST_HIGH (arg1) < 0
10185 && !TYPE_OVERFLOW_TRAPS (type)
10186 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10187 && !sign_bit_p (arg1, arg1))
10188 return fold_build2 (code, type, fold_convert (type, arg0),
10189 fold_convert (type, negate_expr (arg1)));
10190
10191 /* X % -Y is the same as X % Y. */
10192 if (code == TRUNC_MOD_EXPR
10193 && !TYPE_UNSIGNED (type)
10194 && TREE_CODE (arg1) == NEGATE_EXPR
10195 && !TYPE_OVERFLOW_TRAPS (type))
10196 return fold_build2 (code, type, fold_convert (type, arg0),
10197 fold_convert (type, TREE_OPERAND (arg1, 0)));
10198
10199 if (TREE_CODE (arg1) == INTEGER_CST
10200 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10201 &strict_overflow_p)))
10202 {
10203 if (strict_overflow_p)
10204 fold_overflow_warning (("assuming signed overflow does not occur "
10205 "when simplifying modulos"),
10206 WARN_STRICT_OVERFLOW_MISC);
10207 return fold_convert (type, tem);
10208 }
10209
10210 return NULL_TREE;
10211
10212 case LROTATE_EXPR:
10213 case RROTATE_EXPR:
10214 if (integer_all_onesp (arg0))
10215 return omit_one_operand (type, arg0, arg1);
10216 goto shift;
10217
10218 case RSHIFT_EXPR:
10219 /* Optimize -1 >> x for arithmetic right shifts. */
10220 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10221 return omit_one_operand (type, arg0, arg1);
10222 /* ... fall through ... */
10223
10224 case LSHIFT_EXPR:
10225 shift:
10226 if (integer_zerop (arg1))
10227 return non_lvalue (fold_convert (type, arg0));
10228 if (integer_zerop (arg0))
10229 return omit_one_operand (type, arg0, arg1);
10230
10231 /* Since negative shift count is not well-defined,
10232 don't try to compute it in the compiler. */
10233 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10234 return NULL_TREE;
10235
10236 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10237 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10238 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10239 && host_integerp (TREE_OPERAND (arg0, 1), false)
10240 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10241 {
10242 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10243 + TREE_INT_CST_LOW (arg1));
10244
10245 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10246 being well defined. */
10247 if (low >= TYPE_PRECISION (type))
10248 {
10249 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10250 low = low % TYPE_PRECISION (type);
10251 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10252 return build_int_cst (type, 0);
10253 else
10254 low = TYPE_PRECISION (type) - 1;
10255 }
10256
10257 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10258 build_int_cst (type, low));
10259 }
10260
10261 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10262 into x & ((unsigned)-1 >> c) for unsigned types. */
10263 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10264 || (TYPE_UNSIGNED (type)
10265 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10266 && host_integerp (arg1, false)
10267 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10268 && host_integerp (TREE_OPERAND (arg0, 1), false)
10269 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10270 {
10271 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10272 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10273 tree lshift;
10274 tree arg00;
10275
10276 if (low0 == low1)
10277 {
10278 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10279
10280 lshift = build_int_cst (type, -1);
10281 lshift = int_const_binop (code, lshift, arg1, 0);
10282
10283 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10284 }
10285 }
10286
10287 /* Rewrite an LROTATE_EXPR by a constant into an
10288 RROTATE_EXPR by a new constant. */
10289 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10290 {
10291 tree tem = build_int_cst (NULL_TREE,
10292 GET_MODE_BITSIZE (TYPE_MODE (type)));
10293 tem = fold_convert (TREE_TYPE (arg1), tem);
10294 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10295 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10296 }
10297
10298 /* If we have a rotate of a bit operation with the rotate count and
10299 the second operand of the bit operation both constant,
10300 permute the two operations. */
10301 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10302 && (TREE_CODE (arg0) == BIT_AND_EXPR
10303 || TREE_CODE (arg0) == BIT_IOR_EXPR
10304 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10306 return fold_build2 (TREE_CODE (arg0), type,
10307 fold_build2 (code, type,
10308 TREE_OPERAND (arg0, 0), arg1),
10309 fold_build2 (code, type,
10310 TREE_OPERAND (arg0, 1), arg1));
10311
10312 /* Two consecutive rotates adding up to the width of the mode can
10313 be ignored. */
10314 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10315 && TREE_CODE (arg0) == RROTATE_EXPR
10316 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10317 && TREE_INT_CST_HIGH (arg1) == 0
10318 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10319 && ((TREE_INT_CST_LOW (arg1)
10320 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10321 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10322 return TREE_OPERAND (arg0, 0);
10323
10324 return NULL_TREE;
10325
10326 case MIN_EXPR:
10327 if (operand_equal_p (arg0, arg1, 0))
10328 return omit_one_operand (type, arg0, arg1);
10329 if (INTEGRAL_TYPE_P (type)
10330 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10331 return omit_one_operand (type, arg1, arg0);
10332 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10333 if (tem)
10334 return tem;
10335 goto associate;
10336
10337 case MAX_EXPR:
10338 if (operand_equal_p (arg0, arg1, 0))
10339 return omit_one_operand (type, arg0, arg1);
10340 if (INTEGRAL_TYPE_P (type)
10341 && TYPE_MAX_VALUE (type)
10342 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10343 return omit_one_operand (type, arg1, arg0);
10344 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10345 if (tem)
10346 return tem;
10347 goto associate;
10348
10349 case TRUTH_ANDIF_EXPR:
10350 /* Note that the operands of this must be ints
10351 and their values must be 0 or 1.
10352 ("true" is a fixed value perhaps depending on the language.) */
10353 /* If first arg is constant zero, return it. */
10354 if (integer_zerop (arg0))
10355 return fold_convert (type, arg0);
10356 case TRUTH_AND_EXPR:
10357 /* If either arg is constant true, drop it. */
10358 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10359 return non_lvalue (fold_convert (type, arg1));
10360 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10361 /* Preserve sequence points. */
10362 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10363 return non_lvalue (fold_convert (type, arg0));
10364 /* If second arg is constant zero, result is zero, but first arg
10365 must be evaluated. */
10366 if (integer_zerop (arg1))
10367 return omit_one_operand (type, arg1, arg0);
10368 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10369 case will be handled here. */
10370 if (integer_zerop (arg0))
10371 return omit_one_operand (type, arg0, arg1);
10372
10373 /* !X && X is always false. */
10374 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10375 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10376 return omit_one_operand (type, integer_zero_node, arg1);
10377 /* X && !X is always false. */
10378 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10379 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10380 return omit_one_operand (type, integer_zero_node, arg0);
10381
10382 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10383 means A >= Y && A != MAX, but in this case we know that
10384 A < X <= MAX. */
10385
10386 if (!TREE_SIDE_EFFECTS (arg0)
10387 && !TREE_SIDE_EFFECTS (arg1))
10388 {
10389 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10390 if (tem && !operand_equal_p (tem, arg0, 0))
10391 return fold_build2 (code, type, tem, arg1);
10392
10393 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10394 if (tem && !operand_equal_p (tem, arg1, 0))
10395 return fold_build2 (code, type, arg0, tem);
10396 }
10397
10398 truth_andor:
10399 /* We only do these simplifications if we are optimizing. */
10400 if (!optimize)
10401 return NULL_TREE;
10402
10403 /* Check for things like (A || B) && (A || C). We can convert this
10404 to A || (B && C). Note that either operator can be any of the four
10405 truth and/or operations and the transformation will still be
10406 valid. Also note that we only care about order for the
10407 ANDIF and ORIF operators. If B contains side effects, this
10408 might change the truth-value of A. */
10409 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10410 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10411 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10412 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10413 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10414 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10415 {
10416 tree a00 = TREE_OPERAND (arg0, 0);
10417 tree a01 = TREE_OPERAND (arg0, 1);
10418 tree a10 = TREE_OPERAND (arg1, 0);
10419 tree a11 = TREE_OPERAND (arg1, 1);
10420 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10421 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10422 && (code == TRUTH_AND_EXPR
10423 || code == TRUTH_OR_EXPR));
10424
10425 if (operand_equal_p (a00, a10, 0))
10426 return fold_build2 (TREE_CODE (arg0), type, a00,
10427 fold_build2 (code, type, a01, a11));
10428 else if (commutative && operand_equal_p (a00, a11, 0))
10429 return fold_build2 (TREE_CODE (arg0), type, a00,
10430 fold_build2 (code, type, a01, a10));
10431 else if (commutative && operand_equal_p (a01, a10, 0))
10432 return fold_build2 (TREE_CODE (arg0), type, a01,
10433 fold_build2 (code, type, a00, a11));
10434
10435 /* This case if tricky because we must either have commutative
10436 operators or else A10 must not have side-effects. */
10437
10438 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10439 && operand_equal_p (a01, a11, 0))
10440 return fold_build2 (TREE_CODE (arg0), type,
10441 fold_build2 (code, type, a00, a10),
10442 a01);
10443 }
10444
10445 /* See if we can build a range comparison. */
10446 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10447 return tem;
10448
10449 /* Check for the possibility of merging component references. If our
10450 lhs is another similar operation, try to merge its rhs with our
10451 rhs. Then try to merge our lhs and rhs. */
10452 if (TREE_CODE (arg0) == code
10453 && 0 != (tem = fold_truthop (code, type,
10454 TREE_OPERAND (arg0, 1), arg1)))
10455 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10456
10457 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10458 return tem;
10459
10460 return NULL_TREE;
10461
10462 case TRUTH_ORIF_EXPR:
10463 /* Note that the operands of this must be ints
10464 and their values must be 0 or true.
10465 ("true" is a fixed value perhaps depending on the language.) */
10466 /* If first arg is constant true, return it. */
10467 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10468 return fold_convert (type, arg0);
10469 case TRUTH_OR_EXPR:
10470 /* If either arg is constant zero, drop it. */
10471 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10472 return non_lvalue (fold_convert (type, arg1));
10473 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10474 /* Preserve sequence points. */
10475 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10476 return non_lvalue (fold_convert (type, arg0));
10477 /* If second arg is constant true, result is true, but we must
10478 evaluate first arg. */
10479 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10480 return omit_one_operand (type, arg1, arg0);
10481 /* Likewise for first arg, but note this only occurs here for
10482 TRUTH_OR_EXPR. */
10483 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10484 return omit_one_operand (type, arg0, arg1);
10485
10486 /* !X || X is always true. */
10487 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10488 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10489 return omit_one_operand (type, integer_one_node, arg1);
10490 /* X || !X is always true. */
10491 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10492 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10493 return omit_one_operand (type, integer_one_node, arg0);
10494
10495 goto truth_andor;
10496
10497 case TRUTH_XOR_EXPR:
10498 /* If the second arg is constant zero, drop it. */
10499 if (integer_zerop (arg1))
10500 return non_lvalue (fold_convert (type, arg0));
10501 /* If the second arg is constant true, this is a logical inversion. */
10502 if (integer_onep (arg1))
10503 {
10504 /* Only call invert_truthvalue if operand is a truth value. */
10505 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10506 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10507 else
10508 tem = invert_truthvalue (arg0);
10509 return non_lvalue (fold_convert (type, tem));
10510 }
10511 /* Identical arguments cancel to zero. */
10512 if (operand_equal_p (arg0, arg1, 0))
10513 return omit_one_operand (type, integer_zero_node, arg0);
10514
10515 /* !X ^ X is always true. */
10516 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10517 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10518 return omit_one_operand (type, integer_one_node, arg1);
10519
10520 /* X ^ !X is always true. */
10521 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10522 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10523 return omit_one_operand (type, integer_one_node, arg0);
10524
10525 return NULL_TREE;
10526
10527 case EQ_EXPR:
10528 case NE_EXPR:
10529 tem = fold_comparison (code, type, op0, op1);
10530 if (tem != NULL_TREE)
10531 return tem;
10532
10533 /* bool_var != 0 becomes bool_var. */
10534 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10535 && code == NE_EXPR)
10536 return non_lvalue (fold_convert (type, arg0));
10537
10538 /* bool_var == 1 becomes bool_var. */
10539 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10540 && code == EQ_EXPR)
10541 return non_lvalue (fold_convert (type, arg0));
10542
10543 /* bool_var != 1 becomes !bool_var. */
10544 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10545 && code == NE_EXPR)
10546 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10547
10548 /* bool_var == 0 becomes !bool_var. */
10549 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10550 && code == EQ_EXPR)
10551 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10552
10553 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10554 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10555 && TREE_CODE (arg1) == INTEGER_CST)
10556 {
10557 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10558 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10559 fold_build1 (BIT_NOT_EXPR, cmp_type,
10560 fold_convert (cmp_type, arg1)));
10561 }
10562
10563 /* If this is an equality comparison of the address of a non-weak
10564 object against zero, then we know the result. */
10565 if (TREE_CODE (arg0) == ADDR_EXPR
10566 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10567 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10568 && integer_zerop (arg1))
10569 return constant_boolean_node (code != EQ_EXPR, type);
10570
10571 /* If this is an equality comparison of the address of two non-weak,
10572 unaliased symbols neither of which are extern (since we do not
10573 have access to attributes for externs), then we know the result. */
10574 if (TREE_CODE (arg0) == ADDR_EXPR
10575 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10576 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10577 && ! lookup_attribute ("alias",
10578 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10579 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10580 && TREE_CODE (arg1) == ADDR_EXPR
10581 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10582 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10583 && ! lookup_attribute ("alias",
10584 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10585 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10586 {
10587 /* We know that we're looking at the address of two
10588 non-weak, unaliased, static _DECL nodes.
10589
10590 It is both wasteful and incorrect to call operand_equal_p
10591 to compare the two ADDR_EXPR nodes. It is wasteful in that
10592 all we need to do is test pointer equality for the arguments
10593 to the two ADDR_EXPR nodes. It is incorrect to use
10594 operand_equal_p as that function is NOT equivalent to a
10595 C equality test. It can in fact return false for two
10596 objects which would test as equal using the C equality
10597 operator. */
10598 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10599 return constant_boolean_node (equal
10600 ? code == EQ_EXPR : code != EQ_EXPR,
10601 type);
10602 }
10603
10604 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10605 a MINUS_EXPR of a constant, we can convert it into a comparison with
10606 a revised constant as long as no overflow occurs. */
10607 if (TREE_CODE (arg1) == INTEGER_CST
10608 && (TREE_CODE (arg0) == PLUS_EXPR
10609 || TREE_CODE (arg0) == MINUS_EXPR)
10610 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10611 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10612 ? MINUS_EXPR : PLUS_EXPR,
10613 fold_convert (TREE_TYPE (arg0), arg1),
10614 TREE_OPERAND (arg0, 1), 0))
10615 && ! TREE_CONSTANT_OVERFLOW (tem))
10616 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10617
10618 /* Similarly for a NEGATE_EXPR. */
10619 if (TREE_CODE (arg0) == NEGATE_EXPR
10620 && TREE_CODE (arg1) == INTEGER_CST
10621 && 0 != (tem = negate_expr (arg1))
10622 && TREE_CODE (tem) == INTEGER_CST
10623 && ! TREE_CONSTANT_OVERFLOW (tem))
10624 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10625
10626 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10627 for !=. Don't do this for ordered comparisons due to overflow. */
10628 if (TREE_CODE (arg0) == MINUS_EXPR
10629 && integer_zerop (arg1))
10630 return fold_build2 (code, type,
10631 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10632
10633 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10634 if (TREE_CODE (arg0) == ABS_EXPR
10635 && (integer_zerop (arg1) || real_zerop (arg1)))
10636 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10637
10638 /* If this is an EQ or NE comparison with zero and ARG0 is
10639 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10640 two operations, but the latter can be done in one less insn
10641 on machines that have only two-operand insns or on which a
10642 constant cannot be the first operand. */
10643 if (TREE_CODE (arg0) == BIT_AND_EXPR
10644 && integer_zerop (arg1))
10645 {
10646 tree arg00 = TREE_OPERAND (arg0, 0);
10647 tree arg01 = TREE_OPERAND (arg0, 1);
10648 if (TREE_CODE (arg00) == LSHIFT_EXPR
10649 && integer_onep (TREE_OPERAND (arg00, 0)))
10650 return
10651 fold_build2 (code, type,
10652 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10653 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10654 arg01, TREE_OPERAND (arg00, 1)),
10655 fold_convert (TREE_TYPE (arg0),
10656 integer_one_node)),
10657 arg1);
10658 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10659 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10660 return
10661 fold_build2 (code, type,
10662 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10663 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10664 arg00, TREE_OPERAND (arg01, 1)),
10665 fold_convert (TREE_TYPE (arg0),
10666 integer_one_node)),
10667 arg1);
10668 }
10669
10670 /* If this is an NE or EQ comparison of zero against the result of a
10671 signed MOD operation whose second operand is a power of 2, make
10672 the MOD operation unsigned since it is simpler and equivalent. */
10673 if (integer_zerop (arg1)
10674 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10675 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10676 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10677 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10678 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10679 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10680 {
10681 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10682 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10683 fold_convert (newtype,
10684 TREE_OPERAND (arg0, 0)),
10685 fold_convert (newtype,
10686 TREE_OPERAND (arg0, 1)));
10687
10688 return fold_build2 (code, type, newmod,
10689 fold_convert (newtype, arg1));
10690 }
10691
10692 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10693 C1 is a valid shift constant, and C2 is a power of two, i.e.
10694 a single bit. */
10695 if (TREE_CODE (arg0) == BIT_AND_EXPR
10696 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10697 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10698 == INTEGER_CST
10699 && integer_pow2p (TREE_OPERAND (arg0, 1))
10700 && integer_zerop (arg1))
10701 {
10702 tree itype = TREE_TYPE (arg0);
10703 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10704 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10705
10706 /* Check for a valid shift count. */
10707 if (TREE_INT_CST_HIGH (arg001) == 0
10708 && TREE_INT_CST_LOW (arg001) < prec)
10709 {
10710 tree arg01 = TREE_OPERAND (arg0, 1);
10711 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10712 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10713 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10714 can be rewritten as (X & (C2 << C1)) != 0. */
10715 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10716 {
10717 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10718 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10719 return fold_build2 (code, type, tem, arg1);
10720 }
10721 /* Otherwise, for signed (arithmetic) shifts,
10722 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10723 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10724 else if (!TYPE_UNSIGNED (itype))
10725 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10726 arg000, build_int_cst (itype, 0));
10727 /* Otherwise, of unsigned (logical) shifts,
10728 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10729 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10730 else
10731 return omit_one_operand (type,
10732 code == EQ_EXPR ? integer_one_node
10733 : integer_zero_node,
10734 arg000);
10735 }
10736 }
10737
10738 /* If this is an NE comparison of zero with an AND of one, remove the
10739 comparison since the AND will give the correct value. */
10740 if (code == NE_EXPR
10741 && integer_zerop (arg1)
10742 && TREE_CODE (arg0) == BIT_AND_EXPR
10743 && integer_onep (TREE_OPERAND (arg0, 1)))
10744 return fold_convert (type, arg0);
10745
10746 /* If we have (A & C) == C where C is a power of 2, convert this into
10747 (A & C) != 0. Similarly for NE_EXPR. */
10748 if (TREE_CODE (arg0) == BIT_AND_EXPR
10749 && integer_pow2p (TREE_OPERAND (arg0, 1))
10750 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10751 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10752 arg0, fold_convert (TREE_TYPE (arg0),
10753 integer_zero_node));
10754
10755 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10756 bit, then fold the expression into A < 0 or A >= 0. */
10757 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10758 if (tem)
10759 return tem;
10760
10761 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10762 Similarly for NE_EXPR. */
10763 if (TREE_CODE (arg0) == BIT_AND_EXPR
10764 && TREE_CODE (arg1) == INTEGER_CST
10765 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10766 {
10767 tree notc = fold_build1 (BIT_NOT_EXPR,
10768 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10769 TREE_OPERAND (arg0, 1));
10770 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10771 arg1, notc);
10772 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10773 if (integer_nonzerop (dandnotc))
10774 return omit_one_operand (type, rslt, arg0);
10775 }
10776
10777 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10778 Similarly for NE_EXPR. */
10779 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10780 && TREE_CODE (arg1) == INTEGER_CST
10781 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10782 {
10783 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10784 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10785 TREE_OPERAND (arg0, 1), notd);
10786 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10787 if (integer_nonzerop (candnotd))
10788 return omit_one_operand (type, rslt, arg0);
10789 }
10790
10791 /* If this is a comparison of a field, we may be able to simplify it. */
10792 if (((TREE_CODE (arg0) == COMPONENT_REF
10793 && lang_hooks.can_use_bit_fields_p ())
10794 || TREE_CODE (arg0) == BIT_FIELD_REF)
10795 /* Handle the constant case even without -O
10796 to make sure the warnings are given. */
10797 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10798 {
10799 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10800 if (t1)
10801 return t1;
10802 }
10803
10804 /* Optimize comparisons of strlen vs zero to a compare of the
10805 first character of the string vs zero. To wit,
10806 strlen(ptr) == 0 => *ptr == 0
10807 strlen(ptr) != 0 => *ptr != 0
10808 Other cases should reduce to one of these two (or a constant)
10809 due to the return value of strlen being unsigned. */
10810 if (TREE_CODE (arg0) == CALL_EXPR
10811 && integer_zerop (arg1))
10812 {
10813 tree fndecl = get_callee_fndecl (arg0);
10814 tree arglist;
10815
10816 if (fndecl
10817 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10818 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10819 && (arglist = TREE_OPERAND (arg0, 1))
10820 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10821 && ! TREE_CHAIN (arglist))
10822 {
10823 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10824 return fold_build2 (code, type, iref,
10825 build_int_cst (TREE_TYPE (iref), 0));
10826 }
10827 }
10828
10829 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10830 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10831 if (TREE_CODE (arg0) == RSHIFT_EXPR
10832 && integer_zerop (arg1)
10833 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10834 {
10835 tree arg00 = TREE_OPERAND (arg0, 0);
10836 tree arg01 = TREE_OPERAND (arg0, 1);
10837 tree itype = TREE_TYPE (arg00);
10838 if (TREE_INT_CST_HIGH (arg01) == 0
10839 && TREE_INT_CST_LOW (arg01)
10840 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10841 {
10842 if (TYPE_UNSIGNED (itype))
10843 {
10844 itype = lang_hooks.types.signed_type (itype);
10845 arg00 = fold_convert (itype, arg00);
10846 }
10847 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10848 type, arg00, build_int_cst (itype, 0));
10849 }
10850 }
10851
10852 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10853 if (integer_zerop (arg1)
10854 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10855 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10856 TREE_OPERAND (arg0, 1));
10857
10858 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10859 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10860 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10861 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10862 build_int_cst (TREE_TYPE (arg1), 0));
10863 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10864 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10865 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10866 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10867 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10868 build_int_cst (TREE_TYPE (arg1), 0));
10869
10870 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10871 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10872 && TREE_CODE (arg1) == INTEGER_CST
10873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10874 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10875 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10876 TREE_OPERAND (arg0, 1), arg1));
10877
10878 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10879 (X & C) == 0 when C is a single bit. */
10880 if (TREE_CODE (arg0) == BIT_AND_EXPR
10881 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10882 && integer_zerop (arg1)
10883 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10884 {
10885 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10886 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10887 TREE_OPERAND (arg0, 1));
10888 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10889 type, tem, arg1);
10890 }
10891
10892 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10893 constant C is a power of two, i.e. a single bit. */
10894 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10895 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10896 && integer_zerop (arg1)
10897 && integer_pow2p (TREE_OPERAND (arg0, 1))
10898 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10899 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10900 {
10901 tree arg00 = TREE_OPERAND (arg0, 0);
10902 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10903 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10904 }
10905
10906 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10907 when is C is a power of two, i.e. a single bit. */
10908 if (TREE_CODE (arg0) == BIT_AND_EXPR
10909 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10910 && integer_zerop (arg1)
10911 && integer_pow2p (TREE_OPERAND (arg0, 1))
10912 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10913 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10914 {
10915 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10916 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10917 arg000, TREE_OPERAND (arg0, 1));
10918 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10919 tem, build_int_cst (TREE_TYPE (tem), 0));
10920 }
10921
10922 if (integer_zerop (arg1)
10923 && tree_expr_nonzero_p (arg0))
10924 {
10925 tree res = constant_boolean_node (code==NE_EXPR, type);
10926 return omit_one_operand (type, res, arg0);
10927 }
10928 return NULL_TREE;
10929
10930 case LT_EXPR:
10931 case GT_EXPR:
10932 case LE_EXPR:
10933 case GE_EXPR:
10934 tem = fold_comparison (code, type, op0, op1);
10935 if (tem != NULL_TREE)
10936 return tem;
10937
10938 /* Transform comparisons of the form X +- C CMP X. */
10939 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10940 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10941 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10942 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10943 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10944 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10945 {
10946 tree arg01 = TREE_OPERAND (arg0, 1);
10947 enum tree_code code0 = TREE_CODE (arg0);
10948 int is_positive;
10949
10950 if (TREE_CODE (arg01) == REAL_CST)
10951 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10952 else
10953 is_positive = tree_int_cst_sgn (arg01);
10954
10955 /* (X - c) > X becomes false. */
10956 if (code == GT_EXPR
10957 && ((code0 == MINUS_EXPR && is_positive >= 0)
10958 || (code0 == PLUS_EXPR && is_positive <= 0)))
10959 {
10960 if (TREE_CODE (arg01) == INTEGER_CST
10961 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10962 fold_overflow_warning (("assuming signed overflow does not "
10963 "occur when assuming that (X - c) > X "
10964 "is always false"),
10965 WARN_STRICT_OVERFLOW_ALL);
10966 return constant_boolean_node (0, type);
10967 }
10968
10969 /* Likewise (X + c) < X becomes false. */
10970 if (code == LT_EXPR
10971 && ((code0 == PLUS_EXPR && is_positive >= 0)
10972 || (code0 == MINUS_EXPR && is_positive <= 0)))
10973 {
10974 if (TREE_CODE (arg01) == INTEGER_CST
10975 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10976 fold_overflow_warning (("assuming signed overflow does not "
10977 "occur when assuming that "
10978 "(X + c) < X is always false"),
10979 WARN_STRICT_OVERFLOW_ALL);
10980 return constant_boolean_node (0, type);
10981 }
10982
10983 /* Convert (X - c) <= X to true. */
10984 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10985 && code == LE_EXPR
10986 && ((code0 == MINUS_EXPR && is_positive >= 0)
10987 || (code0 == PLUS_EXPR && is_positive <= 0)))
10988 {
10989 if (TREE_CODE (arg01) == INTEGER_CST
10990 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10991 fold_overflow_warning (("assuming signed overflow does not "
10992 "occur when assuming that "
10993 "(X - c) <= X is always true"),
10994 WARN_STRICT_OVERFLOW_ALL);
10995 return constant_boolean_node (1, type);
10996 }
10997
10998 /* Convert (X + c) >= X to true. */
10999 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11000 && code == GE_EXPR
11001 && ((code0 == PLUS_EXPR && is_positive >= 0)
11002 || (code0 == MINUS_EXPR && is_positive <= 0)))
11003 {
11004 if (TREE_CODE (arg01) == INTEGER_CST
11005 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11006 fold_overflow_warning (("assuming signed overflow does not "
11007 "occur when assuming that "
11008 "(X + c) >= X is always true"),
11009 WARN_STRICT_OVERFLOW_ALL);
11010 return constant_boolean_node (1, type);
11011 }
11012
11013 if (TREE_CODE (arg01) == INTEGER_CST)
11014 {
11015 /* Convert X + c > X and X - c < X to true for integers. */
11016 if (code == GT_EXPR
11017 && ((code0 == PLUS_EXPR && is_positive > 0)
11018 || (code0 == MINUS_EXPR && is_positive < 0)))
11019 {
11020 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11021 fold_overflow_warning (("assuming signed overflow does "
11022 "not occur when assuming that "
11023 "(X + c) > X is always true"),
11024 WARN_STRICT_OVERFLOW_ALL);
11025 return constant_boolean_node (1, type);
11026 }
11027
11028 if (code == LT_EXPR
11029 && ((code0 == MINUS_EXPR && is_positive > 0)
11030 || (code0 == PLUS_EXPR && is_positive < 0)))
11031 {
11032 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11033 fold_overflow_warning (("assuming signed overflow does "
11034 "not occur when assuming that "
11035 "(X - c) < X is always true"),
11036 WARN_STRICT_OVERFLOW_ALL);
11037 return constant_boolean_node (1, type);
11038 }
11039
11040 /* Convert X + c <= X and X - c >= X to false for integers. */
11041 if (code == LE_EXPR
11042 && ((code0 == PLUS_EXPR && is_positive > 0)
11043 || (code0 == MINUS_EXPR && is_positive < 0)))
11044 {
11045 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11046 fold_overflow_warning (("assuming signed overflow does "
11047 "not occur when assuming that "
11048 "(X + c) <= X is always false"),
11049 WARN_STRICT_OVERFLOW_ALL);
11050 return constant_boolean_node (0, type);
11051 }
11052
11053 if (code == GE_EXPR
11054 && ((code0 == MINUS_EXPR && is_positive > 0)
11055 || (code0 == PLUS_EXPR && is_positive < 0)))
11056 {
11057 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11058 fold_overflow_warning (("assuming signed overflow does "
11059 "not occur when assuming that "
11060 "(X - c) >= X is always true"),
11061 WARN_STRICT_OVERFLOW_ALL);
11062 return constant_boolean_node (0, type);
11063 }
11064 }
11065 }
11066
11067 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11068 This transformation affects the cases which are handled in later
11069 optimizations involving comparisons with non-negative constants. */
11070 if (TREE_CODE (arg1) == INTEGER_CST
11071 && TREE_CODE (arg0) != INTEGER_CST
11072 && tree_int_cst_sgn (arg1) > 0)
11073 {
11074 if (code == GE_EXPR)
11075 {
11076 arg1 = const_binop (MINUS_EXPR, arg1,
11077 build_int_cst (TREE_TYPE (arg1), 1), 0);
11078 return fold_build2 (GT_EXPR, type, arg0,
11079 fold_convert (TREE_TYPE (arg0), arg1));
11080 }
11081 if (code == LT_EXPR)
11082 {
11083 arg1 = const_binop (MINUS_EXPR, arg1,
11084 build_int_cst (TREE_TYPE (arg1), 1), 0);
11085 return fold_build2 (LE_EXPR, type, arg0,
11086 fold_convert (TREE_TYPE (arg0), arg1));
11087 }
11088 }
11089
11090 /* Comparisons with the highest or lowest possible integer of
11091 the specified size will have known values. */
11092 {
11093 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11094
11095 if (TREE_CODE (arg1) == INTEGER_CST
11096 && ! TREE_CONSTANT_OVERFLOW (arg1)
11097 && width <= 2 * HOST_BITS_PER_WIDE_INT
11098 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11099 || POINTER_TYPE_P (TREE_TYPE (arg1))))
11100 {
11101 HOST_WIDE_INT signed_max_hi;
11102 unsigned HOST_WIDE_INT signed_max_lo;
11103 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11104
11105 if (width <= HOST_BITS_PER_WIDE_INT)
11106 {
11107 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11108 - 1;
11109 signed_max_hi = 0;
11110 max_hi = 0;
11111
11112 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11113 {
11114 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11115 min_lo = 0;
11116 min_hi = 0;
11117 }
11118 else
11119 {
11120 max_lo = signed_max_lo;
11121 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11122 min_hi = -1;
11123 }
11124 }
11125 else
11126 {
11127 width -= HOST_BITS_PER_WIDE_INT;
11128 signed_max_lo = -1;
11129 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11130 - 1;
11131 max_lo = -1;
11132 min_lo = 0;
11133
11134 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11135 {
11136 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11137 min_hi = 0;
11138 }
11139 else
11140 {
11141 max_hi = signed_max_hi;
11142 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11143 }
11144 }
11145
11146 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11147 && TREE_INT_CST_LOW (arg1) == max_lo)
11148 switch (code)
11149 {
11150 case GT_EXPR:
11151 return omit_one_operand (type, integer_zero_node, arg0);
11152
11153 case GE_EXPR:
11154 return fold_build2 (EQ_EXPR, type, op0, op1);
11155
11156 case LE_EXPR:
11157 return omit_one_operand (type, integer_one_node, arg0);
11158
11159 case LT_EXPR:
11160 return fold_build2 (NE_EXPR, type, op0, op1);
11161
11162 /* The GE_EXPR and LT_EXPR cases above are not normally
11163 reached because of previous transformations. */
11164
11165 default:
11166 break;
11167 }
11168 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11169 == max_hi
11170 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11171 switch (code)
11172 {
11173 case GT_EXPR:
11174 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11175 return fold_build2 (EQ_EXPR, type,
11176 fold_convert (TREE_TYPE (arg1), arg0),
11177 arg1);
11178 case LE_EXPR:
11179 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11180 return fold_build2 (NE_EXPR, type,
11181 fold_convert (TREE_TYPE (arg1), arg0),
11182 arg1);
11183 default:
11184 break;
11185 }
11186 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11187 == min_hi
11188 && TREE_INT_CST_LOW (arg1) == min_lo)
11189 switch (code)
11190 {
11191 case LT_EXPR:
11192 return omit_one_operand (type, integer_zero_node, arg0);
11193
11194 case LE_EXPR:
11195 return fold_build2 (EQ_EXPR, type, op0, op1);
11196
11197 case GE_EXPR:
11198 return omit_one_operand (type, integer_one_node, arg0);
11199
11200 case GT_EXPR:
11201 return fold_build2 (NE_EXPR, type, op0, op1);
11202
11203 default:
11204 break;
11205 }
11206 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11207 == min_hi
11208 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11209 switch (code)
11210 {
11211 case GE_EXPR:
11212 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11213 return fold_build2 (NE_EXPR, type,
11214 fold_convert (TREE_TYPE (arg1), arg0),
11215 arg1);
11216 case LT_EXPR:
11217 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11218 return fold_build2 (EQ_EXPR, type,
11219 fold_convert (TREE_TYPE (arg1), arg0),
11220 arg1);
11221 default:
11222 break;
11223 }
11224
11225 else if (!in_gimple_form
11226 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
11227 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11228 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11229 /* signed_type does not work on pointer types. */
11230 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11231 {
11232 /* The following case also applies to X < signed_max+1
11233 and X >= signed_max+1 because previous transformations. */
11234 if (code == LE_EXPR || code == GT_EXPR)
11235 {
11236 tree st;
11237 st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11238 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11239 type, fold_convert (st, arg0),
11240 build_int_cst (st, 0));
11241 }
11242 }
11243 }
11244 }
11245
11246 /* If we are comparing an ABS_EXPR with a constant, we can
11247 convert all the cases into explicit comparisons, but they may
11248 well not be faster than doing the ABS and one comparison.
11249 But ABS (X) <= C is a range comparison, which becomes a subtraction
11250 and a comparison, and is probably faster. */
11251 if (code == LE_EXPR
11252 && TREE_CODE (arg1) == INTEGER_CST
11253 && TREE_CODE (arg0) == ABS_EXPR
11254 && ! TREE_SIDE_EFFECTS (arg0)
11255 && (0 != (tem = negate_expr (arg1)))
11256 && TREE_CODE (tem) == INTEGER_CST
11257 && ! TREE_CONSTANT_OVERFLOW (tem))
11258 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11259 build2 (GE_EXPR, type,
11260 TREE_OPERAND (arg0, 0), tem),
11261 build2 (LE_EXPR, type,
11262 TREE_OPERAND (arg0, 0), arg1));
11263
11264 /* Convert ABS_EXPR<x> >= 0 to true. */
11265 strict_overflow_p = false;
11266 if (code == GE_EXPR
11267 && (integer_zerop (arg1)
11268 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11269 && real_zerop (arg1)))
11270 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11271 {
11272 if (strict_overflow_p)
11273 fold_overflow_warning (("assuming signed overflow does not occur "
11274 "when simplifying comparison of "
11275 "absolute value and zero"),
11276 WARN_STRICT_OVERFLOW_CONDITIONAL);
11277 return omit_one_operand (type, integer_one_node, arg0);
11278 }
11279
11280 /* Convert ABS_EXPR<x> < 0 to false. */
11281 strict_overflow_p = false;
11282 if (code == LT_EXPR
11283 && (integer_zerop (arg1) || real_zerop (arg1))
11284 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11285 {
11286 if (strict_overflow_p)
11287 fold_overflow_warning (("assuming signed overflow does not occur "
11288 "when simplifying comparison of "
11289 "absolute value and zero"),
11290 WARN_STRICT_OVERFLOW_CONDITIONAL);
11291 return omit_one_operand (type, integer_zero_node, arg0);
11292 }
11293
11294 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11295 and similarly for >= into !=. */
11296 if ((code == LT_EXPR || code == GE_EXPR)
11297 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11298 && TREE_CODE (arg1) == LSHIFT_EXPR
11299 && integer_onep (TREE_OPERAND (arg1, 0)))
11300 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11301 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11302 TREE_OPERAND (arg1, 1)),
11303 build_int_cst (TREE_TYPE (arg0), 0));
11304
11305 if ((code == LT_EXPR || code == GE_EXPR)
11306 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11307 && (TREE_CODE (arg1) == NOP_EXPR
11308 || TREE_CODE (arg1) == CONVERT_EXPR)
11309 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11310 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11311 return
11312 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11313 fold_convert (TREE_TYPE (arg0),
11314 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11315 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11316 1))),
11317 build_int_cst (TREE_TYPE (arg0), 0));
11318
11319 return NULL_TREE;
11320
11321 case UNORDERED_EXPR:
11322 case ORDERED_EXPR:
11323 case UNLT_EXPR:
11324 case UNLE_EXPR:
11325 case UNGT_EXPR:
11326 case UNGE_EXPR:
11327 case UNEQ_EXPR:
11328 case LTGT_EXPR:
11329 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11330 {
11331 t1 = fold_relational_const (code, type, arg0, arg1);
11332 if (t1 != NULL_TREE)
11333 return t1;
11334 }
11335
11336 /* If the first operand is NaN, the result is constant. */
11337 if (TREE_CODE (arg0) == REAL_CST
11338 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11339 && (code != LTGT_EXPR || ! flag_trapping_math))
11340 {
11341 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11342 ? integer_zero_node
11343 : integer_one_node;
11344 return omit_one_operand (type, t1, arg1);
11345 }
11346
11347 /* If the second operand is NaN, the result is constant. */
11348 if (TREE_CODE (arg1) == REAL_CST
11349 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11350 && (code != LTGT_EXPR || ! flag_trapping_math))
11351 {
11352 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11353 ? integer_zero_node
11354 : integer_one_node;
11355 return omit_one_operand (type, t1, arg0);
11356 }
11357
11358 /* Simplify unordered comparison of something with itself. */
11359 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11360 && operand_equal_p (arg0, arg1, 0))
11361 return constant_boolean_node (1, type);
11362
11363 if (code == LTGT_EXPR
11364 && !flag_trapping_math
11365 && operand_equal_p (arg0, arg1, 0))
11366 return constant_boolean_node (0, type);
11367
11368 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11369 {
11370 tree targ0 = strip_float_extensions (arg0);
11371 tree targ1 = strip_float_extensions (arg1);
11372 tree newtype = TREE_TYPE (targ0);
11373
11374 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11375 newtype = TREE_TYPE (targ1);
11376
11377 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11378 return fold_build2 (code, type, fold_convert (newtype, targ0),
11379 fold_convert (newtype, targ1));
11380 }
11381
11382 return NULL_TREE;
11383
11384 case COMPOUND_EXPR:
11385 /* When pedantic, a compound expression can be neither an lvalue
11386 nor an integer constant expression. */
11387 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11388 return NULL_TREE;
11389 /* Don't let (0, 0) be null pointer constant. */
11390 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11391 : fold_convert (type, arg1);
11392 return pedantic_non_lvalue (tem);
11393
11394 case COMPLEX_EXPR:
11395 if ((TREE_CODE (arg0) == REAL_CST
11396 && TREE_CODE (arg1) == REAL_CST)
11397 || (TREE_CODE (arg0) == INTEGER_CST
11398 && TREE_CODE (arg1) == INTEGER_CST))
11399 return build_complex (type, arg0, arg1);
11400 return NULL_TREE;
11401
11402 case ASSERT_EXPR:
11403 /* An ASSERT_EXPR should never be passed to fold_binary. */
11404 gcc_unreachable ();
11405
11406 default:
11407 return NULL_TREE;
11408 } /* switch (code) */
11409}
11410
11411/* Callback for walk_tree, looking for LABEL_EXPR.
11412 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11413 Do not check the sub-tree of GOTO_EXPR. */
11414
11415static tree
11416contains_label_1 (tree *tp,
11417 int *walk_subtrees,
11418 void *data ATTRIBUTE_UNUSED)
11419{
11420 switch (TREE_CODE (*tp))
11421 {
11422 case LABEL_EXPR:
11423 return *tp;
11424 case GOTO_EXPR:
11425 *walk_subtrees = 0;
11426 /* no break */
11427 default:
11428 return NULL_TREE;
11429 }
11430}
11431
11432/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11433 accessible from outside the sub-tree. Returns NULL_TREE if no
11434 addressable label is found. */
11435
11436static bool
11437contains_label_p (tree st)
11438{
11439 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11440}
11441
11442/* Fold a ternary expression of code CODE and type TYPE with operands
11443 OP0, OP1, and OP2. Return the folded expression if folding is
11444 successful. Otherwise, return NULL_TREE. */
11445
11446tree
11447fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11448{
11449 tree tem;
11450 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11451 enum tree_code_class kind = TREE_CODE_CLASS (code);
11452
11453 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11454 && TREE_CODE_LENGTH (code) == 3);
11455
11456 /* Strip any conversions that don't change the mode. This is safe
11457 for every expression, except for a comparison expression because
11458 its signedness is derived from its operands. So, in the latter
11459 case, only strip conversions that don't change the signedness.
11460
11461 Note that this is done as an internal manipulation within the
11462 constant folder, in order to find the simplest representation of
11463 the arguments so that their form can be studied. In any cases,
11464 the appropriate type conversions should be put back in the tree
11465 that will get out of the constant folder. */
11466 if (op0)
11467 {
11468 arg0 = op0;
11469 STRIP_NOPS (arg0);
11470 }
11471
11472 if (op1)
11473 {
11474 arg1 = op1;
11475 STRIP_NOPS (arg1);
11476 }
11477
11478 switch (code)
11479 {
11480 case COMPONENT_REF:
11481 if (TREE_CODE (arg0) == CONSTRUCTOR
11482 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11483 {
11484 unsigned HOST_WIDE_INT idx;
11485 tree field, value;
11486 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11487 if (field == arg1)
11488 return value;
11489 }
11490 return NULL_TREE;
11491
11492 case COND_EXPR:
11493 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11494 so all simple results must be passed through pedantic_non_lvalue. */
11495 if (TREE_CODE (arg0) == INTEGER_CST)
11496 {
11497 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11498 tem = integer_zerop (arg0) ? op2 : op1;
11499 /* Only optimize constant conditions when the selected branch
11500 has the same type as the COND_EXPR. This avoids optimizing
11501 away "c ? x : throw", where the throw has a void type.
11502 Avoid throwing away that operand which contains label. */
11503 if ((!TREE_SIDE_EFFECTS (unused_op)
11504 || !contains_label_p (unused_op))
11505 && (! VOID_TYPE_P (TREE_TYPE (tem))
11506 || VOID_TYPE_P (type)))
11507 return pedantic_non_lvalue (tem);
11508 return NULL_TREE;
11509 }
11510 if (operand_equal_p (arg1, op2, 0))
11511 return pedantic_omit_one_operand (type, arg1, arg0);
11512
11513 /* If we have A op B ? A : C, we may be able to convert this to a
11514 simpler expression, depending on the operation and the values
11515 of B and C. Signed zeros prevent all of these transformations,
11516 for reasons given above each one.
11517
11518 Also try swapping the arguments and inverting the conditional. */
11519 if (COMPARISON_CLASS_P (arg0)
11520 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11521 arg1, TREE_OPERAND (arg0, 1))
11522 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11523 {
11524 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11525 if (tem)
11526 return tem;
11527 }
11528
11529 if (COMPARISON_CLASS_P (arg0)
11530 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11531 op2,
11532 TREE_OPERAND (arg0, 1))
11533 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11534 {
11535 tem = fold_truth_not_expr (arg0);
11536 if (tem && COMPARISON_CLASS_P (tem))
11537 {
11538 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11539 if (tem)
11540 return tem;
11541 }
11542 }
11543
11544 /* If the second operand is simpler than the third, swap them
11545 since that produces better jump optimization results. */
11546 if (truth_value_p (TREE_CODE (arg0))
11547 && tree_swap_operands_p (op1, op2, false))
11548 {
11549 /* See if this can be inverted. If it can't, possibly because
11550 it was a floating-point inequality comparison, don't do
11551 anything. */
11552 tem = fold_truth_not_expr (arg0);
11553 if (tem)
11554 return fold_build3 (code, type, tem, op2, op1);
11555 }
11556
11557 /* Convert A ? 1 : 0 to simply A. */
11558 if (integer_onep (op1)
11559 && integer_zerop (op2)
11560 /* If we try to convert OP0 to our type, the
11561 call to fold will try to move the conversion inside
11562 a COND, which will recurse. In that case, the COND_EXPR
11563 is probably the best choice, so leave it alone. */
11564 && type == TREE_TYPE (arg0))
11565 return pedantic_non_lvalue (arg0);
11566
11567 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11568 over COND_EXPR in cases such as floating point comparisons. */
11569 if (integer_zerop (op1)
11570 && integer_onep (op2)
11571 && truth_value_p (TREE_CODE (arg0)))
11572 return pedantic_non_lvalue (fold_convert (type,
11573 invert_truthvalue (arg0)));
11574
11575 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11576 if (TREE_CODE (arg0) == LT_EXPR
11577 && integer_zerop (TREE_OPERAND (arg0, 1))
11578 && integer_zerop (op2)
11579 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11580 {
11581 /* sign_bit_p only checks ARG1 bits within A's precision.
11582 If <sign bit of A> has wider type than A, bits outside
11583 of A's precision in <sign bit of A> need to be checked.
11584 If they are all 0, this optimization needs to be done
11585 in unsigned A's type, if they are all 1 in signed A's type,
11586 otherwise this can't be done. */
11587 if (TYPE_PRECISION (TREE_TYPE (tem))
11588 < TYPE_PRECISION (TREE_TYPE (arg1))
11589 && TYPE_PRECISION (TREE_TYPE (tem))
11590 < TYPE_PRECISION (type))
11591 {
11592 unsigned HOST_WIDE_INT mask_lo;
11593 HOST_WIDE_INT mask_hi;
11594 int inner_width, outer_width;
11595 tree tem_type;
11596
11597 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11598 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11599 if (outer_width > TYPE_PRECISION (type))
11600 outer_width = TYPE_PRECISION (type);
11601
11602 if (outer_width > HOST_BITS_PER_WIDE_INT)
11603 {
11604 mask_hi = ((unsigned HOST_WIDE_INT) -1
11605 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11606 mask_lo = -1;
11607 }
11608 else
11609 {
11610 mask_hi = 0;
11611 mask_lo = ((unsigned HOST_WIDE_INT) -1
11612 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11613 }
11614 if (inner_width > HOST_BITS_PER_WIDE_INT)
11615 {
11616 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11617 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11618 mask_lo = 0;
11619 }
11620 else
11621 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11622 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11623
11624 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11625 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11626 {
11627 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11628 tem = fold_convert (tem_type, tem);
11629 }
11630 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11631 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11632 {
11633 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11634 tem = fold_convert (tem_type, tem);
11635 }
11636 else
11637 tem = NULL;
11638 }
11639
11640 if (tem)
11641 return fold_convert (type,
11642 fold_build2 (BIT_AND_EXPR,
11643 TREE_TYPE (tem), tem,
11644 fold_convert (TREE_TYPE (tem),
11645 arg1)));
11646 }
11647
11648 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11649 already handled above. */
11650 if (TREE_CODE (arg0) == BIT_AND_EXPR
11651 && integer_onep (TREE_OPERAND (arg0, 1))
11652 && integer_zerop (op2)
11653 && integer_pow2p (arg1))
11654 {
11655 tree tem = TREE_OPERAND (arg0, 0);
11656 STRIP_NOPS (tem);
11657 if (TREE_CODE (tem) == RSHIFT_EXPR
11658 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11659 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11660 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11661 return fold_build2 (BIT_AND_EXPR, type,
11662 TREE_OPERAND (tem, 0), arg1);
11663 }
11664
11665 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11666 is probably obsolete because the first operand should be a
11667 truth value (that's why we have the two cases above), but let's
11668 leave it in until we can confirm this for all front-ends. */
11669 if (integer_zerop (op2)
11670 && TREE_CODE (arg0) == NE_EXPR
11671 && integer_zerop (TREE_OPERAND (arg0, 1))
11672 && integer_pow2p (arg1)
11673 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11674 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11675 arg1, OEP_ONLY_CONST))
11676 return pedantic_non_lvalue (fold_convert (type,
11677 TREE_OPERAND (arg0, 0)));
11678
11679 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11680 if (integer_zerop (op2)
11681 && truth_value_p (TREE_CODE (arg0))
11682 && truth_value_p (TREE_CODE (arg1)))
11683 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11684 fold_convert (type, arg0),
11685 arg1);
11686
11687 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11688 if (integer_onep (op2)
11689 && truth_value_p (TREE_CODE (arg0))
11690 && truth_value_p (TREE_CODE (arg1)))
11691 {
11692 /* Only perform transformation if ARG0 is easily inverted. */
11693 tem = fold_truth_not_expr (arg0);
11694 if (tem)
11695 return fold_build2 (TRUTH_ORIF_EXPR, type,
11696 fold_convert (type, tem),
11697 arg1);
11698 }
11699
11700 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11701 if (integer_zerop (arg1)
11702 && truth_value_p (TREE_CODE (arg0))
11703 && truth_value_p (TREE_CODE (op2)))
11704 {
11705 /* Only perform transformation if ARG0 is easily inverted. */
11706 tem = fold_truth_not_expr (arg0);
11707 if (tem)
11708 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11709 fold_convert (type, tem),
11710 op2);
11711 }
11712
11713 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11714 if (integer_onep (arg1)
11715 && truth_value_p (TREE_CODE (arg0))
11716 && truth_value_p (TREE_CODE (op2)))
11717 return fold_build2 (TRUTH_ORIF_EXPR, type,
11718 fold_convert (type, arg0),
11719 op2);
11720
11721 return NULL_TREE;
11722
11723 case CALL_EXPR:
11724 /* Check for a built-in function. */
11725 if (TREE_CODE (op0) == ADDR_EXPR
11726 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11727 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11728 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11729 return NULL_TREE;
11730
11731 case BIT_FIELD_REF:
11732 if (TREE_CODE (arg0) == VECTOR_CST
11733 && type == TREE_TYPE (TREE_TYPE (arg0))
11734 && host_integerp (arg1, 1)
11735 && host_integerp (op2, 1))
11736 {
11737 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11738 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11739
11740 if (width != 0
11741 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11742 && (idx % width) == 0
11743 && (idx = idx / width)
11744 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11745 {
11746 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11747 while (idx-- > 0 && elements)
11748 elements = TREE_CHAIN (elements);
11749 if (elements)
11750 return TREE_VALUE (elements);
11751 else
11752 return fold_convert (type, integer_zero_node);
11753 }
11754 }
11755 return NULL_TREE;
11756
11757 default:
11758 return NULL_TREE;
11759 } /* switch (code) */
11760}
11761
11762/* Perform constant folding and related simplification of EXPR.
11763 The related simplifications include x*1 => x, x*0 => 0, etc.,
11764 and application of the associative law.
11765 NOP_EXPR conversions may be removed freely (as long as we
11766 are careful not to change the type of the overall expression).
11767 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11768 but we can constant-fold them if they have constant operands. */
11769
11770#ifdef ENABLE_FOLD_CHECKING
11771# define fold(x) fold_1 (x)
11772static tree fold_1 (tree);
11773static
11774#endif
11775tree
11776fold (tree expr)
11777{
11778 const tree t = expr;
11779 enum tree_code code = TREE_CODE (t);
11780 enum tree_code_class kind = TREE_CODE_CLASS (code);
11781 tree tem;
11782
11783 /* Return right away if a constant. */
11784 if (kind == tcc_constant)
11785 return t;
11786
11787 if (IS_EXPR_CODE_CLASS (kind))
11788 {
11789 tree type = TREE_TYPE (t);
11790 tree op0, op1, op2;
11791
11792 switch (TREE_CODE_LENGTH (code))
11793 {
11794 case 1:
11795 op0 = TREE_OPERAND (t, 0);
11796 tem = fold_unary (code, type, op0);
11797 return tem ? tem : expr;
11798 case 2:
11799 op0 = TREE_OPERAND (t, 0);
11800 op1 = TREE_OPERAND (t, 1);
11801 tem = fold_binary (code, type, op0, op1);
11802 return tem ? tem : expr;
11803 case 3:
11804 op0 = TREE_OPERAND (t, 0);
11805 op1 = TREE_OPERAND (t, 1);
11806 op2 = TREE_OPERAND (t, 2);
11807 tem = fold_ternary (code, type, op0, op1, op2);
11808 return tem ? tem : expr;
11809 default:
11810 break;
11811 }
11812 }
11813
11814 switch (code)
11815 {
11816 case CONST_DECL:
11817 return fold (DECL_INITIAL (t));
11818
11819 default:
11820 return t;
11821 } /* switch (code) */
11822}
11823
11824#ifdef ENABLE_FOLD_CHECKING
11825#undef fold
11826
11827static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11828static void fold_check_failed (tree, tree);
11829void print_fold_checksum (tree);
11830
11831/* When --enable-checking=fold, compute a digest of expr before
11832 and after actual fold call to see if fold did not accidentally
11833 change original expr. */
11834
11835tree
11836fold (tree expr)
11837{
11838 tree ret;
11839 struct md5_ctx ctx;
11840 unsigned char checksum_before[16], checksum_after[16];
11841 htab_t ht;
11842
11843 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11844 md5_init_ctx (&ctx);
11845 fold_checksum_tree (expr, &ctx, ht);
11846 md5_finish_ctx (&ctx, checksum_before);
11847 htab_empty (ht);
11848
11849 ret = fold_1 (expr);
11850
11851 md5_init_ctx (&ctx);
11852 fold_checksum_tree (expr, &ctx, ht);
11853 md5_finish_ctx (&ctx, checksum_after);
11854 htab_delete (ht);
11855
11856 if (memcmp (checksum_before, checksum_after, 16))
11857 fold_check_failed (expr, ret);
11858
11859 return ret;
11860}
11861
11862void
11863print_fold_checksum (tree expr)
11864{
11865 struct md5_ctx ctx;
11866 unsigned char checksum[16], cnt;
11867 htab_t ht;
11868
11869 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11870 md5_init_ctx (&ctx);
11871 fold_checksum_tree (expr, &ctx, ht);
11872 md5_finish_ctx (&ctx, checksum);
11873 htab_delete (ht);
11874 for (cnt = 0; cnt < 16; ++cnt)
11875 fprintf (stderr, "%02x", checksum[cnt]);
11876 putc ('\n', stderr);
11877}
11878
11879static void
11880fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11881{
11882 internal_error ("fold check: original tree changed by fold");
11883}
11884
11885static void
11886fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11887{
11888 void **slot;
11889 enum tree_code code;
11890 struct tree_function_decl buf;
11891 int i, len;
11892
11893recursive_label:
11894
11895 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11896 <= sizeof (struct tree_function_decl))
11897 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11898 if (expr == NULL)
11899 return;
11900 slot = htab_find_slot (ht, expr, INSERT);
11901 if (*slot != NULL)
11902 return;
11903 *slot = expr;
11904 code = TREE_CODE (expr);
11905 if (TREE_CODE_CLASS (code) == tcc_declaration
11906 && DECL_ASSEMBLER_NAME_SET_P (expr))
11907 {
11908 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11909 memcpy ((char *) &buf, expr, tree_size (expr));
11910 expr = (tree) &buf;
11911 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11912 }
11913 else if (TREE_CODE_CLASS (code) == tcc_type
11914 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11915 || TYPE_CACHED_VALUES_P (expr)
11916 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11917 {
11918 /* Allow these fields to be modified. */
11919 memcpy ((char *) &buf, expr, tree_size (expr));
11920 expr = (tree) &buf;
11921 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11922 TYPE_POINTER_TO (expr) = NULL;
11923 TYPE_REFERENCE_TO (expr) = NULL;
11924 if (TYPE_CACHED_VALUES_P (expr))
11925 {
11926 TYPE_CACHED_VALUES_P (expr) = 0;
11927 TYPE_CACHED_VALUES (expr) = NULL;
11928 }
11929 }
11930 md5_process_bytes (expr, tree_size (expr), ctx);
11931 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11932 if (TREE_CODE_CLASS (code) != tcc_type
11933 && TREE_CODE_CLASS (code) != tcc_declaration
11934 && code != TREE_LIST)
11935 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11936 switch (TREE_CODE_CLASS (code))
11937 {
11938 case tcc_constant:
11939 switch (code)
11940 {
11941 case STRING_CST:
11942 md5_process_bytes (TREE_STRING_POINTER (expr),
11943 TREE_STRING_LENGTH (expr), ctx);
11944 break;
11945 case COMPLEX_CST:
11946 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11947 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11948 break;
11949 case VECTOR_CST:
11950 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11951 break;
11952 default:
11953 break;
11954 }
11955 break;
11956 case tcc_exceptional:
11957 switch (code)
11958 {
11959 case TREE_LIST:
11960 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11961 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11962 expr = TREE_CHAIN (expr);
11963 goto recursive_label;
11964 break;
11965 case TREE_VEC:
11966 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11967 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11968 break;
11969 default:
11970 break;
11971 }
11972 break;
11973 case tcc_expression:
11974 case tcc_reference:
11975 case tcc_comparison:
11976 case tcc_unary:
11977 case tcc_binary:
11978 case tcc_statement:
11979 len = TREE_CODE_LENGTH (code);
11980 for (i = 0; i < len; ++i)
11981 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11982 break;
11983 case tcc_declaration:
11984 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11985 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11986 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11987 {
11988 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11989 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11990 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11991 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11992 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11993 }
11994 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11995 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11996
11997 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11998 {
11999 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12000 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12001 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12002 }
12003 break;
12004 case tcc_type:
12005 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12006 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12007 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12008 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12009 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12010 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12011 if (INTEGRAL_TYPE_P (expr)
12012 || SCALAR_FLOAT_TYPE_P (expr))
12013 {
12014 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12015 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12016 }
12017 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12018 if (TREE_CODE (expr) == RECORD_TYPE
12019 || TREE_CODE (expr) == UNION_TYPE
12020 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12021 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12022 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12023 break;
12024 default:
12025 break;
12026 }
12027}
12028
12029#endif
12030
12031/* Fold a unary tree expression with code CODE of type TYPE with an
12032 operand OP0. Return a folded expression if successful. Otherwise,
12033 return a tree expression with code CODE of type TYPE with an
12034 operand OP0. */
12035
12036tree
12037fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12038{
12039 tree tem;
12040#ifdef ENABLE_FOLD_CHECKING
12041 unsigned char checksum_before[16], checksum_after[16];
12042 struct md5_ctx ctx;
12043 htab_t ht;
12044
12045 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12046 md5_init_ctx (&ctx);
12047 fold_checksum_tree (op0, &ctx, ht);
12048 md5_finish_ctx (&ctx, checksum_before);
12049 htab_empty (ht);
12050#endif
12051
12052 tem = fold_unary (code, type, op0);
12053 if (!tem)
12054 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12055
12056#ifdef ENABLE_FOLD_CHECKING
12057 md5_init_ctx (&ctx);
12058 fold_checksum_tree (op0, &ctx, ht);
12059 md5_finish_ctx (&ctx, checksum_after);
12060 htab_delete (ht);
12061
12062 if (memcmp (checksum_before, checksum_after, 16))
12063 fold_check_failed (op0, tem);
12064#endif
12065 return tem;
12066}
12067
12068/* Fold a binary tree expression with code CODE of type TYPE with
12069 operands OP0 and OP1. Return a folded expression if successful.
12070 Otherwise, return a tree expression with code CODE of type TYPE
12071 with operands OP0 and OP1. */
12072
12073tree
12074fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12075 MEM_STAT_DECL)
12076{
12077 tree tem;
12078#ifdef ENABLE_FOLD_CHECKING
12079 unsigned char checksum_before_op0[16],
12080 checksum_before_op1[16],
12081 checksum_after_op0[16],
12082 checksum_after_op1[16];
12083 struct md5_ctx ctx;
12084 htab_t ht;
12085
12086 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12087 md5_init_ctx (&ctx);
12088 fold_checksum_tree (op0, &ctx, ht);
12089 md5_finish_ctx (&ctx, checksum_before_op0);
12090 htab_empty (ht);
12091
12092 md5_init_ctx (&ctx);
12093 fold_checksum_tree (op1, &ctx, ht);
12094 md5_finish_ctx (&ctx, checksum_before_op1);
12095 htab_empty (ht);
12096#endif
12097
12098 tem = fold_binary (code, type, op0, op1);
12099 if (!tem)
12100 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12101
12102#ifdef ENABLE_FOLD_CHECKING
12103 md5_init_ctx (&ctx);
12104 fold_checksum_tree (op0, &ctx, ht);
12105 md5_finish_ctx (&ctx, checksum_after_op0);
12106 htab_empty (ht);
12107
12108 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12109 fold_check_failed (op0, tem);
12110
12111 md5_init_ctx (&ctx);
12112 fold_checksum_tree (op1, &ctx, ht);
12113 md5_finish_ctx (&ctx, checksum_after_op1);
12114 htab_delete (ht);
12115
12116 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12117 fold_check_failed (op1, tem);
12118#endif
12119 return tem;
12120}
12121
12122/* Fold a ternary tree expression with code CODE of type TYPE with
12123 operands OP0, OP1, and OP2. Return a folded expression if
12124 successful. Otherwise, return a tree expression with code CODE of
12125 type TYPE with operands OP0, OP1, and OP2. */
12126
12127tree
12128fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12129 MEM_STAT_DECL)
12130{
12131 tree tem;
12132#ifdef ENABLE_FOLD_CHECKING
12133 unsigned char checksum_before_op0[16],
12134 checksum_before_op1[16],
12135 checksum_before_op2[16],
12136 checksum_after_op0[16],
12137 checksum_after_op1[16],
12138 checksum_after_op2[16];
12139 struct md5_ctx ctx;
12140 htab_t ht;
12141
12142 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12143 md5_init_ctx (&ctx);
12144 fold_checksum_tree (op0, &ctx, ht);
12145 md5_finish_ctx (&ctx, checksum_before_op0);
12146 htab_empty (ht);
12147
12148 md5_init_ctx (&ctx);
12149 fold_checksum_tree (op1, &ctx, ht);
12150 md5_finish_ctx (&ctx, checksum_before_op1);
12151 htab_empty (ht);
12152
12153 md5_init_ctx (&ctx);
12154 fold_checksum_tree (op2, &ctx, ht);
12155 md5_finish_ctx (&ctx, checksum_before_op2);
12156 htab_empty (ht);
12157#endif
12158
12159 tem = fold_ternary (code, type, op0, op1, op2);
12160 if (!tem)
12161 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12162
12163#ifdef ENABLE_FOLD_CHECKING
12164 md5_init_ctx (&ctx);
12165 fold_checksum_tree (op0, &ctx, ht);
12166 md5_finish_ctx (&ctx, checksum_after_op0);
12167 htab_empty (ht);
12168
12169 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12170 fold_check_failed (op0, tem);
12171
12172 md5_init_ctx (&ctx);
12173 fold_checksum_tree (op1, &ctx, ht);
12174 md5_finish_ctx (&ctx, checksum_after_op1);
12175 htab_empty (ht);
12176
12177 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12178 fold_check_failed (op1, tem);
12179
12180 md5_init_ctx (&ctx);
12181 fold_checksum_tree (op2, &ctx, ht);
12182 md5_finish_ctx (&ctx, checksum_after_op2);
12183 htab_delete (ht);
12184
12185 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12186 fold_check_failed (op2, tem);
12187#endif
12188 return tem;
12189}
12190
12191/* Perform constant folding and related simplification of initializer
12192 expression EXPR. These behave identically to "fold_buildN" but ignore
12193 potential run-time traps and exceptions that fold must preserve. */
12194
12195#define START_FOLD_INIT \
12196 int saved_signaling_nans = flag_signaling_nans;\
12197 int saved_trapping_math = flag_trapping_math;\
12198 int saved_rounding_math = flag_rounding_math;\
12199 int saved_trapv = flag_trapv;\
12200 int saved_folding_initializer = folding_initializer;\
12201 flag_signaling_nans = 0;\
12202 flag_trapping_math = 0;\
12203 flag_rounding_math = 0;\
12204 flag_trapv = 0;\
12205 folding_initializer = 1;
12206
12207#define END_FOLD_INIT \
12208 flag_signaling_nans = saved_signaling_nans;\
12209 flag_trapping_math = saved_trapping_math;\
12210 flag_rounding_math = saved_rounding_math;\
12211 flag_trapv = saved_trapv;\
12212 folding_initializer = saved_folding_initializer;
12213
12214tree
12215fold_build1_initializer (enum tree_code code, tree type, tree op)
12216{
12217 tree result;
12218 START_FOLD_INIT;
12219
12220 result = fold_build1 (code, type, op);
12221
12222 END_FOLD_INIT;
12223 return result;
12224}
12225
12226tree
12227fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12228{
12229 tree result;
12230 START_FOLD_INIT;
12231
12232 result = fold_build2 (code, type, op0, op1);
12233
12234 END_FOLD_INIT;
12235 return result;
12236}
12237
12238tree
12239fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12240 tree op2)
12241{
12242 tree result;
12243 START_FOLD_INIT;
12244
12245 result = fold_build3 (code, type, op0, op1, op2);
12246
12247 END_FOLD_INIT;
12248 return result;
12249}
12250
12251#undef START_FOLD_INIT
12252#undef END_FOLD_INIT
12253
12254/* Determine if first argument is a multiple of second argument. Return 0 if
12255 it is not, or we cannot easily determined it to be.
12256
12257 An example of the sort of thing we care about (at this point; this routine
12258 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12259 fold cases do now) is discovering that
12260
12261 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12262
12263 is a multiple of
12264
12265 SAVE_EXPR (J * 8)
12266
12267 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12268
12269 This code also handles discovering that
12270
12271 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12272
12273 is a multiple of 8 so we don't have to worry about dealing with a
12274 possible remainder.
12275
12276 Note that we *look* inside a SAVE_EXPR only to determine how it was
12277 calculated; it is not safe for fold to do much of anything else with the
12278 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12279 at run time. For example, the latter example above *cannot* be implemented
12280 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12281 evaluation time of the original SAVE_EXPR is not necessarily the same at
12282 the time the new expression is evaluated. The only optimization of this
12283 sort that would be valid is changing
12284
12285 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12286
12287 divided by 8 to
12288
12289 SAVE_EXPR (I) * SAVE_EXPR (J)
12290
12291 (where the same SAVE_EXPR (J) is used in the original and the
12292 transformed version). */
12293
12294static int
12295multiple_of_p (tree type, tree top, tree bottom)
12296{
12297 if (operand_equal_p (top, bottom, 0))
12298 return 1;
12299
12300 if (TREE_CODE (type) != INTEGER_TYPE)
12301 return 0;
12302
12303 switch (TREE_CODE (top))
12304 {
12305 case BIT_AND_EXPR:
12306 /* Bitwise and provides a power of two multiple. If the mask is
12307 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12308 if (!integer_pow2p (bottom))
12309 return 0;
12310 /* FALLTHRU */
12311
12312 case MULT_EXPR:
12313 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12314 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12315
12316 case PLUS_EXPR:
12317 case MINUS_EXPR:
12318 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12319 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12320
12321 case LSHIFT_EXPR:
12322 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12323 {
12324 tree op1, t1;
12325
12326 op1 = TREE_OPERAND (top, 1);
12327 /* const_binop may not detect overflow correctly,
12328 so check for it explicitly here. */
12329 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12330 > TREE_INT_CST_LOW (op1)
12331 && TREE_INT_CST_HIGH (op1) == 0
12332 && 0 != (t1 = fold_convert (type,
12333 const_binop (LSHIFT_EXPR,
12334 size_one_node,
12335 op1, 0)))
12336 && ! TREE_OVERFLOW (t1))
12337 return multiple_of_p (type, t1, bottom);
12338 }
12339 return 0;
12340
12341 case NOP_EXPR:
12342 /* Can't handle conversions from non-integral or wider integral type. */
12343 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12344 || (TYPE_PRECISION (type)
12345 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12346 return 0;
12347
12348 /* .. fall through ... */
12349
12350 case SAVE_EXPR:
12351 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12352
12353 case INTEGER_CST:
12354 if (TREE_CODE (bottom) != INTEGER_CST
12355 || (TYPE_UNSIGNED (type)
12356 && (tree_int_cst_sgn (top) < 0
12357 || tree_int_cst_sgn (bottom) < 0)))
12358 return 0;
12359 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12360 top, bottom, 0));
12361
12362 default:
12363 return 0;
12364 }
12365}
12366
12367/* Return true if `t' is known to be non-negative. If the return
12368 value is based on the assumption that signed overflow is undefined,
12369 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12370 *STRICT_OVERFLOW_P. */
12371
12372int
12373tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12374{
12375 if (t == error_mark_node)
12376 return 0;
12377
12378 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12379 return 1;
12380
12381 switch (TREE_CODE (t))
12382 {
12383 case SSA_NAME:
12384 /* Query VRP to see if it has recorded any information about
12385 the range of this object. */
12386 return ssa_name_nonnegative_p (t);
12387
12388 case ABS_EXPR:
12389 /* We can't return 1 if flag_wrapv is set because
12390 ABS_EXPR<INT_MIN> = INT_MIN. */
12391 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12392 return 1;
12393 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12394 {
12395 *strict_overflow_p = true;
12396 return 1;
12397 }
12398 break;
12399
12400 case INTEGER_CST:
12401 return tree_int_cst_sgn (t) >= 0;
12402
12403 case REAL_CST:
12404 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12405
12406 case PLUS_EXPR:
12407 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12408 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12409 strict_overflow_p)
12410 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12411 strict_overflow_p));
12412
12413 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12414 both unsigned and at least 2 bits shorter than the result. */
12415 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12416 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12417 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12418 {
12419 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12420 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12421 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12422 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12423 {
12424 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12425 TYPE_PRECISION (inner2)) + 1;
12426 return prec < TYPE_PRECISION (TREE_TYPE (t));
12427 }
12428 }
12429 break;
12430
12431 case MULT_EXPR:
12432 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12433 {
12434 /* x * x for floating point x is always non-negative. */
12435 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12436 return 1;
12437 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12438 strict_overflow_p)
12439 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12440 strict_overflow_p));
12441 }
12442
12443 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12444 both unsigned and their total bits is shorter than the result. */
12445 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12446 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12447 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12448 {
12449 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12450 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12451 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12452 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12453 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12454 < TYPE_PRECISION (TREE_TYPE (t));
12455 }
12456 return 0;
12457
12458 case BIT_AND_EXPR:
12459 case MAX_EXPR:
12460 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12461 strict_overflow_p)
12462 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12463 strict_overflow_p));
12464
12465 case BIT_IOR_EXPR:
12466 case BIT_XOR_EXPR:
12467 case MIN_EXPR:
12468 case RDIV_EXPR:
12469 case TRUNC_DIV_EXPR:
12470 case CEIL_DIV_EXPR:
12471 case FLOOR_DIV_EXPR:
12472 case ROUND_DIV_EXPR:
12473 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12474 strict_overflow_p)
12475 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12476 strict_overflow_p));
12477
12478 case TRUNC_MOD_EXPR:
12479 case CEIL_MOD_EXPR:
12480 case FLOOR_MOD_EXPR:
12481 case ROUND_MOD_EXPR:
12482 case SAVE_EXPR:
12483 case NON_LVALUE_EXPR:
12484 case FLOAT_EXPR:
12485 case FIX_TRUNC_EXPR:
12486 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12487 strict_overflow_p);
12488
12489 case COMPOUND_EXPR:
12490 case MODIFY_EXPR:
12491 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12492 strict_overflow_p);
12493
12494 case BIND_EXPR:
12495 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12496 strict_overflow_p);
12497
12498 case COND_EXPR:
12499 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12500 strict_overflow_p)
12501 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12502 strict_overflow_p));
12503
12504 case NOP_EXPR:
12505 {
12506 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12507 tree outer_type = TREE_TYPE (t);
12508
12509 if (TREE_CODE (outer_type) == REAL_TYPE)
12510 {
12511 if (TREE_CODE (inner_type) == REAL_TYPE)
12512 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12513 strict_overflow_p);
12514 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12515 {
12516 if (TYPE_UNSIGNED (inner_type))
12517 return 1;
12518 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12519 strict_overflow_p);
12520 }
12521 }
12522 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12523 {
12524 if (TREE_CODE (inner_type) == REAL_TYPE)
12525 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12526 strict_overflow_p);
12527 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12528 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12529 && TYPE_UNSIGNED (inner_type);
12530 }
12531 }
12532 break;
12533
12534 case TARGET_EXPR:
12535 {
12536 tree temp = TARGET_EXPR_SLOT (t);
12537 t = TARGET_EXPR_INITIAL (t);
12538
12539 /* If the initializer is non-void, then it's a normal expression
12540 that will be assigned to the slot. */
12541 if (!VOID_TYPE_P (t))
12542 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12543
12544 /* Otherwise, the initializer sets the slot in some way. One common
12545 way is an assignment statement at the end of the initializer. */
12546 while (1)
12547 {
12548 if (TREE_CODE (t) == BIND_EXPR)
12549 t = expr_last (BIND_EXPR_BODY (t));
12550 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12551 || TREE_CODE (t) == TRY_CATCH_EXPR)
12552 t = expr_last (TREE_OPERAND (t, 0));
12553 else if (TREE_CODE (t) == STATEMENT_LIST)
12554 t = expr_last (t);
12555 else
12556 break;
12557 }
12558 if (TREE_CODE (t) == MODIFY_EXPR
12559 && TREE_OPERAND (t, 0) == temp)
12560 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12561 strict_overflow_p);
12562
12563 return 0;
12564 }
12565
12566 case CALL_EXPR:
12567 {
12568 tree fndecl = get_callee_fndecl (t);
12569 tree arglist = TREE_OPERAND (t, 1);
12570 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12571 switch (DECL_FUNCTION_CODE (fndecl))
12572 {
12573 CASE_FLT_FN (BUILT_IN_ACOS):
12574 CASE_FLT_FN (BUILT_IN_ACOSH):
12575 CASE_FLT_FN (BUILT_IN_CABS):
12576 CASE_FLT_FN (BUILT_IN_COSH):
12577 CASE_FLT_FN (BUILT_IN_ERFC):
12578 CASE_FLT_FN (BUILT_IN_EXP):
12579 CASE_FLT_FN (BUILT_IN_EXP10):
12580 CASE_FLT_FN (BUILT_IN_EXP2):
12581 CASE_FLT_FN (BUILT_IN_FABS):
12582 CASE_FLT_FN (BUILT_IN_FDIM):
12583 CASE_FLT_FN (BUILT_IN_HYPOT):
12584 CASE_FLT_FN (BUILT_IN_POW10):
12585 CASE_INT_FN (BUILT_IN_FFS):
12586 CASE_INT_FN (BUILT_IN_PARITY):
12587 CASE_INT_FN (BUILT_IN_POPCOUNT):
12588 /* Always true. */
12589 return 1;
12590
12591 CASE_FLT_FN (BUILT_IN_SQRT):
12592 /* sqrt(-0.0) is -0.0. */
12593 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12594 return 1;
12595 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12596 strict_overflow_p);
12597
12598 CASE_FLT_FN (BUILT_IN_ASINH):
12599 CASE_FLT_FN (BUILT_IN_ATAN):
12600 CASE_FLT_FN (BUILT_IN_ATANH):
12601 CASE_FLT_FN (BUILT_IN_CBRT):
12602 CASE_FLT_FN (BUILT_IN_CEIL):
12603 CASE_FLT_FN (BUILT_IN_ERF):
12604 CASE_FLT_FN (BUILT_IN_EXPM1):
12605 CASE_FLT_FN (BUILT_IN_FLOOR):
12606 CASE_FLT_FN (BUILT_IN_FMOD):
12607 CASE_FLT_FN (BUILT_IN_FREXP):
12608 CASE_FLT_FN (BUILT_IN_LCEIL):
12609 CASE_FLT_FN (BUILT_IN_LDEXP):
12610 CASE_FLT_FN (BUILT_IN_LFLOOR):
12611 CASE_FLT_FN (BUILT_IN_LLCEIL):
12612 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12613 CASE_FLT_FN (BUILT_IN_LLRINT):
12614 CASE_FLT_FN (BUILT_IN_LLROUND):
12615 CASE_FLT_FN (BUILT_IN_LRINT):
12616 CASE_FLT_FN (BUILT_IN_LROUND):
12617 CASE_FLT_FN (BUILT_IN_MODF):
12618 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12619 CASE_FLT_FN (BUILT_IN_POW):
12620 CASE_FLT_FN (BUILT_IN_RINT):
12621 CASE_FLT_FN (BUILT_IN_ROUND):
12622 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12623 CASE_FLT_FN (BUILT_IN_SINH):
12624 CASE_FLT_FN (BUILT_IN_TANH):
12625 CASE_FLT_FN (BUILT_IN_TRUNC):
12626 /* True if the 1st argument is nonnegative. */
12627 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12628 strict_overflow_p);
12629
12630 CASE_FLT_FN (BUILT_IN_FMAX):
12631 /* True if the 1st OR 2nd arguments are nonnegative. */
12632 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12633 strict_overflow_p)
12634 || (tree_expr_nonnegative_warnv_p
12635 (TREE_VALUE (TREE_CHAIN (arglist)),
12636 strict_overflow_p)));
12637
12638 CASE_FLT_FN (BUILT_IN_FMIN):
12639 /* True if the 1st AND 2nd arguments are nonnegative. */
12640 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12641 strict_overflow_p)
12642 && (tree_expr_nonnegative_warnv_p
12643 (TREE_VALUE (TREE_CHAIN (arglist)),
12644 strict_overflow_p)));
12645
12646 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12647 /* True if the 2nd argument is nonnegative. */
12648 return (tree_expr_nonnegative_warnv_p
12649 (TREE_VALUE (TREE_CHAIN (arglist)),
12650 strict_overflow_p));
12651
12652 default:
12653 break;
12654 }
12655 }
12656
12657 /* ... fall through ... */
12658
12659 default:
12660 {
12661 tree type = TREE_TYPE (t);
12662 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12663 && truth_value_p (TREE_CODE (t)))
12664 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12665 have a signed:1 type (where the value is -1 and 0). */
12666 return true;
12667 }
12668 }
12669
12670 /* We don't know sign of `t', so be conservative and return false. */
12671 return 0;
12672}
12673
12674/* Return true if `t' is known to be non-negative. Handle warnings
12675 about undefined signed overflow. */
12676
12677int
12678tree_expr_nonnegative_p (tree t)
12679{
12680 int ret;
12681 bool strict_overflow_p;
12682
12683 strict_overflow_p = false;
12684 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12685 if (strict_overflow_p)
12686 fold_overflow_warning (("assuming signed overflow does not occur when "
12687 "determining that expression is always "
12688 "non-negative"),
12689 WARN_STRICT_OVERFLOW_MISC);
12690 return ret;
12691}
12692
12693/* Return true when T is an address and is known to be nonzero.
12694 For floating point we further ensure that T is not denormal.
12695 Similar logic is present in nonzero_address in rtlanal.h.
12696
12697 If the return value is based on the assumption that signed overflow
12698 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12699 change *STRICT_OVERFLOW_P. */
12700
12701bool
12702tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12703{
12704 tree type = TREE_TYPE (t);
12705 bool sub_strict_overflow_p;
12706
12707 /* Doing something useful for floating point would need more work. */
12708 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12709 return false;
12710
12711 switch (TREE_CODE (t))
12712 {
12713 case SSA_NAME:
12714 /* Query VRP to see if it has recorded any information about
12715 the range of this object. */
12716 return ssa_name_nonzero_p (t);
12717
12718 case ABS_EXPR:
12719 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12720 strict_overflow_p);
12721
12722 case INTEGER_CST:
12723 /* We used to test for !integer_zerop here. This does not work correctly
12724 if TREE_CONSTANT_OVERFLOW (t). */
12725 return (TREE_INT_CST_LOW (t) != 0
12726 || TREE_INT_CST_HIGH (t) != 0);
12727
12728 case PLUS_EXPR:
12729 if (TYPE_OVERFLOW_UNDEFINED (type))
12730 {
12731 /* With the presence of negative values it is hard
12732 to say something. */
12733 sub_strict_overflow_p = false;
12734 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12735 &sub_strict_overflow_p)
12736 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12737 &sub_strict_overflow_p))
12738 return false;
12739 /* One of operands must be positive and the other non-negative. */
12740 /* We don't set *STRICT_OVERFLOW_P here: even if this value
12741 overflows, on a twos-complement machine the sum of two
12742 nonnegative numbers can never be zero. */
12743 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12744 strict_overflow_p)
12745 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12746 strict_overflow_p));
12747 }
12748 break;
12749
12750 case MULT_EXPR:
12751 if (TYPE_OVERFLOW_UNDEFINED (type))
12752 {
12753 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12754 strict_overflow_p)
12755 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12756 strict_overflow_p))
12757 {
12758 *strict_overflow_p = true;
12759 return true;
12760 }
12761 }
12762 break;
12763
12764 case NOP_EXPR:
12765 {
12766 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12767 tree outer_type = TREE_TYPE (t);
12768
12769 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12770 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12771 strict_overflow_p));
12772 }
12773 break;
12774
12775 case ADDR_EXPR:
12776 {
12777 tree base = get_base_address (TREE_OPERAND (t, 0));
12778
12779 if (!base)
12780 return false;
12781
12782 /* Weak declarations may link to NULL. */
12783 if (VAR_OR_FUNCTION_DECL_P (base))
12784 return !DECL_WEAK (base);
12785
12786 /* Constants are never weak. */
12787 if (CONSTANT_CLASS_P (base))
12788 return true;
12789
12790 return false;
12791 }
12792
12793 case COND_EXPR:
12794 sub_strict_overflow_p = false;
12795 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12796 &sub_strict_overflow_p)
12797 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12798 &sub_strict_overflow_p))
12799 {
12800 if (sub_strict_overflow_p)
12801 *strict_overflow_p = true;
12802 return true;
12803 }
12804 break;
12805
12806 case MIN_EXPR:
12807 sub_strict_overflow_p = false;
12808 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12809 &sub_strict_overflow_p)
12810 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12811 &sub_strict_overflow_p))
12812 {
12813 if (sub_strict_overflow_p)
12814 *strict_overflow_p = true;
12815 }
12816 break;
12817
12818 case MAX_EXPR:
12819 sub_strict_overflow_p = false;
12820 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12821 &sub_strict_overflow_p))
12822 {
12823 if (sub_strict_overflow_p)
12824 *strict_overflow_p = true;
12825
12826 /* When both operands are nonzero, then MAX must be too. */
12827 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12828 strict_overflow_p))
12829 return true;
12830
12831 /* MAX where operand 0 is positive is positive. */
12832 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12833 strict_overflow_p);
12834 }
12835 /* MAX where operand 1 is positive is positive. */
12836 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12837 &sub_strict_overflow_p)
12838 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12839 &sub_strict_overflow_p))
12840 {
12841 if (sub_strict_overflow_p)
12842 *strict_overflow_p = true;
12843 return true;
12844 }
12845 break;
12846
12847 case COMPOUND_EXPR:
12848 case MODIFY_EXPR:
12849 case BIND_EXPR:
12850 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12851 strict_overflow_p);
12852
12853 case SAVE_EXPR:
12854 case NON_LVALUE_EXPR:
12855 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12856 strict_overflow_p);
12857
12858 case BIT_IOR_EXPR:
12859 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12860 strict_overflow_p)
12861 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12862 strict_overflow_p));
12863
12864 case CALL_EXPR:
12865 return alloca_call_p (t);
12866
12867 default:
12868 break;
12869 }
12870 return false;
12871}
12872
12873/* Return true when T is an address and is known to be nonzero.
12874 Handle warnings about undefined signed overflow. */
12875
12876bool
12877tree_expr_nonzero_p (tree t)
12878{
12879 bool ret, strict_overflow_p;
12880
12881 strict_overflow_p = false;
12882 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12883 if (strict_overflow_p)
12884 fold_overflow_warning (("assuming signed overflow does not occur when "
12885 "determining that expression is always "
12886 "non-zero"),
12887 WARN_STRICT_OVERFLOW_MISC);
12888 return ret;
12889}
12890
12891/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12892 attempt to fold the expression to a constant without modifying TYPE,
12893 OP0 or OP1.
12894
12895 If the expression could be simplified to a constant, then return
12896 the constant. If the expression would not be simplified to a
12897 constant, then return NULL_TREE. */
12898
12899tree
12900fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12901{
12902 tree tem = fold_binary (code, type, op0, op1);
12903 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12904}
12905
12906/* Given the components of a unary expression CODE, TYPE and OP0,
12907 attempt to fold the expression to a constant without modifying
12908 TYPE or OP0.
12909
12910 If the expression could be simplified to a constant, then return
12911 the constant. If the expression would not be simplified to a
12912 constant, then return NULL_TREE. */
12913
12914tree
12915fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12916{
12917 tree tem = fold_unary (code, type, op0);
12918 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12919}
12920
12921/* If EXP represents referencing an element in a constant string
12922 (either via pointer arithmetic or array indexing), return the
12923 tree representing the value accessed, otherwise return NULL. */
12924
12925tree
12926fold_read_from_constant_string (tree exp)
12927{
12928 if ((TREE_CODE (exp) == INDIRECT_REF
12929 || TREE_CODE (exp) == ARRAY_REF)
12930 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12931 {
12932 tree exp1 = TREE_OPERAND (exp, 0);
12933 tree index;
12934 tree string;
12935
12936 if (TREE_CODE (exp) == INDIRECT_REF)
12937 string = string_constant (exp1, &index);
12938 else
12939 {
12940 tree low_bound = array_ref_low_bound (exp);
12941 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12942
12943 /* Optimize the special-case of a zero lower bound.
12944
12945 We convert the low_bound to sizetype to avoid some problems
12946 with constant folding. (E.g. suppose the lower bound is 1,
12947 and its mode is QI. Without the conversion,l (ARRAY
12948 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12949 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12950 if (! integer_zerop (low_bound))
12951 index = size_diffop (index, fold_convert (sizetype, low_bound));
12952
12953 string = exp1;
12954 }
12955
12956 if (string
12957 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12958 && TREE_CODE (string) == STRING_CST
12959 && TREE_CODE (index) == INTEGER_CST
12960 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12961 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12962 == MODE_INT)
12963 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12964 return fold_convert (TREE_TYPE (exp),
12965 build_int_cst (NULL_TREE,
12966 (TREE_STRING_POINTER (string)
12967 [TREE_INT_CST_LOW (index)])));
12968 }
12969 return NULL;
12970}
12971
12972/* Return the tree for neg (ARG0) when ARG0 is known to be either
12973 an integer constant or real constant.
12974
12975 TYPE is the type of the result. */
12976
12977static tree
12978fold_negate_const (tree arg0, tree type)
12979{
12980 tree t = NULL_TREE;
12981
12982 switch (TREE_CODE (arg0))
12983 {
12984 case INTEGER_CST:
12985 {
12986 unsigned HOST_WIDE_INT low;
12987 HOST_WIDE_INT high;
12988 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12989 TREE_INT_CST_HIGH (arg0),
12990 &low, &high);
12991 t = build_int_cst_wide (type, low, high);
12992 t = force_fit_type (t, 1,
12993 (overflow | TREE_OVERFLOW (arg0))
12994 && !TYPE_UNSIGNED (type),
12995 TREE_CONSTANT_OVERFLOW (arg0));
12996 break;
12997 }
12998
12999 case REAL_CST:
13000 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13001 break;
13002
13003 default:
13004 gcc_unreachable ();
13005 }
13006
13007 return t;
13008}
13009
13010/* Return the tree for abs (ARG0) when ARG0 is known to be either
13011 an integer constant or real constant.
13012
13013 TYPE is the type of the result. */
13014
13015tree
13016fold_abs_const (tree arg0, tree type)
13017{
13018 tree t = NULL_TREE;
13019
13020 switch (TREE_CODE (arg0))
13021 {
13022 case INTEGER_CST:
13023 /* If the value is unsigned, then the absolute value is
13024 the same as the ordinary value. */
13025 if (TYPE_UNSIGNED (type))
13026 t = arg0;
13027 /* Similarly, if the value is non-negative. */
13028 else if (INT_CST_LT (integer_minus_one_node, arg0))
13029 t = arg0;
13030 /* If the value is negative, then the absolute value is
13031 its negation. */
13032 else
13033 {
13034 unsigned HOST_WIDE_INT low;
13035 HOST_WIDE_INT high;
13036 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13037 TREE_INT_CST_HIGH (arg0),
13038 &low, &high);
13039 t = build_int_cst_wide (type, low, high);
13040 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13041 TREE_CONSTANT_OVERFLOW (arg0));
13042 }
13043 break;
13044
13045 case REAL_CST:
13046 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13047 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13048 else
13049 t = arg0;
13050 break;
13051
13052 default:
13053 gcc_unreachable ();
13054 }
13055
13056 return t;
13057}
13058
13059/* Return the tree for not (ARG0) when ARG0 is known to be an integer
13060 constant. TYPE is the type of the result. */
13061
13062static tree
13063fold_not_const (tree arg0, tree type)
13064{
13065 tree t = NULL_TREE;
13066
13067 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13068
13069 t = build_int_cst_wide (type,
13070 ~ TREE_INT_CST_LOW (arg0),
13071 ~ TREE_INT_CST_HIGH (arg0));
13072 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13073 TREE_CONSTANT_OVERFLOW (arg0));
13074
13075 return t;
13076}
13077
13078/* Given CODE, a relational operator, the target type, TYPE and two
13079 constant operands OP0 and OP1, return the result of the
13080 relational operation. If the result is not a compile time
13081 constant, then return NULL_TREE. */
13082
13083static tree
13084fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13085{
13086 int result, invert;
13087
13088 /* From here on, the only cases we handle are when the result is
13089 known to be a constant. */
13090
13091 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13092 {
13093 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13094 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13095
13096 /* Handle the cases where either operand is a NaN. */
13097 if (real_isnan (c0) || real_isnan (c1))
13098 {
13099 switch (code)
13100 {
13101 case EQ_EXPR:
13102 case ORDERED_EXPR:
13103 result = 0;
13104 break;
13105
13106 case NE_EXPR:
13107 case UNORDERED_EXPR:
13108 case UNLT_EXPR:
13109 case UNLE_EXPR:
13110 case UNGT_EXPR:
13111 case UNGE_EXPR:
13112 case UNEQ_EXPR:
13113 result = 1;
13114 break;
13115
13116 case LT_EXPR:
13117 case LE_EXPR:
13118 case GT_EXPR:
13119 case GE_EXPR:
13120 case LTGT_EXPR:
13121 if (flag_trapping_math)
13122 return NULL_TREE;
13123 result = 0;
13124 break;
13125
13126 default:
13127 gcc_unreachable ();
13128 }
13129
13130 return constant_boolean_node (result, type);
13131 }
13132
13133 return constant_boolean_node (real_compare (code, c0, c1), type);
13134 }
13135
13136 /* Handle equality/inequality of complex constants. */
13137 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13138 {
13139 tree rcond = fold_relational_const (code, type,
13140 TREE_REALPART (op0),
13141 TREE_REALPART (op1));
13142 tree icond = fold_relational_const (code, type,
13143 TREE_IMAGPART (op0),
13144 TREE_IMAGPART (op1));
13145 if (code == EQ_EXPR)
13146 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13147 else if (code == NE_EXPR)
13148 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13149 else
13150 return NULL_TREE;
13151 }
13152
13153 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13154
13155 To compute GT, swap the arguments and do LT.
13156 To compute GE, do LT and invert the result.
13157 To compute LE, swap the arguments, do LT and invert the result.
13158 To compute NE, do EQ and invert the result.
13159
13160 Therefore, the code below must handle only EQ and LT. */
13161
13162 if (code == LE_EXPR || code == GT_EXPR)
13163 {
13164 tree tem = op0;
13165 op0 = op1;
13166 op1 = tem;
13167 code = swap_tree_comparison (code);
13168 }
13169
13170 /* Note that it is safe to invert for real values here because we
13171 have already handled the one case that it matters. */
13172
13173 invert = 0;
13174 if (code == NE_EXPR || code == GE_EXPR)
13175 {
13176 invert = 1;
13177 code = invert_tree_comparison (code, false);
13178 }
13179
13180 /* Compute a result for LT or EQ if args permit;
13181 Otherwise return T. */
13182 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13183 {
13184 if (code == EQ_EXPR)
13185 result = tree_int_cst_equal (op0, op1);
13186 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13187 result = INT_CST_LT_UNSIGNED (op0, op1);
13188 else
13189 result = INT_CST_LT (op0, op1);
13190 }
13191 else
13192 return NULL_TREE;
13193
13194 if (invert)
13195 result ^= 1;
13196 return constant_boolean_node (result, type);
13197}
13198
13199/* Build an expression for the a clean point containing EXPR with type TYPE.
13200 Don't build a cleanup point expression for EXPR which don't have side
13201 effects. */
13202
13203tree
13204fold_build_cleanup_point_expr (tree type, tree expr)
13205{
13206 /* If the expression does not have side effects then we don't have to wrap
13207 it with a cleanup point expression. */
13208 if (!TREE_SIDE_EFFECTS (expr))
13209 return expr;
13210
13211 /* If the expression is a return, check to see if the expression inside the
13212 return has no side effects or the right hand side of the modify expression
13213 inside the return. If either don't have side effects set we don't need to
13214 wrap the expression in a cleanup point expression. Note we don't check the
13215 left hand side of the modify because it should always be a return decl. */
13216 if (TREE_CODE (expr) == RETURN_EXPR)
13217 {
13218 tree op = TREE_OPERAND (expr, 0);
13219 if (!op || !TREE_SIDE_EFFECTS (op))
13220 return expr;
13221 op = TREE_OPERAND (op, 1);
13222 if (!TREE_SIDE_EFFECTS (op))
13223 return expr;
13224 }
13225
13226 return build1 (CLEANUP_POINT_EXPR, type, expr);
13227}
13228
13229/* Build an expression for the address of T. Folds away INDIRECT_REF to
13230 avoid confusing the gimplify process. */
13231
13232tree
13233build_fold_addr_expr_with_type (tree t, tree ptrtype)
13234{
13235 /* The size of the object is not relevant when talking about its address. */
13236 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13237 t = TREE_OPERAND (t, 0);
13238
13239 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13240 if (TREE_CODE (t) == INDIRECT_REF
13241 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13242 {
13243 t = TREE_OPERAND (t, 0);
13244 if (TREE_TYPE (t) != ptrtype)
13245 t = build1 (NOP_EXPR, ptrtype, t);
13246 }
13247 else
13248 {
13249 tree base = t;
13250
13251 while (handled_component_p (base))
13252 base = TREE_OPERAND (base, 0);
13253 if (DECL_P (base))
13254 TREE_ADDRESSABLE (base) = 1;
13255
13256 t = build1 (ADDR_EXPR, ptrtype, t);
13257 }
13258
13259 return t;
13260}
13261
13262tree
13263build_fold_addr_expr (tree t)
13264{
13265 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13266}
13267
13268/* Given a pointer value OP0 and a type TYPE, return a simplified version
13269 of an indirection through OP0, or NULL_TREE if no simplification is
13270 possible. */
13271
13272tree
13273fold_indirect_ref_1 (tree type, tree op0)
13274{
13275 tree sub = op0;
13276 tree subtype;
13277
13278 STRIP_NOPS (sub);
13279 subtype = TREE_TYPE (sub);
13280 if (!POINTER_TYPE_P (subtype))
13281 return NULL_TREE;
13282
13283 if (TREE_CODE (sub) == ADDR_EXPR)
13284 {
13285 tree op = TREE_OPERAND (sub, 0);
13286 tree optype = TREE_TYPE (op);
13287 /* *&CONST_DECL -> to the value of the const decl. */
13288 if (TREE_CODE (op) == CONST_DECL)
13289 return DECL_INITIAL (op);
13290 /* *&p => p; make sure to handle *&"str"[cst] here. */
13291 if (type == optype)
13292 {
13293 tree fop = fold_read_from_constant_string (op);
13294 if (fop)
13295 return fop;
13296 else
13297 return op;
13298 }
13299 /* *(foo *)&fooarray => fooarray[0] */
13300 else if (TREE_CODE (optype) == ARRAY_TYPE
13301 && type == TREE_TYPE (optype))
13302 {
13303 tree type_domain = TYPE_DOMAIN (optype);
13304 tree min_val = size_zero_node;
13305 if (type_domain && TYPE_MIN_VALUE (type_domain))
13306 min_val = TYPE_MIN_VALUE (type_domain);
13307 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13308 }
13309 /* *(foo *)&complexfoo => __real__ complexfoo */
13310 else if (TREE_CODE (optype) == COMPLEX_TYPE
13311 && type == TREE_TYPE (optype))
13312 return fold_build1 (REALPART_EXPR, type, op);
13313 }
13314
13315 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13316 if (TREE_CODE (sub) == PLUS_EXPR
13317 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13318 {
13319 tree op00 = TREE_OPERAND (sub, 0);
13320 tree op01 = TREE_OPERAND (sub, 1);
13321 tree op00type;
13322
13323 STRIP_NOPS (op00);
13324 op00type = TREE_TYPE (op00);
13325 if (TREE_CODE (op00) == ADDR_EXPR
13326 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13327 && type == TREE_TYPE (TREE_TYPE (op00type)))
13328 {
13329 tree size = TYPE_SIZE_UNIT (type);
13330 if (tree_int_cst_equal (size, op01))
13331 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13332 }
13333 }
13334
13335 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13336 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13337 && type == TREE_TYPE (TREE_TYPE (subtype)))
13338 {
13339 tree type_domain;
13340 tree min_val = size_zero_node;
13341 sub = build_fold_indirect_ref (sub);
13342 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13343 if (type_domain && TYPE_MIN_VALUE (type_domain))
13344 min_val = TYPE_MIN_VALUE (type_domain);
13345 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13346 }
13347
13348 return NULL_TREE;
13349}
13350
13351/* Builds an expression for an indirection through T, simplifying some
13352 cases. */
13353
13354tree
13355build_fold_indirect_ref (tree t)
13356{
13357 tree type = TREE_TYPE (TREE_TYPE (t));
13358 tree sub = fold_indirect_ref_1 (type, t);
13359
13360 if (sub)
13361 return sub;
13362 else
13363 return build1 (INDIRECT_REF, type, t);
13364}
13365
13366/* Given an INDIRECT_REF T, return either T or a simplified version. */
13367
13368tree
13369fold_indirect_ref (tree t)
13370{
13371 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13372
13373 if (sub)
13374 return sub;
13375 else
13376 return t;
13377}
13378
13379/* Strip non-trapping, non-side-effecting tree nodes from an expression
13380 whose result is ignored. The type of the returned tree need not be
13381 the same as the original expression. */
13382
13383tree
13384fold_ignored_result (tree t)
13385{
13386 if (!TREE_SIDE_EFFECTS (t))
13387 return integer_zero_node;
13388
13389 for (;;)
13390 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13391 {
13392 case tcc_unary:
13393 t = TREE_OPERAND (t, 0);
13394 break;
13395
13396 case tcc_binary:
13397 case tcc_comparison:
13398 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13399 t = TREE_OPERAND (t, 0);
13400 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13401 t = TREE_OPERAND (t, 1);
13402 else
13403 return t;
13404 break;
13405
13406 case tcc_expression:
13407 switch (TREE_CODE (t))
13408 {
13409 case COMPOUND_EXPR:
13410 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13411 return t;
13412 t = TREE_OPERAND (t, 0);
13413 break;
13414
13415 case COND_EXPR:
13416 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13417 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13418 return t;
13419 t = TREE_OPERAND (t, 0);
13420 break;
13421
13422 default:
13423 return t;
13424 }
13425 break;
13426
13427 default:
13428 return t;
13429 }
13430}
13431
13432/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13433 This can only be applied to objects of a sizetype. */
13434
13435tree
13436round_up (tree value, int divisor)
13437{
13438 tree div = NULL_TREE;
13439
13440 gcc_assert (divisor > 0);
13441 if (divisor == 1)
13442 return value;
13443
13444 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13445 have to do anything. Only do this when we are not given a const,
13446 because in that case, this check is more expensive than just
13447 doing it. */
13448 if (TREE_CODE (value) != INTEGER_CST)
13449 {
13450 div = build_int_cst (TREE_TYPE (value), divisor);
13451
13452 if (multiple_of_p (TREE_TYPE (value), value, div))
13453 return value;
13454 }
13455
13456 /* If divisor is a power of two, simplify this to bit manipulation. */
13457 if (divisor == (divisor & -divisor))
13458 {
13459 tree t;
13460
13461 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13462 value = size_binop (PLUS_EXPR, value, t);
13463 t = build_int_cst (TREE_TYPE (value), -divisor);
13464 value = size_binop (BIT_AND_EXPR, value, t);
13465 }
13466 else
13467 {
13468 if (!div)
13469 div = build_int_cst (TREE_TYPE (value), divisor);
13470 value = size_binop (CEIL_DIV_EXPR, value, div);
13471 value = size_binop (MULT_EXPR, value, div);
13472 }
13473
13474 return value;
13475}
13476
13477/* Likewise, but round down. */
13478
13479tree
13480round_down (tree value, int divisor)
13481{
13482 tree div = NULL_TREE;
13483
13484 gcc_assert (divisor > 0);
13485 if (divisor == 1)
13486 return value;
13487
13488 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13489 have to do anything. Only do this when we are not given a const,
13490 because in that case, this check is more expensive than just
13491 doing it. */
13492 if (TREE_CODE (value) != INTEGER_CST)
13493 {
13494 div = build_int_cst (TREE_TYPE (value), divisor);
13495
13496 if (multiple_of_p (TREE_TYPE (value), value, div))
13497 return value;
13498 }
13499
13500 /* If divisor is a power of two, simplify this to bit manipulation. */
13501 if (divisor == (divisor & -divisor))
13502 {
13503 tree t;
13504
13505 t = build_int_cst (TREE_TYPE (value), -divisor);
13506 value = size_binop (BIT_AND_EXPR, value, t);
13507 }
13508 else
13509 {
13510 if (!div)
13511 div = build_int_cst (TREE_TYPE (value), divisor);
13512 value = size_binop (FLOOR_DIV_EXPR, value, div);
13513 value = size_binop (MULT_EXPR, value, div);
13514 }
13515
13516 return value;
13517}
13518
13519/* Returns the pointer to the base of the object addressed by EXP and
13520 extracts the information about the offset of the access, storing it
13521 to PBITPOS and POFFSET. */
13522
13523static tree
13524split_address_to_core_and_offset (tree exp,
13525 HOST_WIDE_INT *pbitpos, tree *poffset)
13526{
13527 tree core;
13528 enum machine_mode mode;
13529 int unsignedp, volatilep;
13530 HOST_WIDE_INT bitsize;
13531
13532 if (TREE_CODE (exp) == ADDR_EXPR)
13533 {
13534 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13535 poffset, &mode, &unsignedp, &volatilep,
13536 false);
13537 core = build_fold_addr_expr (core);
13538 }
13539 else
13540 {
13541 core = exp;
13542 *pbitpos = 0;
13543 *poffset = NULL_TREE;
13544 }
13545
13546 return core;
13547}
13548
13549/* Returns true if addresses of E1 and E2 differ by a constant, false
13550 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13551
13552bool
13553ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13554{
13555 tree core1, core2;
13556 HOST_WIDE_INT bitpos1, bitpos2;
13557 tree toffset1, toffset2, tdiff, type;
13558
13559 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13560 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13561
13562 if (bitpos1 % BITS_PER_UNIT != 0
13563 || bitpos2 % BITS_PER_UNIT != 0
13564 || !operand_equal_p (core1, core2, 0))
13565 return false;
13566
13567 if (toffset1 && toffset2)
13568 {
13569 type = TREE_TYPE (toffset1);
13570 if (type != TREE_TYPE (toffset2))
13571 toffset2 = fold_convert (type, toffset2);
13572
13573 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13574 if (!cst_and_fits_in_hwi (tdiff))
13575 return false;
13576
13577 *diff = int_cst_value (tdiff);
13578 }
13579 else if (toffset1 || toffset2)
13580 {
13581 /* If only one of the offsets is non-constant, the difference cannot
13582 be a constant. */
13583 return false;
13584 }
13585 else
13586 *diff = 0;
13587
13588 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13589 return true;
13590}
13591
13592/* Simplify the floating point expression EXP when the sign of the
13593 result is not significant. Return NULL_TREE if no simplification
13594 is possible. */
13595
13596tree
13597fold_strip_sign_ops (tree exp)
13598{
13599 tree arg0, arg1;
13600
13601 switch (TREE_CODE (exp))
13602 {
13603 case ABS_EXPR:
13604 case NEGATE_EXPR:
13605 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13606 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13607
13608 case MULT_EXPR:
13609 case RDIV_EXPR:
13610 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13611 return NULL_TREE;
13612 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13613 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13614 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13615 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13616 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13617 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13618 break;
13619
13620 default:
13621 break;
13622 }
13623 return NULL_TREE;
13624}
13625