1/* Fold a constant sub-tree into a single node for C-compiler 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 4 Free Software Foundation, Inc. 5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify it under 9the terms of the GNU General Public License as published by the Free 10Software Foundation; either version 2, or (at your option) any later 11version. 12 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14WARRANTY; without even the implied warranty of MERCHANTABILITY or 15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING. If not, write to the Free 20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 2102110-1301, USA. */ 22 23/*@@ This file should be rewritten to use an arbitrary precision 24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst". 25 @@ Perhaps the routines could also be used for bc/dc, and made a lib. 26 @@ The routines that translate from the ap rep should 27 @@ warn if precision et. al. is lost. 28 @@ This would also make life easier when this technology is used 29 @@ for cross-compilers. */ 30 31/* The entry points in this file are fold, size_int_wide, size_binop 32 and force_fit_type. 33 34 fold takes a tree as argument and returns a simplified tree. 35 36 size_binop takes a tree code for an arithmetic operation 37 and two operands that are trees, and produces a tree for the 38 result, assuming the type comes from `sizetype'. 39 40 size_int takes an integer value, and creates a tree constant 41 with type from `sizetype'. 42 43 force_fit_type takes a constant, an overflowable flag and prior 44 overflow indicators. It forces the value to fit the type and sets 45 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */ 46 47#include "config.h" 48#include "system.h" 49#include "coretypes.h" 50#include "tm.h" 51#include "flags.h" 52#include "tree.h" 53#include "real.h" 54#include "rtl.h" 55#include "expr.h" 56#include "tm_p.h" 57#include "toplev.h" 58#include "intl.h" 59#include "ggc.h" 60#include "hashtab.h" 61#include "langhooks.h" 62#include "md5.h" 63 64/* Non-zero if we are folding constants inside an initializer; zero 65 otherwise. */ 66int folding_initializer = 0; 67 68/* The following constants represent a bit based encoding of GCC's 69 comparison operators. This encoding simplifies transformations 70 on relational comparison operators, such as AND and OR. */ 71enum comparison_code { 72 COMPCODE_FALSE = 0, 73 COMPCODE_LT = 1, 74 COMPCODE_EQ = 2, 75 COMPCODE_LE = 3, 76 COMPCODE_GT = 4, 77 COMPCODE_LTGT = 5, 78 COMPCODE_GE = 6, 79 COMPCODE_ORD = 7, 80 COMPCODE_UNORD = 8, 81 COMPCODE_UNLT = 9, 82 COMPCODE_UNEQ = 10, 83 COMPCODE_UNLE = 11, 84 COMPCODE_UNGT = 12, 85 COMPCODE_NE = 13, 86 COMPCODE_UNGE = 14, 87 COMPCODE_TRUE = 15 88}; 89 90static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT); 91static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *); 92static bool negate_mathfn_p (enum built_in_function); 93static bool negate_expr_p (tree); 94static tree negate_expr (tree); 95static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int); 96static tree associate_trees (tree, tree, enum tree_code, tree); 97static tree const_binop (enum tree_code, tree, tree, int); 98static enum comparison_code comparison_to_compcode (enum tree_code); 99static enum tree_code compcode_to_comparison (enum comparison_code); 100static tree combine_comparisons (enum tree_code, enum tree_code, 101 enum tree_code, tree, tree, tree); 102static int truth_value_p (enum tree_code); 103static int operand_equal_for_comparison_p (tree, tree, tree); 104static int twoval_comparison_p (tree, tree *, tree *, int *); 105static tree eval_subst (tree, tree, tree, tree, tree); 106static tree pedantic_omit_one_operand (tree, tree, tree); 107static tree distribute_bit_expr (enum tree_code, tree, tree, tree); 108static tree make_bit_field_ref (tree, tree, int, int, int); 109static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree); 110static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *, 111 enum machine_mode *, int *, int *, 112 tree *, tree *); 113static int all_ones_mask_p (tree, int); 114static tree sign_bit_p (tree, tree); 115static int simple_operand_p (tree); 116static tree range_binop (enum tree_code, tree, tree, int, tree, int); 117static tree range_predecessor (tree); 118static tree range_successor (tree); 119static tree make_range (tree, int *, tree *, tree *, bool *); 120static tree build_range_check (tree, tree, int, tree, tree); 121static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree, 122 tree); 123static tree fold_range_test (enum tree_code, tree, tree, tree); 124static tree fold_cond_expr_with_comparison (tree, tree, tree, tree); 125static tree unextend (tree, int, int, tree); 126static tree fold_truthop (enum tree_code, tree, tree, tree); 127static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree); 128static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *); 129static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *); 130static int multiple_of_p (tree, tree, tree); 131static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, 132 tree, tree, 133 tree, tree, int); 134static bool fold_real_zero_addition_p (tree, tree, int); 135static tree fold_mathfn_compare (enum built_in_function, enum tree_code, 136 tree, tree, tree); 137static tree fold_inf_compare (enum tree_code, tree, tree, tree); 138static tree fold_div_compare (enum tree_code, tree, tree, tree); 139static bool reorder_operands_p (tree, tree); 140static tree fold_negate_const (tree, tree); 141static tree fold_not_const (tree, tree); 142static tree fold_relational_const (enum tree_code, tree, tree, tree); 143static int native_encode_expr (tree, unsigned char *, int); 144static tree native_interpret_expr (tree, unsigned char *, int); 145 146 147/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring 148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1, 149 and SUM1. Then this yields nonzero if overflow occurred during the 150 addition. 151 152 Overflow occurs if A and B have the same sign, but A and SUM differ in 153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the 154 sign. */ 155#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0) 156 157/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic. 158 We do that by representing the two-word integer in 4 words, with only 159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive 160 number. The value of the word is LOWPART + HIGHPART * BASE. */ 161 162#define LOWPART(x) \ 163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1)) 164#define HIGHPART(x) \ 165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2) 166#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2) 167 168/* Unpack a two-word integer into 4 words. 169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces. 170 WORDS points to the array of HOST_WIDE_INTs. */ 171 172static void 173encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi) 174{ 175 words[0] = LOWPART (low); 176 words[1] = HIGHPART (low); 177 words[2] = LOWPART (hi); 178 words[3] = HIGHPART (hi); 179} 180 181/* Pack an array of 4 words into a two-word integer. 182 WORDS points to the array of words. 183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */ 184 185static void 186decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low, 187 HOST_WIDE_INT *hi) 188{ 189 *low = words[0] + words[1] * BASE; 190 *hi = words[2] + words[3] * BASE; 191} 192 193/* T is an INT_CST node. OVERFLOWABLE indicates if we are interested 194 in overflow of the value, when >0 we are only interested in signed 195 overflow, for <0 we are interested in any overflow. OVERFLOWED 196 indicates whether overflow has already occurred. CONST_OVERFLOWED 197 indicates whether constant overflow has already occurred. We force 198 T's value to be within range of T's type (by setting to 0 or 1 all 199 the bits outside the type's range). We set TREE_OVERFLOWED if, 200 OVERFLOWED is nonzero, 201 or OVERFLOWABLE is >0 and signed overflow occurs 202 or OVERFLOWABLE is <0 and any overflow occurs 203 We set TREE_CONSTANT_OVERFLOWED if, 204 CONST_OVERFLOWED is nonzero 205 or we set TREE_OVERFLOWED. 206 We return either the original T, or a copy. */ 207 208tree 209force_fit_type (tree t, int overflowable, 210 bool overflowed, bool overflowed_const) 211{ 212 unsigned HOST_WIDE_INT low; 213 HOST_WIDE_INT high; 214 unsigned int prec; 215 int sign_extended_type; 216 217 gcc_assert (TREE_CODE (t) == INTEGER_CST); 218 219 low = TREE_INT_CST_LOW (t); 220 high = TREE_INT_CST_HIGH (t); 221 222 if (POINTER_TYPE_P (TREE_TYPE (t)) 223 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE) 224 prec = POINTER_SIZE; 225 else 226 prec = TYPE_PRECISION (TREE_TYPE (t)); 227 /* Size types *are* sign extended. */ 228 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t)) 229 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE 230 && TYPE_IS_SIZETYPE (TREE_TYPE (t)))); 231 232 /* First clear all bits that are beyond the type's precision. */ 233 234 if (prec >= 2 * HOST_BITS_PER_WIDE_INT) 235 ; 236 else if (prec > HOST_BITS_PER_WIDE_INT) 237 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); 238 else 239 { 240 high = 0; 241 if (prec < HOST_BITS_PER_WIDE_INT) 242 low &= ~((HOST_WIDE_INT) (-1) << prec); 243 } 244 245 if (!sign_extended_type) 246 /* No sign extension */; 247 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT) 248 /* Correct width already. */; 249 else if (prec > HOST_BITS_PER_WIDE_INT) 250 { 251 /* Sign extend top half? */ 252 if (high & ((unsigned HOST_WIDE_INT)1 253 << (prec - HOST_BITS_PER_WIDE_INT - 1))) 254 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT); 255 } 256 else if (prec == HOST_BITS_PER_WIDE_INT) 257 { 258 if ((HOST_WIDE_INT)low < 0) 259 high = -1; 260 } 261 else 262 { 263 /* Sign extend bottom half? */ 264 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1))) 265 { 266 high = -1; 267 low |= (HOST_WIDE_INT)(-1) << prec; 268 } 269 } 270 271 /* If the value changed, return a new node. */ 272 if (overflowed || overflowed_const 273 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t)) 274 { 275 t = build_int_cst_wide (TREE_TYPE (t), low, high); 276 277 if (overflowed 278 || overflowable < 0 279 || (overflowable > 0 && sign_extended_type)) 280 { 281 t = copy_node (t); 282 TREE_OVERFLOW (t) = 1; 283 TREE_CONSTANT_OVERFLOW (t) = 1; 284 } 285 else if (overflowed_const) 286 { 287 t = copy_node (t); 288 TREE_CONSTANT_OVERFLOW (t) = 1; 289 } 290 } 291 292 return t; 293} 294 295/* Add two doubleword integers with doubleword result. 296 Return nonzero if the operation overflows according to UNSIGNED_P. 297 Each argument is given as two `HOST_WIDE_INT' pieces. 298 One argument is L1 and H1; the other, L2 and H2. 299 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 300 301int 302add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 303 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2, 304 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, 305 bool unsigned_p) 306{ 307 unsigned HOST_WIDE_INT l; 308 HOST_WIDE_INT h; 309 310 l = l1 + l2; 311 h = h1 + h2 + (l < l1); 312 313 *lv = l; 314 *hv = h; 315 316 if (unsigned_p) 317 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1; 318 else 319 return OVERFLOW_SUM_SIGN (h1, h2, h); 320} 321 322/* Negate a doubleword integer with doubleword result. 323 Return nonzero if the operation overflows, assuming it's signed. 324 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1. 325 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 326 327int 328neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 329 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) 330{ 331 if (l1 == 0) 332 { 333 *lv = 0; 334 *hv = - h1; 335 return (*hv & h1) < 0; 336 } 337 else 338 { 339 *lv = -l1; 340 *hv = ~h1; 341 return 0; 342 } 343} 344 345/* Multiply two doubleword integers with doubleword result. 346 Return nonzero if the operation overflows according to UNSIGNED_P. 347 Each argument is given as two `HOST_WIDE_INT' pieces. 348 One argument is L1 and H1; the other, L2 and H2. 349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 350 351int 352mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 353 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2, 354 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, 355 bool unsigned_p) 356{ 357 HOST_WIDE_INT arg1[4]; 358 HOST_WIDE_INT arg2[4]; 359 HOST_WIDE_INT prod[4 * 2]; 360 unsigned HOST_WIDE_INT carry; 361 int i, j, k; 362 unsigned HOST_WIDE_INT toplow, neglow; 363 HOST_WIDE_INT tophigh, neghigh; 364 365 encode (arg1, l1, h1); 366 encode (arg2, l2, h2); 367 368 memset (prod, 0, sizeof prod); 369 370 for (i = 0; i < 4; i++) 371 { 372 carry = 0; 373 for (j = 0; j < 4; j++) 374 { 375 k = i + j; 376 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */ 377 carry += arg1[i] * arg2[j]; 378 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */ 379 carry += prod[k]; 380 prod[k] = LOWPART (carry); 381 carry = HIGHPART (carry); 382 } 383 prod[i + 4] = carry; 384 } 385 386 decode (prod, lv, hv); 387 decode (prod + 4, &toplow, &tophigh); 388 389 /* Unsigned overflow is immediate. */ 390 if (unsigned_p) 391 return (toplow | tophigh) != 0; 392 393 /* Check for signed overflow by calculating the signed representation of the 394 top half of the result; it should agree with the low half's sign bit. */ 395 if (h1 < 0) 396 { 397 neg_double (l2, h2, &neglow, &neghigh); 398 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh); 399 } 400 if (h2 < 0) 401 { 402 neg_double (l1, h1, &neglow, &neghigh); 403 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh); 404 } 405 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0; 406} 407 408/* Shift the doubleword integer in L1, H1 left by COUNT places 409 keeping only PREC bits of result. 410 Shift right if COUNT is negative. 411 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift. 412 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 413 414void 415lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 416 HOST_WIDE_INT count, unsigned int prec, 417 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith) 418{ 419 unsigned HOST_WIDE_INT signmask; 420 421 if (count < 0) 422 { 423 rshift_double (l1, h1, -count, prec, lv, hv, arith); 424 return; 425 } 426 427 if (SHIFT_COUNT_TRUNCATED) 428 count %= prec; 429 430 if (count >= 2 * HOST_BITS_PER_WIDE_INT) 431 { 432 /* Shifting by the host word size is undefined according to the 433 ANSI standard, so we must handle this as a special case. */ 434 *hv = 0; 435 *lv = 0; 436 } 437 else if (count >= HOST_BITS_PER_WIDE_INT) 438 { 439 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT); 440 *lv = 0; 441 } 442 else 443 { 444 *hv = (((unsigned HOST_WIDE_INT) h1 << count) 445 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1)); 446 *lv = l1 << count; 447 } 448 449 /* Sign extend all bits that are beyond the precision. */ 450 451 signmask = -((prec > HOST_BITS_PER_WIDE_INT 452 ? ((unsigned HOST_WIDE_INT) *hv 453 >> (prec - HOST_BITS_PER_WIDE_INT - 1)) 454 : (*lv >> (prec - 1))) & 1); 455 456 if (prec >= 2 * HOST_BITS_PER_WIDE_INT) 457 ; 458 else if (prec >= HOST_BITS_PER_WIDE_INT) 459 { 460 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); 461 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT); 462 } 463 else 464 { 465 *hv = signmask; 466 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec); 467 *lv |= signmask << prec; 468 } 469} 470 471/* Shift the doubleword integer in L1, H1 right by COUNT places 472 keeping only PREC bits of result. COUNT must be positive. 473 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift. 474 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 475 476void 477rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 478 HOST_WIDE_INT count, unsigned int prec, 479 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, 480 int arith) 481{ 482 unsigned HOST_WIDE_INT signmask; 483 484 signmask = (arith 485 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1)) 486 : 0); 487 488 if (SHIFT_COUNT_TRUNCATED) 489 count %= prec; 490 491 if (count >= 2 * HOST_BITS_PER_WIDE_INT) 492 { 493 /* Shifting by the host word size is undefined according to the 494 ANSI standard, so we must handle this as a special case. */ 495 *hv = 0; 496 *lv = 0; 497 } 498 else if (count >= HOST_BITS_PER_WIDE_INT) 499 { 500 *hv = 0; 501 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT); 502 } 503 else 504 { 505 *hv = (unsigned HOST_WIDE_INT) h1 >> count; 506 *lv = ((l1 >> count) 507 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1)); 508 } 509 510 /* Zero / sign extend all bits that are beyond the precision. */ 511 512 if (count >= (HOST_WIDE_INT)prec) 513 { 514 *hv = signmask; 515 *lv = signmask; 516 } 517 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT) 518 ; 519 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT) 520 { 521 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT)); 522 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT); 523 } 524 else 525 { 526 *hv = signmask; 527 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count)); 528 *lv |= signmask << (prec - count); 529 } 530} 531 532/* Rotate the doubleword integer in L1, H1 left by COUNT places 533 keeping only PREC bits of result. 534 Rotate right if COUNT is negative. 535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 536 537void 538lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 539 HOST_WIDE_INT count, unsigned int prec, 540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) 541{ 542 unsigned HOST_WIDE_INT s1l, s2l; 543 HOST_WIDE_INT s1h, s2h; 544 545 count %= prec; 546 if (count < 0) 547 count += prec; 548 549 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0); 550 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0); 551 *lv = s1l | s2l; 552 *hv = s1h | s2h; 553} 554 555/* Rotate the doubleword integer in L1, H1 left by COUNT places 556 keeping only PREC bits of result. COUNT must be positive. 557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ 558 559void 560rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, 561 HOST_WIDE_INT count, unsigned int prec, 562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) 563{ 564 unsigned HOST_WIDE_INT s1l, s2l; 565 HOST_WIDE_INT s1h, s2h; 566 567 count %= prec; 568 if (count < 0) 569 count += prec; 570 571 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0); 572 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0); 573 *lv = s1l | s2l; 574 *hv = s1h | s2h; 575} 576 577/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN 578 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM). 579 CODE is a tree code for a kind of division, one of 580 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR 581 or EXACT_DIV_EXPR 582 It controls how the quotient is rounded to an integer. 583 Return nonzero if the operation overflows. 584 UNS nonzero says do unsigned division. */ 585 586int 587div_and_round_double (enum tree_code code, int uns, 588 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */ 589 HOST_WIDE_INT hnum_orig, 590 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */ 591 HOST_WIDE_INT hden_orig, 592 unsigned HOST_WIDE_INT *lquo, 593 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem, 594 HOST_WIDE_INT *hrem) 595{ 596 int quo_neg = 0; 597 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */ 598 HOST_WIDE_INT den[4], quo[4]; 599 int i, j; 600 unsigned HOST_WIDE_INT work; 601 unsigned HOST_WIDE_INT carry = 0; 602 unsigned HOST_WIDE_INT lnum = lnum_orig; 603 HOST_WIDE_INT hnum = hnum_orig; 604 unsigned HOST_WIDE_INT lden = lden_orig; 605 HOST_WIDE_INT hden = hden_orig; 606 int overflow = 0; 607 608 if (hden == 0 && lden == 0) 609 overflow = 1, lden = 1; 610 611 /* Calculate quotient sign and convert operands to unsigned. */ 612 if (!uns) 613 { 614 if (hnum < 0) 615 { 616 quo_neg = ~ quo_neg; 617 /* (minimum integer) / (-1) is the only overflow case. */ 618 if (neg_double (lnum, hnum, &lnum, &hnum) 619 && ((HOST_WIDE_INT) lden & hden) == -1) 620 overflow = 1; 621 } 622 if (hden < 0) 623 { 624 quo_neg = ~ quo_neg; 625 neg_double (lden, hden, &lden, &hden); 626 } 627 } 628 629 if (hnum == 0 && hden == 0) 630 { /* single precision */ 631 *hquo = *hrem = 0; 632 /* This unsigned division rounds toward zero. */ 633 *lquo = lnum / lden; 634 goto finish_up; 635 } 636 637 if (hnum == 0) 638 { /* trivial case: dividend < divisor */ 639 /* hden != 0 already checked. */ 640 *hquo = *lquo = 0; 641 *hrem = hnum; 642 *lrem = lnum; 643 goto finish_up; 644 } 645 646 memset (quo, 0, sizeof quo); 647 648 memset (num, 0, sizeof num); /* to zero 9th element */ 649 memset (den, 0, sizeof den); 650 651 encode (num, lnum, hnum); 652 encode (den, lden, hden); 653 654 /* Special code for when the divisor < BASE. */ 655 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE) 656 { 657 /* hnum != 0 already checked. */ 658 for (i = 4 - 1; i >= 0; i--) 659 { 660 work = num[i] + carry * BASE; 661 quo[i] = work / lden; 662 carry = work % lden; 663 } 664 } 665 else 666 { 667 /* Full double precision division, 668 with thanks to Don Knuth's "Seminumerical Algorithms". */ 669 int num_hi_sig, den_hi_sig; 670 unsigned HOST_WIDE_INT quo_est, scale; 671 672 /* Find the highest nonzero divisor digit. */ 673 for (i = 4 - 1;; i--) 674 if (den[i] != 0) 675 { 676 den_hi_sig = i; 677 break; 678 } 679 680 /* Insure that the first digit of the divisor is at least BASE/2. 681 This is required by the quotient digit estimation algorithm. */ 682 683 scale = BASE / (den[den_hi_sig] + 1); 684 if (scale > 1) 685 { /* scale divisor and dividend */ 686 carry = 0; 687 for (i = 0; i <= 4 - 1; i++) 688 { 689 work = (num[i] * scale) + carry; 690 num[i] = LOWPART (work); 691 carry = HIGHPART (work); 692 } 693 694 num[4] = carry; 695 carry = 0; 696 for (i = 0; i <= 4 - 1; i++) 697 { 698 work = (den[i] * scale) + carry; 699 den[i] = LOWPART (work); 700 carry = HIGHPART (work); 701 if (den[i] != 0) den_hi_sig = i; 702 } 703 } 704 705 num_hi_sig = 4; 706 707 /* Main loop */ 708 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) 709 { 710 /* Guess the next quotient digit, quo_est, by dividing the first 711 two remaining dividend digits by the high order quotient digit. 712 quo_est is never low and is at most 2 high. */ 713 unsigned HOST_WIDE_INT tmp; 714 715 num_hi_sig = i + den_hi_sig + 1; 716 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1]; 717 if (num[num_hi_sig] != den[den_hi_sig]) 718 quo_est = work / den[den_hi_sig]; 719 else 720 quo_est = BASE - 1; 721 722 /* Refine quo_est so it's usually correct, and at most one high. */ 723 tmp = work - quo_est * den[den_hi_sig]; 724 if (tmp < BASE 725 && (den[den_hi_sig - 1] * quo_est 726 > (tmp * BASE + num[num_hi_sig - 2]))) 727 quo_est--; 728 729 /* Try QUO_EST as the quotient digit, by multiplying the 730 divisor by QUO_EST and subtracting from the remaining dividend. 731 Keep in mind that QUO_EST is the I - 1st digit. */ 732 733 carry = 0; 734 for (j = 0; j <= den_hi_sig; j++) 735 { 736 work = quo_est * den[j] + carry; 737 carry = HIGHPART (work); 738 work = num[i + j] - LOWPART (work); 739 num[i + j] = LOWPART (work); 740 carry += HIGHPART (work) != 0; 741 } 742 743 /* If quo_est was high by one, then num[i] went negative and 744 we need to correct things. */ 745 if (num[num_hi_sig] < (HOST_WIDE_INT) carry) 746 { 747 quo_est--; 748 carry = 0; /* add divisor back in */ 749 for (j = 0; j <= den_hi_sig; j++) 750 { 751 work = num[i + j] + den[j] + carry; 752 carry = HIGHPART (work); 753 num[i + j] = LOWPART (work); 754 } 755 756 num [num_hi_sig] += carry; 757 } 758 759 /* Store the quotient digit. */ 760 quo[i] = quo_est; 761 } 762 } 763 764 decode (quo, lquo, hquo); 765 766 finish_up: 767 /* If result is negative, make it so. */ 768 if (quo_neg) 769 neg_double (*lquo, *hquo, lquo, hquo); 770 771 /* Compute trial remainder: rem = num - (quo * den) */ 772 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem); 773 neg_double (*lrem, *hrem, lrem, hrem); 774 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem); 775 776 switch (code) 777 { 778 case TRUNC_DIV_EXPR: 779 case TRUNC_MOD_EXPR: /* round toward zero */ 780 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */ 781 return overflow; 782 783 case FLOOR_DIV_EXPR: 784 case FLOOR_MOD_EXPR: /* round toward negative infinity */ 785 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */ 786 { 787 /* quo = quo - 1; */ 788 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, 789 lquo, hquo); 790 } 791 else 792 return overflow; 793 break; 794 795 case CEIL_DIV_EXPR: 796 case CEIL_MOD_EXPR: /* round toward positive infinity */ 797 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */ 798 { 799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0, 800 lquo, hquo); 801 } 802 else 803 return overflow; 804 break; 805 806 case ROUND_DIV_EXPR: 807 case ROUND_MOD_EXPR: /* round to closest integer */ 808 { 809 unsigned HOST_WIDE_INT labs_rem = *lrem; 810 HOST_WIDE_INT habs_rem = *hrem; 811 unsigned HOST_WIDE_INT labs_den = lden, ltwice; 812 HOST_WIDE_INT habs_den = hden, htwice; 813 814 /* Get absolute values. */ 815 if (*hrem < 0) 816 neg_double (*lrem, *hrem, &labs_rem, &habs_rem); 817 if (hden < 0) 818 neg_double (lden, hden, &labs_den, &habs_den); 819 820 /* If (2 * abs (lrem) >= abs (lden)) */ 821 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0, 822 labs_rem, habs_rem, <wice, &htwice); 823 824 if (((unsigned HOST_WIDE_INT) habs_den 825 < (unsigned HOST_WIDE_INT) htwice) 826 || (((unsigned HOST_WIDE_INT) habs_den 827 == (unsigned HOST_WIDE_INT) htwice) 828 && (labs_den < ltwice))) 829 { 830 if (*hquo < 0) 831 /* quo = quo - 1; */ 832 add_double (*lquo, *hquo, 833 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo); 834 else 835 /* quo = quo + 1; */ 836 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0, 837 lquo, hquo); 838 } 839 else 840 return overflow; 841 } 842 break; 843 844 default: 845 gcc_unreachable (); 846 } 847 848 /* Compute true remainder: rem = num - (quo * den) */ 849 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem); 850 neg_double (*lrem, *hrem, lrem, hrem); 851 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem); 852 return overflow; 853} 854 855/* If ARG2 divides ARG1 with zero remainder, carries out the division 856 of type CODE and returns the quotient. 857 Otherwise returns NULL_TREE. */ 858 859static tree 860div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2) 861{ 862 unsigned HOST_WIDE_INT int1l, int2l; 863 HOST_WIDE_INT int1h, int2h; 864 unsigned HOST_WIDE_INT quol, reml; 865 HOST_WIDE_INT quoh, remh; 866 tree type = TREE_TYPE (arg1); 867 int uns = TYPE_UNSIGNED (type); 868 869 int1l = TREE_INT_CST_LOW (arg1); 870 int1h = TREE_INT_CST_HIGH (arg1); 871 int2l = TREE_INT_CST_LOW (arg2); 872 int2h = TREE_INT_CST_HIGH (arg2); 873 874 div_and_round_double (code, uns, int1l, int1h, int2l, int2h, 875 &quol, &quoh, &reml, &remh); 876 if (remh != 0 || reml != 0) 877 return NULL_TREE; 878 879 return build_int_cst_wide (type, quol, quoh); 880} 881 882/* This is non-zero if we should defer warnings about undefined 883 overflow. This facility exists because these warnings are a 884 special case. The code to estimate loop iterations does not want 885 to issue any warnings, since it works with expressions which do not 886 occur in user code. Various bits of cleanup code call fold(), but 887 only use the result if it has certain characteristics (e.g., is a 888 constant); that code only wants to issue a warning if the result is 889 used. */ 890 891static int fold_deferring_overflow_warnings; 892 893/* If a warning about undefined overflow is deferred, this is the 894 warning. Note that this may cause us to turn two warnings into 895 one, but that is fine since it is sufficient to only give one 896 warning per expression. */ 897 898static const char* fold_deferred_overflow_warning; 899 900/* If a warning about undefined overflow is deferred, this is the 901 level at which the warning should be emitted. */ 902 903static enum warn_strict_overflow_code fold_deferred_overflow_code; 904 905/* Start deferring overflow warnings. We could use a stack here to 906 permit nested calls, but at present it is not necessary. */ 907 908void 909fold_defer_overflow_warnings (void) 910{ 911 ++fold_deferring_overflow_warnings; 912} 913 914/* Stop deferring overflow warnings. If there is a pending warning, 915 and ISSUE is true, then issue the warning if appropriate. STMT is 916 the statement with which the warning should be associated (used for 917 location information); STMT may be NULL. CODE is the level of the 918 warning--a warn_strict_overflow_code value. This function will use 919 the smaller of CODE and the deferred code when deciding whether to 920 issue the warning. CODE may be zero to mean to always use the 921 deferred code. */ 922 923void 924fold_undefer_overflow_warnings (bool issue, tree stmt, int code) 925{ 926 const char *warnmsg; 927 location_t locus; 928 929 gcc_assert (fold_deferring_overflow_warnings > 0); 930 --fold_deferring_overflow_warnings; 931 if (fold_deferring_overflow_warnings > 0) 932 { 933 if (fold_deferred_overflow_warning != NULL 934 && code != 0 935 && code < (int) fold_deferred_overflow_code) 936 fold_deferred_overflow_code = code; 937 return; 938 } 939 940 warnmsg = fold_deferred_overflow_warning; 941 fold_deferred_overflow_warning = NULL; 942 943 if (!issue || warnmsg == NULL) 944 return; 945 946 /* Use the smallest code level when deciding to issue the 947 warning. */ 948 if (code == 0 || code > (int) fold_deferred_overflow_code) 949 code = fold_deferred_overflow_code; 950 951 if (!issue_strict_overflow_warning (code)) 952 return; 953 954 if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt)) 955 locus = input_location; 956 else 957 locus = EXPR_LOCATION (stmt); 958 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg); 959} 960 961/* Stop deferring overflow warnings, ignoring any deferred 962 warnings. */ 963 964void 965fold_undefer_and_ignore_overflow_warnings (void) 966{ 967 fold_undefer_overflow_warnings (false, NULL_TREE, 0); 968} 969 970/* Whether we are deferring overflow warnings. */ 971 972bool 973fold_deferring_overflow_warnings_p (void) 974{ 975 return fold_deferring_overflow_warnings > 0; 976} 977 978/* This is called when we fold something based on the fact that signed 979 overflow is undefined. */ 980 981static void 982fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc) 983{ 984 gcc_assert (!flag_wrapv && !flag_trapv); 985 if (fold_deferring_overflow_warnings > 0) 986 { 987 if (fold_deferred_overflow_warning == NULL 988 || wc < fold_deferred_overflow_code) 989 { 990 fold_deferred_overflow_warning = gmsgid; 991 fold_deferred_overflow_code = wc; 992 } 993 } 994 else if (issue_strict_overflow_warning (wc)) 995 warning (OPT_Wstrict_overflow, "%s", gmsgid); 996} 997 998/* Return true if the built-in mathematical function specified by CODE 999 is odd, i.e. -f(x) == f(-x). */ 1000 1001static bool 1002negate_mathfn_p (enum built_in_function code) 1003{ 1004 switch (code) 1005 { 1006 CASE_FLT_FN (BUILT_IN_ASIN): 1007 CASE_FLT_FN (BUILT_IN_ASINH): 1008 CASE_FLT_FN (BUILT_IN_ATAN): 1009 CASE_FLT_FN (BUILT_IN_ATANH): 1010 CASE_FLT_FN (BUILT_IN_CBRT): 1011 CASE_FLT_FN (BUILT_IN_SIN): 1012 CASE_FLT_FN (BUILT_IN_SINH): 1013 CASE_FLT_FN (BUILT_IN_TAN): 1014 CASE_FLT_FN (BUILT_IN_TANH): 1015 return true; 1016 1017 default: 1018 break; 1019 } 1020 return false; 1021} 1022 1023/* Check whether we may negate an integer constant T without causing 1024 overflow. */ 1025 1026bool 1027may_negate_without_overflow_p (tree t) 1028{ 1029 unsigned HOST_WIDE_INT val; 1030 unsigned int prec; 1031 tree type; 1032 1033 gcc_assert (TREE_CODE (t) == INTEGER_CST); 1034 1035 type = TREE_TYPE (t); 1036 if (TYPE_UNSIGNED (type)) 1037 return false; 1038 1039 prec = TYPE_PRECISION (type); 1040 if (prec > HOST_BITS_PER_WIDE_INT) 1041 { 1042 if (TREE_INT_CST_LOW (t) != 0) 1043 return true; 1044 prec -= HOST_BITS_PER_WIDE_INT; 1045 val = TREE_INT_CST_HIGH (t); 1046 } 1047 else 1048 val = TREE_INT_CST_LOW (t); 1049 if (prec < HOST_BITS_PER_WIDE_INT) 1050 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1; 1051 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1)); 1052} 1053 1054/* Determine whether an expression T can be cheaply negated using 1055 the function negate_expr without introducing undefined overflow. */ 1056 1057static bool 1058negate_expr_p (tree t) 1059{ 1060 tree type; 1061 1062 if (t == 0) 1063 return false; 1064 1065 type = TREE_TYPE (t); 1066 1067 STRIP_SIGN_NOPS (t); 1068 switch (TREE_CODE (t)) 1069 { 1070 case INTEGER_CST: 1071 if (TYPE_OVERFLOW_WRAPS (type)) 1072 return true; 1073 1074 /* Check that -CST will not overflow type. */ 1075 return may_negate_without_overflow_p (t); 1076 case BIT_NOT_EXPR: 1077 return (INTEGRAL_TYPE_P (type) 1078 && TYPE_OVERFLOW_WRAPS (type)); 1079 1080 case REAL_CST: 1081 case NEGATE_EXPR: 1082 return true; 1083 1084 case COMPLEX_CST: 1085 return negate_expr_p (TREE_REALPART (t)) 1086 && negate_expr_p (TREE_IMAGPART (t)); 1087 1088 case PLUS_EXPR: 1089 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations) 1090 return false; 1091 /* -(A + B) -> (-B) - A. */ 1092 if (negate_expr_p (TREE_OPERAND (t, 1)) 1093 && reorder_operands_p (TREE_OPERAND (t, 0), 1094 TREE_OPERAND (t, 1))) 1095 return true; 1096 /* -(A + B) -> (-A) - B. */ 1097 return negate_expr_p (TREE_OPERAND (t, 0)); 1098 1099 case MINUS_EXPR: 1100 /* We can't turn -(A-B) into B-A when we honor signed zeros. */ 1101 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) 1102 && reorder_operands_p (TREE_OPERAND (t, 0), 1103 TREE_OPERAND (t, 1)); 1104 1105 case MULT_EXPR: 1106 if (TYPE_UNSIGNED (TREE_TYPE (t))) 1107 break; 1108 1109 /* Fall through. */ 1110 1111 case RDIV_EXPR: 1112 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t)))) 1113 return negate_expr_p (TREE_OPERAND (t, 1)) 1114 || negate_expr_p (TREE_OPERAND (t, 0)); 1115 break; 1116 1117 case TRUNC_DIV_EXPR: 1118 case ROUND_DIV_EXPR: 1119 case FLOOR_DIV_EXPR: 1120 case CEIL_DIV_EXPR: 1121 case EXACT_DIV_EXPR: 1122 /* In general we can't negate A / B, because if A is INT_MIN and 1123 B is 1, we may turn this into INT_MIN / -1 which is undefined 1124 and actually traps on some architectures. But if overflow is 1125 undefined, we can negate, because - (INT_MIN / 1) is an 1126 overflow. */ 1127 if (INTEGRAL_TYPE_P (TREE_TYPE (t)) 1128 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))) 1129 break; 1130 return negate_expr_p (TREE_OPERAND (t, 1)) 1131 || negate_expr_p (TREE_OPERAND (t, 0)); 1132 1133 case NOP_EXPR: 1134 /* Negate -((double)float) as (double)(-float). */ 1135 if (TREE_CODE (type) == REAL_TYPE) 1136 { 1137 tree tem = strip_float_extensions (t); 1138 if (tem != t) 1139 return negate_expr_p (tem); 1140 } 1141 break; 1142 1143 case CALL_EXPR: 1144 /* Negate -f(x) as f(-x). */ 1145 if (negate_mathfn_p (builtin_mathfn_code (t))) 1146 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))); 1147 break; 1148 1149 case RSHIFT_EXPR: 1150 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ 1151 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) 1152 { 1153 tree op1 = TREE_OPERAND (t, 1); 1154 if (TREE_INT_CST_HIGH (op1) == 0 1155 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) 1156 == TREE_INT_CST_LOW (op1)) 1157 return true; 1158 } 1159 break; 1160 1161 default: 1162 break; 1163 } 1164 return false; 1165} 1166 1167/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no 1168 simplification is possible. 1169 If negate_expr_p would return true for T, NULL_TREE will never be 1170 returned. */ 1171 1172static tree 1173fold_negate_expr (tree t) 1174{ 1175 tree type = TREE_TYPE (t); 1176 tree tem; 1177 1178 switch (TREE_CODE (t)) 1179 { 1180 /* Convert - (~A) to A + 1. */ 1181 case BIT_NOT_EXPR: 1182 if (INTEGRAL_TYPE_P (type)) 1183 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0), 1184 build_int_cst (type, 1)); 1185 break; 1186 1187 case INTEGER_CST: 1188 tem = fold_negate_const (t, type); 1189 if (!TREE_OVERFLOW (tem) 1190 || !TYPE_OVERFLOW_TRAPS (type)) 1191 return tem; 1192 break; 1193 1194 case REAL_CST: 1195 tem = fold_negate_const (t, type); 1196 /* Two's complement FP formats, such as c4x, may overflow. */ 1197 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math) 1198 return tem; 1199 break; 1200 1201 case COMPLEX_CST: 1202 { 1203 tree rpart = negate_expr (TREE_REALPART (t)); 1204 tree ipart = negate_expr (TREE_IMAGPART (t)); 1205 1206 if ((TREE_CODE (rpart) == REAL_CST 1207 && TREE_CODE (ipart) == REAL_CST) 1208 || (TREE_CODE (rpart) == INTEGER_CST 1209 && TREE_CODE (ipart) == INTEGER_CST)) 1210 return build_complex (type, rpart, ipart); 1211 } 1212 break; 1213 1214 case NEGATE_EXPR: 1215 return TREE_OPERAND (t, 0); 1216 1217 case PLUS_EXPR: 1218 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) 1219 { 1220 /* -(A + B) -> (-B) - A. */ 1221 if (negate_expr_p (TREE_OPERAND (t, 1)) 1222 && reorder_operands_p (TREE_OPERAND (t, 0), 1223 TREE_OPERAND (t, 1))) 1224 { 1225 tem = negate_expr (TREE_OPERAND (t, 1)); 1226 return fold_build2 (MINUS_EXPR, type, 1227 tem, TREE_OPERAND (t, 0)); 1228 } 1229 1230 /* -(A + B) -> (-A) - B. */ 1231 if (negate_expr_p (TREE_OPERAND (t, 0))) 1232 { 1233 tem = negate_expr (TREE_OPERAND (t, 0)); 1234 return fold_build2 (MINUS_EXPR, type, 1235 tem, TREE_OPERAND (t, 1)); 1236 } 1237 } 1238 break; 1239 1240 case MINUS_EXPR: 1241 /* - (A - B) -> B - A */ 1242 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) 1243 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1))) 1244 return fold_build2 (MINUS_EXPR, type, 1245 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); 1246 break; 1247 1248 case MULT_EXPR: 1249 if (TYPE_UNSIGNED (type)) 1250 break; 1251 1252 /* Fall through. */ 1253 1254 case RDIV_EXPR: 1255 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))) 1256 { 1257 tem = TREE_OPERAND (t, 1); 1258 if (negate_expr_p (tem)) 1259 return fold_build2 (TREE_CODE (t), type, 1260 TREE_OPERAND (t, 0), negate_expr (tem)); 1261 tem = TREE_OPERAND (t, 0); 1262 if (negate_expr_p (tem)) 1263 return fold_build2 (TREE_CODE (t), type, 1264 negate_expr (tem), TREE_OPERAND (t, 1)); 1265 } 1266 break; 1267 1268 case TRUNC_DIV_EXPR: 1269 case ROUND_DIV_EXPR: 1270 case FLOOR_DIV_EXPR: 1271 case CEIL_DIV_EXPR: 1272 case EXACT_DIV_EXPR: 1273 /* In general we can't negate A / B, because if A is INT_MIN and 1274 B is 1, we may turn this into INT_MIN / -1 which is undefined 1275 and actually traps on some architectures. But if overflow is 1276 undefined, we can negate, because - (INT_MIN / 1) is an 1277 overflow. */ 1278 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 1279 { 1280 const char * const warnmsg = G_("assuming signed overflow does not " 1281 "occur when negating a division"); 1282 tem = TREE_OPERAND (t, 1); 1283 if (negate_expr_p (tem)) 1284 { 1285 if (INTEGRAL_TYPE_P (type) 1286 && (TREE_CODE (tem) != INTEGER_CST 1287 || integer_onep (tem))) 1288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); 1289 return fold_build2 (TREE_CODE (t), type, 1290 TREE_OPERAND (t, 0), negate_expr (tem)); 1291 } 1292 tem = TREE_OPERAND (t, 0); 1293 if (negate_expr_p (tem)) 1294 { 1295 if (INTEGRAL_TYPE_P (type) 1296 && (TREE_CODE (tem) != INTEGER_CST 1297 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type)))) 1298 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); 1299 return fold_build2 (TREE_CODE (t), type, 1300 negate_expr (tem), TREE_OPERAND (t, 1)); 1301 } 1302 } 1303 break; 1304 1305 case NOP_EXPR: 1306 /* Convert -((double)float) into (double)(-float). */ 1307 if (TREE_CODE (type) == REAL_TYPE) 1308 { 1309 tem = strip_float_extensions (t); 1310 if (tem != t && negate_expr_p (tem)) 1311 return negate_expr (tem); 1312 } 1313 break; 1314 1315 case CALL_EXPR: 1316 /* Negate -f(x) as f(-x). */ 1317 if (negate_mathfn_p (builtin_mathfn_code (t)) 1318 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)))) 1319 { 1320 tree fndecl, arg, arglist; 1321 1322 fndecl = get_callee_fndecl (t); 1323 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1))); 1324 arglist = build_tree_list (NULL_TREE, arg); 1325 return build_function_call_expr (fndecl, arglist); 1326 } 1327 break; 1328 1329 case RSHIFT_EXPR: 1330 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ 1331 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) 1332 { 1333 tree op1 = TREE_OPERAND (t, 1); 1334 if (TREE_INT_CST_HIGH (op1) == 0 1335 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) 1336 == TREE_INT_CST_LOW (op1)) 1337 { 1338 tree ntype = TYPE_UNSIGNED (type) 1339 ? lang_hooks.types.signed_type (type) 1340 : lang_hooks.types.unsigned_type (type); 1341 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0)); 1342 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1); 1343 return fold_convert (type, temp); 1344 } 1345 } 1346 break; 1347 1348 default: 1349 break; 1350 } 1351 1352 return NULL_TREE; 1353} 1354 1355/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be 1356 negated in a simpler way. Also allow for T to be NULL_TREE, in which case 1357 return NULL_TREE. */ 1358 1359static tree 1360negate_expr (tree t) 1361{ 1362 tree type, tem; 1363 1364 if (t == NULL_TREE) 1365 return NULL_TREE; 1366 1367 type = TREE_TYPE (t); 1368 STRIP_SIGN_NOPS (t); 1369 1370 tem = fold_negate_expr (t); 1371 if (!tem) 1372 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t); 1373 return fold_convert (type, tem); 1374} 1375 1376/* Split a tree IN into a constant, literal and variable parts that could be 1377 combined with CODE to make IN. "constant" means an expression with 1378 TREE_CONSTANT but that isn't an actual constant. CODE must be a 1379 commutative arithmetic operation. Store the constant part into *CONP, 1380 the literal in *LITP and return the variable part. If a part isn't 1381 present, set it to null. If the tree does not decompose in this way, 1382 return the entire tree as the variable part and the other parts as null. 1383 1384 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that 1385 case, we negate an operand that was subtracted. Except if it is a 1386 literal for which we use *MINUS_LITP instead. 1387 1388 If NEGATE_P is true, we are negating all of IN, again except a literal 1389 for which we use *MINUS_LITP instead. 1390 1391 If IN is itself a literal or constant, return it as appropriate. 1392 1393 Note that we do not guarantee that any of the three values will be the 1394 same type as IN, but they will have the same signedness and mode. */ 1395 1396static tree 1397split_tree (tree in, enum tree_code code, tree *conp, tree *litp, 1398 tree *minus_litp, int negate_p) 1399{ 1400 tree var = 0; 1401 1402 *conp = 0; 1403 *litp = 0; 1404 *minus_litp = 0; 1405 1406 /* Strip any conversions that don't change the machine mode or signedness. */ 1407 STRIP_SIGN_NOPS (in); 1408 1409 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST) 1410 *litp = in; 1411 else if (TREE_CODE (in) == code 1412 || (! FLOAT_TYPE_P (TREE_TYPE (in)) 1413 /* We can associate addition and subtraction together (even 1414 though the C standard doesn't say so) for integers because 1415 the value is not affected. For reals, the value might be 1416 affected, so we can't. */ 1417 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR) 1418 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR)))) 1419 { 1420 tree op0 = TREE_OPERAND (in, 0); 1421 tree op1 = TREE_OPERAND (in, 1); 1422 int neg1_p = TREE_CODE (in) == MINUS_EXPR; 1423 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0; 1424 1425 /* First see if either of the operands is a literal, then a constant. */ 1426 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST) 1427 *litp = op0, op0 = 0; 1428 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST) 1429 *litp = op1, neg_litp_p = neg1_p, op1 = 0; 1430 1431 if (op0 != 0 && TREE_CONSTANT (op0)) 1432 *conp = op0, op0 = 0; 1433 else if (op1 != 0 && TREE_CONSTANT (op1)) 1434 *conp = op1, neg_conp_p = neg1_p, op1 = 0; 1435 1436 /* If we haven't dealt with either operand, this is not a case we can 1437 decompose. Otherwise, VAR is either of the ones remaining, if any. */ 1438 if (op0 != 0 && op1 != 0) 1439 var = in; 1440 else if (op0 != 0) 1441 var = op0; 1442 else 1443 var = op1, neg_var_p = neg1_p; 1444 1445 /* Now do any needed negations. */ 1446 if (neg_litp_p) 1447 *minus_litp = *litp, *litp = 0; 1448 if (neg_conp_p) 1449 *conp = negate_expr (*conp); 1450 if (neg_var_p) 1451 var = negate_expr (var); 1452 } 1453 else if (TREE_CONSTANT (in)) 1454 *conp = in; 1455 else 1456 var = in; 1457 1458 if (negate_p) 1459 { 1460 if (*litp) 1461 *minus_litp = *litp, *litp = 0; 1462 else if (*minus_litp) 1463 *litp = *minus_litp, *minus_litp = 0; 1464 *conp = negate_expr (*conp); 1465 var = negate_expr (var); 1466 } 1467 1468 return var; 1469} 1470 1471/* Re-associate trees split by the above function. T1 and T2 are either 1472 expressions to associate or null. Return the new expression, if any. If 1473 we build an operation, do it in TYPE and with CODE. */ 1474 1475static tree 1476associate_trees (tree t1, tree t2, enum tree_code code, tree type) 1477{ 1478 if (t1 == 0) 1479 return t2; 1480 else if (t2 == 0) 1481 return t1; 1482 1483 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't 1484 try to fold this since we will have infinite recursion. But do 1485 deal with any NEGATE_EXPRs. */ 1486 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code 1487 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR) 1488 { 1489 if (code == PLUS_EXPR) 1490 { 1491 if (TREE_CODE (t1) == NEGATE_EXPR) 1492 return build2 (MINUS_EXPR, type, fold_convert (type, t2), 1493 fold_convert (type, TREE_OPERAND (t1, 0))); 1494 else if (TREE_CODE (t2) == NEGATE_EXPR) 1495 return build2 (MINUS_EXPR, type, fold_convert (type, t1), 1496 fold_convert (type, TREE_OPERAND (t2, 0))); 1497 else if (integer_zerop (t2)) 1498 return fold_convert (type, t1); 1499 } 1500 else if (code == MINUS_EXPR) 1501 { 1502 if (integer_zerop (t2)) 1503 return fold_convert (type, t1); 1504 } 1505 1506 return build2 (code, type, fold_convert (type, t1), 1507 fold_convert (type, t2)); 1508 } 1509 1510 return fold_build2 (code, type, fold_convert (type, t1), 1511 fold_convert (type, t2)); 1512} 1513 1514/* Combine two integer constants ARG1 and ARG2 under operation CODE 1515 to produce a new constant. Return NULL_TREE if we don't know how 1516 to evaluate CODE at compile-time. 1517 1518 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */ 1519 1520tree 1521int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) 1522{ 1523 unsigned HOST_WIDE_INT int1l, int2l; 1524 HOST_WIDE_INT int1h, int2h; 1525 unsigned HOST_WIDE_INT low; 1526 HOST_WIDE_INT hi; 1527 unsigned HOST_WIDE_INT garbagel; 1528 HOST_WIDE_INT garbageh; 1529 tree t; 1530 tree type = TREE_TYPE (arg1); 1531 int uns = TYPE_UNSIGNED (type); 1532 int is_sizetype 1533 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)); 1534 int overflow = 0; 1535 1536 int1l = TREE_INT_CST_LOW (arg1); 1537 int1h = TREE_INT_CST_HIGH (arg1); 1538 int2l = TREE_INT_CST_LOW (arg2); 1539 int2h = TREE_INT_CST_HIGH (arg2); 1540 1541 switch (code) 1542 { 1543 case BIT_IOR_EXPR: 1544 low = int1l | int2l, hi = int1h | int2h; 1545 break; 1546 1547 case BIT_XOR_EXPR: 1548 low = int1l ^ int2l, hi = int1h ^ int2h; 1549 break; 1550 1551 case BIT_AND_EXPR: 1552 low = int1l & int2l, hi = int1h & int2h; 1553 break; 1554 1555 case RSHIFT_EXPR: 1556 int2l = -int2l; 1557 case LSHIFT_EXPR: 1558 /* It's unclear from the C standard whether shifts can overflow. 1559 The following code ignores overflow; perhaps a C standard 1560 interpretation ruling is needed. */ 1561 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type), 1562 &low, &hi, !uns); 1563 break; 1564 1565 case RROTATE_EXPR: 1566 int2l = - int2l; 1567 case LROTATE_EXPR: 1568 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type), 1569 &low, &hi); 1570 break; 1571 1572 case PLUS_EXPR: 1573 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi); 1574 break; 1575 1576 case MINUS_EXPR: 1577 neg_double (int2l, int2h, &low, &hi); 1578 add_double (int1l, int1h, low, hi, &low, &hi); 1579 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h); 1580 break; 1581 1582 case MULT_EXPR: 1583 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi); 1584 break; 1585 1586 case TRUNC_DIV_EXPR: 1587 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR: 1588 case EXACT_DIV_EXPR: 1589 /* This is a shortcut for a common special case. */ 1590 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 1591 && ! TREE_CONSTANT_OVERFLOW (arg1) 1592 && ! TREE_CONSTANT_OVERFLOW (arg2) 1593 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) 1594 { 1595 if (code == CEIL_DIV_EXPR) 1596 int1l += int2l - 1; 1597 1598 low = int1l / int2l, hi = 0; 1599 break; 1600 } 1601 1602 /* ... fall through ... */ 1603 1604 case ROUND_DIV_EXPR: 1605 if (int2h == 0 && int2l == 0) 1606 return NULL_TREE; 1607 if (int2h == 0 && int2l == 1) 1608 { 1609 low = int1l, hi = int1h; 1610 break; 1611 } 1612 if (int1l == int2l && int1h == int2h 1613 && ! (int1l == 0 && int1h == 0)) 1614 { 1615 low = 1, hi = 0; 1616 break; 1617 } 1618 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h, 1619 &low, &hi, &garbagel, &garbageh); 1620 break; 1621 1622 case TRUNC_MOD_EXPR: 1623 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR: 1624 /* This is a shortcut for a common special case. */ 1625 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 1626 && ! TREE_CONSTANT_OVERFLOW (arg1) 1627 && ! TREE_CONSTANT_OVERFLOW (arg2) 1628 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) 1629 { 1630 if (code == CEIL_MOD_EXPR) 1631 int1l += int2l - 1; 1632 low = int1l % int2l, hi = 0; 1633 break; 1634 } 1635 1636 /* ... fall through ... */ 1637 1638 case ROUND_MOD_EXPR: 1639 if (int2h == 0 && int2l == 0) 1640 return NULL_TREE; 1641 overflow = div_and_round_double (code, uns, 1642 int1l, int1h, int2l, int2h, 1643 &garbagel, &garbageh, &low, &hi); 1644 break; 1645 1646 case MIN_EXPR: 1647 case MAX_EXPR: 1648 if (uns) 1649 low = (((unsigned HOST_WIDE_INT) int1h 1650 < (unsigned HOST_WIDE_INT) int2h) 1651 || (((unsigned HOST_WIDE_INT) int1h 1652 == (unsigned HOST_WIDE_INT) int2h) 1653 && int1l < int2l)); 1654 else 1655 low = (int1h < int2h 1656 || (int1h == int2h && int1l < int2l)); 1657 1658 if (low == (code == MIN_EXPR)) 1659 low = int1l, hi = int1h; 1660 else 1661 low = int2l, hi = int2h; 1662 break; 1663 1664 default: 1665 return NULL_TREE; 1666 } 1667 1668 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi); 1669 1670 if (notrunc) 1671 { 1672 /* Propagate overflow flags ourselves. */ 1673 if (((!uns || is_sizetype) && overflow) 1674 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) 1675 { 1676 t = copy_node (t); 1677 TREE_OVERFLOW (t) = 1; 1678 TREE_CONSTANT_OVERFLOW (t) = 1; 1679 } 1680 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2)) 1681 { 1682 t = copy_node (t); 1683 TREE_CONSTANT_OVERFLOW (t) = 1; 1684 } 1685 } 1686 else 1687 t = force_fit_type (t, 1, 1688 ((!uns || is_sizetype) && overflow) 1689 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2), 1690 TREE_CONSTANT_OVERFLOW (arg1) 1691 | TREE_CONSTANT_OVERFLOW (arg2)); 1692 1693 return t; 1694} 1695 1696/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new 1697 constant. We assume ARG1 and ARG2 have the same data type, or at least 1698 are the same kind of constant and the same machine mode. Return zero if 1699 combining the constants is not allowed in the current operating mode. 1700 1701 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */ 1702 1703static tree 1704const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) 1705{ 1706 /* Sanity check for the recursive cases. */ 1707 if (!arg1 || !arg2) 1708 return NULL_TREE; 1709 1710 STRIP_NOPS (arg1); 1711 STRIP_NOPS (arg2); 1712 1713 if (TREE_CODE (arg1) == INTEGER_CST) 1714 return int_const_binop (code, arg1, arg2, notrunc); 1715 1716 if (TREE_CODE (arg1) == REAL_CST) 1717 { 1718 enum machine_mode mode; 1719 REAL_VALUE_TYPE d1; 1720 REAL_VALUE_TYPE d2; 1721 REAL_VALUE_TYPE value; 1722 REAL_VALUE_TYPE result; 1723 bool inexact; 1724 tree t, type; 1725 1726 /* The following codes are handled by real_arithmetic. */ 1727 switch (code) 1728 { 1729 case PLUS_EXPR: 1730 case MINUS_EXPR: 1731 case MULT_EXPR: 1732 case RDIV_EXPR: 1733 case MIN_EXPR: 1734 case MAX_EXPR: 1735 break; 1736 1737 default: 1738 return NULL_TREE; 1739 } 1740 1741 d1 = TREE_REAL_CST (arg1); 1742 d2 = TREE_REAL_CST (arg2); 1743 1744 type = TREE_TYPE (arg1); 1745 mode = TYPE_MODE (type); 1746 1747 /* Don't perform operation if we honor signaling NaNs and 1748 either operand is a NaN. */ 1749 if (HONOR_SNANS (mode) 1750 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2))) 1751 return NULL_TREE; 1752 1753 /* Don't perform operation if it would raise a division 1754 by zero exception. */ 1755 if (code == RDIV_EXPR 1756 && REAL_VALUES_EQUAL (d2, dconst0) 1757 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) 1758 return NULL_TREE; 1759 1760 /* If either operand is a NaN, just return it. Otherwise, set up 1761 for floating-point trap; we return an overflow. */ 1762 if (REAL_VALUE_ISNAN (d1)) 1763 return arg1; 1764 else if (REAL_VALUE_ISNAN (d2)) 1765 return arg2; 1766 1767 inexact = real_arithmetic (&value, code, &d1, &d2); 1768 real_convert (&result, mode, &value); 1769 1770 /* Don't constant fold this floating point operation if 1771 the result has overflowed and flag_trapping_math. */ 1772 if (flag_trapping_math 1773 && MODE_HAS_INFINITIES (mode) 1774 && REAL_VALUE_ISINF (result) 1775 && !REAL_VALUE_ISINF (d1) 1776 && !REAL_VALUE_ISINF (d2)) 1777 return NULL_TREE; 1778 1779 /* Don't constant fold this floating point operation if the 1780 result may dependent upon the run-time rounding mode and 1781 flag_rounding_math is set, or if GCC's software emulation 1782 is unable to accurately represent the result. */ 1783 if ((flag_rounding_math 1784 || (REAL_MODE_FORMAT_COMPOSITE_P (mode) 1785 && !flag_unsafe_math_optimizations)) 1786 && (inexact || !real_identical (&result, &value))) 1787 return NULL_TREE; 1788 1789 t = build_real (type, result); 1790 1791 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2); 1792 TREE_CONSTANT_OVERFLOW (t) 1793 = TREE_OVERFLOW (t) 1794 | TREE_CONSTANT_OVERFLOW (arg1) 1795 | TREE_CONSTANT_OVERFLOW (arg2); 1796 return t; 1797 } 1798 1799 if (TREE_CODE (arg1) == COMPLEX_CST) 1800 { 1801 tree type = TREE_TYPE (arg1); 1802 tree r1 = TREE_REALPART (arg1); 1803 tree i1 = TREE_IMAGPART (arg1); 1804 tree r2 = TREE_REALPART (arg2); 1805 tree i2 = TREE_IMAGPART (arg2); 1806 tree real, imag; 1807 1808 switch (code) 1809 { 1810 case PLUS_EXPR: 1811 case MINUS_EXPR: 1812 real = const_binop (code, r1, r2, notrunc); 1813 imag = const_binop (code, i1, i2, notrunc); 1814 break; 1815 1816 case MULT_EXPR: 1817 real = const_binop (MINUS_EXPR, 1818 const_binop (MULT_EXPR, r1, r2, notrunc), 1819 const_binop (MULT_EXPR, i1, i2, notrunc), 1820 notrunc); 1821 imag = const_binop (PLUS_EXPR, 1822 const_binop (MULT_EXPR, r1, i2, notrunc), 1823 const_binop (MULT_EXPR, i1, r2, notrunc), 1824 notrunc); 1825 break; 1826 1827 case RDIV_EXPR: 1828 { 1829 tree magsquared 1830 = const_binop (PLUS_EXPR, 1831 const_binop (MULT_EXPR, r2, r2, notrunc), 1832 const_binop (MULT_EXPR, i2, i2, notrunc), 1833 notrunc); 1834 tree t1 1835 = const_binop (PLUS_EXPR, 1836 const_binop (MULT_EXPR, r1, r2, notrunc), 1837 const_binop (MULT_EXPR, i1, i2, notrunc), 1838 notrunc); 1839 tree t2 1840 = const_binop (MINUS_EXPR, 1841 const_binop (MULT_EXPR, i1, r2, notrunc), 1842 const_binop (MULT_EXPR, r1, i2, notrunc), 1843 notrunc); 1844 1845 if (INTEGRAL_TYPE_P (TREE_TYPE (r1))) 1846 code = TRUNC_DIV_EXPR; 1847 1848 real = const_binop (code, t1, magsquared, notrunc); 1849 imag = const_binop (code, t2, magsquared, notrunc); 1850 } 1851 break; 1852 1853 default: 1854 return NULL_TREE; 1855 } 1856 1857 if (real && imag) 1858 return build_complex (type, real, imag); 1859 } 1860 1861 return NULL_TREE; 1862} 1863 1864/* Create a size type INT_CST node with NUMBER sign extended. KIND 1865 indicates which particular sizetype to create. */ 1866 1867tree 1868size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) 1869{ 1870 return build_int_cst (sizetype_tab[(int) kind], number); 1871} 1872 1873/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE 1874 is a tree code. The type of the result is taken from the operands. 1875 Both must be the same type integer type and it must be a size type. 1876 If the operands are constant, so is the result. */ 1877 1878tree 1879size_binop (enum tree_code code, tree arg0, tree arg1) 1880{ 1881 tree type = TREE_TYPE (arg0); 1882 1883 if (arg0 == error_mark_node || arg1 == error_mark_node) 1884 return error_mark_node; 1885 1886 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type) 1887 && type == TREE_TYPE (arg1)); 1888 1889 /* Handle the special case of two integer constants faster. */ 1890 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 1891 { 1892 /* And some specific cases even faster than that. */ 1893 if (code == PLUS_EXPR && integer_zerop (arg0)) 1894 return arg1; 1895 else if ((code == MINUS_EXPR || code == PLUS_EXPR) 1896 && integer_zerop (arg1)) 1897 return arg0; 1898 else if (code == MULT_EXPR && integer_onep (arg0)) 1899 return arg1; 1900 1901 /* Handle general case of two integer constants. */ 1902 return int_const_binop (code, arg0, arg1, 0); 1903 } 1904 1905 return fold_build2 (code, type, arg0, arg1); 1906} 1907 1908/* Given two values, either both of sizetype or both of bitsizetype, 1909 compute the difference between the two values. Return the value 1910 in signed type corresponding to the type of the operands. */ 1911 1912tree 1913size_diffop (tree arg0, tree arg1) 1914{ 1915 tree type = TREE_TYPE (arg0); 1916 tree ctype; 1917 1918 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type) 1919 && type == TREE_TYPE (arg1)); 1920 1921 /* If the type is already signed, just do the simple thing. */ 1922 if (!TYPE_UNSIGNED (type)) 1923 return size_binop (MINUS_EXPR, arg0, arg1); 1924 1925 ctype = type == bitsizetype ? sbitsizetype : ssizetype; 1926 1927 /* If either operand is not a constant, do the conversions to the signed 1928 type and subtract. The hardware will do the right thing with any 1929 overflow in the subtraction. */ 1930 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST) 1931 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0), 1932 fold_convert (ctype, arg1)); 1933 1934 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE. 1935 Otherwise, subtract the other way, convert to CTYPE (we know that can't 1936 overflow) and negate (which can't either). Special-case a result 1937 of zero while we're here. */ 1938 if (tree_int_cst_equal (arg0, arg1)) 1939 return build_int_cst (ctype, 0); 1940 else if (tree_int_cst_lt (arg1, arg0)) 1941 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1)); 1942 else 1943 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0), 1944 fold_convert (ctype, size_binop (MINUS_EXPR, 1945 arg1, arg0))); 1946} 1947 1948/* A subroutine of fold_convert_const handling conversions of an 1949 INTEGER_CST to another integer type. */ 1950 1951static tree 1952fold_convert_const_int_from_int (tree type, tree arg1) 1953{ 1954 tree t; 1955 1956 /* Given an integer constant, make new constant with new type, 1957 appropriately sign-extended or truncated. */ 1958 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1), 1959 TREE_INT_CST_HIGH (arg1)); 1960 1961 t = force_fit_type (t, 1962 /* Don't set the overflow when 1963 converting a pointer */ 1964 !POINTER_TYPE_P (TREE_TYPE (arg1)), 1965 (TREE_INT_CST_HIGH (arg1) < 0 1966 && (TYPE_UNSIGNED (type) 1967 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) 1968 | TREE_OVERFLOW (arg1), 1969 TREE_CONSTANT_OVERFLOW (arg1)); 1970 1971 return t; 1972} 1973 1974/* A subroutine of fold_convert_const handling conversions a REAL_CST 1975 to an integer type. */ 1976 1977static tree 1978fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1) 1979{ 1980 int overflow = 0; 1981 tree t; 1982 1983 /* The following code implements the floating point to integer 1984 conversion rules required by the Java Language Specification, 1985 that IEEE NaNs are mapped to zero and values that overflow 1986 the target precision saturate, i.e. values greater than 1987 INT_MAX are mapped to INT_MAX, and values less than INT_MIN 1988 are mapped to INT_MIN. These semantics are allowed by the 1989 C and C++ standards that simply state that the behavior of 1990 FP-to-integer conversion is unspecified upon overflow. */ 1991 1992 HOST_WIDE_INT high, low; 1993 REAL_VALUE_TYPE r; 1994 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); 1995 1996 switch (code) 1997 { 1998 case FIX_TRUNC_EXPR: 1999 real_trunc (&r, VOIDmode, &x); 2000 break; 2001 2002 case FIX_CEIL_EXPR: 2003 real_ceil (&r, VOIDmode, &x); 2004 break; 2005 2006 case FIX_FLOOR_EXPR: 2007 real_floor (&r, VOIDmode, &x); 2008 break; 2009 2010 case FIX_ROUND_EXPR: 2011 real_round (&r, VOIDmode, &x); 2012 break; 2013 2014 default: 2015 gcc_unreachable (); 2016 } 2017 2018 /* If R is NaN, return zero and show we have an overflow. */ 2019 if (REAL_VALUE_ISNAN (r)) 2020 { 2021 overflow = 1; 2022 high = 0; 2023 low = 0; 2024 } 2025 2026 /* See if R is less than the lower bound or greater than the 2027 upper bound. */ 2028 2029 if (! overflow) 2030 { 2031 tree lt = TYPE_MIN_VALUE (type); 2032 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt); 2033 if (REAL_VALUES_LESS (r, l)) 2034 { 2035 overflow = 1; 2036 high = TREE_INT_CST_HIGH (lt); 2037 low = TREE_INT_CST_LOW (lt); 2038 } 2039 } 2040 2041 if (! overflow) 2042 { 2043 tree ut = TYPE_MAX_VALUE (type); 2044 if (ut) 2045 { 2046 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut); 2047 if (REAL_VALUES_LESS (u, r)) 2048 { 2049 overflow = 1; 2050 high = TREE_INT_CST_HIGH (ut); 2051 low = TREE_INT_CST_LOW (ut); 2052 } 2053 } 2054 } 2055 2056 if (! overflow) 2057 REAL_VALUE_TO_INT (&low, &high, r); 2058 2059 t = build_int_cst_wide (type, low, high); 2060 2061 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1), 2062 TREE_CONSTANT_OVERFLOW (arg1)); 2063 return t; 2064} 2065 2066/* A subroutine of fold_convert_const handling conversions a REAL_CST 2067 to another floating point type. */ 2068 2069static tree 2070fold_convert_const_real_from_real (tree type, tree arg1) 2071{ 2072 REAL_VALUE_TYPE value; 2073 tree t; 2074 2075 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1)); 2076 t = build_real (type, value); 2077 2078 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); 2079 TREE_CONSTANT_OVERFLOW (t) 2080 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1); 2081 return t; 2082} 2083 2084/* Attempt to fold type conversion operation CODE of expression ARG1 to 2085 type TYPE. If no simplification can be done return NULL_TREE. */ 2086 2087static tree 2088fold_convert_const (enum tree_code code, tree type, tree arg1) 2089{ 2090 if (TREE_TYPE (arg1) == type) 2091 return arg1; 2092 2093 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)) 2094 { 2095 if (TREE_CODE (arg1) == INTEGER_CST) 2096 return fold_convert_const_int_from_int (type, arg1); 2097 else if (TREE_CODE (arg1) == REAL_CST) 2098 return fold_convert_const_int_from_real (code, type, arg1); 2099 } 2100 else if (TREE_CODE (type) == REAL_TYPE) 2101 { 2102 if (TREE_CODE (arg1) == INTEGER_CST) 2103 return build_real_from_int_cst (type, arg1); 2104 if (TREE_CODE (arg1) == REAL_CST) 2105 return fold_convert_const_real_from_real (type, arg1); 2106 } 2107 return NULL_TREE; 2108} 2109 2110/* Construct a vector of zero elements of vector type TYPE. */ 2111 2112static tree 2113build_zero_vector (tree type) 2114{ 2115 tree elem, list; 2116 int i, units; 2117 2118 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); 2119 units = TYPE_VECTOR_SUBPARTS (type); 2120 2121 list = NULL_TREE; 2122 for (i = 0; i < units; i++) 2123 list = tree_cons (NULL_TREE, elem, list); 2124 return build_vector (type, list); 2125} 2126 2127/* Convert expression ARG to type TYPE. Used by the middle-end for 2128 simple conversions in preference to calling the front-end's convert. */ 2129 2130tree 2131fold_convert (tree type, tree arg) 2132{ 2133 tree orig = TREE_TYPE (arg); 2134 tree tem; 2135 2136 if (type == orig) 2137 return arg; 2138 2139 if (TREE_CODE (arg) == ERROR_MARK 2140 || TREE_CODE (type) == ERROR_MARK 2141 || TREE_CODE (orig) == ERROR_MARK) 2142 return error_mark_node; 2143 2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig) 2145 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type), 2146 TYPE_MAIN_VARIANT (orig))) 2147 return fold_build1 (NOP_EXPR, type, arg); 2148 2149 switch (TREE_CODE (type)) 2150 { 2151 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2152 case POINTER_TYPE: case REFERENCE_TYPE: 2153 case OFFSET_TYPE: 2154 if (TREE_CODE (arg) == INTEGER_CST) 2155 { 2156 tem = fold_convert_const (NOP_EXPR, type, arg); 2157 if (tem != NULL_TREE) 2158 return tem; 2159 } 2160 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2161 || TREE_CODE (orig) == OFFSET_TYPE) 2162 return fold_build1 (NOP_EXPR, type, arg); 2163 if (TREE_CODE (orig) == COMPLEX_TYPE) 2164 { 2165 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); 2166 return fold_convert (type, tem); 2167 } 2168 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE 2169 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 2170 return fold_build1 (NOP_EXPR, type, arg); 2171 2172 case REAL_TYPE: 2173 if (TREE_CODE (arg) == INTEGER_CST) 2174 { 2175 tem = fold_convert_const (FLOAT_EXPR, type, arg); 2176 if (tem != NULL_TREE) 2177 return tem; 2178 } 2179 else if (TREE_CODE (arg) == REAL_CST) 2180 { 2181 tem = fold_convert_const (NOP_EXPR, type, arg); 2182 if (tem != NULL_TREE) 2183 return tem; 2184 } 2185 2186 switch (TREE_CODE (orig)) 2187 { 2188 case INTEGER_TYPE: 2189 case BOOLEAN_TYPE: case ENUMERAL_TYPE: 2190 case POINTER_TYPE: case REFERENCE_TYPE: 2191 return fold_build1 (FLOAT_EXPR, type, arg); 2192 2193 case REAL_TYPE: 2194 return fold_build1 (NOP_EXPR, type, arg); 2195 2196 case COMPLEX_TYPE: 2197 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); 2198 return fold_convert (type, tem); 2199 2200 default: 2201 gcc_unreachable (); 2202 } 2203 2204 case COMPLEX_TYPE: 2205 switch (TREE_CODE (orig)) 2206 { 2207 case INTEGER_TYPE: 2208 case BOOLEAN_TYPE: case ENUMERAL_TYPE: 2209 case POINTER_TYPE: case REFERENCE_TYPE: 2210 case REAL_TYPE: 2211 return build2 (COMPLEX_EXPR, type, 2212 fold_convert (TREE_TYPE (type), arg), 2213 fold_convert (TREE_TYPE (type), integer_zero_node)); 2214 case COMPLEX_TYPE: 2215 { 2216 tree rpart, ipart; 2217 2218 if (TREE_CODE (arg) == COMPLEX_EXPR) 2219 { 2220 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0)); 2221 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1)); 2222 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart); 2223 } 2224 2225 arg = save_expr (arg); 2226 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); 2227 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg); 2228 rpart = fold_convert (TREE_TYPE (type), rpart); 2229 ipart = fold_convert (TREE_TYPE (type), ipart); 2230 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart); 2231 } 2232 2233 default: 2234 gcc_unreachable (); 2235 } 2236 2237 case VECTOR_TYPE: 2238 if (integer_zerop (arg)) 2239 return build_zero_vector (type); 2240 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 2241 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2242 || TREE_CODE (orig) == VECTOR_TYPE); 2243 return fold_build1 (VIEW_CONVERT_EXPR, type, arg); 2244 2245 case VOID_TYPE: 2246 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg)); 2247 2248 default: 2249 gcc_unreachable (); 2250 } 2251} 2252 2253/* Return false if expr can be assumed not to be an lvalue, true 2254 otherwise. */ 2255 2256static bool 2257maybe_lvalue_p (tree x) 2258{ 2259 /* We only need to wrap lvalue tree codes. */ 2260 switch (TREE_CODE (x)) 2261 { 2262 case VAR_DECL: 2263 case PARM_DECL: 2264 case RESULT_DECL: 2265 case LABEL_DECL: 2266 case FUNCTION_DECL: 2267 case SSA_NAME: 2268 2269 case COMPONENT_REF: 2270 case INDIRECT_REF: 2271 case ALIGN_INDIRECT_REF: 2272 case MISALIGNED_INDIRECT_REF: 2273 case ARRAY_REF: 2274 case ARRAY_RANGE_REF: 2275 case BIT_FIELD_REF: 2276 case OBJ_TYPE_REF: 2277 2278 case REALPART_EXPR: 2279 case IMAGPART_EXPR: 2280 case PREINCREMENT_EXPR: 2281 case PREDECREMENT_EXPR: 2282 case SAVE_EXPR: 2283 case TRY_CATCH_EXPR: 2284 case WITH_CLEANUP_EXPR: 2285 case COMPOUND_EXPR: 2286 case MODIFY_EXPR: 2287 case TARGET_EXPR: 2288 case COND_EXPR: 2289 case BIND_EXPR: 2290 case MIN_EXPR: 2291 case MAX_EXPR: 2292 break; 2293 2294 default: 2295 /* Assume the worst for front-end tree codes. */ 2296 if ((int)TREE_CODE (x) >= NUM_TREE_CODES) 2297 break; 2298 return false; 2299 } 2300 2301 return true; 2302} 2303 2304/* Return an expr equal to X but certainly not valid as an lvalue. */ 2305 2306tree 2307non_lvalue (tree x) 2308{ 2309 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to 2310 us. */ 2311 if (in_gimple_form) 2312 return x; 2313 2314 if (! maybe_lvalue_p (x)) 2315 return x; 2316 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x); 2317} 2318 2319/* Nonzero means lvalues are limited to those valid in pedantic ANSI C. 2320 Zero means allow extended lvalues. */ 2321 2322int pedantic_lvalues; 2323 2324/* When pedantic, return an expr equal to X but certainly not valid as a 2325 pedantic lvalue. Otherwise, return X. */ 2326 2327static tree 2328pedantic_non_lvalue (tree x) 2329{ 2330 if (pedantic_lvalues) 2331 return non_lvalue (x); 2332 else 2333 return x; 2334} 2335 2336/* Given a tree comparison code, return the code that is the logical inverse 2337 of the given code. It is not safe to do this for floating-point 2338 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode 2339 as well: if reversing the comparison is unsafe, return ERROR_MARK. */ 2340 2341enum tree_code 2342invert_tree_comparison (enum tree_code code, bool honor_nans) 2343{ 2344 if (honor_nans && flag_trapping_math) 2345 return ERROR_MARK; 2346 2347 switch (code) 2348 { 2349 case EQ_EXPR: 2350 return NE_EXPR; 2351 case NE_EXPR: 2352 return EQ_EXPR; 2353 case GT_EXPR: 2354 return honor_nans ? UNLE_EXPR : LE_EXPR; 2355 case GE_EXPR: 2356 return honor_nans ? UNLT_EXPR : LT_EXPR; 2357 case LT_EXPR: 2358 return honor_nans ? UNGE_EXPR : GE_EXPR; 2359 case LE_EXPR: 2360 return honor_nans ? UNGT_EXPR : GT_EXPR; 2361 case LTGT_EXPR: 2362 return UNEQ_EXPR; 2363 case UNEQ_EXPR: 2364 return LTGT_EXPR; 2365 case UNGT_EXPR: 2366 return LE_EXPR; 2367 case UNGE_EXPR: 2368 return LT_EXPR; 2369 case UNLT_EXPR: 2370 return GE_EXPR; 2371 case UNLE_EXPR: 2372 return GT_EXPR; 2373 case ORDERED_EXPR: 2374 return UNORDERED_EXPR; 2375 case UNORDERED_EXPR: 2376 return ORDERED_EXPR; 2377 default: 2378 gcc_unreachable (); 2379 } 2380} 2381 2382/* Similar, but return the comparison that results if the operands are 2383 swapped. This is safe for floating-point. */ 2384 2385enum tree_code 2386swap_tree_comparison (enum tree_code code) 2387{ 2388 switch (code) 2389 { 2390 case EQ_EXPR: 2391 case NE_EXPR: 2392 case ORDERED_EXPR: 2393 case UNORDERED_EXPR: 2394 case LTGT_EXPR: 2395 case UNEQ_EXPR: 2396 return code; 2397 case GT_EXPR: 2398 return LT_EXPR; 2399 case GE_EXPR: 2400 return LE_EXPR; 2401 case LT_EXPR: 2402 return GT_EXPR; 2403 case LE_EXPR: 2404 return GE_EXPR; 2405 case UNGT_EXPR: 2406 return UNLT_EXPR; 2407 case UNGE_EXPR: 2408 return UNLE_EXPR; 2409 case UNLT_EXPR: 2410 return UNGT_EXPR; 2411 case UNLE_EXPR: 2412 return UNGE_EXPR; 2413 default: 2414 gcc_unreachable (); 2415 } 2416} 2417 2418 2419/* Convert a comparison tree code from an enum tree_code representation 2420 into a compcode bit-based encoding. This function is the inverse of 2421 compcode_to_comparison. */ 2422 2423static enum comparison_code 2424comparison_to_compcode (enum tree_code code) 2425{ 2426 switch (code) 2427 { 2428 case LT_EXPR: 2429 return COMPCODE_LT; 2430 case EQ_EXPR: 2431 return COMPCODE_EQ; 2432 case LE_EXPR: 2433 return COMPCODE_LE; 2434 case GT_EXPR: 2435 return COMPCODE_GT; 2436 case NE_EXPR: 2437 return COMPCODE_NE; 2438 case GE_EXPR: 2439 return COMPCODE_GE; 2440 case ORDERED_EXPR: 2441 return COMPCODE_ORD; 2442 case UNORDERED_EXPR: 2443 return COMPCODE_UNORD; 2444 case UNLT_EXPR: 2445 return COMPCODE_UNLT; 2446 case UNEQ_EXPR: 2447 return COMPCODE_UNEQ; 2448 case UNLE_EXPR: 2449 return COMPCODE_UNLE; 2450 case UNGT_EXPR: 2451 return COMPCODE_UNGT; 2452 case LTGT_EXPR: 2453 return COMPCODE_LTGT; 2454 case UNGE_EXPR: 2455 return COMPCODE_UNGE; 2456 default: 2457 gcc_unreachable (); 2458 } 2459} 2460 2461/* Convert a compcode bit-based encoding of a comparison operator back 2462 to GCC's enum tree_code representation. This function is the 2463 inverse of comparison_to_compcode. */ 2464 2465static enum tree_code 2466compcode_to_comparison (enum comparison_code code) 2467{ 2468 switch (code) 2469 { 2470 case COMPCODE_LT: 2471 return LT_EXPR; 2472 case COMPCODE_EQ: 2473 return EQ_EXPR; 2474 case COMPCODE_LE: 2475 return LE_EXPR; 2476 case COMPCODE_GT: 2477 return GT_EXPR; 2478 case COMPCODE_NE: 2479 return NE_EXPR; 2480 case COMPCODE_GE: 2481 return GE_EXPR; 2482 case COMPCODE_ORD: 2483 return ORDERED_EXPR; 2484 case COMPCODE_UNORD: 2485 return UNORDERED_EXPR; 2486 case COMPCODE_UNLT: 2487 return UNLT_EXPR; 2488 case COMPCODE_UNEQ: 2489 return UNEQ_EXPR; 2490 case COMPCODE_UNLE: 2491 return UNLE_EXPR; 2492 case COMPCODE_UNGT: 2493 return UNGT_EXPR; 2494 case COMPCODE_LTGT: 2495 return LTGT_EXPR; 2496 case COMPCODE_UNGE: 2497 return UNGE_EXPR; 2498 default: 2499 gcc_unreachable (); 2500 } 2501} 2502 2503/* Return a tree for the comparison which is the combination of 2504 doing the AND or OR (depending on CODE) of the two operations LCODE 2505 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account 2506 the possibility of trapping if the mode has NaNs, and return NULL_TREE 2507 if this makes the transformation invalid. */ 2508 2509tree 2510combine_comparisons (enum tree_code code, enum tree_code lcode, 2511 enum tree_code rcode, tree truth_type, 2512 tree ll_arg, tree lr_arg) 2513{ 2514 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg))); 2515 enum comparison_code lcompcode = comparison_to_compcode (lcode); 2516 enum comparison_code rcompcode = comparison_to_compcode (rcode); 2517 enum comparison_code compcode; 2518 2519 switch (code) 2520 { 2521 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR: 2522 compcode = lcompcode & rcompcode; 2523 break; 2524 2525 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR: 2526 compcode = lcompcode | rcompcode; 2527 break; 2528 2529 default: 2530 return NULL_TREE; 2531 } 2532 2533 if (!honor_nans) 2534 { 2535 /* Eliminate unordered comparisons, as well as LTGT and ORD 2536 which are not used unless the mode has NaNs. */ 2537 compcode &= ~COMPCODE_UNORD; 2538 if (compcode == COMPCODE_LTGT) 2539 compcode = COMPCODE_NE; 2540 else if (compcode == COMPCODE_ORD) 2541 compcode = COMPCODE_TRUE; 2542 } 2543 else if (flag_trapping_math) 2544 { 2545 /* Check that the original operation and the optimized ones will trap 2546 under the same condition. */ 2547 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0 2548 && (lcompcode != COMPCODE_EQ) 2549 && (lcompcode != COMPCODE_ORD); 2550 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0 2551 && (rcompcode != COMPCODE_EQ) 2552 && (rcompcode != COMPCODE_ORD); 2553 bool trap = (compcode & COMPCODE_UNORD) == 0 2554 && (compcode != COMPCODE_EQ) 2555 && (compcode != COMPCODE_ORD); 2556 2557 /* In a short-circuited boolean expression the LHS might be 2558 such that the RHS, if evaluated, will never trap. For 2559 example, in ORD (x, y) && (x < y), we evaluate the RHS only 2560 if neither x nor y is NaN. (This is a mixed blessing: for 2561 example, the expression above will never trap, hence 2562 optimizing it to x < y would be invalid). */ 2563 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD)) 2564 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD))) 2565 rtrap = false; 2566 2567 /* If the comparison was short-circuited, and only the RHS 2568 trapped, we may now generate a spurious trap. */ 2569 if (rtrap && !ltrap 2570 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) 2571 return NULL_TREE; 2572 2573 /* If we changed the conditions that cause a trap, we lose. */ 2574 if ((ltrap || rtrap) != trap) 2575 return NULL_TREE; 2576 } 2577 2578 if (compcode == COMPCODE_TRUE) 2579 return constant_boolean_node (true, truth_type); 2580 else if (compcode == COMPCODE_FALSE) 2581 return constant_boolean_node (false, truth_type); 2582 else 2583 return fold_build2 (compcode_to_comparison (compcode), 2584 truth_type, ll_arg, lr_arg); 2585} 2586 2587/* Return nonzero if CODE is a tree code that represents a truth value. */ 2588 2589static int 2590truth_value_p (enum tree_code code) 2591{ 2592 return (TREE_CODE_CLASS (code) == tcc_comparison 2593 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR 2594 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR 2595 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR); 2596} 2597 2598/* Return nonzero if two operands (typically of the same tree node) 2599 are necessarily equal. If either argument has side-effects this 2600 function returns zero. FLAGS modifies behavior as follows: 2601 2602 If OEP_ONLY_CONST is set, only return nonzero for constants. 2603 This function tests whether the operands are indistinguishable; 2604 it does not test whether they are equal using C's == operation. 2605 The distinction is important for IEEE floating point, because 2606 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and 2607 (2) two NaNs may be indistinguishable, but NaN!=NaN. 2608 2609 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself 2610 even though it may hold multiple values during a function. 2611 This is because a GCC tree node guarantees that nothing else is 2612 executed between the evaluation of its "operands" (which may often 2613 be evaluated in arbitrary order). Hence if the operands themselves 2614 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the 2615 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST 2616 unset means assuming isochronic (or instantaneous) tree equivalence. 2617 Unless comparing arbitrary expression trees, such as from different 2618 statements, this flag can usually be left unset. 2619 2620 If OEP_PURE_SAME is set, then pure functions with identical arguments 2621 are considered the same. It is used when the caller has other ways 2622 to ensure that global memory is unchanged in between. */ 2623 2624int 2625operand_equal_p (tree arg0, tree arg1, unsigned int flags) 2626{ 2627 /* If either is ERROR_MARK, they aren't equal. */ 2628 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK) 2629 return 0; 2630 2631 /* If both types don't have the same signedness, then we can't consider 2632 them equal. We must check this before the STRIP_NOPS calls 2633 because they may change the signedness of the arguments. */ 2634 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))) 2635 return 0; 2636 2637 /* If both types don't have the same precision, then it is not safe 2638 to strip NOPs. */ 2639 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1))) 2640 return 0; 2641 2642 STRIP_NOPS (arg0); 2643 STRIP_NOPS (arg1); 2644 2645 /* In case both args are comparisons but with different comparison 2646 code, try to swap the comparison operands of one arg to produce 2647 a match and compare that variant. */ 2648 if (TREE_CODE (arg0) != TREE_CODE (arg1) 2649 && COMPARISON_CLASS_P (arg0) 2650 && COMPARISON_CLASS_P (arg1)) 2651 { 2652 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1)); 2653 2654 if (TREE_CODE (arg0) == swap_code) 2655 return operand_equal_p (TREE_OPERAND (arg0, 0), 2656 TREE_OPERAND (arg1, 1), flags) 2657 && operand_equal_p (TREE_OPERAND (arg0, 1), 2658 TREE_OPERAND (arg1, 0), flags); 2659 } 2660 2661 if (TREE_CODE (arg0) != TREE_CODE (arg1) 2662 /* This is needed for conversions and for COMPONENT_REF. 2663 Might as well play it safe and always test this. */ 2664 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK 2665 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK 2666 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))) 2667 return 0; 2668 2669 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal. 2670 We don't care about side effects in that case because the SAVE_EXPR 2671 takes care of that for us. In all other cases, two expressions are 2672 equal if they have no side effects. If we have two identical 2673 expressions with side effects that should be treated the same due 2674 to the only side effects being identical SAVE_EXPR's, that will 2675 be detected in the recursive calls below. */ 2676 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST) 2677 && (TREE_CODE (arg0) == SAVE_EXPR 2678 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1)))) 2679 return 1; 2680 2681 /* Next handle constant cases, those for which we can return 1 even 2682 if ONLY_CONST is set. */ 2683 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)) 2684 switch (TREE_CODE (arg0)) 2685 { 2686 case INTEGER_CST: 2687 return (! TREE_CONSTANT_OVERFLOW (arg0) 2688 && ! TREE_CONSTANT_OVERFLOW (arg1) 2689 && tree_int_cst_equal (arg0, arg1)); 2690 2691 case REAL_CST: 2692 return (! TREE_CONSTANT_OVERFLOW (arg0) 2693 && ! TREE_CONSTANT_OVERFLOW (arg1) 2694 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0), 2695 TREE_REAL_CST (arg1))); 2696 2697 case VECTOR_CST: 2698 { 2699 tree v1, v2; 2700 2701 if (TREE_CONSTANT_OVERFLOW (arg0) 2702 || TREE_CONSTANT_OVERFLOW (arg1)) 2703 return 0; 2704 2705 v1 = TREE_VECTOR_CST_ELTS (arg0); 2706 v2 = TREE_VECTOR_CST_ELTS (arg1); 2707 while (v1 && v2) 2708 { 2709 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2), 2710 flags)) 2711 return 0; 2712 v1 = TREE_CHAIN (v1); 2713 v2 = TREE_CHAIN (v2); 2714 } 2715 2716 return v1 == v2; 2717 } 2718 2719 case COMPLEX_CST: 2720 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1), 2721 flags) 2722 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1), 2723 flags)); 2724 2725 case STRING_CST: 2726 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1) 2727 && ! memcmp (TREE_STRING_POINTER (arg0), 2728 TREE_STRING_POINTER (arg1), 2729 TREE_STRING_LENGTH (arg0))); 2730 2731 case ADDR_EXPR: 2732 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 2733 0); 2734 default: 2735 break; 2736 } 2737 2738 if (flags & OEP_ONLY_CONST) 2739 return 0; 2740 2741/* Define macros to test an operand from arg0 and arg1 for equality and a 2742 variant that allows null and views null as being different from any 2743 non-null value. In the latter case, if either is null, the both 2744 must be; otherwise, do the normal comparison. */ 2745#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \ 2746 TREE_OPERAND (arg1, N), flags) 2747 2748#define OP_SAME_WITH_NULL(N) \ 2749 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \ 2750 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N)) 2751 2752 switch (TREE_CODE_CLASS (TREE_CODE (arg0))) 2753 { 2754 case tcc_unary: 2755 /* Two conversions are equal only if signedness and modes match. */ 2756 switch (TREE_CODE (arg0)) 2757 { 2758 case NOP_EXPR: 2759 case CONVERT_EXPR: 2760 case FIX_CEIL_EXPR: 2761 case FIX_TRUNC_EXPR: 2762 case FIX_FLOOR_EXPR: 2763 case FIX_ROUND_EXPR: 2764 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) 2765 != TYPE_UNSIGNED (TREE_TYPE (arg1))) 2766 return 0; 2767 break; 2768 default: 2769 break; 2770 } 2771 2772 return OP_SAME (0); 2773 2774 2775 case tcc_comparison: 2776 case tcc_binary: 2777 if (OP_SAME (0) && OP_SAME (1)) 2778 return 1; 2779 2780 /* For commutative ops, allow the other order. */ 2781 return (commutative_tree_code (TREE_CODE (arg0)) 2782 && operand_equal_p (TREE_OPERAND (arg0, 0), 2783 TREE_OPERAND (arg1, 1), flags) 2784 && operand_equal_p (TREE_OPERAND (arg0, 1), 2785 TREE_OPERAND (arg1, 0), flags)); 2786 2787 case tcc_reference: 2788 /* If either of the pointer (or reference) expressions we are 2789 dereferencing contain a side effect, these cannot be equal. */ 2790 if (TREE_SIDE_EFFECTS (arg0) 2791 || TREE_SIDE_EFFECTS (arg1)) 2792 return 0; 2793 2794 switch (TREE_CODE (arg0)) 2795 { 2796 case INDIRECT_REF: 2797 case ALIGN_INDIRECT_REF: 2798 case MISALIGNED_INDIRECT_REF: 2799 case REALPART_EXPR: 2800 case IMAGPART_EXPR: 2801 return OP_SAME (0); 2802 2803 case ARRAY_REF: 2804 case ARRAY_RANGE_REF: 2805 /* Operands 2 and 3 may be null. 2806 Compare the array index by value if it is constant first as we 2807 may have different types but same value here. */ 2808 return (OP_SAME (0) 2809 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1), 2810 TREE_OPERAND (arg1, 1)) 2811 || OP_SAME (1)) 2812 && OP_SAME_WITH_NULL (2) 2813 && OP_SAME_WITH_NULL (3)); 2814 2815 case COMPONENT_REF: 2816 /* Handle operand 2 the same as for ARRAY_REF. Operand 0 2817 may be NULL when we're called to compare MEM_EXPRs. */ 2818 return OP_SAME_WITH_NULL (0) 2819 && OP_SAME (1) 2820 && OP_SAME_WITH_NULL (2); 2821 2822 case BIT_FIELD_REF: 2823 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); 2824 2825 default: 2826 return 0; 2827 } 2828 2829 case tcc_expression: 2830 switch (TREE_CODE (arg0)) 2831 { 2832 case ADDR_EXPR: 2833 case TRUTH_NOT_EXPR: 2834 return OP_SAME (0); 2835 2836 case TRUTH_ANDIF_EXPR: 2837 case TRUTH_ORIF_EXPR: 2838 return OP_SAME (0) && OP_SAME (1); 2839 2840 case TRUTH_AND_EXPR: 2841 case TRUTH_OR_EXPR: 2842 case TRUTH_XOR_EXPR: 2843 if (OP_SAME (0) && OP_SAME (1)) 2844 return 1; 2845 2846 /* Otherwise take into account this is a commutative operation. */ 2847 return (operand_equal_p (TREE_OPERAND (arg0, 0), 2848 TREE_OPERAND (arg1, 1), flags) 2849 && operand_equal_p (TREE_OPERAND (arg0, 1), 2850 TREE_OPERAND (arg1, 0), flags)); 2851 2852 case CALL_EXPR: 2853 /* If the CALL_EXPRs call different functions, then they 2854 clearly can not be equal. */ 2855 if (!OP_SAME (0)) 2856 return 0; 2857 2858 { 2859 unsigned int cef = call_expr_flags (arg0); 2860 if (flags & OEP_PURE_SAME) 2861 cef &= ECF_CONST | ECF_PURE; 2862 else 2863 cef &= ECF_CONST; 2864 if (!cef) 2865 return 0; 2866 } 2867 2868 /* Now see if all the arguments are the same. operand_equal_p 2869 does not handle TREE_LIST, so we walk the operands here 2870 feeding them to operand_equal_p. */ 2871 arg0 = TREE_OPERAND (arg0, 1); 2872 arg1 = TREE_OPERAND (arg1, 1); 2873 while (arg0 && arg1) 2874 { 2875 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 2876 flags)) 2877 return 0; 2878 2879 arg0 = TREE_CHAIN (arg0); 2880 arg1 = TREE_CHAIN (arg1); 2881 } 2882 2883 /* If we get here and both argument lists are exhausted 2884 then the CALL_EXPRs are equal. */ 2885 return ! (arg0 || arg1); 2886 2887 default: 2888 return 0; 2889 } 2890 2891 case tcc_declaration: 2892 /* Consider __builtin_sqrt equal to sqrt. */ 2893 return (TREE_CODE (arg0) == FUNCTION_DECL 2894 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1) 2895 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1) 2896 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1)); 2897 2898 default: 2899 return 0; 2900 } 2901 2902#undef OP_SAME 2903#undef OP_SAME_WITH_NULL 2904} 2905 2906/* Similar to operand_equal_p, but see if ARG0 might have been made by 2907 shorten_compare from ARG1 when ARG1 was being compared with OTHER. 2908 2909 When in doubt, return 0. */ 2910 2911static int 2912operand_equal_for_comparison_p (tree arg0, tree arg1, tree other) 2913{ 2914 int unsignedp1, unsignedpo; 2915 tree primarg0, primarg1, primother; 2916 unsigned int correct_width; 2917 2918 if (operand_equal_p (arg0, arg1, 0)) 2919 return 1; 2920 2921 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 2922 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1))) 2923 return 0; 2924 2925 /* Discard any conversions that don't change the modes of ARG0 and ARG1 2926 and see if the inner values are the same. This removes any 2927 signedness comparison, which doesn't matter here. */ 2928 primarg0 = arg0, primarg1 = arg1; 2929 STRIP_NOPS (primarg0); 2930 STRIP_NOPS (primarg1); 2931 if (operand_equal_p (primarg0, primarg1, 0)) 2932 return 1; 2933 2934 /* Duplicate what shorten_compare does to ARG1 and see if that gives the 2935 actual comparison operand, ARG0. 2936 2937 First throw away any conversions to wider types 2938 already present in the operands. */ 2939 2940 primarg1 = get_narrower (arg1, &unsignedp1); 2941 primother = get_narrower (other, &unsignedpo); 2942 2943 correct_width = TYPE_PRECISION (TREE_TYPE (arg1)); 2944 if (unsignedp1 == unsignedpo 2945 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width 2946 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width) 2947 { 2948 tree type = TREE_TYPE (arg0); 2949 2950 /* Make sure shorter operand is extended the right way 2951 to match the longer operand. */ 2952 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type 2953 (unsignedp1, TREE_TYPE (primarg1)), primarg1); 2954 2955 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0)) 2956 return 1; 2957 } 2958 2959 return 0; 2960} 2961 2962/* See if ARG is an expression that is either a comparison or is performing 2963 arithmetic on comparisons. The comparisons must only be comparing 2964 two different values, which will be stored in *CVAL1 and *CVAL2; if 2965 they are nonzero it means that some operands have already been found. 2966 No variables may be used anywhere else in the expression except in the 2967 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around 2968 the expression and save_expr needs to be called with CVAL1 and CVAL2. 2969 2970 If this is true, return 1. Otherwise, return zero. */ 2971 2972static int 2973twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p) 2974{ 2975 enum tree_code code = TREE_CODE (arg); 2976 enum tree_code_class class = TREE_CODE_CLASS (code); 2977 2978 /* We can handle some of the tcc_expression cases here. */ 2979 if (class == tcc_expression && code == TRUTH_NOT_EXPR) 2980 class = tcc_unary; 2981 else if (class == tcc_expression 2982 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR 2983 || code == COMPOUND_EXPR)) 2984 class = tcc_binary; 2985 2986 else if (class == tcc_expression && code == SAVE_EXPR 2987 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0))) 2988 { 2989 /* If we've already found a CVAL1 or CVAL2, this expression is 2990 two complex to handle. */ 2991 if (*cval1 || *cval2) 2992 return 0; 2993 2994 class = tcc_unary; 2995 *save_p = 1; 2996 } 2997 2998 switch (class) 2999 { 3000 case tcc_unary: 3001 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p); 3002 3003 case tcc_binary: 3004 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p) 3005 && twoval_comparison_p (TREE_OPERAND (arg, 1), 3006 cval1, cval2, save_p)); 3007 3008 case tcc_constant: 3009 return 1; 3010 3011 case tcc_expression: 3012 if (code == COND_EXPR) 3013 return (twoval_comparison_p (TREE_OPERAND (arg, 0), 3014 cval1, cval2, save_p) 3015 && twoval_comparison_p (TREE_OPERAND (arg, 1), 3016 cval1, cval2, save_p) 3017 && twoval_comparison_p (TREE_OPERAND (arg, 2), 3018 cval1, cval2, save_p)); 3019 return 0; 3020 3021 case tcc_comparison: 3022 /* First see if we can handle the first operand, then the second. For 3023 the second operand, we know *CVAL1 can't be zero. It must be that 3024 one side of the comparison is each of the values; test for the 3025 case where this isn't true by failing if the two operands 3026 are the same. */ 3027 3028 if (operand_equal_p (TREE_OPERAND (arg, 0), 3029 TREE_OPERAND (arg, 1), 0)) 3030 return 0; 3031 3032 if (*cval1 == 0) 3033 *cval1 = TREE_OPERAND (arg, 0); 3034 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0)) 3035 ; 3036 else if (*cval2 == 0) 3037 *cval2 = TREE_OPERAND (arg, 0); 3038 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0)) 3039 ; 3040 else 3041 return 0; 3042 3043 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0)) 3044 ; 3045 else if (*cval2 == 0) 3046 *cval2 = TREE_OPERAND (arg, 1); 3047 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0)) 3048 ; 3049 else 3050 return 0; 3051 3052 return 1; 3053 3054 default: 3055 return 0; 3056 } 3057} 3058 3059/* ARG is a tree that is known to contain just arithmetic operations and 3060 comparisons. Evaluate the operations in the tree substituting NEW0 for 3061 any occurrence of OLD0 as an operand of a comparison and likewise for 3062 NEW1 and OLD1. */ 3063 3064static tree 3065eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1) 3066{ 3067 tree type = TREE_TYPE (arg); 3068 enum tree_code code = TREE_CODE (arg); 3069 enum tree_code_class class = TREE_CODE_CLASS (code); 3070 3071 /* We can handle some of the tcc_expression cases here. */ 3072 if (class == tcc_expression && code == TRUTH_NOT_EXPR) 3073 class = tcc_unary; 3074 else if (class == tcc_expression 3075 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) 3076 class = tcc_binary; 3077 3078 switch (class) 3079 { 3080 case tcc_unary: 3081 return fold_build1 (code, type, 3082 eval_subst (TREE_OPERAND (arg, 0), 3083 old0, new0, old1, new1)); 3084 3085 case tcc_binary: 3086 return fold_build2 (code, type, 3087 eval_subst (TREE_OPERAND (arg, 0), 3088 old0, new0, old1, new1), 3089 eval_subst (TREE_OPERAND (arg, 1), 3090 old0, new0, old1, new1)); 3091 3092 case tcc_expression: 3093 switch (code) 3094 { 3095 case SAVE_EXPR: 3096 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1); 3097 3098 case COMPOUND_EXPR: 3099 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1); 3100 3101 case COND_EXPR: 3102 return fold_build3 (code, type, 3103 eval_subst (TREE_OPERAND (arg, 0), 3104 old0, new0, old1, new1), 3105 eval_subst (TREE_OPERAND (arg, 1), 3106 old0, new0, old1, new1), 3107 eval_subst (TREE_OPERAND (arg, 2), 3108 old0, new0, old1, new1)); 3109 default: 3110 break; 3111 } 3112 /* Fall through - ??? */ 3113 3114 case tcc_comparison: 3115 { 3116 tree arg0 = TREE_OPERAND (arg, 0); 3117 tree arg1 = TREE_OPERAND (arg, 1); 3118 3119 /* We need to check both for exact equality and tree equality. The 3120 former will be true if the operand has a side-effect. In that 3121 case, we know the operand occurred exactly once. */ 3122 3123 if (arg0 == old0 || operand_equal_p (arg0, old0, 0)) 3124 arg0 = new0; 3125 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0)) 3126 arg0 = new1; 3127 3128 if (arg1 == old0 || operand_equal_p (arg1, old0, 0)) 3129 arg1 = new0; 3130 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0)) 3131 arg1 = new1; 3132 3133 return fold_build2 (code, type, arg0, arg1); 3134 } 3135 3136 default: 3137 return arg; 3138 } 3139} 3140 3141/* Return a tree for the case when the result of an expression is RESULT 3142 converted to TYPE and OMITTED was previously an operand of the expression 3143 but is now not needed (e.g., we folded OMITTED * 0). 3144 3145 If OMITTED has side effects, we must evaluate it. Otherwise, just do 3146 the conversion of RESULT to TYPE. */ 3147 3148tree 3149omit_one_operand (tree type, tree result, tree omitted) 3150{ 3151 tree t = fold_convert (type, result); 3152 3153 if (TREE_SIDE_EFFECTS (omitted)) 3154 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); 3155 3156 return non_lvalue (t); 3157} 3158 3159/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */ 3160 3161static tree 3162pedantic_omit_one_operand (tree type, tree result, tree omitted) 3163{ 3164 tree t = fold_convert (type, result); 3165 3166 if (TREE_SIDE_EFFECTS (omitted)) 3167 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); 3168 3169 return pedantic_non_lvalue (t); 3170} 3171 3172/* Return a tree for the case when the result of an expression is RESULT 3173 converted to TYPE and OMITTED1 and OMITTED2 were previously operands 3174 of the expression but are now not needed. 3175 3176 If OMITTED1 or OMITTED2 has side effects, they must be evaluated. 3177 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is 3178 evaluated before OMITTED2. Otherwise, if neither has side effects, 3179 just do the conversion of RESULT to TYPE. */ 3180 3181tree 3182omit_two_operands (tree type, tree result, tree omitted1, tree omitted2) 3183{ 3184 tree t = fold_convert (type, result); 3185 3186 if (TREE_SIDE_EFFECTS (omitted2)) 3187 t = build2 (COMPOUND_EXPR, type, omitted2, t); 3188 if (TREE_SIDE_EFFECTS (omitted1)) 3189 t = build2 (COMPOUND_EXPR, type, omitted1, t); 3190 3191 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t; 3192} 3193 3194 3195/* Return a simplified tree node for the truth-negation of ARG. This 3196 never alters ARG itself. We assume that ARG is an operation that 3197 returns a truth value (0 or 1). 3198 3199 FIXME: one would think we would fold the result, but it causes 3200 problems with the dominator optimizer. */ 3201 3202tree 3203fold_truth_not_expr (tree arg) 3204{ 3205 tree type = TREE_TYPE (arg); 3206 enum tree_code code = TREE_CODE (arg); 3207 3208 /* If this is a comparison, we can simply invert it, except for 3209 floating-point non-equality comparisons, in which case we just 3210 enclose a TRUTH_NOT_EXPR around what we have. */ 3211 3212 if (TREE_CODE_CLASS (code) == tcc_comparison) 3213 { 3214 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0)); 3215 if (FLOAT_TYPE_P (op_type) 3216 && flag_trapping_math 3217 && code != ORDERED_EXPR && code != UNORDERED_EXPR 3218 && code != NE_EXPR && code != EQ_EXPR) 3219 return NULL_TREE; 3220 else 3221 { 3222 code = invert_tree_comparison (code, 3223 HONOR_NANS (TYPE_MODE (op_type))); 3224 if (code == ERROR_MARK) 3225 return NULL_TREE; 3226 else 3227 return build2 (code, type, 3228 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1)); 3229 } 3230 } 3231 3232 switch (code) 3233 { 3234 case INTEGER_CST: 3235 return constant_boolean_node (integer_zerop (arg), type); 3236 3237 case TRUTH_AND_EXPR: 3238 return build2 (TRUTH_OR_EXPR, type, 3239 invert_truthvalue (TREE_OPERAND (arg, 0)), 3240 invert_truthvalue (TREE_OPERAND (arg, 1))); 3241 3242 case TRUTH_OR_EXPR: 3243 return build2 (TRUTH_AND_EXPR, type, 3244 invert_truthvalue (TREE_OPERAND (arg, 0)), 3245 invert_truthvalue (TREE_OPERAND (arg, 1))); 3246 3247 case TRUTH_XOR_EXPR: 3248 /* Here we can invert either operand. We invert the first operand 3249 unless the second operand is a TRUTH_NOT_EXPR in which case our 3250 result is the XOR of the first operand with the inside of the 3251 negation of the second operand. */ 3252 3253 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR) 3254 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0), 3255 TREE_OPERAND (TREE_OPERAND (arg, 1), 0)); 3256 else 3257 return build2 (TRUTH_XOR_EXPR, type, 3258 invert_truthvalue (TREE_OPERAND (arg, 0)), 3259 TREE_OPERAND (arg, 1)); 3260 3261 case TRUTH_ANDIF_EXPR: 3262 return build2 (TRUTH_ORIF_EXPR, type, 3263 invert_truthvalue (TREE_OPERAND (arg, 0)), 3264 invert_truthvalue (TREE_OPERAND (arg, 1))); 3265 3266 case TRUTH_ORIF_EXPR: 3267 return build2 (TRUTH_ANDIF_EXPR, type, 3268 invert_truthvalue (TREE_OPERAND (arg, 0)), 3269 invert_truthvalue (TREE_OPERAND (arg, 1))); 3270 3271 case TRUTH_NOT_EXPR: 3272 return TREE_OPERAND (arg, 0); 3273 3274 case COND_EXPR: 3275 { 3276 tree arg1 = TREE_OPERAND (arg, 1); 3277 tree arg2 = TREE_OPERAND (arg, 2); 3278 /* A COND_EXPR may have a throw as one operand, which 3279 then has void type. Just leave void operands 3280 as they are. */ 3281 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0), 3282 VOID_TYPE_P (TREE_TYPE (arg1)) 3283 ? arg1 : invert_truthvalue (arg1), 3284 VOID_TYPE_P (TREE_TYPE (arg2)) 3285 ? arg2 : invert_truthvalue (arg2)); 3286 } 3287 3288 case COMPOUND_EXPR: 3289 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0), 3290 invert_truthvalue (TREE_OPERAND (arg, 1))); 3291 3292 case NON_LVALUE_EXPR: 3293 return invert_truthvalue (TREE_OPERAND (arg, 0)); 3294 3295 case NOP_EXPR: 3296 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE) 3297 return build1 (TRUTH_NOT_EXPR, type, arg); 3298 3299 case CONVERT_EXPR: 3300 case FLOAT_EXPR: 3301 return build1 (TREE_CODE (arg), type, 3302 invert_truthvalue (TREE_OPERAND (arg, 0))); 3303 3304 case BIT_AND_EXPR: 3305 if (!integer_onep (TREE_OPERAND (arg, 1))) 3306 break; 3307 return build2 (EQ_EXPR, type, arg, 3308 build_int_cst (type, 0)); 3309 3310 case SAVE_EXPR: 3311 return build1 (TRUTH_NOT_EXPR, type, arg); 3312 3313 case CLEANUP_POINT_EXPR: 3314 return build1 (CLEANUP_POINT_EXPR, type, 3315 invert_truthvalue (TREE_OPERAND (arg, 0))); 3316 3317 default: 3318 break; 3319 } 3320 3321 return NULL_TREE; 3322} 3323 3324/* Return a simplified tree node for the truth-negation of ARG. This 3325 never alters ARG itself. We assume that ARG is an operation that 3326 returns a truth value (0 or 1). 3327 3328 FIXME: one would think we would fold the result, but it causes 3329 problems with the dominator optimizer. */ 3330 3331tree 3332invert_truthvalue (tree arg) 3333{ 3334 tree tem; 3335 3336 if (TREE_CODE (arg) == ERROR_MARK) 3337 return arg; 3338 3339 tem = fold_truth_not_expr (arg); 3340 if (!tem) 3341 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg); 3342 3343 return tem; 3344} 3345 3346/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both 3347 operands are another bit-wise operation with a common input. If so, 3348 distribute the bit operations to save an operation and possibly two if 3349 constants are involved. For example, convert 3350 (A | B) & (A | C) into A | (B & C) 3351 Further simplification will occur if B and C are constants. 3352 3353 If this optimization cannot be done, 0 will be returned. */ 3354 3355static tree 3356distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1) 3357{ 3358 tree common; 3359 tree left, right; 3360 3361 if (TREE_CODE (arg0) != TREE_CODE (arg1) 3362 || TREE_CODE (arg0) == code 3363 || (TREE_CODE (arg0) != BIT_AND_EXPR 3364 && TREE_CODE (arg0) != BIT_IOR_EXPR)) 3365 return 0; 3366 3367 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)) 3368 { 3369 common = TREE_OPERAND (arg0, 0); 3370 left = TREE_OPERAND (arg0, 1); 3371 right = TREE_OPERAND (arg1, 1); 3372 } 3373 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0)) 3374 { 3375 common = TREE_OPERAND (arg0, 0); 3376 left = TREE_OPERAND (arg0, 1); 3377 right = TREE_OPERAND (arg1, 0); 3378 } 3379 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0)) 3380 { 3381 common = TREE_OPERAND (arg0, 1); 3382 left = TREE_OPERAND (arg0, 0); 3383 right = TREE_OPERAND (arg1, 1); 3384 } 3385 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0)) 3386 { 3387 common = TREE_OPERAND (arg0, 1); 3388 left = TREE_OPERAND (arg0, 0); 3389 right = TREE_OPERAND (arg1, 0); 3390 } 3391 else 3392 return 0; 3393 3394 return fold_build2 (TREE_CODE (arg0), type, common, 3395 fold_build2 (code, type, left, right)); 3396} 3397 3398/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation 3399 with code CODE. This optimization is unsafe. */ 3400static tree 3401distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1) 3402{ 3403 bool mul0 = TREE_CODE (arg0) == MULT_EXPR; 3404 bool mul1 = TREE_CODE (arg1) == MULT_EXPR; 3405 3406 /* (A / C) +- (B / C) -> (A +- B) / C. */ 3407 if (mul0 == mul1 3408 && operand_equal_p (TREE_OPERAND (arg0, 1), 3409 TREE_OPERAND (arg1, 1), 0)) 3410 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type, 3411 fold_build2 (code, type, 3412 TREE_OPERAND (arg0, 0), 3413 TREE_OPERAND (arg1, 0)), 3414 TREE_OPERAND (arg0, 1)); 3415 3416 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */ 3417 if (operand_equal_p (TREE_OPERAND (arg0, 0), 3418 TREE_OPERAND (arg1, 0), 0) 3419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 3420 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) 3421 { 3422 REAL_VALUE_TYPE r0, r1; 3423 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1)); 3424 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1)); 3425 if (!mul0) 3426 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0); 3427 if (!mul1) 3428 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1); 3429 real_arithmetic (&r0, code, &r0, &r1); 3430 return fold_build2 (MULT_EXPR, type, 3431 TREE_OPERAND (arg0, 0), 3432 build_real (type, r0)); 3433 } 3434 3435 return NULL_TREE; 3436} 3437 3438/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER 3439 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */ 3440 3441static tree 3442make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos, 3443 int unsignedp) 3444{ 3445 tree result; 3446 3447 if (bitpos == 0) 3448 { 3449 tree size = TYPE_SIZE (TREE_TYPE (inner)); 3450 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) 3451 || POINTER_TYPE_P (TREE_TYPE (inner))) 3452 && host_integerp (size, 0) 3453 && tree_low_cst (size, 0) == bitsize) 3454 return fold_convert (type, inner); 3455 } 3456 3457 result = build3 (BIT_FIELD_REF, type, inner, 3458 size_int (bitsize), bitsize_int (bitpos)); 3459 3460 BIT_FIELD_REF_UNSIGNED (result) = unsignedp; 3461 3462 return result; 3463} 3464 3465/* Optimize a bit-field compare. 3466 3467 There are two cases: First is a compare against a constant and the 3468 second is a comparison of two items where the fields are at the same 3469 bit position relative to the start of a chunk (byte, halfword, word) 3470 large enough to contain it. In these cases we can avoid the shift 3471 implicit in bitfield extractions. 3472 3473 For constants, we emit a compare of the shifted constant with the 3474 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being 3475 compared. For two fields at the same position, we do the ANDs with the 3476 similar mask and compare the result of the ANDs. 3477 3478 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR. 3479 COMPARE_TYPE is the type of the comparison, and LHS and RHS 3480 are the left and right operands of the comparison, respectively. 3481 3482 If the optimization described above can be done, we return the resulting 3483 tree. Otherwise we return zero. */ 3484 3485static tree 3486optimize_bit_field_compare (enum tree_code code, tree compare_type, 3487 tree lhs, tree rhs) 3488{ 3489 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; 3490 tree type = TREE_TYPE (lhs); 3491 tree signed_type, unsigned_type; 3492 int const_p = TREE_CODE (rhs) == INTEGER_CST; 3493 enum machine_mode lmode, rmode, nmode; 3494 int lunsignedp, runsignedp; 3495 int lvolatilep = 0, rvolatilep = 0; 3496 tree linner, rinner = NULL_TREE; 3497 tree mask; 3498 tree offset; 3499 3500 /* Get all the information about the extractions being done. If the bit size 3501 if the same as the size of the underlying object, we aren't doing an 3502 extraction at all and so can do nothing. We also don't want to 3503 do anything if the inner expression is a PLACEHOLDER_EXPR since we 3504 then will no longer be able to replace it. */ 3505 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, 3506 &lunsignedp, &lvolatilep, false); 3507 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 3508 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR) 3509 return 0; 3510 3511 if (!const_p) 3512 { 3513 /* If this is not a constant, we can only do something if bit positions, 3514 sizes, and signedness are the same. */ 3515 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, 3516 &runsignedp, &rvolatilep, false); 3517 3518 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize 3519 || lunsignedp != runsignedp || offset != 0 3520 || TREE_CODE (rinner) == PLACEHOLDER_EXPR) 3521 return 0; 3522 } 3523 3524 /* See if we can find a mode to refer to this field. We should be able to, 3525 but fail if we can't. */ 3526 nmode = get_best_mode (lbitsize, lbitpos, 3527 const_p ? TYPE_ALIGN (TREE_TYPE (linner)) 3528 : MIN (TYPE_ALIGN (TREE_TYPE (linner)), 3529 TYPE_ALIGN (TREE_TYPE (rinner))), 3530 word_mode, lvolatilep || rvolatilep); 3531 if (nmode == VOIDmode) 3532 return 0; 3533 3534 /* Set signed and unsigned types of the precision of this mode for the 3535 shifts below. */ 3536 signed_type = lang_hooks.types.type_for_mode (nmode, 0); 3537 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1); 3538 3539 /* Compute the bit position and size for the new reference and our offset 3540 within it. If the new reference is the same size as the original, we 3541 won't optimize anything, so return zero. */ 3542 nbitsize = GET_MODE_BITSIZE (nmode); 3543 nbitpos = lbitpos & ~ (nbitsize - 1); 3544 lbitpos -= nbitpos; 3545 if (nbitsize == lbitsize) 3546 return 0; 3547 3548 if (BYTES_BIG_ENDIAN) 3549 lbitpos = nbitsize - lbitsize - lbitpos; 3550 3551 /* Make the mask to be used against the extracted field. */ 3552 mask = build_int_cst (unsigned_type, -1); 3553 mask = force_fit_type (mask, 0, false, false); 3554 mask = fold_convert (unsigned_type, mask); 3555 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0); 3556 mask = const_binop (RSHIFT_EXPR, mask, 3557 size_int (nbitsize - lbitsize - lbitpos), 0); 3558 3559 if (! const_p) 3560 /* If not comparing with constant, just rework the comparison 3561 and return. */ 3562 return build2 (code, compare_type, 3563 build2 (BIT_AND_EXPR, unsigned_type, 3564 make_bit_field_ref (linner, unsigned_type, 3565 nbitsize, nbitpos, 1), 3566 mask), 3567 build2 (BIT_AND_EXPR, unsigned_type, 3568 make_bit_field_ref (rinner, unsigned_type, 3569 nbitsize, nbitpos, 1), 3570 mask)); 3571 3572 /* Otherwise, we are handling the constant case. See if the constant is too 3573 big for the field. Warn and return a tree of for 0 (false) if so. We do 3574 this not only for its own sake, but to avoid having to test for this 3575 error case below. If we didn't, we might generate wrong code. 3576 3577 For unsigned fields, the constant shifted right by the field length should 3578 be all zero. For signed fields, the high-order bits should agree with 3579 the sign bit. */ 3580 3581 if (lunsignedp) 3582 { 3583 if (! integer_zerop (const_binop (RSHIFT_EXPR, 3584 fold_convert (unsigned_type, rhs), 3585 size_int (lbitsize), 0))) 3586 { 3587 warning (0, "comparison is always %d due to width of bit-field", 3588 code == NE_EXPR); 3589 return constant_boolean_node (code == NE_EXPR, compare_type); 3590 } 3591 } 3592 else 3593 { 3594 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs), 3595 size_int (lbitsize - 1), 0); 3596 if (! integer_zerop (tem) && ! integer_all_onesp (tem)) 3597 { 3598 warning (0, "comparison is always %d due to width of bit-field", 3599 code == NE_EXPR); 3600 return constant_boolean_node (code == NE_EXPR, compare_type); 3601 } 3602 } 3603 3604 /* Single-bit compares should always be against zero. */ 3605 if (lbitsize == 1 && ! integer_zerop (rhs)) 3606 { 3607 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR; 3608 rhs = build_int_cst (type, 0); 3609 } 3610 3611 /* Make a new bitfield reference, shift the constant over the 3612 appropriate number of bits and mask it with the computed mask 3613 (in case this was a signed field). If we changed it, make a new one. */ 3614 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1); 3615 if (lvolatilep) 3616 { 3617 TREE_SIDE_EFFECTS (lhs) = 1; 3618 TREE_THIS_VOLATILE (lhs) = 1; 3619 } 3620 3621 rhs = const_binop (BIT_AND_EXPR, 3622 const_binop (LSHIFT_EXPR, 3623 fold_convert (unsigned_type, rhs), 3624 size_int (lbitpos), 0), 3625 mask, 0); 3626 3627 return build2 (code, compare_type, 3628 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), 3629 rhs); 3630} 3631 3632/* Subroutine for fold_truthop: decode a field reference. 3633 3634 If EXP is a comparison reference, we return the innermost reference. 3635 3636 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is 3637 set to the starting bit number. 3638 3639 If the innermost field can be completely contained in a mode-sized 3640 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode. 3641 3642 *PVOLATILEP is set to 1 if the any expression encountered is volatile; 3643 otherwise it is not changed. 3644 3645 *PUNSIGNEDP is set to the signedness of the field. 3646 3647 *PMASK is set to the mask used. This is either contained in a 3648 BIT_AND_EXPR or derived from the width of the field. 3649 3650 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any. 3651 3652 Return 0 if this is not a component reference or is one that we can't 3653 do anything with. */ 3654 3655static tree 3656decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize, 3657 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode, 3658 int *punsignedp, int *pvolatilep, 3659 tree *pmask, tree *pand_mask) 3660{ 3661 tree outer_type = 0; 3662 tree and_mask = 0; 3663 tree mask, inner, offset; 3664 tree unsigned_type; 3665 unsigned int precision; 3666 3667 /* All the optimizations using this function assume integer fields. 3668 There are problems with FP fields since the type_for_size call 3669 below can fail for, e.g., XFmode. */ 3670 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp))) 3671 return 0; 3672 3673 /* We are interested in the bare arrangement of bits, so strip everything 3674 that doesn't affect the machine mode. However, record the type of the 3675 outermost expression if it may matter below. */ 3676 if (TREE_CODE (exp) == NOP_EXPR 3677 || TREE_CODE (exp) == CONVERT_EXPR 3678 || TREE_CODE (exp) == NON_LVALUE_EXPR) 3679 outer_type = TREE_TYPE (exp); 3680 STRIP_NOPS (exp); 3681 3682 if (TREE_CODE (exp) == BIT_AND_EXPR) 3683 { 3684 and_mask = TREE_OPERAND (exp, 1); 3685 exp = TREE_OPERAND (exp, 0); 3686 STRIP_NOPS (exp); STRIP_NOPS (and_mask); 3687 if (TREE_CODE (and_mask) != INTEGER_CST) 3688 return 0; 3689 } 3690 3691 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode, 3692 punsignedp, pvolatilep, false); 3693 if ((inner == exp && and_mask == 0) 3694 || *pbitsize < 0 || offset != 0 3695 || TREE_CODE (inner) == PLACEHOLDER_EXPR) 3696 return 0; 3697 3698 /* If the number of bits in the reference is the same as the bitsize of 3699 the outer type, then the outer type gives the signedness. Otherwise 3700 (in case of a small bitfield) the signedness is unchanged. */ 3701 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type)) 3702 *punsignedp = TYPE_UNSIGNED (outer_type); 3703 3704 /* Compute the mask to access the bitfield. */ 3705 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1); 3706 precision = TYPE_PRECISION (unsigned_type); 3707 3708 mask = build_int_cst (unsigned_type, -1); 3709 mask = force_fit_type (mask, 0, false, false); 3710 3711 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); 3712 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); 3713 3714 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */ 3715 if (and_mask != 0) 3716 mask = fold_build2 (BIT_AND_EXPR, unsigned_type, 3717 fold_convert (unsigned_type, and_mask), mask); 3718 3719 *pmask = mask; 3720 *pand_mask = and_mask; 3721 return inner; 3722} 3723 3724/* Return nonzero if MASK represents a mask of SIZE ones in the low-order 3725 bit positions. */ 3726 3727static int 3728all_ones_mask_p (tree mask, int size) 3729{ 3730 tree type = TREE_TYPE (mask); 3731 unsigned int precision = TYPE_PRECISION (type); 3732 tree tmask; 3733 3734 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1); 3735 tmask = force_fit_type (tmask, 0, false, false); 3736 3737 return 3738 tree_int_cst_equal (mask, 3739 const_binop (RSHIFT_EXPR, 3740 const_binop (LSHIFT_EXPR, tmask, 3741 size_int (precision - size), 3742 0), 3743 size_int (precision - size), 0)); 3744} 3745 3746/* Subroutine for fold: determine if VAL is the INTEGER_CONST that 3747 represents the sign bit of EXP's type. If EXP represents a sign 3748 or zero extension, also test VAL against the unextended type. 3749 The return value is the (sub)expression whose sign bit is VAL, 3750 or NULL_TREE otherwise. */ 3751 3752static tree 3753sign_bit_p (tree exp, tree val) 3754{ 3755 unsigned HOST_WIDE_INT mask_lo, lo; 3756 HOST_WIDE_INT mask_hi, hi; 3757 int width; 3758 tree t; 3759 3760 /* Tree EXP must have an integral type. */ 3761 t = TREE_TYPE (exp); 3762 if (! INTEGRAL_TYPE_P (t)) 3763 return NULL_TREE; 3764 3765 /* Tree VAL must be an integer constant. */ 3766 if (TREE_CODE (val) != INTEGER_CST 3767 || TREE_CONSTANT_OVERFLOW (val)) 3768 return NULL_TREE; 3769 3770 width = TYPE_PRECISION (t); 3771 if (width > HOST_BITS_PER_WIDE_INT) 3772 { 3773 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1); 3774 lo = 0; 3775 3776 mask_hi = ((unsigned HOST_WIDE_INT) -1 3777 >> (2 * HOST_BITS_PER_WIDE_INT - width)); 3778 mask_lo = -1; 3779 } 3780 else 3781 { 3782 hi = 0; 3783 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1); 3784 3785 mask_hi = 0; 3786 mask_lo = ((unsigned HOST_WIDE_INT) -1 3787 >> (HOST_BITS_PER_WIDE_INT - width)); 3788 } 3789 3790 /* We mask off those bits beyond TREE_TYPE (exp) so that we can 3791 treat VAL as if it were unsigned. */ 3792 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi 3793 && (TREE_INT_CST_LOW (val) & mask_lo) == lo) 3794 return exp; 3795 3796 /* Handle extension from a narrower type. */ 3797 if (TREE_CODE (exp) == NOP_EXPR 3798 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width) 3799 return sign_bit_p (TREE_OPERAND (exp, 0), val); 3800 3801 return NULL_TREE; 3802} 3803 3804/* Subroutine for fold_truthop: determine if an operand is simple enough 3805 to be evaluated unconditionally. */ 3806 3807static int 3808simple_operand_p (tree exp) 3809{ 3810 /* Strip any conversions that don't change the machine mode. */ 3811 STRIP_NOPS (exp); 3812 3813 return (CONSTANT_CLASS_P (exp) 3814 || TREE_CODE (exp) == SSA_NAME 3815 || (DECL_P (exp) 3816 && ! TREE_ADDRESSABLE (exp) 3817 && ! TREE_THIS_VOLATILE (exp) 3818 && ! DECL_NONLOCAL (exp) 3819 /* Don't regard global variables as simple. They may be 3820 allocated in ways unknown to the compiler (shared memory, 3821 #pragma weak, etc). */ 3822 && ! TREE_PUBLIC (exp) 3823 && ! DECL_EXTERNAL (exp) 3824 /* Loading a static variable is unduly expensive, but global 3825 registers aren't expensive. */ 3826 && (! TREE_STATIC (exp) || DECL_REGISTER (exp)))); 3827} 3828 3829/* The following functions are subroutines to fold_range_test and allow it to 3830 try to change a logical combination of comparisons into a range test. 3831 3832 For example, both 3833 X == 2 || X == 3 || X == 4 || X == 5 3834 and 3835 X >= 2 && X <= 5 3836 are converted to 3837 (unsigned) (X - 2) <= 3 3838 3839 We describe each set of comparisons as being either inside or outside 3840 a range, using a variable named like IN_P, and then describe the 3841 range with a lower and upper bound. If one of the bounds is omitted, 3842 it represents either the highest or lowest value of the type. 3843 3844 In the comments below, we represent a range by two numbers in brackets 3845 preceded by a "+" to designate being inside that range, or a "-" to 3846 designate being outside that range, so the condition can be inverted by 3847 flipping the prefix. An omitted bound is represented by a "-". For 3848 example, "- [-, 10]" means being outside the range starting at the lowest 3849 possible value and ending at 10, in other words, being greater than 10. 3850 The range "+ [-, -]" is always true and hence the range "- [-, -]" is 3851 always false. 3852 3853 We set up things so that the missing bounds are handled in a consistent 3854 manner so neither a missing bound nor "true" and "false" need to be 3855 handled using a special case. */ 3856 3857/* Return the result of applying CODE to ARG0 and ARG1, but handle the case 3858 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P 3859 and UPPER1_P are nonzero if the respective argument is an upper bound 3860 and zero for a lower. TYPE, if nonzero, is the type of the result; it 3861 must be specified for a comparison. ARG1 will be converted to ARG0's 3862 type if both are specified. */ 3863 3864static tree 3865range_binop (enum tree_code code, tree type, tree arg0, int upper0_p, 3866 tree arg1, int upper1_p) 3867{ 3868 tree tem; 3869 int result; 3870 int sgn0, sgn1; 3871 3872 /* If neither arg represents infinity, do the normal operation. 3873 Else, if not a comparison, return infinity. Else handle the special 3874 comparison rules. Note that most of the cases below won't occur, but 3875 are handled for consistency. */ 3876 3877 if (arg0 != 0 && arg1 != 0) 3878 { 3879 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0), 3880 arg0, fold_convert (TREE_TYPE (arg0), arg1)); 3881 STRIP_NOPS (tem); 3882 return TREE_CODE (tem) == INTEGER_CST ? tem : 0; 3883 } 3884 3885 if (TREE_CODE_CLASS (code) != tcc_comparison) 3886 return 0; 3887 3888 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0 3889 for neither. In real maths, we cannot assume open ended ranges are 3890 the same. But, this is computer arithmetic, where numbers are finite. 3891 We can therefore make the transformation of any unbounded range with 3892 the value Z, Z being greater than any representable number. This permits 3893 us to treat unbounded ranges as equal. */ 3894 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1); 3895 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1); 3896 switch (code) 3897 { 3898 case EQ_EXPR: 3899 result = sgn0 == sgn1; 3900 break; 3901 case NE_EXPR: 3902 result = sgn0 != sgn1; 3903 break; 3904 case LT_EXPR: 3905 result = sgn0 < sgn1; 3906 break; 3907 case LE_EXPR: 3908 result = sgn0 <= sgn1; 3909 break; 3910 case GT_EXPR: 3911 result = sgn0 > sgn1; 3912 break; 3913 case GE_EXPR: 3914 result = sgn0 >= sgn1; 3915 break; 3916 default: 3917 gcc_unreachable (); 3918 } 3919 3920 return constant_boolean_node (result, type); 3921} 3922 3923/* Given EXP, a logical expression, set the range it is testing into 3924 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression 3925 actually being tested. *PLOW and *PHIGH will be made of the same 3926 type as the returned expression. If EXP is not a comparison, we 3927 will most likely not be returning a useful value and range. Set 3928 *STRICT_OVERFLOW_P to true if the return value is only valid 3929 because signed overflow is undefined; otherwise, do not change 3930 *STRICT_OVERFLOW_P. */ 3931 3932static tree 3933make_range (tree exp, int *pin_p, tree *plow, tree *phigh, 3934 bool *strict_overflow_p) 3935{ 3936 enum tree_code code; 3937 tree arg0 = NULL_TREE, arg1 = NULL_TREE; 3938 tree exp_type = NULL_TREE, arg0_type = NULL_TREE; 3939 int in_p, n_in_p; 3940 tree low, high, n_low, n_high; 3941 3942 /* Start with simply saying "EXP != 0" and then look at the code of EXP 3943 and see if we can refine the range. Some of the cases below may not 3944 happen, but it doesn't seem worth worrying about this. We "continue" 3945 the outer loop when we've changed something; otherwise we "break" 3946 the switch, which will "break" the while. */ 3947 3948 in_p = 0; 3949 low = high = build_int_cst (TREE_TYPE (exp), 0); 3950 3951 while (1) 3952 { 3953 code = TREE_CODE (exp); 3954 exp_type = TREE_TYPE (exp); 3955 3956 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) 3957 { 3958 if (TREE_CODE_LENGTH (code) > 0) 3959 arg0 = TREE_OPERAND (exp, 0); 3960 if (TREE_CODE_CLASS (code) == tcc_comparison 3961 || TREE_CODE_CLASS (code) == tcc_unary 3962 || TREE_CODE_CLASS (code) == tcc_binary) 3963 arg0_type = TREE_TYPE (arg0); 3964 if (TREE_CODE_CLASS (code) == tcc_binary 3965 || TREE_CODE_CLASS (code) == tcc_comparison 3966 || (TREE_CODE_CLASS (code) == tcc_expression 3967 && TREE_CODE_LENGTH (code) > 1)) 3968 arg1 = TREE_OPERAND (exp, 1); 3969 } 3970 3971 switch (code) 3972 { 3973 case TRUTH_NOT_EXPR: 3974 in_p = ! in_p, exp = arg0; 3975 continue; 3976 3977 case EQ_EXPR: case NE_EXPR: 3978 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR: 3979 /* We can only do something if the range is testing for zero 3980 and if the second operand is an integer constant. Note that 3981 saying something is "in" the range we make is done by 3982 complementing IN_P since it will set in the initial case of 3983 being not equal to zero; "out" is leaving it alone. */ 3984 if (low == 0 || high == 0 3985 || ! integer_zerop (low) || ! integer_zerop (high) 3986 || TREE_CODE (arg1) != INTEGER_CST) 3987 break; 3988 3989 switch (code) 3990 { 3991 case NE_EXPR: /* - [c, c] */ 3992 low = high = arg1; 3993 break; 3994 case EQ_EXPR: /* + [c, c] */ 3995 in_p = ! in_p, low = high = arg1; 3996 break; 3997 case GT_EXPR: /* - [-, c] */ 3998 low = 0, high = arg1; 3999 break; 4000 case GE_EXPR: /* + [c, -] */ 4001 in_p = ! in_p, low = arg1, high = 0; 4002 break; 4003 case LT_EXPR: /* - [c, -] */ 4004 low = arg1, high = 0; 4005 break; 4006 case LE_EXPR: /* + [-, c] */ 4007 in_p = ! in_p, low = 0, high = arg1; 4008 break; 4009 default: 4010 gcc_unreachable (); 4011 } 4012 4013 /* If this is an unsigned comparison, we also know that EXP is 4014 greater than or equal to zero. We base the range tests we make 4015 on that fact, so we record it here so we can parse existing 4016 range tests. We test arg0_type since often the return type 4017 of, e.g. EQ_EXPR, is boolean. */ 4018 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0)) 4019 { 4020 if (! merge_ranges (&n_in_p, &n_low, &n_high, 4021 in_p, low, high, 1, 4022 build_int_cst (arg0_type, 0), 4023 NULL_TREE)) 4024 break; 4025 4026 in_p = n_in_p, low = n_low, high = n_high; 4027 4028 /* If the high bound is missing, but we have a nonzero low 4029 bound, reverse the range so it goes from zero to the low bound 4030 minus 1. */ 4031 if (high == 0 && low && ! integer_zerop (low)) 4032 { 4033 in_p = ! in_p; 4034 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0, 4035 integer_one_node, 0); 4036 low = build_int_cst (arg0_type, 0); 4037 } 4038 } 4039 4040 exp = arg0; 4041 continue; 4042 4043 case NEGATE_EXPR: 4044 /* (-x) IN [a,b] -> x in [-b, -a] */ 4045 n_low = range_binop (MINUS_EXPR, exp_type, 4046 build_int_cst (exp_type, 0), 4047 0, high, 1); 4048 n_high = range_binop (MINUS_EXPR, exp_type, 4049 build_int_cst (exp_type, 0), 4050 0, low, 0); 4051 low = n_low, high = n_high; 4052 exp = arg0; 4053 continue; 4054 4055 case BIT_NOT_EXPR: 4056 /* ~ X -> -X - 1 */ 4057 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0), 4058 build_int_cst (exp_type, 1)); 4059 continue; 4060 4061 case PLUS_EXPR: case MINUS_EXPR: 4062 if (TREE_CODE (arg1) != INTEGER_CST) 4063 break; 4064 4065 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot 4066 move a constant to the other side. */ 4067 if (!TYPE_UNSIGNED (arg0_type) 4068 && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) 4069 break; 4070 4071 /* If EXP is signed, any overflow in the computation is undefined, 4072 so we don't worry about it so long as our computations on 4073 the bounds don't overflow. For unsigned, overflow is defined 4074 and this is exactly the right thing. */ 4075 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, 4076 arg0_type, low, 0, arg1, 0); 4077 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, 4078 arg0_type, high, 1, arg1, 0); 4079 if ((n_low != 0 && TREE_OVERFLOW (n_low)) 4080 || (n_high != 0 && TREE_OVERFLOW (n_high))) 4081 break; 4082 4083 if (TYPE_OVERFLOW_UNDEFINED (arg0_type)) 4084 *strict_overflow_p = true; 4085 4086 /* Check for an unsigned range which has wrapped around the maximum 4087 value thus making n_high < n_low, and normalize it. */ 4088 if (n_low && n_high && tree_int_cst_lt (n_high, n_low)) 4089 { 4090 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0, 4091 integer_one_node, 0); 4092 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0, 4093 integer_one_node, 0); 4094 4095 /* If the range is of the form +/- [ x+1, x ], we won't 4096 be able to normalize it. But then, it represents the 4097 whole range or the empty set, so make it 4098 +/- [ -, - ]. */ 4099 if (tree_int_cst_equal (n_low, low) 4100 && tree_int_cst_equal (n_high, high)) 4101 low = high = 0; 4102 else 4103 in_p = ! in_p; 4104 } 4105 else 4106 low = n_low, high = n_high; 4107 4108 exp = arg0; 4109 continue; 4110 4111 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR: 4112 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type)) 4113 break; 4114 4115 if (! INTEGRAL_TYPE_P (arg0_type) 4116 || (low != 0 && ! int_fits_type_p (low, arg0_type)) 4117 || (high != 0 && ! int_fits_type_p (high, arg0_type))) 4118 break; 4119 4120 n_low = low, n_high = high; 4121 4122 if (n_low != 0) 4123 n_low = fold_convert (arg0_type, n_low); 4124 4125 if (n_high != 0) 4126 n_high = fold_convert (arg0_type, n_high); 4127 4128 4129 /* If we're converting arg0 from an unsigned type, to exp, 4130 a signed type, we will be doing the comparison as unsigned. 4131 The tests above have already verified that LOW and HIGH 4132 are both positive. 4133 4134 So we have to ensure that we will handle large unsigned 4135 values the same way that the current signed bounds treat 4136 negative values. */ 4137 4138 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type)) 4139 { 4140 tree high_positive; 4141 tree equiv_type = lang_hooks.types.type_for_mode 4142 (TYPE_MODE (arg0_type), 1); 4143 4144 /* A range without an upper bound is, naturally, unbounded. 4145 Since convert would have cropped a very large value, use 4146 the max value for the destination type. */ 4147 high_positive 4148 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type) 4149 : TYPE_MAX_VALUE (arg0_type); 4150 4151 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type)) 4152 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type, 4153 fold_convert (arg0_type, 4154 high_positive), 4155 fold_convert (arg0_type, 4156 integer_one_node)); 4157 4158 /* If the low bound is specified, "and" the range with the 4159 range for which the original unsigned value will be 4160 positive. */ 4161 if (low != 0) 4162 { 4163 if (! merge_ranges (&n_in_p, &n_low, &n_high, 4164 1, n_low, n_high, 1, 4165 fold_convert (arg0_type, 4166 integer_zero_node), 4167 high_positive)) 4168 break; 4169 4170 in_p = (n_in_p == in_p); 4171 } 4172 else 4173 { 4174 /* Otherwise, "or" the range with the range of the input 4175 that will be interpreted as negative. */ 4176 if (! merge_ranges (&n_in_p, &n_low, &n_high, 4177 0, n_low, n_high, 1, 4178 fold_convert (arg0_type, 4179 integer_zero_node), 4180 high_positive)) 4181 break; 4182 4183 in_p = (in_p != n_in_p); 4184 } 4185 } 4186 4187 exp = arg0; 4188 low = n_low, high = n_high; 4189 continue; 4190 4191 default: 4192 break; 4193 } 4194 4195 break; 4196 } 4197 4198 /* If EXP is a constant, we can evaluate whether this is true or false. */ 4199 if (TREE_CODE (exp) == INTEGER_CST) 4200 { 4201 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node, 4202 exp, 0, low, 0)) 4203 && integer_onep (range_binop (LE_EXPR, integer_type_node, 4204 exp, 1, high, 1))); 4205 low = high = 0; 4206 exp = 0; 4207 } 4208 4209 *pin_p = in_p, *plow = low, *phigh = high; 4210 return exp; 4211} 4212 4213/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result 4214 type, TYPE, return an expression to test if EXP is in (or out of, depending 4215 on IN_P) the range. Return 0 if the test couldn't be created. */ 4216 4217static tree 4218build_range_check (tree type, tree exp, int in_p, tree low, tree high) 4219{ 4220 tree etype = TREE_TYPE (exp); 4221 tree value; 4222 4223#ifdef HAVE_canonicalize_funcptr_for_compare 4224 /* Disable this optimization for function pointer expressions 4225 on targets that require function pointer canonicalization. */ 4226 if (HAVE_canonicalize_funcptr_for_compare 4227 && TREE_CODE (etype) == POINTER_TYPE 4228 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE) 4229 return NULL_TREE; 4230#endif 4231 4232 if (! in_p) 4233 { 4234 value = build_range_check (type, exp, 1, low, high); 4235 if (value != 0) 4236 return invert_truthvalue (value); 4237 4238 return 0; 4239 } 4240 4241 if (low == 0 && high == 0) 4242 return build_int_cst (type, 1); 4243 4244 if (low == 0) 4245 return fold_build2 (LE_EXPR, type, exp, 4246 fold_convert (etype, high)); 4247 4248 if (high == 0) 4249 return fold_build2 (GE_EXPR, type, exp, 4250 fold_convert (etype, low)); 4251 4252 if (operand_equal_p (low, high, 0)) 4253 return fold_build2 (EQ_EXPR, type, exp, 4254 fold_convert (etype, low)); 4255 4256 if (integer_zerop (low)) 4257 { 4258 if (! TYPE_UNSIGNED (etype)) 4259 { 4260 etype = lang_hooks.types.unsigned_type (etype); 4261 high = fold_convert (etype, high); 4262 exp = fold_convert (etype, exp); 4263 } 4264 return build_range_check (type, exp, 1, 0, high); 4265 } 4266 4267 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */ 4268 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST) 4269 { 4270 unsigned HOST_WIDE_INT lo; 4271 HOST_WIDE_INT hi; 4272 int prec; 4273 4274 prec = TYPE_PRECISION (etype); 4275 if (prec <= HOST_BITS_PER_WIDE_INT) 4276 { 4277 hi = 0; 4278 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1; 4279 } 4280 else 4281 { 4282 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1; 4283 lo = (unsigned HOST_WIDE_INT) -1; 4284 } 4285 4286 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo) 4287 { 4288 if (TYPE_UNSIGNED (etype)) 4289 { 4290 etype = lang_hooks.types.signed_type (etype); 4291 exp = fold_convert (etype, exp); 4292 } 4293 return fold_build2 (GT_EXPR, type, exp, 4294 build_int_cst (etype, 0)); 4295 } 4296 } 4297 4298 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low). 4299 This requires wrap-around arithmetics for the type of the expression. */ 4300 switch (TREE_CODE (etype)) 4301 { 4302 case INTEGER_TYPE: 4303 /* There is no requirement that LOW be within the range of ETYPE 4304 if the latter is a subtype. It must, however, be within the base 4305 type of ETYPE. So be sure we do the subtraction in that type. */ 4306 if (TREE_TYPE (etype)) 4307 etype = TREE_TYPE (etype); 4308 break; 4309 4310 case ENUMERAL_TYPE: 4311 case BOOLEAN_TYPE: 4312 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 4313 TYPE_UNSIGNED (etype)); 4314 break; 4315 4316 default: 4317 break; 4318 } 4319 4320 /* If we don't have wrap-around arithmetics upfront, try to force it. */ 4321 if (TREE_CODE (etype) == INTEGER_TYPE 4322 && !TYPE_OVERFLOW_WRAPS (etype)) 4323 { 4324 tree utype, minv, maxv; 4325 4326 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN 4327 for the type in question, as we rely on this here. */ 4328 utype = lang_hooks.types.unsigned_type (etype); 4329 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype)); 4330 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1, 4331 integer_one_node, 1); 4332 minv = fold_convert (utype, TYPE_MIN_VALUE (etype)); 4333 4334 if (integer_zerop (range_binop (NE_EXPR, integer_type_node, 4335 minv, 1, maxv, 1))) 4336 etype = utype; 4337 else 4338 return 0; 4339 } 4340 4341 high = fold_convert (etype, high); 4342 low = fold_convert (etype, low); 4343 exp = fold_convert (etype, exp); 4344 4345 value = const_binop (MINUS_EXPR, high, low, 0); 4346 4347 if (value != 0 && !TREE_OVERFLOW (value)) 4348 return build_range_check (type, 4349 fold_build2 (MINUS_EXPR, etype, exp, low), 4350 1, build_int_cst (etype, 0), value); 4351 4352 return 0; 4353} 4354 4355/* Return the predecessor of VAL in its type, handling the infinite case. */ 4356 4357static tree 4358range_predecessor (tree val) 4359{ 4360 tree type = TREE_TYPE (val); 4361 4362 if (INTEGRAL_TYPE_P (type) 4363 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0)) 4364 return 0; 4365 else 4366 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); 4367} 4368 4369/* Return the successor of VAL in its type, handling the infinite case. */ 4370 4371static tree 4372range_successor (tree val) 4373{ 4374 tree type = TREE_TYPE (val); 4375 4376 if (INTEGRAL_TYPE_P (type) 4377 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0)) 4378 return 0; 4379 else 4380 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); 4381} 4382 4383/* Given two ranges, see if we can merge them into one. Return 1 if we 4384 can, 0 if we can't. Set the output range into the specified parameters. */ 4385 4386static int 4387merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0, 4388 tree high0, int in1_p, tree low1, tree high1) 4389{ 4390 int no_overlap; 4391 int subset; 4392 int temp; 4393 tree tem; 4394 int in_p; 4395 tree low, high; 4396 int lowequal = ((low0 == 0 && low1 == 0) 4397 || integer_onep (range_binop (EQ_EXPR, integer_type_node, 4398 low0, 0, low1, 0))); 4399 int highequal = ((high0 == 0 && high1 == 0) 4400 || integer_onep (range_binop (EQ_EXPR, integer_type_node, 4401 high0, 1, high1, 1))); 4402 4403 /* Make range 0 be the range that starts first, or ends last if they 4404 start at the same value. Swap them if it isn't. */ 4405 if (integer_onep (range_binop (GT_EXPR, integer_type_node, 4406 low0, 0, low1, 0)) 4407 || (lowequal 4408 && integer_onep (range_binop (GT_EXPR, integer_type_node, 4409 high1, 1, high0, 1)))) 4410 { 4411 temp = in0_p, in0_p = in1_p, in1_p = temp; 4412 tem = low0, low0 = low1, low1 = tem; 4413 tem = high0, high0 = high1, high1 = tem; 4414 } 4415 4416 /* Now flag two cases, whether the ranges are disjoint or whether the 4417 second range is totally subsumed in the first. Note that the tests 4418 below are simplified by the ones above. */ 4419 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node, 4420 high0, 1, low1, 0)); 4421 subset = integer_onep (range_binop (LE_EXPR, integer_type_node, 4422 high1, 1, high0, 1)); 4423 4424 /* We now have four cases, depending on whether we are including or 4425 excluding the two ranges. */ 4426 if (in0_p && in1_p) 4427 { 4428 /* If they don't overlap, the result is false. If the second range 4429 is a subset it is the result. Otherwise, the range is from the start 4430 of the second to the end of the first. */ 4431 if (no_overlap) 4432 in_p = 0, low = high = 0; 4433 else if (subset) 4434 in_p = 1, low = low1, high = high1; 4435 else 4436 in_p = 1, low = low1, high = high0; 4437 } 4438 4439 else if (in0_p && ! in1_p) 4440 { 4441 /* If they don't overlap, the result is the first range. If they are 4442 equal, the result is false. If the second range is a subset of the 4443 first, and the ranges begin at the same place, we go from just after 4444 the end of the second range to the end of the first. If the second 4445 range is not a subset of the first, or if it is a subset and both 4446 ranges end at the same place, the range starts at the start of the 4447 first range and ends just before the second range. 4448 Otherwise, we can't describe this as a single range. */ 4449 if (no_overlap) 4450 in_p = 1, low = low0, high = high0; 4451 else if (lowequal && highequal) 4452 in_p = 0, low = high = 0; 4453 else if (subset && lowequal) 4454 { 4455 low = range_successor (high1); 4456 high = high0; 4457 in_p = 1; 4458 if (low == 0) 4459 { 4460 /* We are in the weird situation where high0 > high1 but 4461 high1 has no successor. Punt. */ 4462 return 0; 4463 } 4464 } 4465 else if (! subset || highequal) 4466 { 4467 low = low0; 4468 high = range_predecessor (low1); 4469 in_p = 1; 4470 if (high == 0) 4471 { 4472 /* low0 < low1 but low1 has no predecessor. Punt. */ 4473 return 0; 4474 } 4475 } 4476 else 4477 return 0; 4478 } 4479 4480 else if (! in0_p && in1_p) 4481 { 4482 /* If they don't overlap, the result is the second range. If the second 4483 is a subset of the first, the result is false. Otherwise, 4484 the range starts just after the first range and ends at the 4485 end of the second. */ 4486 if (no_overlap) 4487 in_p = 1, low = low1, high = high1; 4488 else if (subset || highequal) 4489 in_p = 0, low = high = 0; 4490 else 4491 { 4492 low = range_successor (high0); 4493 high = high1; 4494 in_p = 1; 4495 if (low == 0) 4496 { 4497 /* high1 > high0 but high0 has no successor. Punt. */ 4498 return 0; 4499 } 4500 } 4501 } 4502 4503 else 4504 { 4505 /* The case where we are excluding both ranges. Here the complex case 4506 is if they don't overlap. In that case, the only time we have a 4507 range is if they are adjacent. If the second is a subset of the 4508 first, the result is the first. Otherwise, the range to exclude 4509 starts at the beginning of the first range and ends at the end of the 4510 second. */ 4511 if (no_overlap) 4512 { 4513 if (integer_onep (range_binop (EQ_EXPR, integer_type_node, 4514 range_successor (high0), 4515 1, low1, 0))) 4516 in_p = 0, low = low0, high = high1; 4517 else 4518 { 4519 /* Canonicalize - [min, x] into - [-, x]. */ 4520 if (low0 && TREE_CODE (low0) == INTEGER_CST) 4521 switch (TREE_CODE (TREE_TYPE (low0))) 4522 { 4523 case ENUMERAL_TYPE: 4524 if (TYPE_PRECISION (TREE_TYPE (low0)) 4525 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0)))) 4526 break; 4527 /* FALLTHROUGH */ 4528 case INTEGER_TYPE: 4529 if (tree_int_cst_equal (low0, 4530 TYPE_MIN_VALUE (TREE_TYPE (low0)))) 4531 low0 = 0; 4532 break; 4533 case POINTER_TYPE: 4534 if (TYPE_UNSIGNED (TREE_TYPE (low0)) 4535 && integer_zerop (low0)) 4536 low0 = 0; 4537 break; 4538 default: 4539 break; 4540 } 4541 4542 /* Canonicalize - [x, max] into - [x, -]. */ 4543 if (high1 && TREE_CODE (high1) == INTEGER_CST) 4544 switch (TREE_CODE (TREE_TYPE (high1))) 4545 { 4546 case ENUMERAL_TYPE: 4547 if (TYPE_PRECISION (TREE_TYPE (high1)) 4548 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1)))) 4549 break; 4550 /* FALLTHROUGH */ 4551 case INTEGER_TYPE: 4552 if (tree_int_cst_equal (high1, 4553 TYPE_MAX_VALUE (TREE_TYPE (high1)))) 4554 high1 = 0; 4555 break; 4556 case POINTER_TYPE: 4557 if (TYPE_UNSIGNED (TREE_TYPE (high1)) 4558 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE, 4559 high1, 1, 4560 integer_one_node, 1))) 4561 high1 = 0; 4562 break; 4563 default: 4564 break; 4565 } 4566 4567 /* The ranges might be also adjacent between the maximum and 4568 minimum values of the given type. For 4569 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y 4570 return + [x + 1, y - 1]. */ 4571 if (low0 == 0 && high1 == 0) 4572 { 4573 low = range_successor (high0); 4574 high = range_predecessor (low1); 4575 if (low == 0 || high == 0) 4576 return 0; 4577 4578 in_p = 1; 4579 } 4580 else 4581 return 0; 4582 } 4583 } 4584 else if (subset) 4585 in_p = 0, low = low0, high = high0; 4586 else 4587 in_p = 0, low = low0, high = high1; 4588 } 4589 4590 *pin_p = in_p, *plow = low, *phigh = high; 4591 return 1; 4592} 4593 4594 4595/* Subroutine of fold, looking inside expressions of the form 4596 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands 4597 of the COND_EXPR. This function is being used also to optimize 4598 A op B ? C : A, by reversing the comparison first. 4599 4600 Return a folded expression whose code is not a COND_EXPR 4601 anymore, or NULL_TREE if no folding opportunity is found. */ 4602 4603static tree 4604fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) 4605{ 4606 enum tree_code comp_code = TREE_CODE (arg0); 4607 tree arg00 = TREE_OPERAND (arg0, 0); 4608 tree arg01 = TREE_OPERAND (arg0, 1); 4609 tree arg1_type = TREE_TYPE (arg1); 4610 tree tem; 4611 4612 STRIP_NOPS (arg1); 4613 STRIP_NOPS (arg2); 4614 4615 /* If we have A op 0 ? A : -A, consider applying the following 4616 transformations: 4617 4618 A == 0? A : -A same as -A 4619 A != 0? A : -A same as A 4620 A >= 0? A : -A same as abs (A) 4621 A > 0? A : -A same as abs (A) 4622 A <= 0? A : -A same as -abs (A) 4623 A < 0? A : -A same as -abs (A) 4624 4625 None of these transformations work for modes with signed 4626 zeros. If A is +/-0, the first two transformations will 4627 change the sign of the result (from +0 to -0, or vice 4628 versa). The last four will fix the sign of the result, 4629 even though the original expressions could be positive or 4630 negative, depending on the sign of A. 4631 4632 Note that all these transformations are correct if A is 4633 NaN, since the two alternatives (A and -A) are also NaNs. */ 4634 if ((FLOAT_TYPE_P (TREE_TYPE (arg01)) 4635 ? real_zerop (arg01) 4636 : integer_zerop (arg01)) 4637 && ((TREE_CODE (arg2) == NEGATE_EXPR 4638 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0)) 4639 /* In the case that A is of the form X-Y, '-A' (arg2) may 4640 have already been folded to Y-X, check for that. */ 4641 || (TREE_CODE (arg1) == MINUS_EXPR 4642 && TREE_CODE (arg2) == MINUS_EXPR 4643 && operand_equal_p (TREE_OPERAND (arg1, 0), 4644 TREE_OPERAND (arg2, 1), 0) 4645 && operand_equal_p (TREE_OPERAND (arg1, 1), 4646 TREE_OPERAND (arg2, 0), 0)))) 4647 switch (comp_code) 4648 { 4649 case EQ_EXPR: 4650 case UNEQ_EXPR: 4651 tem = fold_convert (arg1_type, arg1); 4652 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem))); 4653 case NE_EXPR: 4654 case LTGT_EXPR: 4655 return pedantic_non_lvalue (fold_convert (type, arg1)); 4656 case UNGE_EXPR: 4657 case UNGT_EXPR: 4658 if (flag_trapping_math) 4659 break; 4660 /* Fall through. */ 4661 case GE_EXPR: 4662 case GT_EXPR: 4663 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 4664 arg1 = fold_convert (lang_hooks.types.signed_type 4665 (TREE_TYPE (arg1)), arg1); 4666 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1); 4667 return pedantic_non_lvalue (fold_convert (type, tem)); 4668 case UNLE_EXPR: 4669 case UNLT_EXPR: 4670 if (flag_trapping_math) 4671 break; 4672 case LE_EXPR: 4673 case LT_EXPR: 4674 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 4675 arg1 = fold_convert (lang_hooks.types.signed_type 4676 (TREE_TYPE (arg1)), arg1); 4677 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1); 4678 return negate_expr (fold_convert (type, tem)); 4679 default: 4680 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); 4681 break; 4682 } 4683 4684 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise 4685 A == 0 ? A : 0 is always 0 unless A is -0. Note that 4686 both transformations are correct when A is NaN: A != 0 4687 is then true, and A == 0 is false. */ 4688 4689 if (integer_zerop (arg01) && integer_zerop (arg2)) 4690 { 4691 if (comp_code == NE_EXPR) 4692 return pedantic_non_lvalue (fold_convert (type, arg1)); 4693 else if (comp_code == EQ_EXPR) 4694 return build_int_cst (type, 0); 4695 } 4696 4697 /* Try some transformations of A op B ? A : B. 4698 4699 A == B? A : B same as B 4700 A != B? A : B same as A 4701 A >= B? A : B same as max (A, B) 4702 A > B? A : B same as max (B, A) 4703 A <= B? A : B same as min (A, B) 4704 A < B? A : B same as min (B, A) 4705 4706 As above, these transformations don't work in the presence 4707 of signed zeros. For example, if A and B are zeros of 4708 opposite sign, the first two transformations will change 4709 the sign of the result. In the last four, the original 4710 expressions give different results for (A=+0, B=-0) and 4711 (A=-0, B=+0), but the transformed expressions do not. 4712 4713 The first two transformations are correct if either A or B 4714 is a NaN. In the first transformation, the condition will 4715 be false, and B will indeed be chosen. In the case of the 4716 second transformation, the condition A != B will be true, 4717 and A will be chosen. 4718 4719 The conversions to max() and min() are not correct if B is 4720 a number and A is not. The conditions in the original 4721 expressions will be false, so all four give B. The min() 4722 and max() versions would give a NaN instead. */ 4723 if (operand_equal_for_comparison_p (arg01, arg2, arg00) 4724 /* Avoid these transformations if the COND_EXPR may be used 4725 as an lvalue in the C++ front-end. PR c++/19199. */ 4726 && (in_gimple_form 4727 || (strcmp (lang_hooks.name, "GNU C++") != 0 4728 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0) 4729 || ! maybe_lvalue_p (arg1) 4730 || ! maybe_lvalue_p (arg2))) 4731 { 4732 tree comp_op0 = arg00; 4733 tree comp_op1 = arg01; 4734 tree comp_type = TREE_TYPE (comp_op0); 4735 4736 /* Avoid adding NOP_EXPRs in case this is an lvalue. */ 4737 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type)) 4738 { 4739 comp_type = type; 4740 comp_op0 = arg1; 4741 comp_op1 = arg2; 4742 } 4743 4744 switch (comp_code) 4745 { 4746 case EQ_EXPR: 4747 return pedantic_non_lvalue (fold_convert (type, arg2)); 4748 case NE_EXPR: 4749 return pedantic_non_lvalue (fold_convert (type, arg1)); 4750 case LE_EXPR: 4751 case LT_EXPR: 4752 case UNLE_EXPR: 4753 case UNLT_EXPR: 4754 /* In C++ a ?: expression can be an lvalue, so put the 4755 operand which will be used if they are equal first 4756 so that we can convert this back to the 4757 corresponding COND_EXPR. */ 4758 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 4759 { 4760 comp_op0 = fold_convert (comp_type, comp_op0); 4761 comp_op1 = fold_convert (comp_type, comp_op1); 4762 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR) 4763 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1) 4764 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0); 4765 return pedantic_non_lvalue (fold_convert (type, tem)); 4766 } 4767 break; 4768 case GE_EXPR: 4769 case GT_EXPR: 4770 case UNGE_EXPR: 4771 case UNGT_EXPR: 4772 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 4773 { 4774 comp_op0 = fold_convert (comp_type, comp_op0); 4775 comp_op1 = fold_convert (comp_type, comp_op1); 4776 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR) 4777 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1) 4778 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0); 4779 return pedantic_non_lvalue (fold_convert (type, tem)); 4780 } 4781 break; 4782 case UNEQ_EXPR: 4783 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 4784 return pedantic_non_lvalue (fold_convert (type, arg2)); 4785 break; 4786 case LTGT_EXPR: 4787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 4788 return pedantic_non_lvalue (fold_convert (type, arg1)); 4789 break; 4790 default: 4791 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); 4792 break; 4793 } 4794 } 4795 4796 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers, 4797 we might still be able to simplify this. For example, 4798 if C1 is one less or one more than C2, this might have started 4799 out as a MIN or MAX and been transformed by this function. 4800 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */ 4801 4802 if (INTEGRAL_TYPE_P (type) 4803 && TREE_CODE (arg01) == INTEGER_CST 4804 && TREE_CODE (arg2) == INTEGER_CST) 4805 switch (comp_code) 4806 { 4807 case EQ_EXPR: 4808 /* We can replace A with C1 in this case. */ 4809 arg1 = fold_convert (type, arg01); 4810 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2); 4811 4812 case LT_EXPR: 4813 /* If C1 is C2 + 1, this is min(A, C2). */ 4814 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 4815 OEP_ONLY_CONST) 4816 && operand_equal_p (arg01, 4817 const_binop (PLUS_EXPR, arg2, 4818 integer_one_node, 0), 4819 OEP_ONLY_CONST)) 4820 return pedantic_non_lvalue (fold_build2 (MIN_EXPR, 4821 type, arg1, arg2)); 4822 break; 4823 4824 case LE_EXPR: 4825 /* If C1 is C2 - 1, this is min(A, C2). */ 4826 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 4827 OEP_ONLY_CONST) 4828 && operand_equal_p (arg01, 4829 const_binop (MINUS_EXPR, arg2, 4830 integer_one_node, 0), 4831 OEP_ONLY_CONST)) 4832 return pedantic_non_lvalue (fold_build2 (MIN_EXPR, 4833 type, arg1, arg2)); 4834 break; 4835 4836 case GT_EXPR: 4837 /* If C1 is C2 - 1, this is max(A, C2). */ 4838 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 4839 OEP_ONLY_CONST) 4840 && operand_equal_p (arg01, 4841 const_binop (MINUS_EXPR, arg2, 4842 integer_one_node, 0), 4843 OEP_ONLY_CONST)) 4844 return pedantic_non_lvalue (fold_build2 (MAX_EXPR, 4845 type, arg1, arg2)); 4846 break; 4847 4848 case GE_EXPR: 4849 /* If C1 is C2 + 1, this is max(A, C2). */ 4850 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 4851 OEP_ONLY_CONST) 4852 && operand_equal_p (arg01, 4853 const_binop (PLUS_EXPR, arg2, 4854 integer_one_node, 0), 4855 OEP_ONLY_CONST)) 4856 return pedantic_non_lvalue (fold_build2 (MAX_EXPR, 4857 type, arg1, arg2)); 4858 break; 4859 case NE_EXPR: 4860 break; 4861 default: 4862 gcc_unreachable (); 4863 } 4864 4865 return NULL_TREE; 4866} 4867 4868 4869 4870#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT 4871#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2) 4872#endif 4873 4874/* EXP is some logical combination of boolean tests. See if we can 4875 merge it into some range test. Return the new tree if so. */ 4876 4877static tree 4878fold_range_test (enum tree_code code, tree type, tree op0, tree op1) 4879{ 4880 int or_op = (code == TRUTH_ORIF_EXPR 4881 || code == TRUTH_OR_EXPR); 4882 int in0_p, in1_p, in_p; 4883 tree low0, low1, low, high0, high1, high; 4884 bool strict_overflow_p = false; 4885 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p); 4886 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p); 4887 tree tem; 4888 const char * const warnmsg = G_("assuming signed overflow does not occur " 4889 "when simplifying range test"); 4890 4891 /* If this is an OR operation, invert both sides; we will invert 4892 again at the end. */ 4893 if (or_op) 4894 in0_p = ! in0_p, in1_p = ! in1_p; 4895 4896 /* If both expressions are the same, if we can merge the ranges, and we 4897 can build the range test, return it or it inverted. If one of the 4898 ranges is always true or always false, consider it to be the same 4899 expression as the other. */ 4900 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0)) 4901 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0, 4902 in1_p, low1, high1) 4903 && 0 != (tem = (build_range_check (type, 4904 lhs != 0 ? lhs 4905 : rhs != 0 ? rhs : integer_zero_node, 4906 in_p, low, high)))) 4907 { 4908 if (strict_overflow_p) 4909 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 4910 return or_op ? invert_truthvalue (tem) : tem; 4911 } 4912 4913 /* On machines where the branch cost is expensive, if this is a 4914 short-circuited branch and the underlying object on both sides 4915 is the same, make a non-short-circuit operation. */ 4916 else if (LOGICAL_OP_NON_SHORT_CIRCUIT 4917 && lhs != 0 && rhs != 0 4918 && (code == TRUTH_ANDIF_EXPR 4919 || code == TRUTH_ORIF_EXPR) 4920 && operand_equal_p (lhs, rhs, 0)) 4921 { 4922 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR 4923 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in 4924 which cases we can't do this. */ 4925 if (simple_operand_p (lhs)) 4926 return build2 (code == TRUTH_ANDIF_EXPR 4927 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, 4928 type, op0, op1); 4929 4930 else if (lang_hooks.decls.global_bindings_p () == 0 4931 && ! CONTAINS_PLACEHOLDER_P (lhs)) 4932 { 4933 tree common = save_expr (lhs); 4934 4935 if (0 != (lhs = build_range_check (type, common, 4936 or_op ? ! in0_p : in0_p, 4937 low0, high0)) 4938 && (0 != (rhs = build_range_check (type, common, 4939 or_op ? ! in1_p : in1_p, 4940 low1, high1)))) 4941 { 4942 if (strict_overflow_p) 4943 fold_overflow_warning (warnmsg, 4944 WARN_STRICT_OVERFLOW_COMPARISON); 4945 return build2 (code == TRUTH_ANDIF_EXPR 4946 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, 4947 type, lhs, rhs); 4948 } 4949 } 4950 } 4951 4952 return 0; 4953} 4954 4955/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P 4956 bit value. Arrange things so the extra bits will be set to zero if and 4957 only if C is signed-extended to its full width. If MASK is nonzero, 4958 it is an INTEGER_CST that should be AND'ed with the extra bits. */ 4959 4960static tree 4961unextend (tree c, int p, int unsignedp, tree mask) 4962{ 4963 tree type = TREE_TYPE (c); 4964 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type)); 4965 tree temp; 4966 4967 if (p == modesize || unsignedp) 4968 return c; 4969 4970 /* We work by getting just the sign bit into the low-order bit, then 4971 into the high-order bit, then sign-extend. We then XOR that value 4972 with C. */ 4973 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0); 4974 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0); 4975 4976 /* We must use a signed type in order to get an arithmetic right shift. 4977 However, we must also avoid introducing accidental overflows, so that 4978 a subsequent call to integer_zerop will work. Hence we must 4979 do the type conversion here. At this point, the constant is either 4980 zero or one, and the conversion to a signed type can never overflow. 4981 We could get an overflow if this conversion is done anywhere else. */ 4982 if (TYPE_UNSIGNED (type)) 4983 temp = fold_convert (lang_hooks.types.signed_type (type), temp); 4984 4985 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0); 4986 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0); 4987 if (mask != 0) 4988 temp = const_binop (BIT_AND_EXPR, temp, 4989 fold_convert (TREE_TYPE (c), mask), 0); 4990 /* If necessary, convert the type back to match the type of C. */ 4991 if (TYPE_UNSIGNED (type)) 4992 temp = fold_convert (type, temp); 4993 4994 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0)); 4995} 4996 4997/* Find ways of folding logical expressions of LHS and RHS: 4998 Try to merge two comparisons to the same innermost item. 4999 Look for range tests like "ch >= '0' && ch <= '9'". 5000 Look for combinations of simple terms on machines with expensive branches 5001 and evaluate the RHS unconditionally. 5002 5003 For example, if we have p->a == 2 && p->b == 4 and we can make an 5004 object large enough to span both A and B, we can do this with a comparison 5005 against the object ANDed with the a mask. 5006 5007 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking 5008 operations to do this with one comparison. 5009 5010 We check for both normal comparisons and the BIT_AND_EXPRs made this by 5011 function and the one above. 5012 5013 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR, 5014 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR. 5015 5016 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its 5017 two operands. 5018 5019 We return the simplified tree or 0 if no optimization is possible. */ 5020 5021static tree 5022fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) 5023{ 5024 /* If this is the "or" of two comparisons, we can do something if 5025 the comparisons are NE_EXPR. If this is the "and", we can do something 5026 if the comparisons are EQ_EXPR. I.e., 5027 (a->b == 2 && a->c == 4) can become (a->new == NEW). 5028 5029 WANTED_CODE is this operation code. For single bit fields, we can 5030 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong" 5031 comparison for one-bit fields. */ 5032 5033 enum tree_code wanted_code; 5034 enum tree_code lcode, rcode; 5035 tree ll_arg, lr_arg, rl_arg, rr_arg; 5036 tree ll_inner, lr_inner, rl_inner, rr_inner; 5037 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos; 5038 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos; 5039 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos; 5040 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos; 5041 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp; 5042 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode; 5043 enum machine_mode lnmode, rnmode; 5044 tree ll_mask, lr_mask, rl_mask, rr_mask; 5045 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask; 5046 tree l_const, r_const; 5047 tree lntype, rntype, result; 5048 int first_bit, end_bit; 5049 int volatilep; 5050 tree orig_lhs = lhs, orig_rhs = rhs; 5051 enum tree_code orig_code = code; 5052 5053 /* Start by getting the comparison codes. Fail if anything is volatile. 5054 If one operand is a BIT_AND_EXPR with the constant one, treat it as if 5055 it were surrounded with a NE_EXPR. */ 5056 5057 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs)) 5058 return 0; 5059 5060 lcode = TREE_CODE (lhs); 5061 rcode = TREE_CODE (rhs); 5062 5063 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1))) 5064 { 5065 lhs = build2 (NE_EXPR, truth_type, lhs, 5066 build_int_cst (TREE_TYPE (lhs), 0)); 5067 lcode = NE_EXPR; 5068 } 5069 5070 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1))) 5071 { 5072 rhs = build2 (NE_EXPR, truth_type, rhs, 5073 build_int_cst (TREE_TYPE (rhs), 0)); 5074 rcode = NE_EXPR; 5075 } 5076 5077 if (TREE_CODE_CLASS (lcode) != tcc_comparison 5078 || TREE_CODE_CLASS (rcode) != tcc_comparison) 5079 return 0; 5080 5081 ll_arg = TREE_OPERAND (lhs, 0); 5082 lr_arg = TREE_OPERAND (lhs, 1); 5083 rl_arg = TREE_OPERAND (rhs, 0); 5084 rr_arg = TREE_OPERAND (rhs, 1); 5085 5086 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */ 5087 if (simple_operand_p (ll_arg) 5088 && simple_operand_p (lr_arg)) 5089 { 5090 tree result; 5091 if (operand_equal_p (ll_arg, rl_arg, 0) 5092 && operand_equal_p (lr_arg, rr_arg, 0)) 5093 { 5094 result = combine_comparisons (code, lcode, rcode, 5095 truth_type, ll_arg, lr_arg); 5096 if (result) 5097 return result; 5098 } 5099 else if (operand_equal_p (ll_arg, rr_arg, 0) 5100 && operand_equal_p (lr_arg, rl_arg, 0)) 5101 { 5102 result = combine_comparisons (code, lcode, 5103 swap_tree_comparison (rcode), 5104 truth_type, ll_arg, lr_arg); 5105 if (result) 5106 return result; 5107 } 5108 } 5109 5110 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR) 5111 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR); 5112 5113 /* If the RHS can be evaluated unconditionally and its operands are 5114 simple, it wins to evaluate the RHS unconditionally on machines 5115 with expensive branches. In this case, this isn't a comparison 5116 that can be merged. Avoid doing this if the RHS is a floating-point 5117 comparison since those can trap. */ 5118 5119 if (BRANCH_COST >= 2 5120 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg)) 5121 && simple_operand_p (rl_arg) 5122 && simple_operand_p (rr_arg)) 5123 { 5124 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */ 5125 if (code == TRUTH_OR_EXPR 5126 && lcode == NE_EXPR && integer_zerop (lr_arg) 5127 && rcode == NE_EXPR && integer_zerop (rr_arg) 5128 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)) 5129 return build2 (NE_EXPR, truth_type, 5130 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), 5131 ll_arg, rl_arg), 5132 build_int_cst (TREE_TYPE (ll_arg), 0)); 5133 5134 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */ 5135 if (code == TRUTH_AND_EXPR 5136 && lcode == EQ_EXPR && integer_zerop (lr_arg) 5137 && rcode == EQ_EXPR && integer_zerop (rr_arg) 5138 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)) 5139 return build2 (EQ_EXPR, truth_type, 5140 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), 5141 ll_arg, rl_arg), 5142 build_int_cst (TREE_TYPE (ll_arg), 0)); 5143 5144 if (LOGICAL_OP_NON_SHORT_CIRCUIT) 5145 { 5146 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs) 5147 return build2 (code, truth_type, lhs, rhs); 5148 return NULL_TREE; 5149 } 5150 } 5151 5152 /* See if the comparisons can be merged. Then get all the parameters for 5153 each side. */ 5154 5155 if ((lcode != EQ_EXPR && lcode != NE_EXPR) 5156 || (rcode != EQ_EXPR && rcode != NE_EXPR)) 5157 return 0; 5158 5159 volatilep = 0; 5160 ll_inner = decode_field_reference (ll_arg, 5161 &ll_bitsize, &ll_bitpos, &ll_mode, 5162 &ll_unsignedp, &volatilep, &ll_mask, 5163 &ll_and_mask); 5164 lr_inner = decode_field_reference (lr_arg, 5165 &lr_bitsize, &lr_bitpos, &lr_mode, 5166 &lr_unsignedp, &volatilep, &lr_mask, 5167 &lr_and_mask); 5168 rl_inner = decode_field_reference (rl_arg, 5169 &rl_bitsize, &rl_bitpos, &rl_mode, 5170 &rl_unsignedp, &volatilep, &rl_mask, 5171 &rl_and_mask); 5172 rr_inner = decode_field_reference (rr_arg, 5173 &rr_bitsize, &rr_bitpos, &rr_mode, 5174 &rr_unsignedp, &volatilep, &rr_mask, 5175 &rr_and_mask); 5176 5177 /* It must be true that the inner operation on the lhs of each 5178 comparison must be the same if we are to be able to do anything. 5179 Then see if we have constants. If not, the same must be true for 5180 the rhs's. */ 5181 if (volatilep || ll_inner == 0 || rl_inner == 0 5182 || ! operand_equal_p (ll_inner, rl_inner, 0)) 5183 return 0; 5184 5185 if (TREE_CODE (lr_arg) == INTEGER_CST 5186 && TREE_CODE (rr_arg) == INTEGER_CST) 5187 l_const = lr_arg, r_const = rr_arg; 5188 else if (lr_inner == 0 || rr_inner == 0 5189 || ! operand_equal_p (lr_inner, rr_inner, 0)) 5190 return 0; 5191 else 5192 l_const = r_const = 0; 5193 5194 /* If either comparison code is not correct for our logical operation, 5195 fail. However, we can convert a one-bit comparison against zero into 5196 the opposite comparison against that bit being set in the field. */ 5197 5198 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR); 5199 if (lcode != wanted_code) 5200 { 5201 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask)) 5202 { 5203 /* Make the left operand unsigned, since we are only interested 5204 in the value of one bit. Otherwise we are doing the wrong 5205 thing below. */ 5206 ll_unsignedp = 1; 5207 l_const = ll_mask; 5208 } 5209 else 5210 return 0; 5211 } 5212 5213 /* This is analogous to the code for l_const above. */ 5214 if (rcode != wanted_code) 5215 { 5216 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask)) 5217 { 5218 rl_unsignedp = 1; 5219 r_const = rl_mask; 5220 } 5221 else 5222 return 0; 5223 } 5224 5225 /* After this point all optimizations will generate bit-field 5226 references, which we might not want. */ 5227 if (! lang_hooks.can_use_bit_fields_p ()) 5228 return 0; 5229 5230 /* See if we can find a mode that contains both fields being compared on 5231 the left. If we can't, fail. Otherwise, update all constants and masks 5232 to be relative to a field of that size. */ 5233 first_bit = MIN (ll_bitpos, rl_bitpos); 5234 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize); 5235 lnmode = get_best_mode (end_bit - first_bit, first_bit, 5236 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode, 5237 volatilep); 5238 if (lnmode == VOIDmode) 5239 return 0; 5240 5241 lnbitsize = GET_MODE_BITSIZE (lnmode); 5242 lnbitpos = first_bit & ~ (lnbitsize - 1); 5243 lntype = lang_hooks.types.type_for_size (lnbitsize, 1); 5244 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos; 5245 5246 if (BYTES_BIG_ENDIAN) 5247 { 5248 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize; 5249 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize; 5250 } 5251 5252 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask), 5253 size_int (xll_bitpos), 0); 5254 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask), 5255 size_int (xrl_bitpos), 0); 5256 5257 if (l_const) 5258 { 5259 l_const = fold_convert (lntype, l_const); 5260 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask); 5261 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0); 5262 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const, 5263 fold_build1 (BIT_NOT_EXPR, 5264 lntype, ll_mask), 5265 0))) 5266 { 5267 warning (0, "comparison is always %d", wanted_code == NE_EXPR); 5268 5269 return constant_boolean_node (wanted_code == NE_EXPR, truth_type); 5270 } 5271 } 5272 if (r_const) 5273 { 5274 r_const = fold_convert (lntype, r_const); 5275 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask); 5276 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0); 5277 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const, 5278 fold_build1 (BIT_NOT_EXPR, 5279 lntype, rl_mask), 5280 0))) 5281 { 5282 warning (0, "comparison is always %d", wanted_code == NE_EXPR); 5283 5284 return constant_boolean_node (wanted_code == NE_EXPR, truth_type); 5285 } 5286 } 5287 5288 /* If the right sides are not constant, do the same for it. Also, 5289 disallow this optimization if a size or signedness mismatch occurs 5290 between the left and right sides. */ 5291 if (l_const == 0) 5292 { 5293 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize 5294 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp 5295 /* Make sure the two fields on the right 5296 correspond to the left without being swapped. */ 5297 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos) 5298 return 0; 5299 5300 first_bit = MIN (lr_bitpos, rr_bitpos); 5301 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize); 5302 rnmode = get_best_mode (end_bit - first_bit, first_bit, 5303 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode, 5304 volatilep); 5305 if (rnmode == VOIDmode) 5306 return 0; 5307 5308 rnbitsize = GET_MODE_BITSIZE (rnmode); 5309 rnbitpos = first_bit & ~ (rnbitsize - 1); 5310 rntype = lang_hooks.types.type_for_size (rnbitsize, 1); 5311 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos; 5312 5313 if (BYTES_BIG_ENDIAN) 5314 { 5315 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize; 5316 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize; 5317 } 5318 5319 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask), 5320 size_int (xlr_bitpos), 0); 5321 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask), 5322 size_int (xrr_bitpos), 0); 5323 5324 /* Make a mask that corresponds to both fields being compared. 5325 Do this for both items being compared. If the operands are the 5326 same size and the bits being compared are in the same position 5327 then we can do this by masking both and comparing the masked 5328 results. */ 5329 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0); 5330 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0); 5331 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) 5332 { 5333 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos, 5334 ll_unsignedp || rl_unsignedp); 5335 if (! all_ones_mask_p (ll_mask, lnbitsize)) 5336 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask); 5337 5338 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos, 5339 lr_unsignedp || rr_unsignedp); 5340 if (! all_ones_mask_p (lr_mask, rnbitsize)) 5341 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask); 5342 5343 return build2 (wanted_code, truth_type, lhs, rhs); 5344 } 5345 5346 /* There is still another way we can do something: If both pairs of 5347 fields being compared are adjacent, we may be able to make a wider 5348 field containing them both. 5349 5350 Note that we still must mask the lhs/rhs expressions. Furthermore, 5351 the mask must be shifted to account for the shift done by 5352 make_bit_field_ref. */ 5353 if ((ll_bitsize + ll_bitpos == rl_bitpos 5354 && lr_bitsize + lr_bitpos == rr_bitpos) 5355 || (ll_bitpos == rl_bitpos + rl_bitsize 5356 && lr_bitpos == rr_bitpos + rr_bitsize)) 5357 { 5358 tree type; 5359 5360 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize, 5361 MIN (ll_bitpos, rl_bitpos), ll_unsignedp); 5362 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize, 5363 MIN (lr_bitpos, rr_bitpos), lr_unsignedp); 5364 5365 ll_mask = const_binop (RSHIFT_EXPR, ll_mask, 5366 size_int (MIN (xll_bitpos, xrl_bitpos)), 0); 5367 lr_mask = const_binop (RSHIFT_EXPR, lr_mask, 5368 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0); 5369 5370 /* Convert to the smaller type before masking out unwanted bits. */ 5371 type = lntype; 5372 if (lntype != rntype) 5373 { 5374 if (lnbitsize > rnbitsize) 5375 { 5376 lhs = fold_convert (rntype, lhs); 5377 ll_mask = fold_convert (rntype, ll_mask); 5378 type = rntype; 5379 } 5380 else if (lnbitsize < rnbitsize) 5381 { 5382 rhs = fold_convert (lntype, rhs); 5383 lr_mask = fold_convert (lntype, lr_mask); 5384 type = lntype; 5385 } 5386 } 5387 5388 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize)) 5389 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask); 5390 5391 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize)) 5392 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask); 5393 5394 return build2 (wanted_code, truth_type, lhs, rhs); 5395 } 5396 5397 return 0; 5398 } 5399 5400 /* Handle the case of comparisons with constants. If there is something in 5401 common between the masks, those bits of the constants must be the same. 5402 If not, the condition is always false. Test for this to avoid generating 5403 incorrect code below. */ 5404 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0); 5405 if (! integer_zerop (result) 5406 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0), 5407 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1) 5408 { 5409 if (wanted_code == NE_EXPR) 5410 { 5411 warning (0, "%<or%> of unmatched not-equal tests is always 1"); 5412 return constant_boolean_node (true, truth_type); 5413 } 5414 else 5415 { 5416 warning (0, "%<and%> of mutually exclusive equal-tests is always 0"); 5417 return constant_boolean_node (false, truth_type); 5418 } 5419 } 5420 5421 /* Construct the expression we will return. First get the component 5422 reference we will make. Unless the mask is all ones the width of 5423 that field, perform the mask operation. Then compare with the 5424 merged constant. */ 5425 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos, 5426 ll_unsignedp || rl_unsignedp); 5427 5428 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0); 5429 if (! all_ones_mask_p (ll_mask, lnbitsize)) 5430 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask); 5431 5432 return build2 (wanted_code, truth_type, result, 5433 const_binop (BIT_IOR_EXPR, l_const, r_const, 0)); 5434} 5435 5436/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a 5437 constant. */ 5438 5439static tree 5440optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1) 5441{ 5442 tree arg0 = op0; 5443 enum tree_code op_code; 5444 tree comp_const = op1; 5445 tree minmax_const; 5446 int consts_equal, consts_lt; 5447 tree inner; 5448 5449 STRIP_SIGN_NOPS (arg0); 5450 5451 op_code = TREE_CODE (arg0); 5452 minmax_const = TREE_OPERAND (arg0, 1); 5453 consts_equal = tree_int_cst_equal (minmax_const, comp_const); 5454 consts_lt = tree_int_cst_lt (minmax_const, comp_const); 5455 inner = TREE_OPERAND (arg0, 0); 5456 5457 /* If something does not permit us to optimize, return the original tree. */ 5458 if ((op_code != MIN_EXPR && op_code != MAX_EXPR) 5459 || TREE_CODE (comp_const) != INTEGER_CST 5460 || TREE_CONSTANT_OVERFLOW (comp_const) 5461 || TREE_CODE (minmax_const) != INTEGER_CST 5462 || TREE_CONSTANT_OVERFLOW (minmax_const)) 5463 return NULL_TREE; 5464 5465 /* Now handle all the various comparison codes. We only handle EQ_EXPR 5466 and GT_EXPR, doing the rest with recursive calls using logical 5467 simplifications. */ 5468 switch (code) 5469 { 5470 case NE_EXPR: case LT_EXPR: case LE_EXPR: 5471 { 5472 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false), 5473 type, op0, op1); 5474 if (tem) 5475 return invert_truthvalue (tem); 5476 return NULL_TREE; 5477 } 5478 5479 case GE_EXPR: 5480 return 5481 fold_build2 (TRUTH_ORIF_EXPR, type, 5482 optimize_minmax_comparison 5483 (EQ_EXPR, type, arg0, comp_const), 5484 optimize_minmax_comparison 5485 (GT_EXPR, type, arg0, comp_const)); 5486 5487 case EQ_EXPR: 5488 if (op_code == MAX_EXPR && consts_equal) 5489 /* MAX (X, 0) == 0 -> X <= 0 */ 5490 return fold_build2 (LE_EXPR, type, inner, comp_const); 5491 5492 else if (op_code == MAX_EXPR && consts_lt) 5493 /* MAX (X, 0) == 5 -> X == 5 */ 5494 return fold_build2 (EQ_EXPR, type, inner, comp_const); 5495 5496 else if (op_code == MAX_EXPR) 5497 /* MAX (X, 0) == -1 -> false */ 5498 return omit_one_operand (type, integer_zero_node, inner); 5499 5500 else if (consts_equal) 5501 /* MIN (X, 0) == 0 -> X >= 0 */ 5502 return fold_build2 (GE_EXPR, type, inner, comp_const); 5503 5504 else if (consts_lt) 5505 /* MIN (X, 0) == 5 -> false */ 5506 return omit_one_operand (type, integer_zero_node, inner); 5507 5508 else 5509 /* MIN (X, 0) == -1 -> X == -1 */ 5510 return fold_build2 (EQ_EXPR, type, inner, comp_const); 5511 5512 case GT_EXPR: 5513 if (op_code == MAX_EXPR && (consts_equal || consts_lt)) 5514 /* MAX (X, 0) > 0 -> X > 0 5515 MAX (X, 0) > 5 -> X > 5 */ 5516 return fold_build2 (GT_EXPR, type, inner, comp_const); 5517 5518 else if (op_code == MAX_EXPR) 5519 /* MAX (X, 0) > -1 -> true */ 5520 return omit_one_operand (type, integer_one_node, inner); 5521 5522 else if (op_code == MIN_EXPR && (consts_equal || consts_lt)) 5523 /* MIN (X, 0) > 0 -> false 5524 MIN (X, 0) > 5 -> false */ 5525 return omit_one_operand (type, integer_zero_node, inner); 5526 5527 else 5528 /* MIN (X, 0) > -1 -> X > -1 */ 5529 return fold_build2 (GT_EXPR, type, inner, comp_const); 5530 5531 default: 5532 return NULL_TREE; 5533 } 5534} 5535 5536/* T is an integer expression that is being multiplied, divided, or taken a 5537 modulus (CODE says which and what kind of divide or modulus) by a 5538 constant C. See if we can eliminate that operation by folding it with 5539 other operations already in T. WIDE_TYPE, if non-null, is a type that 5540 should be used for the computation if wider than our type. 5541 5542 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return 5543 (X * 2) + (Y * 4). We must, however, be assured that either the original 5544 expression would not overflow or that overflow is undefined for the type 5545 in the language in question. 5546 5547 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either 5548 the machine has a multiply-accumulate insn or that this is part of an 5549 addressing calculation. 5550 5551 If we return a non-null expression, it is an equivalent form of the 5552 original computation, but need not be in the original type. 5553 5554 We set *STRICT_OVERFLOW_P to true if the return values depends on 5555 signed overflow being undefined. Otherwise we do not change 5556 *STRICT_OVERFLOW_P. */ 5557 5558static tree 5559extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type, 5560 bool *strict_overflow_p) 5561{ 5562 /* To avoid exponential search depth, refuse to allow recursion past 5563 three levels. Beyond that (1) it's highly unlikely that we'll find 5564 something interesting and (2) we've probably processed it before 5565 when we built the inner expression. */ 5566 5567 static int depth; 5568 tree ret; 5569 5570 if (depth > 3) 5571 return NULL; 5572 5573 depth++; 5574 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p); 5575 depth--; 5576 5577 return ret; 5578} 5579 5580static tree 5581extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, 5582 bool *strict_overflow_p) 5583{ 5584 tree type = TREE_TYPE (t); 5585 enum tree_code tcode = TREE_CODE (t); 5586 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type)) 5587 > GET_MODE_SIZE (TYPE_MODE (type))) 5588 ? wide_type : type); 5589 tree t1, t2; 5590 int same_p = tcode == code; 5591 tree op0 = NULL_TREE, op1 = NULL_TREE; 5592 bool sub_strict_overflow_p; 5593 5594 /* Don't deal with constants of zero here; they confuse the code below. */ 5595 if (integer_zerop (c)) 5596 return NULL_TREE; 5597 5598 if (TREE_CODE_CLASS (tcode) == tcc_unary) 5599 op0 = TREE_OPERAND (t, 0); 5600 5601 if (TREE_CODE_CLASS (tcode) == tcc_binary) 5602 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1); 5603 5604 /* Note that we need not handle conditional operations here since fold 5605 already handles those cases. So just do arithmetic here. */ 5606 switch (tcode) 5607 { 5608 case INTEGER_CST: 5609 /* For a constant, we can always simplify if we are a multiply 5610 or (for divide and modulus) if it is a multiple of our constant. */ 5611 if (code == MULT_EXPR 5612 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0))) 5613 return const_binop (code, fold_convert (ctype, t), 5614 fold_convert (ctype, c), 0); 5615 break; 5616 5617 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR: 5618 /* If op0 is an expression ... */ 5619 if ((COMPARISON_CLASS_P (op0) 5620 || UNARY_CLASS_P (op0) 5621 || BINARY_CLASS_P (op0) 5622 || EXPRESSION_CLASS_P (op0)) 5623 /* ... and is unsigned, and its type is smaller than ctype, 5624 then we cannot pass through as widening. */ 5625 && ((TYPE_UNSIGNED (TREE_TYPE (op0)) 5626 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE 5627 && TYPE_IS_SIZETYPE (TREE_TYPE (op0))) 5628 && (GET_MODE_SIZE (TYPE_MODE (ctype)) 5629 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))) 5630 /* ... or this is a truncation (t is narrower than op0), 5631 then we cannot pass through this narrowing. */ 5632 || (GET_MODE_SIZE (TYPE_MODE (type)) 5633 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))) 5634 /* ... or signedness changes for division or modulus, 5635 then we cannot pass through this conversion. */ 5636 || (code != MULT_EXPR 5637 && (TYPE_UNSIGNED (ctype) 5638 != TYPE_UNSIGNED (TREE_TYPE (op0)))))) 5639 break; 5640 5641 /* Pass the constant down and see if we can make a simplification. If 5642 we can, replace this expression with the inner simplification for 5643 possible later conversion to our or some other type. */ 5644 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0 5645 && TREE_CODE (t2) == INTEGER_CST 5646 && ! TREE_CONSTANT_OVERFLOW (t2) 5647 && (0 != (t1 = extract_muldiv (op0, t2, code, 5648 code == MULT_EXPR 5649 ? ctype : NULL_TREE, 5650 strict_overflow_p)))) 5651 return t1; 5652 break; 5653 5654 case ABS_EXPR: 5655 /* If widening the type changes it from signed to unsigned, then we 5656 must avoid building ABS_EXPR itself as unsigned. */ 5657 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type)) 5658 { 5659 tree cstype = (*lang_hooks.types.signed_type) (ctype); 5660 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p)) 5661 != 0) 5662 { 5663 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1)); 5664 return fold_convert (ctype, t1); 5665 } 5666 break; 5667 } 5668 /* If the constant is negative, we cannot simplify this. */ 5669 if (tree_int_cst_sgn (c) == -1) 5670 break; 5671 /* FALLTHROUGH */ 5672 case NEGATE_EXPR: 5673 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p)) 5674 != 0) 5675 return fold_build1 (tcode, ctype, fold_convert (ctype, t1)); 5676 break; 5677 5678 case MIN_EXPR: case MAX_EXPR: 5679 /* If widening the type changes the signedness, then we can't perform 5680 this optimization as that changes the result. */ 5681 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type)) 5682 break; 5683 5684 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */ 5685 sub_strict_overflow_p = false; 5686 if ((t1 = extract_muldiv (op0, c, code, wide_type, 5687 &sub_strict_overflow_p)) != 0 5688 && (t2 = extract_muldiv (op1, c, code, wide_type, 5689 &sub_strict_overflow_p)) != 0) 5690 { 5691 if (tree_int_cst_sgn (c) < 0) 5692 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR); 5693 if (sub_strict_overflow_p) 5694 *strict_overflow_p = true; 5695 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 5696 fold_convert (ctype, t2)); 5697 } 5698 break; 5699 5700 case LSHIFT_EXPR: case RSHIFT_EXPR: 5701 /* If the second operand is constant, this is a multiplication 5702 or floor division, by a power of two, so we can treat it that 5703 way unless the multiplier or divisor overflows. Signed 5704 left-shift overflow is implementation-defined rather than 5705 undefined in C90, so do not convert signed left shift into 5706 multiplication. */ 5707 if (TREE_CODE (op1) == INTEGER_CST 5708 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0))) 5709 /* const_binop may not detect overflow correctly, 5710 so check for it explicitly here. */ 5711 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1) 5712 && TREE_INT_CST_HIGH (op1) == 0 5713 && 0 != (t1 = fold_convert (ctype, 5714 const_binop (LSHIFT_EXPR, 5715 size_one_node, 5716 op1, 0))) 5717 && ! TREE_OVERFLOW (t1)) 5718 return extract_muldiv (build2 (tcode == LSHIFT_EXPR 5719 ? MULT_EXPR : FLOOR_DIV_EXPR, 5720 ctype, fold_convert (ctype, op0), t1), 5721 c, code, wide_type, strict_overflow_p); 5722 break; 5723 5724 case PLUS_EXPR: case MINUS_EXPR: 5725 /* See if we can eliminate the operation on both sides. If we can, we 5726 can return a new PLUS or MINUS. If we can't, the only remaining 5727 cases where we can do anything are if the second operand is a 5728 constant. */ 5729 sub_strict_overflow_p = false; 5730 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p); 5731 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p); 5732 if (t1 != 0 && t2 != 0 5733 && (code == MULT_EXPR 5734 /* If not multiplication, we can only do this if both operands 5735 are divisible by c. */ 5736 || (multiple_of_p (ctype, op0, c) 5737 && multiple_of_p (ctype, op1, c)))) 5738 { 5739 if (sub_strict_overflow_p) 5740 *strict_overflow_p = true; 5741 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 5742 fold_convert (ctype, t2)); 5743 } 5744 5745 /* If this was a subtraction, negate OP1 and set it to be an addition. 5746 This simplifies the logic below. */ 5747 if (tcode == MINUS_EXPR) 5748 tcode = PLUS_EXPR, op1 = negate_expr (op1); 5749 5750 if (TREE_CODE (op1) != INTEGER_CST) 5751 break; 5752 5753 /* If either OP1 or C are negative, this optimization is not safe for 5754 some of the division and remainder types while for others we need 5755 to change the code. */ 5756 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0) 5757 { 5758 if (code == CEIL_DIV_EXPR) 5759 code = FLOOR_DIV_EXPR; 5760 else if (code == FLOOR_DIV_EXPR) 5761 code = CEIL_DIV_EXPR; 5762 else if (code != MULT_EXPR 5763 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR) 5764 break; 5765 } 5766 5767 /* If it's a multiply or a division/modulus operation of a multiple 5768 of our constant, do the operation and verify it doesn't overflow. */ 5769 if (code == MULT_EXPR 5770 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) 5771 { 5772 op1 = const_binop (code, fold_convert (ctype, op1), 5773 fold_convert (ctype, c), 0); 5774 /* We allow the constant to overflow with wrapping semantics. */ 5775 if (op1 == 0 5776 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype))) 5777 break; 5778 } 5779 else 5780 break; 5781 5782 /* If we have an unsigned type is not a sizetype, we cannot widen 5783 the operation since it will change the result if the original 5784 computation overflowed. */ 5785 if (TYPE_UNSIGNED (ctype) 5786 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)) 5787 && ctype != type) 5788 break; 5789 5790 /* If we were able to eliminate our operation from the first side, 5791 apply our operation to the second side and reform the PLUS. */ 5792 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR)) 5793 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1); 5794 5795 /* The last case is if we are a multiply. In that case, we can 5796 apply the distributive law to commute the multiply and addition 5797 if the multiplication of the constants doesn't overflow. */ 5798 if (code == MULT_EXPR) 5799 return fold_build2 (tcode, ctype, 5800 fold_build2 (code, ctype, 5801 fold_convert (ctype, op0), 5802 fold_convert (ctype, c)), 5803 op1); 5804 5805 break; 5806 5807 case MULT_EXPR: 5808 /* We have a special case here if we are doing something like 5809 (C * 8) % 4 since we know that's zero. */ 5810 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR 5811 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR) 5812 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST 5813 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) 5814 return omit_one_operand (type, integer_zero_node, op0); 5815 5816 /* ... fall through ... */ 5817 5818 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR: 5819 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR: 5820 /* If we can extract our operation from the LHS, do so and return a 5821 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise, 5822 do something only if the second operand is a constant. */ 5823 if (same_p 5824 && (t1 = extract_muldiv (op0, c, code, wide_type, 5825 strict_overflow_p)) != 0) 5826 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 5827 fold_convert (ctype, op1)); 5828 else if (tcode == MULT_EXPR && code == MULT_EXPR 5829 && (t1 = extract_muldiv (op1, c, code, wide_type, 5830 strict_overflow_p)) != 0) 5831 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 5832 fold_convert (ctype, t1)); 5833 else if (TREE_CODE (op1) != INTEGER_CST) 5834 return 0; 5835 5836 /* If these are the same operation types, we can associate them 5837 assuming no overflow. */ 5838 if (tcode == code 5839 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1), 5840 fold_convert (ctype, c), 0)) 5841 && ! TREE_OVERFLOW (t1)) 5842 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1); 5843 5844 /* If these operations "cancel" each other, we have the main 5845 optimizations of this pass, which occur when either constant is a 5846 multiple of the other, in which case we replace this with either an 5847 operation or CODE or TCODE. 5848 5849 If we have an unsigned type that is not a sizetype, we cannot do 5850 this since it will change the result if the original computation 5851 overflowed. */ 5852 if ((TYPE_OVERFLOW_UNDEFINED (ctype) 5853 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))) 5854 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR) 5855 || (tcode == MULT_EXPR 5856 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR 5857 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR))) 5858 { 5859 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) 5860 { 5861 if (TYPE_OVERFLOW_UNDEFINED (ctype)) 5862 *strict_overflow_p = true; 5863 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 5864 fold_convert (ctype, 5865 const_binop (TRUNC_DIV_EXPR, 5866 op1, c, 0))); 5867 } 5868 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0))) 5869 { 5870 if (TYPE_OVERFLOW_UNDEFINED (ctype)) 5871 *strict_overflow_p = true; 5872 return fold_build2 (code, ctype, fold_convert (ctype, op0), 5873 fold_convert (ctype, 5874 const_binop (TRUNC_DIV_EXPR, 5875 c, op1, 0))); 5876 } 5877 } 5878 break; 5879 5880 default: 5881 break; 5882 } 5883 5884 return 0; 5885} 5886 5887/* Return a node which has the indicated constant VALUE (either 0 or 5888 1), and is of the indicated TYPE. */ 5889 5890tree 5891constant_boolean_node (int value, tree type) 5892{ 5893 if (type == integer_type_node) 5894 return value ? integer_one_node : integer_zero_node; 5895 else if (type == boolean_type_node) 5896 return value ? boolean_true_node : boolean_false_node; 5897 else 5898 return build_int_cst (type, value); 5899} 5900 5901 5902/* Return true if expr looks like an ARRAY_REF and set base and 5903 offset to the appropriate trees. If there is no offset, 5904 offset is set to NULL_TREE. Base will be canonicalized to 5905 something you can get the element type from using 5906 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset 5907 in bytes to the base. */ 5908 5909static bool 5910extract_array_ref (tree expr, tree *base, tree *offset) 5911{ 5912 /* One canonical form is a PLUS_EXPR with the first 5913 argument being an ADDR_EXPR with a possible NOP_EXPR 5914 attached. */ 5915 if (TREE_CODE (expr) == PLUS_EXPR) 5916 { 5917 tree op0 = TREE_OPERAND (expr, 0); 5918 tree inner_base, dummy1; 5919 /* Strip NOP_EXPRs here because the C frontends and/or 5920 folders present us (int *)&x.a + 4B possibly. */ 5921 STRIP_NOPS (op0); 5922 if (extract_array_ref (op0, &inner_base, &dummy1)) 5923 { 5924 *base = inner_base; 5925 if (dummy1 == NULL_TREE) 5926 *offset = TREE_OPERAND (expr, 1); 5927 else 5928 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr), 5929 dummy1, TREE_OPERAND (expr, 1)); 5930 return true; 5931 } 5932 } 5933 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF, 5934 which we transform into an ADDR_EXPR with appropriate 5935 offset. For other arguments to the ADDR_EXPR we assume 5936 zero offset and as such do not care about the ADDR_EXPR 5937 type and strip possible nops from it. */ 5938 else if (TREE_CODE (expr) == ADDR_EXPR) 5939 { 5940 tree op0 = TREE_OPERAND (expr, 0); 5941 if (TREE_CODE (op0) == ARRAY_REF) 5942 { 5943 tree idx = TREE_OPERAND (op0, 1); 5944 *base = TREE_OPERAND (op0, 0); 5945 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx, 5946 array_ref_element_size (op0)); 5947 } 5948 else 5949 { 5950 /* Handle array-to-pointer decay as &a. */ 5951 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) 5952 *base = TREE_OPERAND (expr, 0); 5953 else 5954 *base = expr; 5955 *offset = NULL_TREE; 5956 } 5957 return true; 5958 } 5959 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */ 5960 else if (SSA_VAR_P (expr) 5961 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE) 5962 { 5963 *base = expr; 5964 *offset = NULL_TREE; 5965 return true; 5966 } 5967 5968 return false; 5969} 5970 5971 5972/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'. 5973 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here 5974 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)' 5975 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the 5976 COND is the first argument to CODE; otherwise (as in the example 5977 given here), it is the second argument. TYPE is the type of the 5978 original expression. Return NULL_TREE if no simplification is 5979 possible. */ 5980 5981static tree 5982fold_binary_op_with_conditional_arg (enum tree_code code, 5983 tree type, tree op0, tree op1, 5984 tree cond, tree arg, int cond_first_p) 5985{ 5986 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1); 5987 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0); 5988 tree test, true_value, false_value; 5989 tree lhs = NULL_TREE; 5990 tree rhs = NULL_TREE; 5991 5992 /* This transformation is only worthwhile if we don't have to wrap 5993 arg in a SAVE_EXPR, and the operation can be simplified on at least 5994 one of the branches once its pushed inside the COND_EXPR. */ 5995 if (!TREE_CONSTANT (arg)) 5996 return NULL_TREE; 5997 5998 if (TREE_CODE (cond) == COND_EXPR) 5999 { 6000 test = TREE_OPERAND (cond, 0); 6001 true_value = TREE_OPERAND (cond, 1); 6002 false_value = TREE_OPERAND (cond, 2); 6003 /* If this operand throws an expression, then it does not make 6004 sense to try to perform a logical or arithmetic operation 6005 involving it. */ 6006 if (VOID_TYPE_P (TREE_TYPE (true_value))) 6007 lhs = true_value; 6008 if (VOID_TYPE_P (TREE_TYPE (false_value))) 6009 rhs = false_value; 6010 } 6011 else 6012 { 6013 tree testtype = TREE_TYPE (cond); 6014 test = cond; 6015 true_value = constant_boolean_node (true, testtype); 6016 false_value = constant_boolean_node (false, testtype); 6017 } 6018 6019 arg = fold_convert (arg_type, arg); 6020 if (lhs == 0) 6021 { 6022 true_value = fold_convert (cond_type, true_value); 6023 if (cond_first_p) 6024 lhs = fold_build2 (code, type, true_value, arg); 6025 else 6026 lhs = fold_build2 (code, type, arg, true_value); 6027 } 6028 if (rhs == 0) 6029 { 6030 false_value = fold_convert (cond_type, false_value); 6031 if (cond_first_p) 6032 rhs = fold_build2 (code, type, false_value, arg); 6033 else 6034 rhs = fold_build2 (code, type, arg, false_value); 6035 } 6036 6037 test = fold_build3 (COND_EXPR, type, test, lhs, rhs); 6038 return fold_convert (type, test); 6039} 6040 6041 6042/* Subroutine of fold() that checks for the addition of +/- 0.0. 6043 6044 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type 6045 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X - 6046 ADDEND is the same as X. 6047 6048 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero 6049 and finite. The problematic cases are when X is zero, and its mode 6050 has signed zeros. In the case of rounding towards -infinity, 6051 X - 0 is not the same as X because 0 - 0 is -0. In other rounding 6052 modes, X + 0 is not the same as X because -0 + 0 is 0. */ 6053 6054static bool 6055fold_real_zero_addition_p (tree type, tree addend, int negate) 6056{ 6057 if (!real_zerop (addend)) 6058 return false; 6059 6060 /* Don't allow the fold with -fsignaling-nans. */ 6061 if (HONOR_SNANS (TYPE_MODE (type))) 6062 return false; 6063 6064 /* Allow the fold if zeros aren't signed, or their sign isn't important. */ 6065 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))) 6066 return true; 6067 6068 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */ 6069 if (TREE_CODE (addend) == REAL_CST 6070 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend))) 6071 negate = !negate; 6072 6073 /* The mode has signed zeros, and we have to honor their sign. 6074 In this situation, there is only one case we can return true for. 6075 X - 0 is the same as X unless rounding towards -infinity is 6076 supported. */ 6077 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)); 6078} 6079 6080/* Subroutine of fold() that checks comparisons of built-in math 6081 functions against real constants. 6082 6083 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison 6084 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE 6085 is the type of the result and ARG0 and ARG1 are the operands of the 6086 comparison. ARG1 must be a TREE_REAL_CST. 6087 6088 The function returns the constant folded tree if a simplification 6089 can be made, and NULL_TREE otherwise. */ 6090 6091static tree 6092fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, 6093 tree type, tree arg0, tree arg1) 6094{ 6095 REAL_VALUE_TYPE c; 6096 6097 if (BUILTIN_SQRT_P (fcode)) 6098 { 6099 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1)); 6100 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0)); 6101 6102 c = TREE_REAL_CST (arg1); 6103 if (REAL_VALUE_NEGATIVE (c)) 6104 { 6105 /* sqrt(x) < y is always false, if y is negative. */ 6106 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR) 6107 return omit_one_operand (type, integer_zero_node, arg); 6108 6109 /* sqrt(x) > y is always true, if y is negative and we 6110 don't care about NaNs, i.e. negative values of x. */ 6111 if (code == NE_EXPR || !HONOR_NANS (mode)) 6112 return omit_one_operand (type, integer_one_node, arg); 6113 6114 /* sqrt(x) > y is the same as x >= 0, if y is negative. */ 6115 return fold_build2 (GE_EXPR, type, arg, 6116 build_real (TREE_TYPE (arg), dconst0)); 6117 } 6118 else if (code == GT_EXPR || code == GE_EXPR) 6119 { 6120 REAL_VALUE_TYPE c2; 6121 6122 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); 6123 real_convert (&c2, mode, &c2); 6124 6125 if (REAL_VALUE_ISINF (c2)) 6126 { 6127 /* sqrt(x) > y is x == +Inf, when y is very large. */ 6128 if (HONOR_INFINITIES (mode)) 6129 return fold_build2 (EQ_EXPR, type, arg, 6130 build_real (TREE_TYPE (arg), c2)); 6131 6132 /* sqrt(x) > y is always false, when y is very large 6133 and we don't care about infinities. */ 6134 return omit_one_operand (type, integer_zero_node, arg); 6135 } 6136 6137 /* sqrt(x) > c is the same as x > c*c. */ 6138 return fold_build2 (code, type, arg, 6139 build_real (TREE_TYPE (arg), c2)); 6140 } 6141 else if (code == LT_EXPR || code == LE_EXPR) 6142 { 6143 REAL_VALUE_TYPE c2; 6144 6145 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); 6146 real_convert (&c2, mode, &c2); 6147 6148 if (REAL_VALUE_ISINF (c2)) 6149 { 6150 /* sqrt(x) < y is always true, when y is a very large 6151 value and we don't care about NaNs or Infinities. */ 6152 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode)) 6153 return omit_one_operand (type, integer_one_node, arg); 6154 6155 /* sqrt(x) < y is x != +Inf when y is very large and we 6156 don't care about NaNs. */ 6157 if (! HONOR_NANS (mode)) 6158 return fold_build2 (NE_EXPR, type, arg, 6159 build_real (TREE_TYPE (arg), c2)); 6160 6161 /* sqrt(x) < y is x >= 0 when y is very large and we 6162 don't care about Infinities. */ 6163 if (! HONOR_INFINITIES (mode)) 6164 return fold_build2 (GE_EXPR, type, arg, 6165 build_real (TREE_TYPE (arg), dconst0)); 6166 6167 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */ 6168 if (lang_hooks.decls.global_bindings_p () != 0 6169 || CONTAINS_PLACEHOLDER_P (arg)) 6170 return NULL_TREE; 6171 6172 arg = save_expr (arg); 6173 return fold_build2 (TRUTH_ANDIF_EXPR, type, 6174 fold_build2 (GE_EXPR, type, arg, 6175 build_real (TREE_TYPE (arg), 6176 dconst0)), 6177 fold_build2 (NE_EXPR, type, arg, 6178 build_real (TREE_TYPE (arg), 6179 c2))); 6180 } 6181 6182 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */ 6183 if (! HONOR_NANS (mode)) 6184 return fold_build2 (code, type, arg, 6185 build_real (TREE_TYPE (arg), c2)); 6186 6187 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */ 6188 if (lang_hooks.decls.global_bindings_p () == 0 6189 && ! CONTAINS_PLACEHOLDER_P (arg)) 6190 { 6191 arg = save_expr (arg); 6192 return fold_build2 (TRUTH_ANDIF_EXPR, type, 6193 fold_build2 (GE_EXPR, type, arg, 6194 build_real (TREE_TYPE (arg), 6195 dconst0)), 6196 fold_build2 (code, type, arg, 6197 build_real (TREE_TYPE (arg), 6198 c2))); 6199 } 6200 } 6201 } 6202 6203 return NULL_TREE; 6204} 6205 6206/* Subroutine of fold() that optimizes comparisons against Infinities, 6207 either +Inf or -Inf. 6208 6209 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, 6210 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 6211 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. 6212 6213 The function returns the constant folded tree if a simplification 6214 can be made, and NULL_TREE otherwise. */ 6215 6216static tree 6217fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1) 6218{ 6219 enum machine_mode mode; 6220 REAL_VALUE_TYPE max; 6221 tree temp; 6222 bool neg; 6223 6224 mode = TYPE_MODE (TREE_TYPE (arg0)); 6225 6226 /* For negative infinity swap the sense of the comparison. */ 6227 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)); 6228 if (neg) 6229 code = swap_tree_comparison (code); 6230 6231 switch (code) 6232 { 6233 case GT_EXPR: 6234 /* x > +Inf is always false, if with ignore sNANs. */ 6235 if (HONOR_SNANS (mode)) 6236 return NULL_TREE; 6237 return omit_one_operand (type, integer_zero_node, arg0); 6238 6239 case LE_EXPR: 6240 /* x <= +Inf is always true, if we don't case about NaNs. */ 6241 if (! HONOR_NANS (mode)) 6242 return omit_one_operand (type, integer_one_node, arg0); 6243 6244 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */ 6245 if (lang_hooks.decls.global_bindings_p () == 0 6246 && ! CONTAINS_PLACEHOLDER_P (arg0)) 6247 { 6248 arg0 = save_expr (arg0); 6249 return fold_build2 (EQ_EXPR, type, arg0, arg0); 6250 } 6251 break; 6252 6253 case EQ_EXPR: 6254 case GE_EXPR: 6255 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */ 6256 real_maxval (&max, neg, mode); 6257 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type, 6258 arg0, build_real (TREE_TYPE (arg0), max)); 6259 6260 case LT_EXPR: 6261 /* x < +Inf is always equal to x <= DBL_MAX. */ 6262 real_maxval (&max, neg, mode); 6263 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type, 6264 arg0, build_real (TREE_TYPE (arg0), max)); 6265 6266 case NE_EXPR: 6267 /* x != +Inf is always equal to !(x > DBL_MAX). */ 6268 real_maxval (&max, neg, mode); 6269 if (! HONOR_NANS (mode)) 6270 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type, 6271 arg0, build_real (TREE_TYPE (arg0), max)); 6272 6273 /* The transformation below creates non-gimple code and thus is 6274 not appropriate if we are in gimple form. */ 6275 if (in_gimple_form) 6276 return NULL_TREE; 6277 6278 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type, 6279 arg0, build_real (TREE_TYPE (arg0), max)); 6280 return fold_build1 (TRUTH_NOT_EXPR, type, temp); 6281 6282 default: 6283 break; 6284 } 6285 6286 return NULL_TREE; 6287} 6288 6289/* Subroutine of fold() that optimizes comparisons of a division by 6290 a nonzero integer constant against an integer constant, i.e. 6291 X/C1 op C2. 6292 6293 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, 6294 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 6295 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. 6296 6297 The function returns the constant folded tree if a simplification 6298 can be made, and NULL_TREE otherwise. */ 6299 6300static tree 6301fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) 6302{ 6303 tree prod, tmp, hi, lo; 6304 tree arg00 = TREE_OPERAND (arg0, 0); 6305 tree arg01 = TREE_OPERAND (arg0, 1); 6306 unsigned HOST_WIDE_INT lpart; 6307 HOST_WIDE_INT hpart; 6308 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0)); 6309 bool neg_overflow; 6310 int overflow; 6311 6312 /* We have to do this the hard way to detect unsigned overflow. 6313 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */ 6314 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01), 6315 TREE_INT_CST_HIGH (arg01), 6316 TREE_INT_CST_LOW (arg1), 6317 TREE_INT_CST_HIGH (arg1), 6318 &lpart, &hpart, unsigned_p); 6319 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart); 6320 prod = force_fit_type (prod, -1, overflow, false); 6321 neg_overflow = false; 6322 6323 if (unsigned_p) 6324 { 6325 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0); 6326 lo = prod; 6327 6328 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */ 6329 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod), 6330 TREE_INT_CST_HIGH (prod), 6331 TREE_INT_CST_LOW (tmp), 6332 TREE_INT_CST_HIGH (tmp), 6333 &lpart, &hpart, unsigned_p); 6334 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart); 6335 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod), 6336 TREE_CONSTANT_OVERFLOW (prod)); 6337 } 6338 else if (tree_int_cst_sgn (arg01) >= 0) 6339 { 6340 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0); 6341 switch (tree_int_cst_sgn (arg1)) 6342 { 6343 case -1: 6344 neg_overflow = true; 6345 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0); 6346 hi = prod; 6347 break; 6348 6349 case 0: 6350 lo = fold_negate_const (tmp, TREE_TYPE (arg0)); 6351 hi = tmp; 6352 break; 6353 6354 case 1: 6355 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0); 6356 lo = prod; 6357 break; 6358 6359 default: 6360 gcc_unreachable (); 6361 } 6362 } 6363 else 6364 { 6365 /* A negative divisor reverses the relational operators. */ 6366 code = swap_tree_comparison (code); 6367 6368 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0); 6369 switch (tree_int_cst_sgn (arg1)) 6370 { 6371 case -1: 6372 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0); 6373 lo = prod; 6374 break; 6375 6376 case 0: 6377 hi = fold_negate_const (tmp, TREE_TYPE (arg0)); 6378 lo = tmp; 6379 break; 6380 6381 case 1: 6382 neg_overflow = true; 6383 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0); 6384 hi = prod; 6385 break; 6386 6387 default: 6388 gcc_unreachable (); 6389 } 6390 } 6391 6392 switch (code) 6393 { 6394 case EQ_EXPR: 6395 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) 6396 return omit_one_operand (type, integer_zero_node, arg00); 6397 if (TREE_OVERFLOW (hi)) 6398 return fold_build2 (GE_EXPR, type, arg00, lo); 6399 if (TREE_OVERFLOW (lo)) 6400 return fold_build2 (LE_EXPR, type, arg00, hi); 6401 return build_range_check (type, arg00, 1, lo, hi); 6402 6403 case NE_EXPR: 6404 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) 6405 return omit_one_operand (type, integer_one_node, arg00); 6406 if (TREE_OVERFLOW (hi)) 6407 return fold_build2 (LT_EXPR, type, arg00, lo); 6408 if (TREE_OVERFLOW (lo)) 6409 return fold_build2 (GT_EXPR, type, arg00, hi); 6410 return build_range_check (type, arg00, 0, lo, hi); 6411 6412 case LT_EXPR: 6413 if (TREE_OVERFLOW (lo)) 6414 { 6415 tmp = neg_overflow ? integer_zero_node : integer_one_node; 6416 return omit_one_operand (type, tmp, arg00); 6417 } 6418 return fold_build2 (LT_EXPR, type, arg00, lo); 6419 6420 case LE_EXPR: 6421 if (TREE_OVERFLOW (hi)) 6422 { 6423 tmp = neg_overflow ? integer_zero_node : integer_one_node; 6424 return omit_one_operand (type, tmp, arg00); 6425 } 6426 return fold_build2 (LE_EXPR, type, arg00, hi); 6427 6428 case GT_EXPR: 6429 if (TREE_OVERFLOW (hi)) 6430 { 6431 tmp = neg_overflow ? integer_one_node : integer_zero_node; 6432 return omit_one_operand (type, tmp, arg00); 6433 } 6434 return fold_build2 (GT_EXPR, type, arg00, hi); 6435 6436 case GE_EXPR: 6437 if (TREE_OVERFLOW (lo)) 6438 { 6439 tmp = neg_overflow ? integer_one_node : integer_zero_node; 6440 return omit_one_operand (type, tmp, arg00); 6441 } 6442 return fold_build2 (GE_EXPR, type, arg00, lo); 6443 6444 default: 6445 break; 6446 } 6447 6448 return NULL_TREE; 6449} 6450 6451 6452/* If CODE with arguments ARG0 and ARG1 represents a single bit 6453 equality/inequality test, then return a simplified form of the test 6454 using a sign testing. Otherwise return NULL. TYPE is the desired 6455 result type. */ 6456 6457static tree 6458fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1, 6459 tree result_type) 6460{ 6461 /* If this is testing a single bit, we can optimize the test. */ 6462 if ((code == NE_EXPR || code == EQ_EXPR) 6463 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 6464 && integer_pow2p (TREE_OPERAND (arg0, 1))) 6465 { 6466 /* If we have (A & C) != 0 where C is the sign bit of A, convert 6467 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */ 6468 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); 6469 6470 if (arg00 != NULL_TREE 6471 /* This is only a win if casting to a signed type is cheap, 6472 i.e. when arg00's type is not a partial mode. */ 6473 && TYPE_PRECISION (TREE_TYPE (arg00)) 6474 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00)))) 6475 { 6476 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00)); 6477 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, 6478 result_type, fold_convert (stype, arg00), 6479 build_int_cst (stype, 0)); 6480 } 6481 } 6482 6483 return NULL_TREE; 6484} 6485 6486/* If CODE with arguments ARG0 and ARG1 represents a single bit 6487 equality/inequality test, then return a simplified form of 6488 the test using shifts and logical operations. Otherwise return 6489 NULL. TYPE is the desired result type. */ 6490 6491tree 6492fold_single_bit_test (enum tree_code code, tree arg0, tree arg1, 6493 tree result_type) 6494{ 6495 /* If this is testing a single bit, we can optimize the test. */ 6496 if ((code == NE_EXPR || code == EQ_EXPR) 6497 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 6498 && integer_pow2p (TREE_OPERAND (arg0, 1))) 6499 { 6500 tree inner = TREE_OPERAND (arg0, 0); 6501 tree type = TREE_TYPE (arg0); 6502 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1)); 6503 enum machine_mode operand_mode = TYPE_MODE (type); 6504 int ops_unsigned; 6505 tree signed_type, unsigned_type, intermediate_type; 6506 tree tem; 6507 6508 /* First, see if we can fold the single bit test into a sign-bit 6509 test. */ 6510 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, 6511 result_type); 6512 if (tem) 6513 return tem; 6514 6515 /* Otherwise we have (A & C) != 0 where C is a single bit, 6516 convert that into ((A >> C2) & 1). Where C2 = log2(C). 6517 Similarly for (A & C) == 0. */ 6518 6519 /* If INNER is a right shift of a constant and it plus BITNUM does 6520 not overflow, adjust BITNUM and INNER. */ 6521 if (TREE_CODE (inner) == RSHIFT_EXPR 6522 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST 6523 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 6524 && bitnum < TYPE_PRECISION (type) 6525 && 0 > compare_tree_int (TREE_OPERAND (inner, 1), 6526 bitnum - TYPE_PRECISION (type))) 6527 { 6528 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); 6529 inner = TREE_OPERAND (inner, 0); 6530 } 6531 6532 /* If we are going to be able to omit the AND below, we must do our 6533 operations as unsigned. If we must use the AND, we have a choice. 6534 Normally unsigned is faster, but for some machines signed is. */ 6535#ifdef LOAD_EXTEND_OP 6536 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND 6537 && !flag_syntax_only) ? 0 : 1; 6538#else 6539 ops_unsigned = 1; 6540#endif 6541 6542 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0); 6543 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1); 6544 intermediate_type = ops_unsigned ? unsigned_type : signed_type; 6545 inner = fold_convert (intermediate_type, inner); 6546 6547 if (bitnum != 0) 6548 inner = build2 (RSHIFT_EXPR, intermediate_type, 6549 inner, size_int (bitnum)); 6550 6551 if (code == EQ_EXPR) 6552 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, 6553 inner, integer_one_node); 6554 6555 /* Put the AND last so it can combine with more things. */ 6556 inner = build2 (BIT_AND_EXPR, intermediate_type, 6557 inner, integer_one_node); 6558 6559 /* Make sure to return the proper type. */ 6560 inner = fold_convert (result_type, inner); 6561 6562 return inner; 6563 } 6564 return NULL_TREE; 6565} 6566 6567/* Check whether we are allowed to reorder operands arg0 and arg1, 6568 such that the evaluation of arg1 occurs before arg0. */ 6569 6570static bool 6571reorder_operands_p (tree arg0, tree arg1) 6572{ 6573 if (! flag_evaluation_order) 6574 return true; 6575 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1)) 6576 return true; 6577 return ! TREE_SIDE_EFFECTS (arg0) 6578 && ! TREE_SIDE_EFFECTS (arg1); 6579} 6580 6581/* Test whether it is preferable two swap two operands, ARG0 and 6582 ARG1, for example because ARG0 is an integer constant and ARG1 6583 isn't. If REORDER is true, only recommend swapping if we can 6584 evaluate the operands in reverse order. */ 6585 6586bool 6587tree_swap_operands_p (tree arg0, tree arg1, bool reorder) 6588{ 6589 STRIP_SIGN_NOPS (arg0); 6590 STRIP_SIGN_NOPS (arg1); 6591 6592 if (TREE_CODE (arg1) == INTEGER_CST) 6593 return 0; 6594 if (TREE_CODE (arg0) == INTEGER_CST) 6595 return 1; 6596 6597 if (TREE_CODE (arg1) == REAL_CST) 6598 return 0; 6599 if (TREE_CODE (arg0) == REAL_CST) 6600 return 1; 6601 6602 if (TREE_CODE (arg1) == COMPLEX_CST) 6603 return 0; 6604 if (TREE_CODE (arg0) == COMPLEX_CST) 6605 return 1; 6606 6607 if (TREE_CONSTANT (arg1)) 6608 return 0; 6609 if (TREE_CONSTANT (arg0)) 6610 return 1; 6611 6612 if (optimize_size) 6613 return 0; 6614 6615 if (reorder && flag_evaluation_order 6616 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))) 6617 return 0; 6618 6619 if (DECL_P (arg1)) 6620 return 0; 6621 if (DECL_P (arg0)) 6622 return 1; 6623 6624 /* It is preferable to swap two SSA_NAME to ensure a canonical form 6625 for commutative and comparison operators. Ensuring a canonical 6626 form allows the optimizers to find additional redundancies without 6627 having to explicitly check for both orderings. */ 6628 if (TREE_CODE (arg0) == SSA_NAME 6629 && TREE_CODE (arg1) == SSA_NAME 6630 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1)) 6631 return 1; 6632 6633 return 0; 6634} 6635 6636/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where 6637 ARG0 is extended to a wider type. */ 6638 6639static tree 6640fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1) 6641{ 6642 tree arg0_unw = get_unwidened (arg0, NULL_TREE); 6643 tree arg1_unw; 6644 tree shorter_type, outer_type; 6645 tree min, max; 6646 bool above, below; 6647 6648 if (arg0_unw == arg0) 6649 return NULL_TREE; 6650 shorter_type = TREE_TYPE (arg0_unw); 6651 6652#ifdef HAVE_canonicalize_funcptr_for_compare 6653 /* Disable this optimization if we're casting a function pointer 6654 type on targets that require function pointer canonicalization. */ 6655 if (HAVE_canonicalize_funcptr_for_compare 6656 && TREE_CODE (shorter_type) == POINTER_TYPE 6657 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE) 6658 return NULL_TREE; 6659#endif 6660 6661 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type)) 6662 return NULL_TREE; 6663 6664 arg1_unw = get_unwidened (arg1, NULL_TREE); 6665 6666 /* If possible, express the comparison in the shorter mode. */ 6667 if ((code == EQ_EXPR || code == NE_EXPR 6668 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type)) 6669 && (TREE_TYPE (arg1_unw) == shorter_type 6670 || (TYPE_PRECISION (shorter_type) 6671 >= TYPE_PRECISION (TREE_TYPE (arg1_unw))) 6672 || (TREE_CODE (arg1_unw) == INTEGER_CST 6673 && (TREE_CODE (shorter_type) == INTEGER_TYPE 6674 || TREE_CODE (shorter_type) == BOOLEAN_TYPE) 6675 && int_fits_type_p (arg1_unw, shorter_type)))) 6676 return fold_build2 (code, type, arg0_unw, 6677 fold_convert (shorter_type, arg1_unw)); 6678 6679 if (TREE_CODE (arg1_unw) != INTEGER_CST 6680 || TREE_CODE (shorter_type) != INTEGER_TYPE 6681 || !int_fits_type_p (arg1_unw, shorter_type)) 6682 return NULL_TREE; 6683 6684 /* If we are comparing with the integer that does not fit into the range 6685 of the shorter type, the result is known. */ 6686 outer_type = TREE_TYPE (arg1_unw); 6687 min = lower_bound_in_type (outer_type, shorter_type); 6688 max = upper_bound_in_type (outer_type, shorter_type); 6689 6690 above = integer_nonzerop (fold_relational_const (LT_EXPR, type, 6691 max, arg1_unw)); 6692 below = integer_nonzerop (fold_relational_const (LT_EXPR, type, 6693 arg1_unw, min)); 6694 6695 switch (code) 6696 { 6697 case EQ_EXPR: 6698 if (above || below) 6699 return omit_one_operand (type, integer_zero_node, arg0); 6700 break; 6701 6702 case NE_EXPR: 6703 if (above || below) 6704 return omit_one_operand (type, integer_one_node, arg0); 6705 break; 6706 6707 case LT_EXPR: 6708 case LE_EXPR: 6709 if (above) 6710 return omit_one_operand (type, integer_one_node, arg0); 6711 else if (below) 6712 return omit_one_operand (type, integer_zero_node, arg0); 6713 6714 case GT_EXPR: 6715 case GE_EXPR: 6716 if (above) 6717 return omit_one_operand (type, integer_zero_node, arg0); 6718 else if (below) 6719 return omit_one_operand (type, integer_one_node, arg0); 6720 6721 default: 6722 break; 6723 } 6724 6725 return NULL_TREE; 6726} 6727 6728/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for 6729 ARG0 just the signedness is changed. */ 6730 6731static tree 6732fold_sign_changed_comparison (enum tree_code code, tree type, 6733 tree arg0, tree arg1) 6734{ 6735 tree arg0_inner, tmp; 6736 tree inner_type, outer_type; 6737 6738 if (TREE_CODE (arg0) != NOP_EXPR 6739 && TREE_CODE (arg0) != CONVERT_EXPR) 6740 return NULL_TREE; 6741 6742 outer_type = TREE_TYPE (arg0); 6743 arg0_inner = TREE_OPERAND (arg0, 0); 6744 inner_type = TREE_TYPE (arg0_inner); 6745 6746#ifdef HAVE_canonicalize_funcptr_for_compare 6747 /* Disable this optimization if we're casting a function pointer 6748 type on targets that require function pointer canonicalization. */ 6749 if (HAVE_canonicalize_funcptr_for_compare 6750 && TREE_CODE (inner_type) == POINTER_TYPE 6751 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE) 6752 return NULL_TREE; 6753#endif 6754 6755 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) 6756 return NULL_TREE; 6757 6758 if (TREE_CODE (arg1) != INTEGER_CST 6759 && !((TREE_CODE (arg1) == NOP_EXPR 6760 || TREE_CODE (arg1) == CONVERT_EXPR) 6761 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type)) 6762 return NULL_TREE; 6763 6764 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) 6765 && code != NE_EXPR 6766 && code != EQ_EXPR) 6767 return NULL_TREE; 6768 6769 if (TREE_CODE (arg1) == INTEGER_CST) 6770 { 6771 tmp = build_int_cst_wide (inner_type, 6772 TREE_INT_CST_LOW (arg1), 6773 TREE_INT_CST_HIGH (arg1)); 6774 arg1 = force_fit_type (tmp, 0, 6775 TREE_OVERFLOW (arg1), 6776 TREE_CONSTANT_OVERFLOW (arg1)); 6777 } 6778 else 6779 arg1 = fold_convert (inner_type, arg1); 6780 6781 return fold_build2 (code, type, arg0_inner, arg1); 6782} 6783 6784/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is 6785 step of the array. Reconstructs s and delta in the case of s * delta 6786 being an integer constant (and thus already folded). 6787 ADDR is the address. MULT is the multiplicative expression. 6788 If the function succeeds, the new address expression is returned. Otherwise 6789 NULL_TREE is returned. */ 6790 6791static tree 6792try_move_mult_to_index (enum tree_code code, tree addr, tree op1) 6793{ 6794 tree s, delta, step; 6795 tree ref = TREE_OPERAND (addr, 0), pref; 6796 tree ret, pos; 6797 tree itype; 6798 6799 /* Canonicalize op1 into a possibly non-constant delta 6800 and an INTEGER_CST s. */ 6801 if (TREE_CODE (op1) == MULT_EXPR) 6802 { 6803 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1); 6804 6805 STRIP_NOPS (arg0); 6806 STRIP_NOPS (arg1); 6807 6808 if (TREE_CODE (arg0) == INTEGER_CST) 6809 { 6810 s = arg0; 6811 delta = arg1; 6812 } 6813 else if (TREE_CODE (arg1) == INTEGER_CST) 6814 { 6815 s = arg1; 6816 delta = arg0; 6817 } 6818 else 6819 return NULL_TREE; 6820 } 6821 else if (TREE_CODE (op1) == INTEGER_CST) 6822 { 6823 delta = op1; 6824 s = NULL_TREE; 6825 } 6826 else 6827 { 6828 /* Simulate we are delta * 1. */ 6829 delta = op1; 6830 s = integer_one_node; 6831 } 6832 6833 for (;; ref = TREE_OPERAND (ref, 0)) 6834 { 6835 if (TREE_CODE (ref) == ARRAY_REF) 6836 { 6837 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); 6838 if (! itype) 6839 continue; 6840 6841 step = array_ref_element_size (ref); 6842 if (TREE_CODE (step) != INTEGER_CST) 6843 continue; 6844 6845 if (s) 6846 { 6847 if (! tree_int_cst_equal (step, s)) 6848 continue; 6849 } 6850 else 6851 { 6852 /* Try if delta is a multiple of step. */ 6853 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step); 6854 if (! tmp) 6855 continue; 6856 delta = tmp; 6857 } 6858 6859 break; 6860 } 6861 6862 if (!handled_component_p (ref)) 6863 return NULL_TREE; 6864 } 6865 6866 /* We found the suitable array reference. So copy everything up to it, 6867 and replace the index. */ 6868 6869 pref = TREE_OPERAND (addr, 0); 6870 ret = copy_node (pref); 6871 pos = ret; 6872 6873 while (pref != ref) 6874 { 6875 pref = TREE_OPERAND (pref, 0); 6876 TREE_OPERAND (pos, 0) = copy_node (pref); 6877 pos = TREE_OPERAND (pos, 0); 6878 } 6879 6880 TREE_OPERAND (pos, 1) = fold_build2 (code, itype, 6881 fold_convert (itype, 6882 TREE_OPERAND (pos, 1)), 6883 fold_convert (itype, delta)); 6884 6885 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret); 6886} 6887 6888 6889/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y 6890 means A >= Y && A != MAX, but in this case we know that 6891 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */ 6892 6893static tree 6894fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound) 6895{ 6896 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y; 6897 6898 if (TREE_CODE (bound) == LT_EXPR) 6899 a = TREE_OPERAND (bound, 0); 6900 else if (TREE_CODE (bound) == GT_EXPR) 6901 a = TREE_OPERAND (bound, 1); 6902 else 6903 return NULL_TREE; 6904 6905 typea = TREE_TYPE (a); 6906 if (!INTEGRAL_TYPE_P (typea) 6907 && !POINTER_TYPE_P (typea)) 6908 return NULL_TREE; 6909 6910 if (TREE_CODE (ineq) == LT_EXPR) 6911 { 6912 a1 = TREE_OPERAND (ineq, 1); 6913 y = TREE_OPERAND (ineq, 0); 6914 } 6915 else if (TREE_CODE (ineq) == GT_EXPR) 6916 { 6917 a1 = TREE_OPERAND (ineq, 0); 6918 y = TREE_OPERAND (ineq, 1); 6919 } 6920 else 6921 return NULL_TREE; 6922 6923 if (TREE_TYPE (a1) != typea) 6924 return NULL_TREE; 6925 6926 diff = fold_build2 (MINUS_EXPR, typea, a1, a); 6927 if (!integer_onep (diff)) 6928 return NULL_TREE; 6929 6930 return fold_build2 (GE_EXPR, type, a, y); 6931} 6932 6933/* Fold a sum or difference of at least one multiplication. 6934 Returns the folded tree or NULL if no simplification could be made. */ 6935 6936static tree 6937fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1) 6938{ 6939 tree arg00, arg01, arg10, arg11; 6940 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same; 6941 6942 /* (A * C) +- (B * C) -> (A+-B) * C. 6943 (A * C) +- A -> A * (C+-1). 6944 We are most concerned about the case where C is a constant, 6945 but other combinations show up during loop reduction. Since 6946 it is not difficult, try all four possibilities. */ 6947 6948 if (TREE_CODE (arg0) == MULT_EXPR) 6949 { 6950 arg00 = TREE_OPERAND (arg0, 0); 6951 arg01 = TREE_OPERAND (arg0, 1); 6952 } 6953 else 6954 { 6955 arg00 = arg0; 6956 arg01 = build_one_cst (type); 6957 } 6958 if (TREE_CODE (arg1) == MULT_EXPR) 6959 { 6960 arg10 = TREE_OPERAND (arg1, 0); 6961 arg11 = TREE_OPERAND (arg1, 1); 6962 } 6963 else 6964 { 6965 arg10 = arg1; 6966 arg11 = build_one_cst (type); 6967 } 6968 same = NULL_TREE; 6969 6970 if (operand_equal_p (arg01, arg11, 0)) 6971 same = arg01, alt0 = arg00, alt1 = arg10; 6972 else if (operand_equal_p (arg00, arg10, 0)) 6973 same = arg00, alt0 = arg01, alt1 = arg11; 6974 else if (operand_equal_p (arg00, arg11, 0)) 6975 same = arg00, alt0 = arg01, alt1 = arg10; 6976 else if (operand_equal_p (arg01, arg10, 0)) 6977 same = arg01, alt0 = arg00, alt1 = arg11; 6978 6979 /* No identical multiplicands; see if we can find a common 6980 power-of-two factor in non-power-of-two multiplies. This 6981 can help in multi-dimensional array access. */ 6982 else if (host_integerp (arg01, 0) 6983 && host_integerp (arg11, 0)) 6984 { 6985 HOST_WIDE_INT int01, int11, tmp; 6986 bool swap = false; 6987 tree maybe_same; 6988 int01 = TREE_INT_CST_LOW (arg01); 6989 int11 = TREE_INT_CST_LOW (arg11); 6990 6991 /* Move min of absolute values to int11. */ 6992 if ((int01 >= 0 ? int01 : -int01) 6993 < (int11 >= 0 ? int11 : -int11)) 6994 { 6995 tmp = int01, int01 = int11, int11 = tmp; 6996 alt0 = arg00, arg00 = arg10, arg10 = alt0; 6997 maybe_same = arg01; 6998 swap = true; 6999 } 7000 else 7001 maybe_same = arg11; 7002 7003 if (exact_log2 (int11) > 0 && int01 % int11 == 0) 7004 { 7005 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00, 7006 build_int_cst (TREE_TYPE (arg00), 7007 int01 / int11)); 7008 alt1 = arg10; 7009 same = maybe_same; 7010 if (swap) 7011 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same; 7012 } 7013 } 7014 7015 if (same) 7016 return fold_build2 (MULT_EXPR, type, 7017 fold_build2 (code, type, 7018 fold_convert (type, alt0), 7019 fold_convert (type, alt1)), 7020 fold_convert (type, same)); 7021 7022 return NULL_TREE; 7023} 7024 7025/* Subroutine of native_encode_expr. Encode the INTEGER_CST 7026 specified by EXPR into the buffer PTR of length LEN bytes. 7027 Return the number of bytes placed in the buffer, or zero 7028 upon failure. */ 7029 7030static int 7031native_encode_int (tree expr, unsigned char *ptr, int len) 7032{ 7033 tree type = TREE_TYPE (expr); 7034 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7035 int byte, offset, word, words; 7036 unsigned char value; 7037 7038 if (total_bytes > len) 7039 return 0; 7040 words = total_bytes / UNITS_PER_WORD; 7041 7042 for (byte = 0; byte < total_bytes; byte++) 7043 { 7044 int bitpos = byte * BITS_PER_UNIT; 7045 if (bitpos < HOST_BITS_PER_WIDE_INT) 7046 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos); 7047 else 7048 value = (unsigned char) (TREE_INT_CST_HIGH (expr) 7049 >> (bitpos - HOST_BITS_PER_WIDE_INT)); 7050 7051 if (total_bytes > UNITS_PER_WORD) 7052 { 7053 word = byte / UNITS_PER_WORD; 7054 if (WORDS_BIG_ENDIAN) 7055 word = (words - 1) - word; 7056 offset = word * UNITS_PER_WORD; 7057 if (BYTES_BIG_ENDIAN) 7058 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7059 else 7060 offset += byte % UNITS_PER_WORD; 7061 } 7062 else 7063 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; 7064 ptr[offset] = value; 7065 } 7066 return total_bytes; 7067} 7068 7069 7070/* Subroutine of native_encode_expr. Encode the REAL_CST 7071 specified by EXPR into the buffer PTR of length LEN bytes. 7072 Return the number of bytes placed in the buffer, or zero 7073 upon failure. */ 7074 7075static int 7076native_encode_real (tree expr, unsigned char *ptr, int len) 7077{ 7078 tree type = TREE_TYPE (expr); 7079 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7080 int byte, offset, word, words, bitpos; 7081 unsigned char value; 7082 7083 /* There are always 32 bits in each long, no matter the size of 7084 the hosts long. We handle floating point representations with 7085 up to 192 bits. */ 7086 long tmp[6]; 7087 7088 if (total_bytes > len) 7089 return 0; 7090 words = 32 / UNITS_PER_WORD; 7091 7092 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type)); 7093 7094 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; 7095 bitpos += BITS_PER_UNIT) 7096 { 7097 byte = (bitpos / BITS_PER_UNIT) & 3; 7098 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31)); 7099 7100 if (UNITS_PER_WORD < 4) 7101 { 7102 word = byte / UNITS_PER_WORD; 7103 if (WORDS_BIG_ENDIAN) 7104 word = (words - 1) - word; 7105 offset = word * UNITS_PER_WORD; 7106 if (BYTES_BIG_ENDIAN) 7107 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7108 else 7109 offset += byte % UNITS_PER_WORD; 7110 } 7111 else 7112 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; 7113 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value; 7114 } 7115 return total_bytes; 7116} 7117 7118/* Subroutine of native_encode_expr. Encode the COMPLEX_CST 7119 specified by EXPR into the buffer PTR of length LEN bytes. 7120 Return the number of bytes placed in the buffer, or zero 7121 upon failure. */ 7122 7123static int 7124native_encode_complex (tree expr, unsigned char *ptr, int len) 7125{ 7126 int rsize, isize; 7127 tree part; 7128 7129 part = TREE_REALPART (expr); 7130 rsize = native_encode_expr (part, ptr, len); 7131 if (rsize == 0) 7132 return 0; 7133 part = TREE_IMAGPART (expr); 7134 isize = native_encode_expr (part, ptr+rsize, len-rsize); 7135 if (isize != rsize) 7136 return 0; 7137 return rsize + isize; 7138} 7139 7140 7141/* Subroutine of native_encode_expr. Encode the VECTOR_CST 7142 specified by EXPR into the buffer PTR of length LEN bytes. 7143 Return the number of bytes placed in the buffer, or zero 7144 upon failure. */ 7145 7146static int 7147native_encode_vector (tree expr, unsigned char *ptr, int len) 7148{ 7149 int i, size, offset, count; 7150 tree itype, elem, elements; 7151 7152 offset = 0; 7153 elements = TREE_VECTOR_CST_ELTS (expr); 7154 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)); 7155 itype = TREE_TYPE (TREE_TYPE (expr)); 7156 size = GET_MODE_SIZE (TYPE_MODE (itype)); 7157 for (i = 0; i < count; i++) 7158 { 7159 if (elements) 7160 { 7161 elem = TREE_VALUE (elements); 7162 elements = TREE_CHAIN (elements); 7163 } 7164 else 7165 elem = NULL_TREE; 7166 7167 if (elem) 7168 { 7169 if (native_encode_expr (elem, ptr+offset, len-offset) != size) 7170 return 0; 7171 } 7172 else 7173 { 7174 if (offset + size > len) 7175 return 0; 7176 memset (ptr+offset, 0, size); 7177 } 7178 offset += size; 7179 } 7180 return offset; 7181} 7182 7183 7184/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, 7185 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the 7186 buffer PTR of length LEN bytes. Return the number of bytes 7187 placed in the buffer, or zero upon failure. */ 7188 7189static int 7190native_encode_expr (tree expr, unsigned char *ptr, int len) 7191{ 7192 switch (TREE_CODE (expr)) 7193 { 7194 case INTEGER_CST: 7195 return native_encode_int (expr, ptr, len); 7196 7197 case REAL_CST: 7198 return native_encode_real (expr, ptr, len); 7199 7200 case COMPLEX_CST: 7201 return native_encode_complex (expr, ptr, len); 7202 7203 case VECTOR_CST: 7204 return native_encode_vector (expr, ptr, len); 7205 7206 default: 7207 return 0; 7208 } 7209} 7210 7211 7212/* Subroutine of native_interpret_expr. Interpret the contents of 7213 the buffer PTR of length LEN as an INTEGER_CST of type TYPE. 7214 If the buffer cannot be interpreted, return NULL_TREE. */ 7215 7216static tree 7217native_interpret_int (tree type, unsigned char *ptr, int len) 7218{ 7219 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7220 int byte, offset, word, words; 7221 unsigned char value; 7222 unsigned int HOST_WIDE_INT lo = 0; 7223 HOST_WIDE_INT hi = 0; 7224 7225 if (total_bytes > len) 7226 return NULL_TREE; 7227 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT) 7228 return NULL_TREE; 7229 words = total_bytes / UNITS_PER_WORD; 7230 7231 for (byte = 0; byte < total_bytes; byte++) 7232 { 7233 int bitpos = byte * BITS_PER_UNIT; 7234 if (total_bytes > UNITS_PER_WORD) 7235 { 7236 word = byte / UNITS_PER_WORD; 7237 if (WORDS_BIG_ENDIAN) 7238 word = (words - 1) - word; 7239 offset = word * UNITS_PER_WORD; 7240 if (BYTES_BIG_ENDIAN) 7241 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7242 else 7243 offset += byte % UNITS_PER_WORD; 7244 } 7245 else 7246 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; 7247 value = ptr[offset]; 7248 7249 if (bitpos < HOST_BITS_PER_WIDE_INT) 7250 lo |= (unsigned HOST_WIDE_INT) value << bitpos; 7251 else 7252 hi |= (unsigned HOST_WIDE_INT) value 7253 << (bitpos - HOST_BITS_PER_WIDE_INT); 7254 } 7255 7256 return force_fit_type (build_int_cst_wide (type, lo, hi), 7257 0, false, false); 7258} 7259 7260 7261/* Subroutine of native_interpret_expr. Interpret the contents of 7262 the buffer PTR of length LEN as a REAL_CST of type TYPE. 7263 If the buffer cannot be interpreted, return NULL_TREE. */ 7264 7265static tree 7266native_interpret_real (tree type, unsigned char *ptr, int len) 7267{ 7268 enum machine_mode mode = TYPE_MODE (type); 7269 int total_bytes = GET_MODE_SIZE (mode); 7270 int byte, offset, word, words, bitpos; 7271 unsigned char value; 7272 /* There are always 32 bits in each long, no matter the size of 7273 the hosts long. We handle floating point representations with 7274 up to 192 bits. */ 7275 REAL_VALUE_TYPE r; 7276 long tmp[6]; 7277 7278 total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7279 if (total_bytes > len || total_bytes > 24) 7280 return NULL_TREE; 7281 words = 32 / UNITS_PER_WORD; 7282 7283 memset (tmp, 0, sizeof (tmp)); 7284 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; 7285 bitpos += BITS_PER_UNIT) 7286 { 7287 byte = (bitpos / BITS_PER_UNIT) & 3; 7288 if (UNITS_PER_WORD < 4) 7289 { 7290 word = byte / UNITS_PER_WORD; 7291 if (WORDS_BIG_ENDIAN) 7292 word = (words - 1) - word; 7293 offset = word * UNITS_PER_WORD; 7294 if (BYTES_BIG_ENDIAN) 7295 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7296 else 7297 offset += byte % UNITS_PER_WORD; 7298 } 7299 else 7300 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; 7301 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)]; 7302 7303 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31); 7304 } 7305 7306 real_from_target (&r, tmp, mode); 7307 return build_real (type, r); 7308} 7309 7310 7311/* Subroutine of native_interpret_expr. Interpret the contents of 7312 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE. 7313 If the buffer cannot be interpreted, return NULL_TREE. */ 7314 7315static tree 7316native_interpret_complex (tree type, unsigned char *ptr, int len) 7317{ 7318 tree etype, rpart, ipart; 7319 int size; 7320 7321 etype = TREE_TYPE (type); 7322 size = GET_MODE_SIZE (TYPE_MODE (etype)); 7323 if (size * 2 > len) 7324 return NULL_TREE; 7325 rpart = native_interpret_expr (etype, ptr, size); 7326 if (!rpart) 7327 return NULL_TREE; 7328 ipart = native_interpret_expr (etype, ptr+size, size); 7329 if (!ipart) 7330 return NULL_TREE; 7331 return build_complex (type, rpart, ipart); 7332} 7333 7334 7335/* Subroutine of native_interpret_expr. Interpret the contents of 7336 the buffer PTR of length LEN as a VECTOR_CST of type TYPE. 7337 If the buffer cannot be interpreted, return NULL_TREE. */ 7338 7339static tree 7340native_interpret_vector (tree type, unsigned char *ptr, int len) 7341{ 7342 tree etype, elem, elements; 7343 int i, size, count; 7344 7345 etype = TREE_TYPE (type); 7346 size = GET_MODE_SIZE (TYPE_MODE (etype)); 7347 count = TYPE_VECTOR_SUBPARTS (type); 7348 if (size * count > len) 7349 return NULL_TREE; 7350 7351 elements = NULL_TREE; 7352 for (i = count - 1; i >= 0; i--) 7353 { 7354 elem = native_interpret_expr (etype, ptr+(i*size), size); 7355 if (!elem) 7356 return NULL_TREE; 7357 elements = tree_cons (NULL_TREE, elem, elements); 7358 } 7359 return build_vector (type, elements); 7360} 7361 7362 7363/* Subroutine of fold_view_convert_expr. Interpret the contents of 7364 the buffer PTR of length LEN as a constant of type TYPE. For 7365 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P 7366 we return a REAL_CST, etc... If the buffer cannot be interpreted, 7367 return NULL_TREE. */ 7368 7369static tree 7370native_interpret_expr (tree type, unsigned char *ptr, int len) 7371{ 7372 switch (TREE_CODE (type)) 7373 { 7374 case INTEGER_TYPE: 7375 case ENUMERAL_TYPE: 7376 case BOOLEAN_TYPE: 7377 return native_interpret_int (type, ptr, len); 7378 7379 case REAL_TYPE: 7380 return native_interpret_real (type, ptr, len); 7381 7382 case COMPLEX_TYPE: 7383 return native_interpret_complex (type, ptr, len); 7384 7385 case VECTOR_TYPE: 7386 return native_interpret_vector (type, ptr, len); 7387 7388 default: 7389 return NULL_TREE; 7390 } 7391} 7392 7393 7394/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type 7395 TYPE at compile-time. If we're unable to perform the conversion 7396 return NULL_TREE. */ 7397 7398static tree 7399fold_view_convert_expr (tree type, tree expr) 7400{ 7401 /* We support up to 512-bit values (for V8DFmode). */ 7402 unsigned char buffer[64]; 7403 int len; 7404 7405 /* Check that the host and target are sane. */ 7406 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8) 7407 return NULL_TREE; 7408 7409 len = native_encode_expr (expr, buffer, sizeof (buffer)); 7410 if (len == 0) 7411 return NULL_TREE; 7412 7413 return native_interpret_expr (type, buffer, len); 7414} 7415 7416 7417/* Fold a unary expression of code CODE and type TYPE with operand 7418 OP0. Return the folded expression if folding is successful. 7419 Otherwise, return NULL_TREE. */ 7420 7421tree 7422fold_unary (enum tree_code code, tree type, tree op0) 7423{ 7424 tree tem; 7425 tree arg0; 7426 enum tree_code_class kind = TREE_CODE_CLASS (code); 7427 7428 gcc_assert (IS_EXPR_CODE_CLASS (kind) 7429 && TREE_CODE_LENGTH (code) == 1); 7430 7431 arg0 = op0; 7432 if (arg0) 7433 { 7434 if (code == NOP_EXPR || code == CONVERT_EXPR 7435 || code == FLOAT_EXPR || code == ABS_EXPR) 7436 { 7437 /* Don't use STRIP_NOPS, because signedness of argument type 7438 matters. */ 7439 STRIP_SIGN_NOPS (arg0); 7440 } 7441 else 7442 { 7443 /* Strip any conversions that don't change the mode. This 7444 is safe for every expression, except for a comparison 7445 expression because its signedness is derived from its 7446 operands. 7447 7448 Note that this is done as an internal manipulation within 7449 the constant folder, in order to find the simplest 7450 representation of the arguments so that their form can be 7451 studied. In any cases, the appropriate type conversions 7452 should be put back in the tree that will get out of the 7453 constant folder. */ 7454 STRIP_NOPS (arg0); 7455 } 7456 } 7457 7458 if (TREE_CODE_CLASS (code) == tcc_unary) 7459 { 7460 if (TREE_CODE (arg0) == COMPOUND_EXPR) 7461 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), 7462 fold_build1 (code, type, TREE_OPERAND (arg0, 1))); 7463 else if (TREE_CODE (arg0) == COND_EXPR) 7464 { 7465 tree arg01 = TREE_OPERAND (arg0, 1); 7466 tree arg02 = TREE_OPERAND (arg0, 2); 7467 if (! VOID_TYPE_P (TREE_TYPE (arg01))) 7468 arg01 = fold_build1 (code, type, arg01); 7469 if (! VOID_TYPE_P (TREE_TYPE (arg02))) 7470 arg02 = fold_build1 (code, type, arg02); 7471 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0), 7472 arg01, arg02); 7473 7474 /* If this was a conversion, and all we did was to move into 7475 inside the COND_EXPR, bring it back out. But leave it if 7476 it is a conversion from integer to integer and the 7477 result precision is no wider than a word since such a 7478 conversion is cheap and may be optimized away by combine, 7479 while it couldn't if it were outside the COND_EXPR. Then return 7480 so we don't get into an infinite recursion loop taking the 7481 conversion out and then back in. */ 7482 7483 if ((code == NOP_EXPR || code == CONVERT_EXPR 7484 || code == NON_LVALUE_EXPR) 7485 && TREE_CODE (tem) == COND_EXPR 7486 && TREE_CODE (TREE_OPERAND (tem, 1)) == code 7487 && TREE_CODE (TREE_OPERAND (tem, 2)) == code 7488 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1)) 7489 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2)) 7490 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)) 7491 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0))) 7492 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem)) 7493 && (INTEGRAL_TYPE_P 7494 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)))) 7495 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD) 7496 || flag_syntax_only)) 7497 tem = build1 (code, type, 7498 build3 (COND_EXPR, 7499 TREE_TYPE (TREE_OPERAND 7500 (TREE_OPERAND (tem, 1), 0)), 7501 TREE_OPERAND (tem, 0), 7502 TREE_OPERAND (TREE_OPERAND (tem, 1), 0), 7503 TREE_OPERAND (TREE_OPERAND (tem, 2), 0))); 7504 return tem; 7505 } 7506 else if (COMPARISON_CLASS_P (arg0)) 7507 { 7508 if (TREE_CODE (type) == BOOLEAN_TYPE) 7509 { 7510 arg0 = copy_node (arg0); 7511 TREE_TYPE (arg0) = type; 7512 return arg0; 7513 } 7514 else if (TREE_CODE (type) != INTEGER_TYPE) 7515 return fold_build3 (COND_EXPR, type, arg0, 7516 fold_build1 (code, type, 7517 integer_one_node), 7518 fold_build1 (code, type, 7519 integer_zero_node)); 7520 } 7521 } 7522 7523 switch (code) 7524 { 7525 case NOP_EXPR: 7526 case FLOAT_EXPR: 7527 case CONVERT_EXPR: 7528 case FIX_TRUNC_EXPR: 7529 case FIX_CEIL_EXPR: 7530 case FIX_FLOOR_EXPR: 7531 case FIX_ROUND_EXPR: 7532 if (TREE_TYPE (op0) == type) 7533 return op0; 7534 7535 /* If we have (type) (a CMP b) and type is an integral type, return 7536 new expression involving the new type. */ 7537 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type)) 7538 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0), 7539 TREE_OPERAND (op0, 1)); 7540 7541 /* Handle cases of two conversions in a row. */ 7542 if (TREE_CODE (op0) == NOP_EXPR 7543 || TREE_CODE (op0) == CONVERT_EXPR) 7544 { 7545 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0)); 7546 tree inter_type = TREE_TYPE (op0); 7547 int inside_int = INTEGRAL_TYPE_P (inside_type); 7548 int inside_ptr = POINTER_TYPE_P (inside_type); 7549 int inside_float = FLOAT_TYPE_P (inside_type); 7550 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE; 7551 unsigned int inside_prec = TYPE_PRECISION (inside_type); 7552 int inside_unsignedp = TYPE_UNSIGNED (inside_type); 7553 int inter_int = INTEGRAL_TYPE_P (inter_type); 7554 int inter_ptr = POINTER_TYPE_P (inter_type); 7555 int inter_float = FLOAT_TYPE_P (inter_type); 7556 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE; 7557 unsigned int inter_prec = TYPE_PRECISION (inter_type); 7558 int inter_unsignedp = TYPE_UNSIGNED (inter_type); 7559 int final_int = INTEGRAL_TYPE_P (type); 7560 int final_ptr = POINTER_TYPE_P (type); 7561 int final_float = FLOAT_TYPE_P (type); 7562 int final_vec = TREE_CODE (type) == VECTOR_TYPE; 7563 unsigned int final_prec = TYPE_PRECISION (type); 7564 int final_unsignedp = TYPE_UNSIGNED (type); 7565 7566 /* In addition to the cases of two conversions in a row 7567 handled below, if we are converting something to its own 7568 type via an object of identical or wider precision, neither 7569 conversion is needed. */ 7570 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type) 7571 && (((inter_int || inter_ptr) && final_int) 7572 || (inter_float && final_float)) 7573 && inter_prec >= final_prec) 7574 return fold_build1 (code, type, TREE_OPERAND (op0, 0)); 7575 7576 /* Likewise, if the intermediate and final types are either both 7577 float or both integer, we don't need the middle conversion if 7578 it is wider than the final type and doesn't change the signedness 7579 (for integers). Avoid this if the final type is a pointer 7580 since then we sometimes need the inner conversion. Likewise if 7581 the outer has a precision not equal to the size of its mode. */ 7582 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr)) 7583 || (inter_float && inside_float) 7584 || (inter_vec && inside_vec)) 7585 && inter_prec >= inside_prec 7586 && (inter_float || inter_vec 7587 || inter_unsignedp == inside_unsignedp) 7588 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) 7589 && TYPE_MODE (type) == TYPE_MODE (inter_type)) 7590 && ! final_ptr 7591 && (! final_vec || inter_prec == inside_prec)) 7592 return fold_build1 (code, type, TREE_OPERAND (op0, 0)); 7593 7594 /* If we have a sign-extension of a zero-extended value, we can 7595 replace that by a single zero-extension. */ 7596 if (inside_int && inter_int && final_int 7597 && inside_prec < inter_prec && inter_prec < final_prec 7598 && inside_unsignedp && !inter_unsignedp) 7599 return fold_build1 (code, type, TREE_OPERAND (op0, 0)); 7600 7601 /* Two conversions in a row are not needed unless: 7602 - some conversion is floating-point (overstrict for now), or 7603 - some conversion is a vector (overstrict for now), or 7604 - the intermediate type is narrower than both initial and 7605 final, or 7606 - the intermediate type and innermost type differ in signedness, 7607 and the outermost type is wider than the intermediate, or 7608 - the initial type is a pointer type and the precisions of the 7609 intermediate and final types differ, or 7610 - the final type is a pointer type and the precisions of the 7611 initial and intermediate types differ. 7612 - the final type is a pointer type and the initial type not 7613 - the initial type is a pointer to an array and the final type 7614 not. */ 7615 /* Java pointer type conversions generate checks in some 7616 cases, so we explicitly disallow this optimization. */ 7617 if (! inside_float && ! inter_float && ! final_float 7618 && ! inside_vec && ! inter_vec && ! final_vec 7619 && (inter_prec >= inside_prec || inter_prec >= final_prec) 7620 && ! (inside_int && inter_int 7621 && inter_unsignedp != inside_unsignedp 7622 && inter_prec < final_prec) 7623 && ((inter_unsignedp && inter_prec > inside_prec) 7624 == (final_unsignedp && final_prec > inter_prec)) 7625 && ! (inside_ptr && inter_prec != final_prec) 7626 && ! (final_ptr && inside_prec != inter_prec) 7627 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) 7628 && TYPE_MODE (type) == TYPE_MODE (inter_type)) 7629 && final_ptr == inside_ptr 7630 && ! (inside_ptr 7631 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE 7632 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE) 7633 && ! ((strcmp (lang_hooks.name, "GNU Java") == 0) 7634 && final_ptr)) 7635 return fold_build1 (code, type, TREE_OPERAND (op0, 0)); 7636 } 7637 7638 /* Handle (T *)&A.B.C for A being of type T and B and C 7639 living at offset zero. This occurs frequently in 7640 C++ upcasting and then accessing the base. */ 7641 if (TREE_CODE (op0) == ADDR_EXPR 7642 && POINTER_TYPE_P (type) 7643 && handled_component_p (TREE_OPERAND (op0, 0))) 7644 { 7645 HOST_WIDE_INT bitsize, bitpos; 7646 tree offset; 7647 enum machine_mode mode; 7648 int unsignedp, volatilep; 7649 tree base = TREE_OPERAND (op0, 0); 7650 base = get_inner_reference (base, &bitsize, &bitpos, &offset, 7651 &mode, &unsignedp, &volatilep, false); 7652 /* If the reference was to a (constant) zero offset, we can use 7653 the address of the base if it has the same base type 7654 as the result type. */ 7655 if (! offset && bitpos == 0 7656 && TYPE_MAIN_VARIANT (TREE_TYPE (type)) 7657 == TYPE_MAIN_VARIANT (TREE_TYPE (base))) 7658 return fold_convert (type, build_fold_addr_expr (base)); 7659 } 7660 7661 if (TREE_CODE (op0) == MODIFY_EXPR 7662 && TREE_CONSTANT (TREE_OPERAND (op0, 1)) 7663 /* Detect assigning a bitfield. */ 7664 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF 7665 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1)))) 7666 { 7667 /* Don't leave an assignment inside a conversion 7668 unless assigning a bitfield. */ 7669 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1)); 7670 /* First do the assignment, then return converted constant. */ 7671 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem); 7672 TREE_NO_WARNING (tem) = 1; 7673 TREE_USED (tem) = 1; 7674 return tem; 7675 } 7676 7677 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer 7678 constants (if x has signed type, the sign bit cannot be set 7679 in c). This folds extension into the BIT_AND_EXPR. */ 7680 if (INTEGRAL_TYPE_P (type) 7681 && TREE_CODE (type) != BOOLEAN_TYPE 7682 && TREE_CODE (op0) == BIT_AND_EXPR 7683 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST) 7684 { 7685 tree and = op0; 7686 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1); 7687 int change = 0; 7688 7689 if (TYPE_UNSIGNED (TREE_TYPE (and)) 7690 || (TYPE_PRECISION (type) 7691 <= TYPE_PRECISION (TREE_TYPE (and)))) 7692 change = 1; 7693 else if (TYPE_PRECISION (TREE_TYPE (and1)) 7694 <= HOST_BITS_PER_WIDE_INT 7695 && host_integerp (and1, 1)) 7696 { 7697 unsigned HOST_WIDE_INT cst; 7698 7699 cst = tree_low_cst (and1, 1); 7700 cst &= (HOST_WIDE_INT) -1 7701 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1); 7702 change = (cst == 0); 7703#ifdef LOAD_EXTEND_OP 7704 if (change 7705 && !flag_syntax_only 7706 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0))) 7707 == ZERO_EXTEND)) 7708 { 7709 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0)); 7710 and0 = fold_convert (uns, and0); 7711 and1 = fold_convert (uns, and1); 7712 } 7713#endif 7714 } 7715 if (change) 7716 { 7717 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1), 7718 TREE_INT_CST_HIGH (and1)); 7719 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1), 7720 TREE_CONSTANT_OVERFLOW (and1)); 7721 return fold_build2 (BIT_AND_EXPR, type, 7722 fold_convert (type, and0), tem); 7723 } 7724 } 7725 7726 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and 7727 T2 being pointers to types of the same size. */ 7728 if (POINTER_TYPE_P (type) 7729 && BINARY_CLASS_P (arg0) 7730 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR 7731 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))) 7732 { 7733 tree arg00 = TREE_OPERAND (arg0, 0); 7734 tree t0 = type; 7735 tree t1 = TREE_TYPE (arg00); 7736 tree tt0 = TREE_TYPE (t0); 7737 tree tt1 = TREE_TYPE (t1); 7738 tree s0 = TYPE_SIZE (tt0); 7739 tree s1 = TYPE_SIZE (tt1); 7740 7741 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST)) 7742 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00), 7743 TREE_OPERAND (arg0, 1)); 7744 } 7745 7746 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types 7747 of the same precision, and X is a integer type not narrower than 7748 types T1 or T2, i.e. the cast (T2)X isn't an extension. */ 7749 if (INTEGRAL_TYPE_P (type) 7750 && TREE_CODE (op0) == BIT_NOT_EXPR 7751 && INTEGRAL_TYPE_P (TREE_TYPE (op0)) 7752 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR 7753 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR) 7754 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))) 7755 { 7756 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0); 7757 if (INTEGRAL_TYPE_P (TREE_TYPE (tem)) 7758 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem))) 7759 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem)); 7760 } 7761 7762 tem = fold_convert_const (code, type, op0); 7763 return tem ? tem : NULL_TREE; 7764 7765 case VIEW_CONVERT_EXPR: 7766 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR) 7767 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0)); 7768 return fold_view_convert_expr (type, op0); 7769 7770 case NEGATE_EXPR: 7771 tem = fold_negate_expr (arg0); 7772 if (tem) 7773 return fold_convert (type, tem); 7774 return NULL_TREE; 7775 7776 case ABS_EXPR: 7777 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST) 7778 return fold_abs_const (arg0, type); 7779 else if (TREE_CODE (arg0) == NEGATE_EXPR) 7780 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)); 7781 /* Convert fabs((double)float) into (double)fabsf(float). */ 7782 else if (TREE_CODE (arg0) == NOP_EXPR 7783 && TREE_CODE (type) == REAL_TYPE) 7784 { 7785 tree targ0 = strip_float_extensions (arg0); 7786 if (targ0 != arg0) 7787 return fold_convert (type, fold_build1 (ABS_EXPR, 7788 TREE_TYPE (targ0), 7789 targ0)); 7790 } 7791 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */ 7792 else if (TREE_CODE (arg0) == ABS_EXPR) 7793 return arg0; 7794 else if (tree_expr_nonnegative_p (arg0)) 7795 return arg0; 7796 7797 /* Strip sign ops from argument. */ 7798 if (TREE_CODE (type) == REAL_TYPE) 7799 { 7800 tem = fold_strip_sign_ops (arg0); 7801 if (tem) 7802 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem)); 7803 } 7804 return NULL_TREE; 7805 7806 case CONJ_EXPR: 7807 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 7808 return fold_convert (type, arg0); 7809 if (TREE_CODE (arg0) == COMPLEX_EXPR) 7810 { 7811 tree itype = TREE_TYPE (type); 7812 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0)); 7813 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1)); 7814 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart)); 7815 } 7816 if (TREE_CODE (arg0) == COMPLEX_CST) 7817 { 7818 tree itype = TREE_TYPE (type); 7819 tree rpart = fold_convert (itype, TREE_REALPART (arg0)); 7820 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0)); 7821 return build_complex (type, rpart, negate_expr (ipart)); 7822 } 7823 if (TREE_CODE (arg0) == CONJ_EXPR) 7824 return fold_convert (type, TREE_OPERAND (arg0, 0)); 7825 return NULL_TREE; 7826 7827 case BIT_NOT_EXPR: 7828 if (TREE_CODE (arg0) == INTEGER_CST) 7829 return fold_not_const (arg0, type); 7830 else if (TREE_CODE (arg0) == BIT_NOT_EXPR) 7831 return TREE_OPERAND (arg0, 0); 7832 /* Convert ~ (-A) to A - 1. */ 7833 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR) 7834 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0), 7835 build_int_cst (type, 1)); 7836 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */ 7837 else if (INTEGRAL_TYPE_P (type) 7838 && ((TREE_CODE (arg0) == MINUS_EXPR 7839 && integer_onep (TREE_OPERAND (arg0, 1))) 7840 || (TREE_CODE (arg0) == PLUS_EXPR 7841 && integer_all_onesp (TREE_OPERAND (arg0, 1))))) 7842 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)); 7843 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */ 7844 else if (TREE_CODE (arg0) == BIT_XOR_EXPR 7845 && (tem = fold_unary (BIT_NOT_EXPR, type, 7846 fold_convert (type, 7847 TREE_OPERAND (arg0, 0))))) 7848 return fold_build2 (BIT_XOR_EXPR, type, tem, 7849 fold_convert (type, TREE_OPERAND (arg0, 1))); 7850 else if (TREE_CODE (arg0) == BIT_XOR_EXPR 7851 && (tem = fold_unary (BIT_NOT_EXPR, type, 7852 fold_convert (type, 7853 TREE_OPERAND (arg0, 1))))) 7854 return fold_build2 (BIT_XOR_EXPR, type, 7855 fold_convert (type, TREE_OPERAND (arg0, 0)), tem); 7856 7857 return NULL_TREE; 7858 7859 case TRUTH_NOT_EXPR: 7860 /* The argument to invert_truthvalue must have Boolean type. */ 7861 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) 7862 arg0 = fold_convert (boolean_type_node, arg0); 7863 7864 /* Note that the operand of this must be an int 7865 and its values must be 0 or 1. 7866 ("true" is a fixed value perhaps depending on the language, 7867 but we don't handle values other than 1 correctly yet.) */ 7868 tem = fold_truth_not_expr (arg0); 7869 if (!tem) 7870 return NULL_TREE; 7871 return fold_convert (type, tem); 7872 7873 case REALPART_EXPR: 7874 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 7875 return fold_convert (type, arg0); 7876 if (TREE_CODE (arg0) == COMPLEX_EXPR) 7877 return omit_one_operand (type, TREE_OPERAND (arg0, 0), 7878 TREE_OPERAND (arg0, 1)); 7879 if (TREE_CODE (arg0) == COMPLEX_CST) 7880 return fold_convert (type, TREE_REALPART (arg0)); 7881 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 7882 { 7883 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 7884 tem = fold_build2 (TREE_CODE (arg0), itype, 7885 fold_build1 (REALPART_EXPR, itype, 7886 TREE_OPERAND (arg0, 0)), 7887 fold_build1 (REALPART_EXPR, itype, 7888 TREE_OPERAND (arg0, 1))); 7889 return fold_convert (type, tem); 7890 } 7891 if (TREE_CODE (arg0) == CONJ_EXPR) 7892 { 7893 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 7894 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0)); 7895 return fold_convert (type, tem); 7896 } 7897 return NULL_TREE; 7898 7899 case IMAGPART_EXPR: 7900 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 7901 return fold_convert (type, integer_zero_node); 7902 if (TREE_CODE (arg0) == COMPLEX_EXPR) 7903 return omit_one_operand (type, TREE_OPERAND (arg0, 1), 7904 TREE_OPERAND (arg0, 0)); 7905 if (TREE_CODE (arg0) == COMPLEX_CST) 7906 return fold_convert (type, TREE_IMAGPART (arg0)); 7907 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 7908 { 7909 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 7910 tem = fold_build2 (TREE_CODE (arg0), itype, 7911 fold_build1 (IMAGPART_EXPR, itype, 7912 TREE_OPERAND (arg0, 0)), 7913 fold_build1 (IMAGPART_EXPR, itype, 7914 TREE_OPERAND (arg0, 1))); 7915 return fold_convert (type, tem); 7916 } 7917 if (TREE_CODE (arg0) == CONJ_EXPR) 7918 { 7919 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 7920 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0)); 7921 return fold_convert (type, negate_expr (tem)); 7922 } 7923 return NULL_TREE; 7924 7925 default: 7926 return NULL_TREE; 7927 } /* switch (code) */ 7928} 7929 7930/* Fold a binary expression of code CODE and type TYPE with operands 7931 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination. 7932 Return the folded expression if folding is successful. Otherwise, 7933 return NULL_TREE. */ 7934 7935static tree 7936fold_minmax (enum tree_code code, tree type, tree op0, tree op1) 7937{ 7938 enum tree_code compl_code; 7939 7940 if (code == MIN_EXPR) 7941 compl_code = MAX_EXPR; 7942 else if (code == MAX_EXPR) 7943 compl_code = MIN_EXPR; 7944 else 7945 gcc_unreachable (); 7946 7947 /* MIN (MAX (a, b), b) == b. */ 7948 if (TREE_CODE (op0) == compl_code 7949 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0)) 7950 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0)); 7951 7952 /* MIN (MAX (b, a), b) == b. */ 7953 if (TREE_CODE (op0) == compl_code 7954 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0) 7955 && reorder_operands_p (TREE_OPERAND (op0, 1), op1)) 7956 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1)); 7957 7958 /* MIN (a, MAX (a, b)) == a. */ 7959 if (TREE_CODE (op1) == compl_code 7960 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0) 7961 && reorder_operands_p (op0, TREE_OPERAND (op1, 1))) 7962 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1)); 7963 7964 /* MIN (a, MAX (b, a)) == a. */ 7965 if (TREE_CODE (op1) == compl_code 7966 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0) 7967 && reorder_operands_p (op0, TREE_OPERAND (op1, 0))) 7968 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0)); 7969 7970 return NULL_TREE; 7971} 7972 7973/* Subroutine of fold_binary. This routine performs all of the 7974 transformations that are common to the equality/inequality 7975 operators (EQ_EXPR and NE_EXPR) and the ordering operators 7976 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than 7977 fold_binary should call fold_binary. Fold a comparison with 7978 tree code CODE and type TYPE with operands OP0 and OP1. Return 7979 the folded comparison or NULL_TREE. */ 7980 7981static tree 7982fold_comparison (enum tree_code code, tree type, tree op0, tree op1) 7983{ 7984 tree arg0, arg1, tem; 7985 7986 arg0 = op0; 7987 arg1 = op1; 7988 7989 STRIP_SIGN_NOPS (arg0); 7990 STRIP_SIGN_NOPS (arg1); 7991 7992 tem = fold_relational_const (code, type, arg0, arg1); 7993 if (tem != NULL_TREE) 7994 return tem; 7995 7996 /* If one arg is a real or integer constant, put it last. */ 7997 if (tree_swap_operands_p (arg0, arg1, true)) 7998 return fold_build2 (swap_tree_comparison (code), type, op1, op0); 7999 8000 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */ 8001 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 8002 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 8003 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) 8004 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 8005 && (TREE_CODE (arg1) == INTEGER_CST 8006 && !TREE_OVERFLOW (arg1))) 8007 { 8008 tree const1 = TREE_OPERAND (arg0, 1); 8009 tree const2 = arg1; 8010 tree variable = TREE_OPERAND (arg0, 0); 8011 tree lhs; 8012 int lhs_add; 8013 lhs_add = TREE_CODE (arg0) != PLUS_EXPR; 8014 8015 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR, 8016 TREE_TYPE (arg1), const2, const1); 8017 if (TREE_CODE (lhs) == TREE_CODE (arg1) 8018 && (TREE_CODE (lhs) != INTEGER_CST 8019 || !TREE_OVERFLOW (lhs))) 8020 { 8021 fold_overflow_warning (("assuming signed overflow does not occur " 8022 "when changing X +- C1 cmp C2 to " 8023 "X cmp C1 +- C2"), 8024 WARN_STRICT_OVERFLOW_COMPARISON); 8025 return fold_build2 (code, type, variable, lhs); 8026 } 8027 } 8028 8029 /* If this is a comparison of two exprs that look like an ARRAY_REF of the 8030 same object, then we can fold this to a comparison of the two offsets in 8031 signed size type. This is possible because pointer arithmetic is 8032 restricted to retain within an object and overflow on pointer differences 8033 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. 8034 8035 We check flag_wrapv directly because pointers types are unsigned, 8036 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is 8037 normally what we want to avoid certain odd overflow cases, but 8038 not here. */ 8039 if (POINTER_TYPE_P (TREE_TYPE (arg0)) 8040 && !flag_wrapv 8041 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0))) 8042 { 8043 tree base0, offset0, base1, offset1; 8044 8045 if (extract_array_ref (arg0, &base0, &offset0) 8046 && extract_array_ref (arg1, &base1, &offset1) 8047 && operand_equal_p (base0, base1, 0)) 8048 { 8049 tree signed_size_type_node; 8050 signed_size_type_node = signed_type_for (size_type_node); 8051 8052 /* By converting to signed size type we cover middle-end pointer 8053 arithmetic which operates on unsigned pointer types of size 8054 type size and ARRAY_REF offsets which are properly sign or 8055 zero extended from their type in case it is narrower than 8056 size type. */ 8057 if (offset0 == NULL_TREE) 8058 offset0 = build_int_cst (signed_size_type_node, 0); 8059 else 8060 offset0 = fold_convert (signed_size_type_node, offset0); 8061 if (offset1 == NULL_TREE) 8062 offset1 = build_int_cst (signed_size_type_node, 0); 8063 else 8064 offset1 = fold_convert (signed_size_type_node, offset1); 8065 8066 return fold_build2 (code, type, offset0, offset1); 8067 } 8068 } 8069 8070 if (FLOAT_TYPE_P (TREE_TYPE (arg0))) 8071 { 8072 tree targ0 = strip_float_extensions (arg0); 8073 tree targ1 = strip_float_extensions (arg1); 8074 tree newtype = TREE_TYPE (targ0); 8075 8076 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype)) 8077 newtype = TREE_TYPE (targ1); 8078 8079 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */ 8080 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) 8081 return fold_build2 (code, type, fold_convert (newtype, targ0), 8082 fold_convert (newtype, targ1)); 8083 8084 /* (-a) CMP (-b) -> b CMP a */ 8085 if (TREE_CODE (arg0) == NEGATE_EXPR 8086 && TREE_CODE (arg1) == NEGATE_EXPR) 8087 return fold_build2 (code, type, TREE_OPERAND (arg1, 0), 8088 TREE_OPERAND (arg0, 0)); 8089 8090 if (TREE_CODE (arg1) == REAL_CST) 8091 { 8092 REAL_VALUE_TYPE cst; 8093 cst = TREE_REAL_CST (arg1); 8094 8095 /* (-a) CMP CST -> a swap(CMP) (-CST) */ 8096 if (TREE_CODE (arg0) == NEGATE_EXPR) 8097 return fold_build2 (swap_tree_comparison (code), type, 8098 TREE_OPERAND (arg0, 0), 8099 build_real (TREE_TYPE (arg1), 8100 REAL_VALUE_NEGATE (cst))); 8101 8102 /* IEEE doesn't distinguish +0 and -0 in comparisons. */ 8103 /* a CMP (-0) -> a CMP 0 */ 8104 if (REAL_VALUE_MINUS_ZERO (cst)) 8105 return fold_build2 (code, type, arg0, 8106 build_real (TREE_TYPE (arg1), dconst0)); 8107 8108 /* x != NaN is always true, other ops are always false. */ 8109 if (REAL_VALUE_ISNAN (cst) 8110 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))) 8111 { 8112 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node; 8113 return omit_one_operand (type, tem, arg0); 8114 } 8115 8116 /* Fold comparisons against infinity. */ 8117 if (REAL_VALUE_ISINF (cst)) 8118 { 8119 tem = fold_inf_compare (code, type, arg0, arg1); 8120 if (tem != NULL_TREE) 8121 return tem; 8122 } 8123 } 8124 8125 /* If this is a comparison of a real constant with a PLUS_EXPR 8126 or a MINUS_EXPR of a real constant, we can convert it into a 8127 comparison with a revised real constant as long as no overflow 8128 occurs when unsafe_math_optimizations are enabled. */ 8129 if (flag_unsafe_math_optimizations 8130 && TREE_CODE (arg1) == REAL_CST 8131 && (TREE_CODE (arg0) == PLUS_EXPR 8132 || TREE_CODE (arg0) == MINUS_EXPR) 8133 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 8134 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR 8135 ? MINUS_EXPR : PLUS_EXPR, 8136 arg1, TREE_OPERAND (arg0, 1), 0)) 8137 && ! TREE_CONSTANT_OVERFLOW (tem)) 8138 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); 8139 8140 /* Likewise, we can simplify a comparison of a real constant with 8141 a MINUS_EXPR whose first operand is also a real constant, i.e. 8142 (c1 - x) < c2 becomes x > c1-c2. */ 8143 if (flag_unsafe_math_optimizations 8144 && TREE_CODE (arg1) == REAL_CST 8145 && TREE_CODE (arg0) == MINUS_EXPR 8146 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST 8147 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0), 8148 arg1, 0)) 8149 && ! TREE_CONSTANT_OVERFLOW (tem)) 8150 return fold_build2 (swap_tree_comparison (code), type, 8151 TREE_OPERAND (arg0, 1), tem); 8152 8153 /* Fold comparisons against built-in math functions. */ 8154 if (TREE_CODE (arg1) == REAL_CST 8155 && flag_unsafe_math_optimizations 8156 && ! flag_errno_math) 8157 { 8158 enum built_in_function fcode = builtin_mathfn_code (arg0); 8159 8160 if (fcode != END_BUILTINS) 8161 { 8162 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1); 8163 if (tem != NULL_TREE) 8164 return tem; 8165 } 8166 } 8167 } 8168 8169 /* Convert foo++ == CONST into ++foo == CONST + INCR. */ 8170 if (TREE_CONSTANT (arg1) 8171 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR 8172 || TREE_CODE (arg0) == POSTDECREMENT_EXPR) 8173 /* This optimization is invalid for ordered comparisons 8174 if CONST+INCR overflows or if foo+incr might overflow. 8175 This optimization is invalid for floating point due to rounding. 8176 For pointer types we assume overflow doesn't happen. */ 8177 && (POINTER_TYPE_P (TREE_TYPE (arg0)) 8178 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 8179 && (code == EQ_EXPR || code == NE_EXPR)))) 8180 { 8181 tree varop, newconst; 8182 8183 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR) 8184 { 8185 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0), 8186 arg1, TREE_OPERAND (arg0, 1)); 8187 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0), 8188 TREE_OPERAND (arg0, 0), 8189 TREE_OPERAND (arg0, 1)); 8190 } 8191 else 8192 { 8193 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0), 8194 arg1, TREE_OPERAND (arg0, 1)); 8195 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0), 8196 TREE_OPERAND (arg0, 0), 8197 TREE_OPERAND (arg0, 1)); 8198 } 8199 8200 8201 /* If VAROP is a reference to a bitfield, we must mask 8202 the constant by the width of the field. */ 8203 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF 8204 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)) 8205 && host_integerp (DECL_SIZE (TREE_OPERAND 8206 (TREE_OPERAND (varop, 0), 1)), 1)) 8207 { 8208 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1); 8209 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1); 8210 tree folded_compare, shift; 8211 8212 /* First check whether the comparison would come out 8213 always the same. If we don't do that we would 8214 change the meaning with the masking. */ 8215 folded_compare = fold_build2 (code, type, 8216 TREE_OPERAND (varop, 0), arg1); 8217 if (TREE_CODE (folded_compare) == INTEGER_CST) 8218 return omit_one_operand (type, folded_compare, varop); 8219 8220 shift = build_int_cst (NULL_TREE, 8221 TYPE_PRECISION (TREE_TYPE (varop)) - size); 8222 shift = fold_convert (TREE_TYPE (varop), shift); 8223 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop), 8224 newconst, shift); 8225 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop), 8226 newconst, shift); 8227 } 8228 8229 return fold_build2 (code, type, varop, newconst); 8230 } 8231 8232 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE 8233 && (TREE_CODE (arg0) == NOP_EXPR 8234 || TREE_CODE (arg0) == CONVERT_EXPR)) 8235 { 8236 /* If we are widening one operand of an integer comparison, 8237 see if the other operand is similarly being widened. Perhaps we 8238 can do the comparison in the narrower type. */ 8239 tem = fold_widened_comparison (code, type, arg0, arg1); 8240 if (tem) 8241 return tem; 8242 8243 /* Or if we are changing signedness. */ 8244 tem = fold_sign_changed_comparison (code, type, arg0, arg1); 8245 if (tem) 8246 return tem; 8247 } 8248 8249 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a 8250 constant, we can simplify it. */ 8251 if (TREE_CODE (arg1) == INTEGER_CST 8252 && (TREE_CODE (arg0) == MIN_EXPR 8253 || TREE_CODE (arg0) == MAX_EXPR) 8254 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 8255 { 8256 tem = optimize_minmax_comparison (code, type, op0, op1); 8257 if (tem) 8258 return tem; 8259 } 8260 8261 /* Simplify comparison of something with itself. (For IEEE 8262 floating-point, we can only do some of these simplifications.) */ 8263 if (operand_equal_p (arg0, arg1, 0)) 8264 { 8265 switch (code) 8266 { 8267 case EQ_EXPR: 8268 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)) 8269 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 8270 return constant_boolean_node (1, type); 8271 break; 8272 8273 case GE_EXPR: 8274 case LE_EXPR: 8275 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)) 8276 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 8277 return constant_boolean_node (1, type); 8278 return fold_build2 (EQ_EXPR, type, arg0, arg1); 8279 8280 case NE_EXPR: 8281 /* For NE, we can only do this simplification if integer 8282 or we don't honor IEEE floating point NaNs. */ 8283 if (FLOAT_TYPE_P (TREE_TYPE (arg0)) 8284 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 8285 break; 8286 /* ... fall through ... */ 8287 case GT_EXPR: 8288 case LT_EXPR: 8289 return constant_boolean_node (0, type); 8290 default: 8291 gcc_unreachable (); 8292 } 8293 } 8294 8295 /* If we are comparing an expression that just has comparisons 8296 of two integer values, arithmetic expressions of those comparisons, 8297 and constants, we can simplify it. There are only three cases 8298 to check: the two values can either be equal, the first can be 8299 greater, or the second can be greater. Fold the expression for 8300 those three values. Since each value must be 0 or 1, we have 8301 eight possibilities, each of which corresponds to the constant 0 8302 or 1 or one of the six possible comparisons. 8303 8304 This handles common cases like (a > b) == 0 but also handles 8305 expressions like ((x > y) - (y > x)) > 0, which supposedly 8306 occur in macroized code. */ 8307 8308 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST) 8309 { 8310 tree cval1 = 0, cval2 = 0; 8311 int save_p = 0; 8312 8313 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p) 8314 /* Don't handle degenerate cases here; they should already 8315 have been handled anyway. */ 8316 && cval1 != 0 && cval2 != 0 8317 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2)) 8318 && TREE_TYPE (cval1) == TREE_TYPE (cval2) 8319 && INTEGRAL_TYPE_P (TREE_TYPE (cval1)) 8320 && TYPE_MAX_VALUE (TREE_TYPE (cval1)) 8321 && TYPE_MAX_VALUE (TREE_TYPE (cval2)) 8322 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)), 8323 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0)) 8324 { 8325 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1)); 8326 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1)); 8327 8328 /* We can't just pass T to eval_subst in case cval1 or cval2 8329 was the same as ARG1. */ 8330 8331 tree high_result 8332 = fold_build2 (code, type, 8333 eval_subst (arg0, cval1, maxval, 8334 cval2, minval), 8335 arg1); 8336 tree equal_result 8337 = fold_build2 (code, type, 8338 eval_subst (arg0, cval1, maxval, 8339 cval2, maxval), 8340 arg1); 8341 tree low_result 8342 = fold_build2 (code, type, 8343 eval_subst (arg0, cval1, minval, 8344 cval2, maxval), 8345 arg1); 8346 8347 /* All three of these results should be 0 or 1. Confirm they are. 8348 Then use those values to select the proper code to use. */ 8349 8350 if (TREE_CODE (high_result) == INTEGER_CST 8351 && TREE_CODE (equal_result) == INTEGER_CST 8352 && TREE_CODE (low_result) == INTEGER_CST) 8353 { 8354 /* Make a 3-bit mask with the high-order bit being the 8355 value for `>', the next for '=', and the low for '<'. */ 8356 switch ((integer_onep (high_result) * 4) 8357 + (integer_onep (equal_result) * 2) 8358 + integer_onep (low_result)) 8359 { 8360 case 0: 8361 /* Always false. */ 8362 return omit_one_operand (type, integer_zero_node, arg0); 8363 case 1: 8364 code = LT_EXPR; 8365 break; 8366 case 2: 8367 code = EQ_EXPR; 8368 break; 8369 case 3: 8370 code = LE_EXPR; 8371 break; 8372 case 4: 8373 code = GT_EXPR; 8374 break; 8375 case 5: 8376 code = NE_EXPR; 8377 break; 8378 case 6: 8379 code = GE_EXPR; 8380 break; 8381 case 7: 8382 /* Always true. */ 8383 return omit_one_operand (type, integer_one_node, arg0); 8384 } 8385 8386 if (save_p) 8387 return save_expr (build2 (code, type, cval1, cval2)); 8388 return fold_build2 (code, type, cval1, cval2); 8389 } 8390 } 8391 } 8392 8393 /* Fold a comparison of the address of COMPONENT_REFs with the same 8394 type and component to a comparison of the address of the base 8395 object. In short, &x->a OP &y->a to x OP y and 8396 &x->a OP &y.a to x OP &y */ 8397 if (TREE_CODE (arg0) == ADDR_EXPR 8398 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF 8399 && TREE_CODE (arg1) == ADDR_EXPR 8400 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF) 8401 { 8402 tree cref0 = TREE_OPERAND (arg0, 0); 8403 tree cref1 = TREE_OPERAND (arg1, 0); 8404 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1)) 8405 { 8406 tree op0 = TREE_OPERAND (cref0, 0); 8407 tree op1 = TREE_OPERAND (cref1, 0); 8408 return fold_build2 (code, type, 8409 build_fold_addr_expr (op0), 8410 build_fold_addr_expr (op1)); 8411 } 8412 } 8413 8414 /* We can fold X/C1 op C2 where C1 and C2 are integer constants 8415 into a single range test. */ 8416 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR 8417 || TREE_CODE (arg0) == EXACT_DIV_EXPR) 8418 && TREE_CODE (arg1) == INTEGER_CST 8419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 8420 && !integer_zerop (TREE_OPERAND (arg0, 1)) 8421 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) 8422 && !TREE_OVERFLOW (arg1)) 8423 { 8424 tem = fold_div_compare (code, type, arg0, arg1); 8425 if (tem != NULL_TREE) 8426 return tem; 8427 } 8428 8429 return NULL_TREE; 8430} 8431 8432 8433/* Subroutine of fold_binary. Optimize complex multiplications of the 8434 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The 8435 argument EXPR represents the expression "z" of type TYPE. */ 8436 8437static tree 8438fold_mult_zconjz (tree type, tree expr) 8439{ 8440 tree itype = TREE_TYPE (type); 8441 tree rpart, ipart, tem; 8442 8443 if (TREE_CODE (expr) == COMPLEX_EXPR) 8444 { 8445 rpart = TREE_OPERAND (expr, 0); 8446 ipart = TREE_OPERAND (expr, 1); 8447 } 8448 else if (TREE_CODE (expr) == COMPLEX_CST) 8449 { 8450 rpart = TREE_REALPART (expr); 8451 ipart = TREE_IMAGPART (expr); 8452 } 8453 else 8454 { 8455 expr = save_expr (expr); 8456 rpart = fold_build1 (REALPART_EXPR, itype, expr); 8457 ipart = fold_build1 (IMAGPART_EXPR, itype, expr); 8458 } 8459 8460 rpart = save_expr (rpart); 8461 ipart = save_expr (ipart); 8462 tem = fold_build2 (PLUS_EXPR, itype, 8463 fold_build2 (MULT_EXPR, itype, rpart, rpart), 8464 fold_build2 (MULT_EXPR, itype, ipart, ipart)); 8465 return fold_build2 (COMPLEX_EXPR, type, tem, 8466 fold_convert (itype, integer_zero_node)); 8467} 8468 8469 8470/* Fold a binary expression of code CODE and type TYPE with operands 8471 OP0 and OP1. Return the folded expression if folding is 8472 successful. Otherwise, return NULL_TREE. */ 8473 8474tree 8475fold_binary (enum tree_code code, tree type, tree op0, tree op1) 8476{ 8477 enum tree_code_class kind = TREE_CODE_CLASS (code); 8478 tree arg0, arg1, tem; 8479 tree t1 = NULL_TREE; 8480 bool strict_overflow_p; 8481 8482 gcc_assert (IS_EXPR_CODE_CLASS (kind) 8483 && TREE_CODE_LENGTH (code) == 2 8484 && op0 != NULL_TREE 8485 && op1 != NULL_TREE); 8486 8487 arg0 = op0; 8488 arg1 = op1; 8489 8490 /* Strip any conversions that don't change the mode. This is 8491 safe for every expression, except for a comparison expression 8492 because its signedness is derived from its operands. So, in 8493 the latter case, only strip conversions that don't change the 8494 signedness. 8495 8496 Note that this is done as an internal manipulation within the 8497 constant folder, in order to find the simplest representation 8498 of the arguments so that their form can be studied. In any 8499 cases, the appropriate type conversions should be put back in 8500 the tree that will get out of the constant folder. */ 8501 8502 if (kind == tcc_comparison) 8503 { 8504 STRIP_SIGN_NOPS (arg0); 8505 STRIP_SIGN_NOPS (arg1); 8506 } 8507 else 8508 { 8509 STRIP_NOPS (arg0); 8510 STRIP_NOPS (arg1); 8511 } 8512 8513 /* Note that TREE_CONSTANT isn't enough: static var addresses are 8514 constant but we can't do arithmetic on them. */ 8515 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 8516 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST) 8517 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST) 8518 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)) 8519 { 8520 if (kind == tcc_binary) 8521 tem = const_binop (code, arg0, arg1, 0); 8522 else if (kind == tcc_comparison) 8523 tem = fold_relational_const (code, type, arg0, arg1); 8524 else 8525 tem = NULL_TREE; 8526 8527 if (tem != NULL_TREE) 8528 { 8529 if (TREE_TYPE (tem) != type) 8530 tem = fold_convert (type, tem); 8531 return tem; 8532 } 8533 } 8534 8535 /* If this is a commutative operation, and ARG0 is a constant, move it 8536 to ARG1 to reduce the number of tests below. */ 8537 if (commutative_tree_code (code) 8538 && tree_swap_operands_p (arg0, arg1, true)) 8539 return fold_build2 (code, type, op1, op0); 8540 8541 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand. 8542 8543 First check for cases where an arithmetic operation is applied to a 8544 compound, conditional, or comparison operation. Push the arithmetic 8545 operation inside the compound or conditional to see if any folding 8546 can then be done. Convert comparison to conditional for this purpose. 8547 The also optimizes non-constant cases that used to be done in 8548 expand_expr. 8549 8550 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR, 8551 one of the operands is a comparison and the other is a comparison, a 8552 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the 8553 code below would make the expression more complex. Change it to a 8554 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to 8555 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */ 8556 8557 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR 8558 || code == EQ_EXPR || code == NE_EXPR) 8559 && ((truth_value_p (TREE_CODE (arg0)) 8560 && (truth_value_p (TREE_CODE (arg1)) 8561 || (TREE_CODE (arg1) == BIT_AND_EXPR 8562 && integer_onep (TREE_OPERAND (arg1, 1))))) 8563 || (truth_value_p (TREE_CODE (arg1)) 8564 && (truth_value_p (TREE_CODE (arg0)) 8565 || (TREE_CODE (arg0) == BIT_AND_EXPR 8566 && integer_onep (TREE_OPERAND (arg0, 1))))))) 8567 { 8568 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR 8569 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR 8570 : TRUTH_XOR_EXPR, 8571 boolean_type_node, 8572 fold_convert (boolean_type_node, arg0), 8573 fold_convert (boolean_type_node, arg1)); 8574 8575 if (code == EQ_EXPR) 8576 tem = invert_truthvalue (tem); 8577 8578 return fold_convert (type, tem); 8579 } 8580 8581 if (TREE_CODE_CLASS (code) == tcc_binary 8582 || TREE_CODE_CLASS (code) == tcc_comparison) 8583 { 8584 if (TREE_CODE (arg0) == COMPOUND_EXPR) 8585 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), 8586 fold_build2 (code, type, 8587 TREE_OPERAND (arg0, 1), op1)); 8588 if (TREE_CODE (arg1) == COMPOUND_EXPR 8589 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 8590 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), 8591 fold_build2 (code, type, 8592 op0, TREE_OPERAND (arg1, 1))); 8593 8594 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0)) 8595 { 8596 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1, 8597 arg0, arg1, 8598 /*cond_first_p=*/1); 8599 if (tem != NULL_TREE) 8600 return tem; 8601 } 8602 8603 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1)) 8604 { 8605 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1, 8606 arg1, arg0, 8607 /*cond_first_p=*/0); 8608 if (tem != NULL_TREE) 8609 return tem; 8610 } 8611 } 8612 8613 switch (code) 8614 { 8615 case PLUS_EXPR: 8616 /* A + (-B) -> A - B */ 8617 if (TREE_CODE (arg1) == NEGATE_EXPR) 8618 return fold_build2 (MINUS_EXPR, type, 8619 fold_convert (type, arg0), 8620 fold_convert (type, TREE_OPERAND (arg1, 0))); 8621 /* (-A) + B -> B - A */ 8622 if (TREE_CODE (arg0) == NEGATE_EXPR 8623 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)) 8624 return fold_build2 (MINUS_EXPR, type, 8625 fold_convert (type, arg1), 8626 fold_convert (type, TREE_OPERAND (arg0, 0))); 8627 /* Convert ~A + 1 to -A. */ 8628 if (INTEGRAL_TYPE_P (type) 8629 && TREE_CODE (arg0) == BIT_NOT_EXPR 8630 && integer_onep (arg1)) 8631 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)); 8632 8633 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the 8634 same or one. */ 8635 if ((TREE_CODE (arg0) == MULT_EXPR 8636 || TREE_CODE (arg1) == MULT_EXPR) 8637 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)) 8638 { 8639 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1); 8640 if (tem) 8641 return tem; 8642 } 8643 8644 if (! FLOAT_TYPE_P (type)) 8645 { 8646 if (integer_zerop (arg1)) 8647 return non_lvalue (fold_convert (type, arg0)); 8648 8649 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing 8650 with a constant, and the two constants have no bits in common, 8651 we should treat this as a BIT_IOR_EXPR since this may produce more 8652 simplifications. */ 8653 if (TREE_CODE (arg0) == BIT_AND_EXPR 8654 && TREE_CODE (arg1) == BIT_AND_EXPR 8655 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 8656 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 8657 && integer_zerop (const_binop (BIT_AND_EXPR, 8658 TREE_OPERAND (arg0, 1), 8659 TREE_OPERAND (arg1, 1), 0))) 8660 { 8661 code = BIT_IOR_EXPR; 8662 goto bit_ior; 8663 } 8664 8665 /* Reassociate (plus (plus (mult) (foo)) (mult)) as 8666 (plus (plus (mult) (mult)) (foo)) so that we can 8667 take advantage of the factoring cases below. */ 8668 if (((TREE_CODE (arg0) == PLUS_EXPR 8669 || TREE_CODE (arg0) == MINUS_EXPR) 8670 && TREE_CODE (arg1) == MULT_EXPR) 8671 || ((TREE_CODE (arg1) == PLUS_EXPR 8672 || TREE_CODE (arg1) == MINUS_EXPR) 8673 && TREE_CODE (arg0) == MULT_EXPR)) 8674 { 8675 tree parg0, parg1, parg, marg; 8676 enum tree_code pcode; 8677 8678 if (TREE_CODE (arg1) == MULT_EXPR) 8679 parg = arg0, marg = arg1; 8680 else 8681 parg = arg1, marg = arg0; 8682 pcode = TREE_CODE (parg); 8683 parg0 = TREE_OPERAND (parg, 0); 8684 parg1 = TREE_OPERAND (parg, 1); 8685 STRIP_NOPS (parg0); 8686 STRIP_NOPS (parg1); 8687 8688 if (TREE_CODE (parg0) == MULT_EXPR 8689 && TREE_CODE (parg1) != MULT_EXPR) 8690 return fold_build2 (pcode, type, 8691 fold_build2 (PLUS_EXPR, type, 8692 fold_convert (type, parg0), 8693 fold_convert (type, marg)), 8694 fold_convert (type, parg1)); 8695 if (TREE_CODE (parg0) != MULT_EXPR 8696 && TREE_CODE (parg1) == MULT_EXPR) 8697 return fold_build2 (PLUS_EXPR, type, 8698 fold_convert (type, parg0), 8699 fold_build2 (pcode, type, 8700 fold_convert (type, marg), 8701 fold_convert (type, 8702 parg1))); 8703 } 8704 8705 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step 8706 of the array. Loop optimizer sometimes produce this type of 8707 expressions. */ 8708 if (TREE_CODE (arg0) == ADDR_EXPR) 8709 { 8710 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1); 8711 if (tem) 8712 return fold_convert (type, tem); 8713 } 8714 else if (TREE_CODE (arg1) == ADDR_EXPR) 8715 { 8716 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0); 8717 if (tem) 8718 return fold_convert (type, tem); 8719 } 8720 } 8721 else 8722 { 8723 /* See if ARG1 is zero and X + ARG1 reduces to X. */ 8724 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0)) 8725 return non_lvalue (fold_convert (type, arg0)); 8726 8727 /* Likewise if the operands are reversed. */ 8728 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0)) 8729 return non_lvalue (fold_convert (type, arg1)); 8730 8731 /* Convert X + -C into X - C. */ 8732 if (TREE_CODE (arg1) == REAL_CST 8733 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))) 8734 { 8735 tem = fold_negate_const (arg1, type); 8736 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math) 8737 return fold_build2 (MINUS_EXPR, type, 8738 fold_convert (type, arg0), 8739 fold_convert (type, tem)); 8740 } 8741 8742 if (flag_unsafe_math_optimizations 8743 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) 8744 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) 8745 && (tem = distribute_real_division (code, type, arg0, arg1))) 8746 return tem; 8747 8748 /* Convert x+x into x*2.0. */ 8749 if (operand_equal_p (arg0, arg1, 0) 8750 && SCALAR_FLOAT_TYPE_P (type)) 8751 return fold_build2 (MULT_EXPR, type, arg0, 8752 build_real (type, dconst2)); 8753 8754 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */ 8755 if (flag_unsafe_math_optimizations 8756 && TREE_CODE (arg1) == PLUS_EXPR 8757 && TREE_CODE (arg0) != MULT_EXPR) 8758 { 8759 tree tree10 = TREE_OPERAND (arg1, 0); 8760 tree tree11 = TREE_OPERAND (arg1, 1); 8761 if (TREE_CODE (tree11) == MULT_EXPR 8762 && TREE_CODE (tree10) == MULT_EXPR) 8763 { 8764 tree tree0; 8765 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10); 8766 return fold_build2 (PLUS_EXPR, type, tree0, tree11); 8767 } 8768 } 8769 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */ 8770 if (flag_unsafe_math_optimizations 8771 && TREE_CODE (arg0) == PLUS_EXPR 8772 && TREE_CODE (arg1) != MULT_EXPR) 8773 { 8774 tree tree00 = TREE_OPERAND (arg0, 0); 8775 tree tree01 = TREE_OPERAND (arg0, 1); 8776 if (TREE_CODE (tree01) == MULT_EXPR 8777 && TREE_CODE (tree00) == MULT_EXPR) 8778 { 8779 tree tree0; 8780 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1); 8781 return fold_build2 (PLUS_EXPR, type, tree00, tree0); 8782 } 8783 } 8784 } 8785 8786 bit_rotate: 8787 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A 8788 is a rotate of A by C1 bits. */ 8789 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A 8790 is a rotate of A by B bits. */ 8791 { 8792 enum tree_code code0, code1; 8793 code0 = TREE_CODE (arg0); 8794 code1 = TREE_CODE (arg1); 8795 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR) 8796 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR)) 8797 && operand_equal_p (TREE_OPERAND (arg0, 0), 8798 TREE_OPERAND (arg1, 0), 0) 8799 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))) 8800 { 8801 tree tree01, tree11; 8802 enum tree_code code01, code11; 8803 8804 tree01 = TREE_OPERAND (arg0, 1); 8805 tree11 = TREE_OPERAND (arg1, 1); 8806 STRIP_NOPS (tree01); 8807 STRIP_NOPS (tree11); 8808 code01 = TREE_CODE (tree01); 8809 code11 = TREE_CODE (tree11); 8810 if (code01 == INTEGER_CST 8811 && code11 == INTEGER_CST 8812 && TREE_INT_CST_HIGH (tree01) == 0 8813 && TREE_INT_CST_HIGH (tree11) == 0 8814 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11)) 8815 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))) 8816 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0), 8817 code0 == LSHIFT_EXPR ? tree01 : tree11); 8818 else if (code11 == MINUS_EXPR) 8819 { 8820 tree tree110, tree111; 8821 tree110 = TREE_OPERAND (tree11, 0); 8822 tree111 = TREE_OPERAND (tree11, 1); 8823 STRIP_NOPS (tree110); 8824 STRIP_NOPS (tree111); 8825 if (TREE_CODE (tree110) == INTEGER_CST 8826 && 0 == compare_tree_int (tree110, 8827 TYPE_PRECISION 8828 (TREE_TYPE (TREE_OPERAND 8829 (arg0, 0)))) 8830 && operand_equal_p (tree01, tree111, 0)) 8831 return build2 ((code0 == LSHIFT_EXPR 8832 ? LROTATE_EXPR 8833 : RROTATE_EXPR), 8834 type, TREE_OPERAND (arg0, 0), tree01); 8835 } 8836 else if (code01 == MINUS_EXPR) 8837 { 8838 tree tree010, tree011; 8839 tree010 = TREE_OPERAND (tree01, 0); 8840 tree011 = TREE_OPERAND (tree01, 1); 8841 STRIP_NOPS (tree010); 8842 STRIP_NOPS (tree011); 8843 if (TREE_CODE (tree010) == INTEGER_CST 8844 && 0 == compare_tree_int (tree010, 8845 TYPE_PRECISION 8846 (TREE_TYPE (TREE_OPERAND 8847 (arg0, 0)))) 8848 && operand_equal_p (tree11, tree011, 0)) 8849 return build2 ((code0 != LSHIFT_EXPR 8850 ? LROTATE_EXPR 8851 : RROTATE_EXPR), 8852 type, TREE_OPERAND (arg0, 0), tree11); 8853 } 8854 } 8855 } 8856 8857 associate: 8858 /* In most languages, can't associate operations on floats through 8859 parentheses. Rather than remember where the parentheses were, we 8860 don't associate floats at all, unless the user has specified 8861 -funsafe-math-optimizations. */ 8862 8863 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) 8864 { 8865 tree var0, con0, lit0, minus_lit0; 8866 tree var1, con1, lit1, minus_lit1; 8867 bool ok = true; 8868 8869 /* Split both trees into variables, constants, and literals. Then 8870 associate each group together, the constants with literals, 8871 then the result with variables. This increases the chances of 8872 literals being recombined later and of generating relocatable 8873 expressions for the sum of a constant and literal. */ 8874 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0); 8875 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1, 8876 code == MINUS_EXPR); 8877 8878 /* With undefined overflow we can only associate constants 8879 with one variable. */ 8880 if ((POINTER_TYPE_P (type) 8881 || (INTEGRAL_TYPE_P (type) 8882 && !(TYPE_UNSIGNED (type) || flag_wrapv))) 8883 && var0 && var1) 8884 { 8885 tree tmp0 = var0; 8886 tree tmp1 = var1; 8887 8888 if (TREE_CODE (tmp0) == NEGATE_EXPR) 8889 tmp0 = TREE_OPERAND (tmp0, 0); 8890 if (TREE_CODE (tmp1) == NEGATE_EXPR) 8891 tmp1 = TREE_OPERAND (tmp1, 0); 8892 /* The only case we can still associate with two variables 8893 is if they are the same, modulo negation. */ 8894 if (!operand_equal_p (tmp0, tmp1, 0)) 8895 ok = false; 8896 } 8897 8898 /* Only do something if we found more than two objects. Otherwise, 8899 nothing has changed and we risk infinite recursion. */ 8900 if (ok 8901 && (2 < ((var0 != 0) + (var1 != 0) 8902 + (con0 != 0) + (con1 != 0) 8903 + (lit0 != 0) + (lit1 != 0) 8904 + (minus_lit0 != 0) + (minus_lit1 != 0)))) 8905 { 8906 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */ 8907 if (code == MINUS_EXPR) 8908 code = PLUS_EXPR; 8909 8910 var0 = associate_trees (var0, var1, code, type); 8911 con0 = associate_trees (con0, con1, code, type); 8912 lit0 = associate_trees (lit0, lit1, code, type); 8913 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type); 8914 8915 /* Preserve the MINUS_EXPR if the negative part of the literal is 8916 greater than the positive part. Otherwise, the multiplicative 8917 folding code (i.e extract_muldiv) may be fooled in case 8918 unsigned constants are subtracted, like in the following 8919 example: ((X*2 + 4) - 8U)/2. */ 8920 if (minus_lit0 && lit0) 8921 { 8922 if (TREE_CODE (lit0) == INTEGER_CST 8923 && TREE_CODE (minus_lit0) == INTEGER_CST 8924 && tree_int_cst_lt (lit0, minus_lit0)) 8925 { 8926 minus_lit0 = associate_trees (minus_lit0, lit0, 8927 MINUS_EXPR, type); 8928 lit0 = 0; 8929 } 8930 else 8931 { 8932 lit0 = associate_trees (lit0, minus_lit0, 8933 MINUS_EXPR, type); 8934 minus_lit0 = 0; 8935 } 8936 } 8937 if (minus_lit0) 8938 { 8939 if (con0 == 0) 8940 return fold_convert (type, 8941 associate_trees (var0, minus_lit0, 8942 MINUS_EXPR, type)); 8943 else 8944 { 8945 con0 = associate_trees (con0, minus_lit0, 8946 MINUS_EXPR, type); 8947 return fold_convert (type, 8948 associate_trees (var0, con0, 8949 PLUS_EXPR, type)); 8950 } 8951 } 8952 8953 con0 = associate_trees (con0, lit0, code, type); 8954 return fold_convert (type, associate_trees (var0, con0, 8955 code, type)); 8956 } 8957 } 8958 8959 return NULL_TREE; 8960 8961 case MINUS_EXPR: 8962 /* A - (-B) -> A + B */ 8963 if (TREE_CODE (arg1) == NEGATE_EXPR) 8964 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)); 8965 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */ 8966 if (TREE_CODE (arg0) == NEGATE_EXPR 8967 && (FLOAT_TYPE_P (type) 8968 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)) 8969 && negate_expr_p (arg1) 8970 && reorder_operands_p (arg0, arg1)) 8971 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1), 8972 TREE_OPERAND (arg0, 0)); 8973 /* Convert -A - 1 to ~A. */ 8974 if (INTEGRAL_TYPE_P (type) 8975 && TREE_CODE (arg0) == NEGATE_EXPR 8976 && integer_onep (arg1)) 8977 return fold_build1 (BIT_NOT_EXPR, type, 8978 fold_convert (type, TREE_OPERAND (arg0, 0))); 8979 8980 /* Convert -1 - A to ~A. */ 8981 if (INTEGRAL_TYPE_P (type) 8982 && integer_all_onesp (arg0)) 8983 return fold_build1 (BIT_NOT_EXPR, type, arg1); 8984 8985 if (! FLOAT_TYPE_P (type)) 8986 { 8987 if (integer_zerop (arg0)) 8988 return negate_expr (fold_convert (type, arg1)); 8989 if (integer_zerop (arg1)) 8990 return non_lvalue (fold_convert (type, arg0)); 8991 8992 /* Fold A - (A & B) into ~B & A. */ 8993 if (!TREE_SIDE_EFFECTS (arg0) 8994 && TREE_CODE (arg1) == BIT_AND_EXPR) 8995 { 8996 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)) 8997 return fold_build2 (BIT_AND_EXPR, type, 8998 fold_build1 (BIT_NOT_EXPR, type, 8999 TREE_OPERAND (arg1, 0)), 9000 arg0); 9001 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 9002 return fold_build2 (BIT_AND_EXPR, type, 9003 fold_build1 (BIT_NOT_EXPR, type, 9004 TREE_OPERAND (arg1, 1)), 9005 arg0); 9006 } 9007 9008 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is 9009 any power of 2 minus 1. */ 9010 if (TREE_CODE (arg0) == BIT_AND_EXPR 9011 && TREE_CODE (arg1) == BIT_AND_EXPR 9012 && operand_equal_p (TREE_OPERAND (arg0, 0), 9013 TREE_OPERAND (arg1, 0), 0)) 9014 { 9015 tree mask0 = TREE_OPERAND (arg0, 1); 9016 tree mask1 = TREE_OPERAND (arg1, 1); 9017 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0); 9018 9019 if (operand_equal_p (tem, mask1, 0)) 9020 { 9021 tem = fold_build2 (BIT_XOR_EXPR, type, 9022 TREE_OPERAND (arg0, 0), mask1); 9023 return fold_build2 (MINUS_EXPR, type, tem, mask1); 9024 } 9025 } 9026 } 9027 9028 /* See if ARG1 is zero and X - ARG1 reduces to X. */ 9029 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1)) 9030 return non_lvalue (fold_convert (type, arg0)); 9031 9032 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether 9033 ARG0 is zero and X + ARG0 reduces to X, since that would mean 9034 (-ARG1 + ARG0) reduces to -ARG1. */ 9035 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0)) 9036 return negate_expr (fold_convert (type, arg1)); 9037 9038 /* Fold &x - &x. This can happen from &x.foo - &x. 9039 This is unsafe for certain floats even in non-IEEE formats. 9040 In IEEE, it is unsafe because it does wrong for NaNs. 9041 Also note that operand_equal_p is always false if an operand 9042 is volatile. */ 9043 9044 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) 9045 && operand_equal_p (arg0, arg1, 0)) 9046 return fold_convert (type, integer_zero_node); 9047 9048 /* A - B -> A + (-B) if B is easily negatable. */ 9049 if (negate_expr_p (arg1) 9050 && ((FLOAT_TYPE_P (type) 9051 /* Avoid this transformation if B is a positive REAL_CST. */ 9052 && (TREE_CODE (arg1) != REAL_CST 9053 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))) 9054 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))) 9055 return fold_build2 (PLUS_EXPR, type, 9056 fold_convert (type, arg0), 9057 fold_convert (type, negate_expr (arg1))); 9058 9059 /* Try folding difference of addresses. */ 9060 { 9061 HOST_WIDE_INT diff; 9062 9063 if ((TREE_CODE (arg0) == ADDR_EXPR 9064 || TREE_CODE (arg1) == ADDR_EXPR) 9065 && ptr_difference_const (arg0, arg1, &diff)) 9066 return build_int_cst_type (type, diff); 9067 } 9068 9069 /* Fold &a[i] - &a[j] to i-j. */ 9070 if (TREE_CODE (arg0) == ADDR_EXPR 9071 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF 9072 && TREE_CODE (arg1) == ADDR_EXPR 9073 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF) 9074 { 9075 tree aref0 = TREE_OPERAND (arg0, 0); 9076 tree aref1 = TREE_OPERAND (arg1, 0); 9077 if (operand_equal_p (TREE_OPERAND (aref0, 0), 9078 TREE_OPERAND (aref1, 0), 0)) 9079 { 9080 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1)); 9081 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1)); 9082 tree esz = array_ref_element_size (aref0); 9083 tree diff = build2 (MINUS_EXPR, type, op0, op1); 9084 return fold_build2 (MULT_EXPR, type, diff, 9085 fold_convert (type, esz)); 9086 9087 } 9088 } 9089 9090 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step 9091 of the array. Loop optimizer sometimes produce this type of 9092 expressions. */ 9093 if (TREE_CODE (arg0) == ADDR_EXPR) 9094 { 9095 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1); 9096 if (tem) 9097 return fold_convert (type, tem); 9098 } 9099 9100 if (flag_unsafe_math_optimizations 9101 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) 9102 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) 9103 && (tem = distribute_real_division (code, type, arg0, arg1))) 9104 return tem; 9105 9106 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the 9107 same or one. */ 9108 if ((TREE_CODE (arg0) == MULT_EXPR 9109 || TREE_CODE (arg1) == MULT_EXPR) 9110 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)) 9111 { 9112 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1); 9113 if (tem) 9114 return tem; 9115 } 9116 9117 goto associate; 9118 9119 case MULT_EXPR: 9120 /* (-A) * (-B) -> A * B */ 9121 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) 9122 return fold_build2 (MULT_EXPR, type, 9123 fold_convert (type, TREE_OPERAND (arg0, 0)), 9124 fold_convert (type, negate_expr (arg1))); 9125 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) 9126 return fold_build2 (MULT_EXPR, type, 9127 fold_convert (type, negate_expr (arg0)), 9128 fold_convert (type, TREE_OPERAND (arg1, 0))); 9129 9130 if (! FLOAT_TYPE_P (type)) 9131 { 9132 if (integer_zerop (arg1)) 9133 return omit_one_operand (type, arg1, arg0); 9134 if (integer_onep (arg1)) 9135 return non_lvalue (fold_convert (type, arg0)); 9136 /* Transform x * -1 into -x. */ 9137 if (integer_all_onesp (arg1)) 9138 return fold_convert (type, negate_expr (arg0)); 9139 9140 /* (a * (1 << b)) is (a << b) */ 9141 if (TREE_CODE (arg1) == LSHIFT_EXPR 9142 && integer_onep (TREE_OPERAND (arg1, 0))) 9143 return fold_build2 (LSHIFT_EXPR, type, arg0, 9144 TREE_OPERAND (arg1, 1)); 9145 if (TREE_CODE (arg0) == LSHIFT_EXPR 9146 && integer_onep (TREE_OPERAND (arg0, 0))) 9147 return fold_build2 (LSHIFT_EXPR, type, arg1, 9148 TREE_OPERAND (arg0, 1)); 9149 9150 strict_overflow_p = false; 9151 if (TREE_CODE (arg1) == INTEGER_CST 9152 && 0 != (tem = extract_muldiv (op0, 9153 fold_convert (type, arg1), 9154 code, NULL_TREE, 9155 &strict_overflow_p))) 9156 { 9157 if (strict_overflow_p) 9158 fold_overflow_warning (("assuming signed overflow does not " 9159 "occur when simplifying " 9160 "multiplication"), 9161 WARN_STRICT_OVERFLOW_MISC); 9162 return fold_convert (type, tem); 9163 } 9164 9165 /* Optimize z * conj(z) for integer complex numbers. */ 9166 if (TREE_CODE (arg0) == CONJ_EXPR 9167 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 9168 return fold_mult_zconjz (type, arg1); 9169 if (TREE_CODE (arg1) == CONJ_EXPR 9170 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 9171 return fold_mult_zconjz (type, arg0); 9172 } 9173 else 9174 { 9175 /* Maybe fold x * 0 to 0. The expressions aren't the same 9176 when x is NaN, since x * 0 is also NaN. Nor are they the 9177 same in modes with signed zeros, since multiplying a 9178 negative value by 0 gives -0, not +0. */ 9179 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 9180 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 9181 && real_zerop (arg1)) 9182 return omit_one_operand (type, arg1, arg0); 9183 /* In IEEE floating point, x*1 is not equivalent to x for snans. */ 9184 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 9185 && real_onep (arg1)) 9186 return non_lvalue (fold_convert (type, arg0)); 9187 9188 /* Transform x * -1.0 into -x. */ 9189 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 9190 && real_minus_onep (arg1)) 9191 return fold_convert (type, negate_expr (arg0)); 9192 9193 /* Convert (C1/X)*C2 into (C1*C2)/X. */ 9194 if (flag_unsafe_math_optimizations 9195 && TREE_CODE (arg0) == RDIV_EXPR 9196 && TREE_CODE (arg1) == REAL_CST 9197 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST) 9198 { 9199 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0), 9200 arg1, 0); 9201 if (tem) 9202 return fold_build2 (RDIV_EXPR, type, tem, 9203 TREE_OPERAND (arg0, 1)); 9204 } 9205 9206 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */ 9207 if (operand_equal_p (arg0, arg1, 0)) 9208 { 9209 tree tem = fold_strip_sign_ops (arg0); 9210 if (tem != NULL_TREE) 9211 { 9212 tem = fold_convert (type, tem); 9213 return fold_build2 (MULT_EXPR, type, tem, tem); 9214 } 9215 } 9216 9217 /* Optimize z * conj(z) for floating point complex numbers. 9218 Guarded by flag_unsafe_math_optimizations as non-finite 9219 imaginary components don't produce scalar results. */ 9220 if (flag_unsafe_math_optimizations 9221 && TREE_CODE (arg0) == CONJ_EXPR 9222 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 9223 return fold_mult_zconjz (type, arg1); 9224 if (flag_unsafe_math_optimizations 9225 && TREE_CODE (arg1) == CONJ_EXPR 9226 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 9227 return fold_mult_zconjz (type, arg0); 9228 9229 if (flag_unsafe_math_optimizations) 9230 { 9231 enum built_in_function fcode0 = builtin_mathfn_code (arg0); 9232 enum built_in_function fcode1 = builtin_mathfn_code (arg1); 9233 9234 /* Optimizations of root(...)*root(...). */ 9235 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0)) 9236 { 9237 tree rootfn, arg, arglist; 9238 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); 9239 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1)); 9240 9241 /* Optimize sqrt(x)*sqrt(x) as x. */ 9242 if (BUILTIN_SQRT_P (fcode0) 9243 && operand_equal_p (arg00, arg10, 0) 9244 && ! HONOR_SNANS (TYPE_MODE (type))) 9245 return arg00; 9246 9247 /* Optimize root(x)*root(y) as root(x*y). */ 9248 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 9249 arg = fold_build2 (MULT_EXPR, type, arg00, arg10); 9250 arglist = build_tree_list (NULL_TREE, arg); 9251 return build_function_call_expr (rootfn, arglist); 9252 } 9253 9254 /* Optimize expN(x)*expN(y) as expN(x+y). */ 9255 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0)) 9256 { 9257 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 9258 tree arg = fold_build2 (PLUS_EXPR, type, 9259 TREE_VALUE (TREE_OPERAND (arg0, 1)), 9260 TREE_VALUE (TREE_OPERAND (arg1, 1))); 9261 tree arglist = build_tree_list (NULL_TREE, arg); 9262 return build_function_call_expr (expfn, arglist); 9263 } 9264 9265 /* Optimizations of pow(...)*pow(...). */ 9266 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW) 9267 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF) 9268 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL)) 9269 { 9270 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); 9271 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 9272 1))); 9273 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1)); 9274 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 9275 1))); 9276 9277 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */ 9278 if (operand_equal_p (arg01, arg11, 0)) 9279 { 9280 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 9281 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10); 9282 tree arglist = tree_cons (NULL_TREE, arg, 9283 build_tree_list (NULL_TREE, 9284 arg01)); 9285 return build_function_call_expr (powfn, arglist); 9286 } 9287 9288 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */ 9289 if (operand_equal_p (arg00, arg10, 0)) 9290 { 9291 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 9292 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11); 9293 tree arglist = tree_cons (NULL_TREE, arg00, 9294 build_tree_list (NULL_TREE, 9295 arg)); 9296 return build_function_call_expr (powfn, arglist); 9297 } 9298 } 9299 9300 /* Optimize tan(x)*cos(x) as sin(x). */ 9301 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS) 9302 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF) 9303 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL) 9304 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN) 9305 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF) 9306 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL)) 9307 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)), 9308 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0)) 9309 { 9310 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN); 9311 9312 if (sinfn != NULL_TREE) 9313 return build_function_call_expr (sinfn, 9314 TREE_OPERAND (arg0, 1)); 9315 } 9316 9317 /* Optimize x*pow(x,c) as pow(x,c+1). */ 9318 if (fcode1 == BUILT_IN_POW 9319 || fcode1 == BUILT_IN_POWF 9320 || fcode1 == BUILT_IN_POWL) 9321 { 9322 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1)); 9323 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 9324 1))); 9325 if (TREE_CODE (arg11) == REAL_CST 9326 && ! TREE_CONSTANT_OVERFLOW (arg11) 9327 && operand_equal_p (arg0, arg10, 0)) 9328 { 9329 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0); 9330 REAL_VALUE_TYPE c; 9331 tree arg, arglist; 9332 9333 c = TREE_REAL_CST (arg11); 9334 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); 9335 arg = build_real (type, c); 9336 arglist = build_tree_list (NULL_TREE, arg); 9337 arglist = tree_cons (NULL_TREE, arg0, arglist); 9338 return build_function_call_expr (powfn, arglist); 9339 } 9340 } 9341 9342 /* Optimize pow(x,c)*x as pow(x,c+1). */ 9343 if (fcode0 == BUILT_IN_POW 9344 || fcode0 == BUILT_IN_POWF 9345 || fcode0 == BUILT_IN_POWL) 9346 { 9347 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); 9348 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 9349 1))); 9350 if (TREE_CODE (arg01) == REAL_CST 9351 && ! TREE_CONSTANT_OVERFLOW (arg01) 9352 && operand_equal_p (arg1, arg00, 0)) 9353 { 9354 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 9355 REAL_VALUE_TYPE c; 9356 tree arg, arglist; 9357 9358 c = TREE_REAL_CST (arg01); 9359 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); 9360 arg = build_real (type, c); 9361 arglist = build_tree_list (NULL_TREE, arg); 9362 arglist = tree_cons (NULL_TREE, arg1, arglist); 9363 return build_function_call_expr (powfn, arglist); 9364 } 9365 } 9366 9367 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */ 9368 if (! optimize_size 9369 && operand_equal_p (arg0, arg1, 0)) 9370 { 9371 tree powfn = mathfn_built_in (type, BUILT_IN_POW); 9372 9373 if (powfn) 9374 { 9375 tree arg = build_real (type, dconst2); 9376 tree arglist = build_tree_list (NULL_TREE, arg); 9377 arglist = tree_cons (NULL_TREE, arg0, arglist); 9378 return build_function_call_expr (powfn, arglist); 9379 } 9380 } 9381 } 9382 } 9383 goto associate; 9384 9385 case BIT_IOR_EXPR: 9386 bit_ior: 9387 if (integer_all_onesp (arg1)) 9388 return omit_one_operand (type, arg1, arg0); 9389 if (integer_zerop (arg1)) 9390 return non_lvalue (fold_convert (type, arg0)); 9391 if (operand_equal_p (arg0, arg1, 0)) 9392 return non_lvalue (fold_convert (type, arg0)); 9393 9394 /* ~X | X is -1. */ 9395 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9396 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)) 9397 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 9398 { 9399 t1 = build_int_cst (type, -1); 9400 t1 = force_fit_type (t1, 0, false, false); 9401 return omit_one_operand (type, t1, arg1); 9402 } 9403 9404 /* X | ~X is -1. */ 9405 if (TREE_CODE (arg1) == BIT_NOT_EXPR 9406 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 9407 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 9408 { 9409 t1 = build_int_cst (type, -1); 9410 t1 = force_fit_type (t1, 0, false, false); 9411 return omit_one_operand (type, t1, arg0); 9412 } 9413 9414 /* Canonicalize (X & C1) | C2. */ 9415 if (TREE_CODE (arg0) == BIT_AND_EXPR 9416 && TREE_CODE (arg1) == INTEGER_CST 9417 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 9418 { 9419 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi; 9420 int width = TYPE_PRECISION (type); 9421 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)); 9422 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)); 9423 hi2 = TREE_INT_CST_HIGH (arg1); 9424 lo2 = TREE_INT_CST_LOW (arg1); 9425 9426 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */ 9427 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1) 9428 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0)); 9429 9430 if (width > HOST_BITS_PER_WIDE_INT) 9431 { 9432 mhi = (unsigned HOST_WIDE_INT) -1 9433 >> (2 * HOST_BITS_PER_WIDE_INT - width); 9434 mlo = -1; 9435 } 9436 else 9437 { 9438 mhi = 0; 9439 mlo = (unsigned HOST_WIDE_INT) -1 9440 >> (HOST_BITS_PER_WIDE_INT - width); 9441 } 9442 9443 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */ 9444 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0) 9445 return fold_build2 (BIT_IOR_EXPR, type, 9446 TREE_OPERAND (arg0, 0), arg1); 9447 9448 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */ 9449 hi1 &= mhi; 9450 lo1 &= mlo; 9451 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1) 9452 return fold_build2 (BIT_IOR_EXPR, type, 9453 fold_build2 (BIT_AND_EXPR, type, 9454 TREE_OPERAND (arg0, 0), 9455 build_int_cst_wide (type, 9456 lo1 & ~lo2, 9457 hi1 & ~hi2)), 9458 arg1); 9459 } 9460 9461 /* (X & Y) | Y is (X, Y). */ 9462 if (TREE_CODE (arg0) == BIT_AND_EXPR 9463 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 9464 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0)); 9465 /* (X & Y) | X is (Y, X). */ 9466 if (TREE_CODE (arg0) == BIT_AND_EXPR 9467 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 9468 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 9469 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1)); 9470 /* X | (X & Y) is (Y, X). */ 9471 if (TREE_CODE (arg1) == BIT_AND_EXPR 9472 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0) 9473 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1))) 9474 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1)); 9475 /* X | (Y & X) is (Y, X). */ 9476 if (TREE_CODE (arg1) == BIT_AND_EXPR 9477 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 9478 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 9479 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0)); 9480 9481 t1 = distribute_bit_expr (code, type, arg0, arg1); 9482 if (t1 != NULL_TREE) 9483 return t1; 9484 9485 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))). 9486 9487 This results in more efficient code for machines without a NAND 9488 instruction. Combine will canonicalize to the first form 9489 which will allow use of NAND instructions provided by the 9490 backend if they exist. */ 9491 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9492 && TREE_CODE (arg1) == BIT_NOT_EXPR) 9493 { 9494 return fold_build1 (BIT_NOT_EXPR, type, 9495 build2 (BIT_AND_EXPR, type, 9496 TREE_OPERAND (arg0, 0), 9497 TREE_OPERAND (arg1, 0))); 9498 } 9499 9500 /* See if this can be simplified into a rotate first. If that 9501 is unsuccessful continue in the association code. */ 9502 goto bit_rotate; 9503 9504 case BIT_XOR_EXPR: 9505 if (integer_zerop (arg1)) 9506 return non_lvalue (fold_convert (type, arg0)); 9507 if (integer_all_onesp (arg1)) 9508 return fold_build1 (BIT_NOT_EXPR, type, arg0); 9509 if (operand_equal_p (arg0, arg1, 0)) 9510 return omit_one_operand (type, integer_zero_node, arg0); 9511 9512 /* ~X ^ X is -1. */ 9513 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9514 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)) 9515 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 9516 { 9517 t1 = build_int_cst (type, -1); 9518 t1 = force_fit_type (t1, 0, false, false); 9519 return omit_one_operand (type, t1, arg1); 9520 } 9521 9522 /* X ^ ~X is -1. */ 9523 if (TREE_CODE (arg1) == BIT_NOT_EXPR 9524 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 9525 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 9526 { 9527 t1 = build_int_cst (type, -1); 9528 t1 = force_fit_type (t1, 0, false, false); 9529 return omit_one_operand (type, t1, arg0); 9530 } 9531 9532 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing 9533 with a constant, and the two constants have no bits in common, 9534 we should treat this as a BIT_IOR_EXPR since this may produce more 9535 simplifications. */ 9536 if (TREE_CODE (arg0) == BIT_AND_EXPR 9537 && TREE_CODE (arg1) == BIT_AND_EXPR 9538 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9539 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 9540 && integer_zerop (const_binop (BIT_AND_EXPR, 9541 TREE_OPERAND (arg0, 1), 9542 TREE_OPERAND (arg1, 1), 0))) 9543 { 9544 code = BIT_IOR_EXPR; 9545 goto bit_ior; 9546 } 9547 9548 /* (X | Y) ^ X -> Y & ~ X*/ 9549 if (TREE_CODE (arg0) == BIT_IOR_EXPR 9550 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 9551 { 9552 tree t2 = TREE_OPERAND (arg0, 1); 9553 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), 9554 arg1); 9555 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), 9556 fold_convert (type, t1)); 9557 return t1; 9558 } 9559 9560 /* (Y | X) ^ X -> Y & ~ X*/ 9561 if (TREE_CODE (arg0) == BIT_IOR_EXPR 9562 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 9563 { 9564 tree t2 = TREE_OPERAND (arg0, 0); 9565 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), 9566 arg1); 9567 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), 9568 fold_convert (type, t1)); 9569 return t1; 9570 } 9571 9572 /* X ^ (X | Y) -> Y & ~ X*/ 9573 if (TREE_CODE (arg1) == BIT_IOR_EXPR 9574 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0)) 9575 { 9576 tree t2 = TREE_OPERAND (arg1, 1); 9577 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0), 9578 arg0); 9579 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), 9580 fold_convert (type, t1)); 9581 return t1; 9582 } 9583 9584 /* X ^ (Y | X) -> Y & ~ X*/ 9585 if (TREE_CODE (arg1) == BIT_IOR_EXPR 9586 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0)) 9587 { 9588 tree t2 = TREE_OPERAND (arg1, 0); 9589 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0), 9590 arg0); 9591 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), 9592 fold_convert (type, t1)); 9593 return t1; 9594 } 9595 9596 /* Convert ~X ^ ~Y to X ^ Y. */ 9597 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9598 && TREE_CODE (arg1) == BIT_NOT_EXPR) 9599 return fold_build2 (code, type, 9600 fold_convert (type, TREE_OPERAND (arg0, 0)), 9601 fold_convert (type, TREE_OPERAND (arg1, 0))); 9602 9603 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */ 9604 if (TREE_CODE (arg0) == BIT_AND_EXPR 9605 && integer_onep (TREE_OPERAND (arg0, 1)) 9606 && integer_onep (arg1)) 9607 return fold_build2 (EQ_EXPR, type, arg0, 9608 build_int_cst (TREE_TYPE (arg0), 0)); 9609 9610 /* Fold (X & Y) ^ Y as ~X & Y. */ 9611 if (TREE_CODE (arg0) == BIT_AND_EXPR 9612 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 9613 { 9614 tem = fold_convert (type, TREE_OPERAND (arg0, 0)); 9615 return fold_build2 (BIT_AND_EXPR, type, 9616 fold_build1 (BIT_NOT_EXPR, type, tem), 9617 fold_convert (type, arg1)); 9618 } 9619 /* Fold (X & Y) ^ X as ~Y & X. */ 9620 if (TREE_CODE (arg0) == BIT_AND_EXPR 9621 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 9622 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 9623 { 9624 tem = fold_convert (type, TREE_OPERAND (arg0, 1)); 9625 return fold_build2 (BIT_AND_EXPR, type, 9626 fold_build1 (BIT_NOT_EXPR, type, tem), 9627 fold_convert (type, arg1)); 9628 } 9629 /* Fold X ^ (X & Y) as X & ~Y. */ 9630 if (TREE_CODE (arg1) == BIT_AND_EXPR 9631 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 9632 { 9633 tem = fold_convert (type, TREE_OPERAND (arg1, 1)); 9634 return fold_build2 (BIT_AND_EXPR, type, 9635 fold_convert (type, arg0), 9636 fold_build1 (BIT_NOT_EXPR, type, tem)); 9637 } 9638 /* Fold X ^ (Y & X) as ~Y & X. */ 9639 if (TREE_CODE (arg1) == BIT_AND_EXPR 9640 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 9641 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 9642 { 9643 tem = fold_convert (type, TREE_OPERAND (arg1, 0)); 9644 return fold_build2 (BIT_AND_EXPR, type, 9645 fold_build1 (BIT_NOT_EXPR, type, tem), 9646 fold_convert (type, arg0)); 9647 } 9648 9649 /* See if this can be simplified into a rotate first. If that 9650 is unsuccessful continue in the association code. */ 9651 goto bit_rotate; 9652 9653 case BIT_AND_EXPR: 9654 if (integer_all_onesp (arg1)) 9655 return non_lvalue (fold_convert (type, arg0)); 9656 if (integer_zerop (arg1)) 9657 return omit_one_operand (type, arg1, arg0); 9658 if (operand_equal_p (arg0, arg1, 0)) 9659 return non_lvalue (fold_convert (type, arg0)); 9660 9661 /* ~X & X is always zero. */ 9662 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9663 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 9664 return omit_one_operand (type, integer_zero_node, arg1); 9665 9666 /* X & ~X is always zero. */ 9667 if (TREE_CODE (arg1) == BIT_NOT_EXPR 9668 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 9669 return omit_one_operand (type, integer_zero_node, arg0); 9670 9671 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */ 9672 if (TREE_CODE (arg0) == BIT_IOR_EXPR 9673 && TREE_CODE (arg1) == INTEGER_CST 9674 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 9675 return fold_build2 (BIT_IOR_EXPR, type, 9676 fold_build2 (BIT_AND_EXPR, type, 9677 TREE_OPERAND (arg0, 0), arg1), 9678 fold_build2 (BIT_AND_EXPR, type, 9679 TREE_OPERAND (arg0, 1), arg1)); 9680 9681 /* (X | Y) & Y is (X, Y). */ 9682 if (TREE_CODE (arg0) == BIT_IOR_EXPR 9683 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 9684 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0)); 9685 /* (X | Y) & X is (Y, X). */ 9686 if (TREE_CODE (arg0) == BIT_IOR_EXPR 9687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 9688 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 9689 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1)); 9690 /* X & (X | Y) is (Y, X). */ 9691 if (TREE_CODE (arg1) == BIT_IOR_EXPR 9692 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0) 9693 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1))) 9694 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1)); 9695 /* X & (Y | X) is (Y, X). */ 9696 if (TREE_CODE (arg1) == BIT_IOR_EXPR 9697 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 9698 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 9699 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0)); 9700 9701 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */ 9702 if (TREE_CODE (arg0) == BIT_XOR_EXPR 9703 && integer_onep (TREE_OPERAND (arg0, 1)) 9704 && integer_onep (arg1)) 9705 { 9706 tem = TREE_OPERAND (arg0, 0); 9707 return fold_build2 (EQ_EXPR, type, 9708 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem, 9709 build_int_cst (TREE_TYPE (tem), 1)), 9710 build_int_cst (TREE_TYPE (tem), 0)); 9711 } 9712 /* Fold ~X & 1 as (X & 1) == 0. */ 9713 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9714 && integer_onep (arg1)) 9715 { 9716 tem = TREE_OPERAND (arg0, 0); 9717 return fold_build2 (EQ_EXPR, type, 9718 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem, 9719 build_int_cst (TREE_TYPE (tem), 1)), 9720 build_int_cst (TREE_TYPE (tem), 0)); 9721 } 9722 9723 /* Fold (X ^ Y) & Y as ~X & Y. */ 9724 if (TREE_CODE (arg0) == BIT_XOR_EXPR 9725 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 9726 { 9727 tem = fold_convert (type, TREE_OPERAND (arg0, 0)); 9728 return fold_build2 (BIT_AND_EXPR, type, 9729 fold_build1 (BIT_NOT_EXPR, type, tem), 9730 fold_convert (type, arg1)); 9731 } 9732 /* Fold (X ^ Y) & X as ~Y & X. */ 9733 if (TREE_CODE (arg0) == BIT_XOR_EXPR 9734 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 9735 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 9736 { 9737 tem = fold_convert (type, TREE_OPERAND (arg0, 1)); 9738 return fold_build2 (BIT_AND_EXPR, type, 9739 fold_build1 (BIT_NOT_EXPR, type, tem), 9740 fold_convert (type, arg1)); 9741 } 9742 /* Fold X & (X ^ Y) as X & ~Y. */ 9743 if (TREE_CODE (arg1) == BIT_XOR_EXPR 9744 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 9745 { 9746 tem = fold_convert (type, TREE_OPERAND (arg1, 1)); 9747 return fold_build2 (BIT_AND_EXPR, type, 9748 fold_convert (type, arg0), 9749 fold_build1 (BIT_NOT_EXPR, type, tem)); 9750 } 9751 /* Fold X & (Y ^ X) as ~Y & X. */ 9752 if (TREE_CODE (arg1) == BIT_XOR_EXPR 9753 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 9754 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 9755 { 9756 tem = fold_convert (type, TREE_OPERAND (arg1, 0)); 9757 return fold_build2 (BIT_AND_EXPR, type, 9758 fold_build1 (BIT_NOT_EXPR, type, tem), 9759 fold_convert (type, arg0)); 9760 } 9761 9762 t1 = distribute_bit_expr (code, type, arg0, arg1); 9763 if (t1 != NULL_TREE) 9764 return t1; 9765 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */ 9766 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR 9767 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))) 9768 { 9769 unsigned int prec 9770 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))); 9771 9772 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT 9773 && (~TREE_INT_CST_LOW (arg1) 9774 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0) 9775 return fold_convert (type, TREE_OPERAND (arg0, 0)); 9776 } 9777 9778 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))). 9779 9780 This results in more efficient code for machines without a NOR 9781 instruction. Combine will canonicalize to the first form 9782 which will allow use of NOR instructions provided by the 9783 backend if they exist. */ 9784 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9785 && TREE_CODE (arg1) == BIT_NOT_EXPR) 9786 { 9787 return fold_build1 (BIT_NOT_EXPR, type, 9788 build2 (BIT_IOR_EXPR, type, 9789 TREE_OPERAND (arg0, 0), 9790 TREE_OPERAND (arg1, 0))); 9791 } 9792 9793 goto associate; 9794 9795 case RDIV_EXPR: 9796 /* Don't touch a floating-point divide by zero unless the mode 9797 of the constant can represent infinity. */ 9798 if (TREE_CODE (arg1) == REAL_CST 9799 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))) 9800 && real_zerop (arg1)) 9801 return NULL_TREE; 9802 9803 /* Optimize A / A to 1.0 if we don't care about 9804 NaNs or Infinities. Skip the transformation 9805 for non-real operands. */ 9806 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0)) 9807 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 9808 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0))) 9809 && operand_equal_p (arg0, arg1, 0)) 9810 { 9811 tree r = build_real (TREE_TYPE (arg0), dconst1); 9812 9813 return omit_two_operands (type, r, arg0, arg1); 9814 } 9815 9816 /* The complex version of the above A / A optimization. */ 9817 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)) 9818 && operand_equal_p (arg0, arg1, 0)) 9819 { 9820 tree elem_type = TREE_TYPE (TREE_TYPE (arg0)); 9821 if (! HONOR_NANS (TYPE_MODE (elem_type)) 9822 && ! HONOR_INFINITIES (TYPE_MODE (elem_type))) 9823 { 9824 tree r = build_real (elem_type, dconst1); 9825 /* omit_two_operands will call fold_convert for us. */ 9826 return omit_two_operands (type, r, arg0, arg1); 9827 } 9828 } 9829 9830 /* (-A) / (-B) -> A / B */ 9831 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) 9832 return fold_build2 (RDIV_EXPR, type, 9833 TREE_OPERAND (arg0, 0), 9834 negate_expr (arg1)); 9835 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) 9836 return fold_build2 (RDIV_EXPR, type, 9837 negate_expr (arg0), 9838 TREE_OPERAND (arg1, 0)); 9839 9840 /* In IEEE floating point, x/1 is not equivalent to x for snans. */ 9841 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 9842 && real_onep (arg1)) 9843 return non_lvalue (fold_convert (type, arg0)); 9844 9845 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */ 9846 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 9847 && real_minus_onep (arg1)) 9848 return non_lvalue (fold_convert (type, negate_expr (arg0))); 9849 9850 /* If ARG1 is a constant, we can convert this to a multiply by the 9851 reciprocal. This does not have the same rounding properties, 9852 so only do this if -funsafe-math-optimizations. We can actually 9853 always safely do it if ARG1 is a power of two, but it's hard to 9854 tell if it is or not in a portable manner. */ 9855 if (TREE_CODE (arg1) == REAL_CST) 9856 { 9857 if (flag_unsafe_math_optimizations 9858 && 0 != (tem = const_binop (code, build_real (type, dconst1), 9859 arg1, 0))) 9860 return fold_build2 (MULT_EXPR, type, arg0, tem); 9861 /* Find the reciprocal if optimizing and the result is exact. */ 9862 if (optimize) 9863 { 9864 REAL_VALUE_TYPE r; 9865 r = TREE_REAL_CST (arg1); 9866 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r)) 9867 { 9868 tem = build_real (type, r); 9869 return fold_build2 (MULT_EXPR, type, 9870 fold_convert (type, arg0), tem); 9871 } 9872 } 9873 } 9874 /* Convert A/B/C to A/(B*C). */ 9875 if (flag_unsafe_math_optimizations 9876 && TREE_CODE (arg0) == RDIV_EXPR) 9877 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0), 9878 fold_build2 (MULT_EXPR, type, 9879 TREE_OPERAND (arg0, 1), arg1)); 9880 9881 /* Convert A/(B/C) to (A/B)*C. */ 9882 if (flag_unsafe_math_optimizations 9883 && TREE_CODE (arg1) == RDIV_EXPR) 9884 return fold_build2 (MULT_EXPR, type, 9885 fold_build2 (RDIV_EXPR, type, arg0, 9886 TREE_OPERAND (arg1, 0)), 9887 TREE_OPERAND (arg1, 1)); 9888 9889 /* Convert C1/(X*C2) into (C1/C2)/X. */ 9890 if (flag_unsafe_math_optimizations 9891 && TREE_CODE (arg1) == MULT_EXPR 9892 && TREE_CODE (arg0) == REAL_CST 9893 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) 9894 { 9895 tree tem = const_binop (RDIV_EXPR, arg0, 9896 TREE_OPERAND (arg1, 1), 0); 9897 if (tem) 9898 return fold_build2 (RDIV_EXPR, type, tem, 9899 TREE_OPERAND (arg1, 0)); 9900 } 9901 9902 if (flag_unsafe_math_optimizations) 9903 { 9904 enum built_in_function fcode0 = builtin_mathfn_code (arg0); 9905 enum built_in_function fcode1 = builtin_mathfn_code (arg1); 9906 9907 /* Optimize sin(x)/cos(x) as tan(x). */ 9908 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS) 9909 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF) 9910 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL)) 9911 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)), 9912 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0)) 9913 { 9914 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); 9915 9916 if (tanfn != NULL_TREE) 9917 return build_function_call_expr (tanfn, 9918 TREE_OPERAND (arg0, 1)); 9919 } 9920 9921 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */ 9922 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN) 9923 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF) 9924 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL)) 9925 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)), 9926 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0)) 9927 { 9928 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); 9929 9930 if (tanfn != NULL_TREE) 9931 { 9932 tree tmp = TREE_OPERAND (arg0, 1); 9933 tmp = build_function_call_expr (tanfn, tmp); 9934 return fold_build2 (RDIV_EXPR, type, 9935 build_real (type, dconst1), tmp); 9936 } 9937 } 9938 9939 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about 9940 NaNs or Infinities. */ 9941 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN) 9942 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF) 9943 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL))) 9944 { 9945 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); 9946 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1)); 9947 9948 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))) 9949 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00))) 9950 && operand_equal_p (arg00, arg01, 0)) 9951 { 9952 tree cosfn = mathfn_built_in (type, BUILT_IN_COS); 9953 9954 if (cosfn != NULL_TREE) 9955 return build_function_call_expr (cosfn, 9956 TREE_OPERAND (arg0, 1)); 9957 } 9958 } 9959 9960 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about 9961 NaNs or Infinities. */ 9962 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN) 9963 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF) 9964 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL))) 9965 { 9966 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); 9967 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1)); 9968 9969 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))) 9970 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00))) 9971 && operand_equal_p (arg00, arg01, 0)) 9972 { 9973 tree cosfn = mathfn_built_in (type, BUILT_IN_COS); 9974 9975 if (cosfn != NULL_TREE) 9976 { 9977 tree tmp = TREE_OPERAND (arg0, 1); 9978 tmp = build_function_call_expr (cosfn, tmp); 9979 return fold_build2 (RDIV_EXPR, type, 9980 build_real (type, dconst1), 9981 tmp); 9982 } 9983 } 9984 } 9985 9986 /* Optimize pow(x,c)/x as pow(x,c-1). */ 9987 if (fcode0 == BUILT_IN_POW 9988 || fcode0 == BUILT_IN_POWF 9989 || fcode0 == BUILT_IN_POWL) 9990 { 9991 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); 9992 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1))); 9993 if (TREE_CODE (arg01) == REAL_CST 9994 && ! TREE_CONSTANT_OVERFLOW (arg01) 9995 && operand_equal_p (arg1, arg00, 0)) 9996 { 9997 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 9998 REAL_VALUE_TYPE c; 9999 tree arg, arglist; 10000 10001 c = TREE_REAL_CST (arg01); 10002 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1); 10003 arg = build_real (type, c); 10004 arglist = build_tree_list (NULL_TREE, arg); 10005 arglist = tree_cons (NULL_TREE, arg1, arglist); 10006 return build_function_call_expr (powfn, arglist); 10007 } 10008 } 10009 10010 /* Optimize x/expN(y) into x*expN(-y). */ 10011 if (BUILTIN_EXPONENT_P (fcode1)) 10012 { 10013 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0); 10014 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1))); 10015 tree arglist = build_tree_list (NULL_TREE, 10016 fold_convert (type, arg)); 10017 arg1 = build_function_call_expr (expfn, arglist); 10018 return fold_build2 (MULT_EXPR, type, arg0, arg1); 10019 } 10020 10021 /* Optimize x/pow(y,z) into x*pow(y,-z). */ 10022 if (fcode1 == BUILT_IN_POW 10023 || fcode1 == BUILT_IN_POWF 10024 || fcode1 == BUILT_IN_POWL) 10025 { 10026 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0); 10027 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1)); 10028 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1))); 10029 tree neg11 = fold_convert (type, negate_expr (arg11)); 10030 tree arglist = tree_cons(NULL_TREE, arg10, 10031 build_tree_list (NULL_TREE, neg11)); 10032 arg1 = build_function_call_expr (powfn, arglist); 10033 return fold_build2 (MULT_EXPR, type, arg0, arg1); 10034 } 10035 } 10036 return NULL_TREE; 10037 10038 case TRUNC_DIV_EXPR: 10039 case FLOOR_DIV_EXPR: 10040 /* Simplify A / (B << N) where A and B are positive and B is 10041 a power of 2, to A >> (N + log2(B)). */ 10042 strict_overflow_p = false; 10043 if (TREE_CODE (arg1) == LSHIFT_EXPR 10044 && (TYPE_UNSIGNED (type) 10045 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))) 10046 { 10047 tree sval = TREE_OPERAND (arg1, 0); 10048 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0) 10049 { 10050 tree sh_cnt = TREE_OPERAND (arg1, 1); 10051 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval)); 10052 10053 if (strict_overflow_p) 10054 fold_overflow_warning (("assuming signed overflow does not " 10055 "occur when simplifying A / (B << N)"), 10056 WARN_STRICT_OVERFLOW_MISC); 10057 10058 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt), 10059 sh_cnt, build_int_cst (NULL_TREE, pow2)); 10060 return fold_build2 (RSHIFT_EXPR, type, 10061 fold_convert (type, arg0), sh_cnt); 10062 } 10063 } 10064 /* Fall thru */ 10065 10066 case ROUND_DIV_EXPR: 10067 case CEIL_DIV_EXPR: 10068 case EXACT_DIV_EXPR: 10069 if (integer_onep (arg1)) 10070 return non_lvalue (fold_convert (type, arg0)); 10071 if (integer_zerop (arg1)) 10072 return NULL_TREE; 10073 /* X / -1 is -X. */ 10074 if (!TYPE_UNSIGNED (type) 10075 && TREE_CODE (arg1) == INTEGER_CST 10076 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1 10077 && TREE_INT_CST_HIGH (arg1) == -1) 10078 return fold_convert (type, negate_expr (arg0)); 10079 10080 /* Convert -A / -B to A / B when the type is signed and overflow is 10081 undefined. */ 10082 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 10083 && TREE_CODE (arg0) == NEGATE_EXPR 10084 && negate_expr_p (arg1)) 10085 { 10086 if (INTEGRAL_TYPE_P (type)) 10087 fold_overflow_warning (("assuming signed overflow does not occur " 10088 "when distributing negation across " 10089 "division"), 10090 WARN_STRICT_OVERFLOW_MISC); 10091 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), 10092 negate_expr (arg1)); 10093 } 10094 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 10095 && TREE_CODE (arg1) == NEGATE_EXPR 10096 && negate_expr_p (arg0)) 10097 { 10098 if (INTEGRAL_TYPE_P (type)) 10099 fold_overflow_warning (("assuming signed overflow does not occur " 10100 "when distributing negation across " 10101 "division"), 10102 WARN_STRICT_OVERFLOW_MISC); 10103 return fold_build2 (code, type, negate_expr (arg0), 10104 TREE_OPERAND (arg1, 0)); 10105 } 10106 10107 /* If arg0 is a multiple of arg1, then rewrite to the fastest div 10108 operation, EXACT_DIV_EXPR. 10109 10110 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now. 10111 At one time others generated faster code, it's not clear if they do 10112 after the last round to changes to the DIV code in expmed.c. */ 10113 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR) 10114 && multiple_of_p (type, arg0, arg1)) 10115 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1); 10116 10117 strict_overflow_p = false; 10118 if (TREE_CODE (arg1) == INTEGER_CST 10119 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 10120 &strict_overflow_p))) 10121 { 10122 if (strict_overflow_p) 10123 fold_overflow_warning (("assuming signed overflow does not occur " 10124 "when simplifying division"), 10125 WARN_STRICT_OVERFLOW_MISC); 10126 return fold_convert (type, tem); 10127 } 10128 10129 return NULL_TREE; 10130 10131 case CEIL_MOD_EXPR: 10132 case FLOOR_MOD_EXPR: 10133 case ROUND_MOD_EXPR: 10134 case TRUNC_MOD_EXPR: 10135 /* X % 1 is always zero, but be sure to preserve any side 10136 effects in X. */ 10137 if (integer_onep (arg1)) 10138 return omit_one_operand (type, integer_zero_node, arg0); 10139 10140 /* X % 0, return X % 0 unchanged so that we can get the 10141 proper warnings and errors. */ 10142 if (integer_zerop (arg1)) 10143 return NULL_TREE; 10144 10145 /* 0 % X is always zero, but be sure to preserve any side 10146 effects in X. Place this after checking for X == 0. */ 10147 if (integer_zerop (arg0)) 10148 return omit_one_operand (type, integer_zero_node, arg1); 10149 10150 /* X % -1 is zero. */ 10151 if (!TYPE_UNSIGNED (type) 10152 && TREE_CODE (arg1) == INTEGER_CST 10153 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1 10154 && TREE_INT_CST_HIGH (arg1) == -1) 10155 return omit_one_operand (type, integer_zero_node, arg0); 10156 10157 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR, 10158 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */ 10159 strict_overflow_p = false; 10160 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR) 10161 && (TYPE_UNSIGNED (type) 10162 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))) 10163 { 10164 tree c = arg1; 10165 /* Also optimize A % (C << N) where C is a power of 2, 10166 to A & ((C << N) - 1). */ 10167 if (TREE_CODE (arg1) == LSHIFT_EXPR) 10168 c = TREE_OPERAND (arg1, 0); 10169 10170 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0) 10171 { 10172 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), 10173 arg1, integer_one_node); 10174 if (strict_overflow_p) 10175 fold_overflow_warning (("assuming signed overflow does not " 10176 "occur when simplifying " 10177 "X % (power of two)"), 10178 WARN_STRICT_OVERFLOW_MISC); 10179 return fold_build2 (BIT_AND_EXPR, type, 10180 fold_convert (type, arg0), 10181 fold_convert (type, mask)); 10182 } 10183 } 10184 10185 /* X % -C is the same as X % C. */ 10186 if (code == TRUNC_MOD_EXPR 10187 && !TYPE_UNSIGNED (type) 10188 && TREE_CODE (arg1) == INTEGER_CST 10189 && !TREE_CONSTANT_OVERFLOW (arg1) 10190 && TREE_INT_CST_HIGH (arg1) < 0 10191 && !TYPE_OVERFLOW_TRAPS (type) 10192 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */ 10193 && !sign_bit_p (arg1, arg1)) 10194 return fold_build2 (code, type, fold_convert (type, arg0), 10195 fold_convert (type, negate_expr (arg1))); 10196 10197 /* X % -Y is the same as X % Y. */ 10198 if (code == TRUNC_MOD_EXPR 10199 && !TYPE_UNSIGNED (type) 10200 && TREE_CODE (arg1) == NEGATE_EXPR 10201 && !TYPE_OVERFLOW_TRAPS (type)) 10202 return fold_build2 (code, type, fold_convert (type, arg0), 10203 fold_convert (type, TREE_OPERAND (arg1, 0))); 10204 10205 if (TREE_CODE (arg1) == INTEGER_CST 10206 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 10207 &strict_overflow_p))) 10208 { 10209 if (strict_overflow_p) 10210 fold_overflow_warning (("assuming signed overflow does not occur " 10211 "when simplifying modulos"), 10212 WARN_STRICT_OVERFLOW_MISC); 10213 return fold_convert (type, tem); 10214 } 10215 10216 return NULL_TREE; 10217 10218 case LROTATE_EXPR: 10219 case RROTATE_EXPR: 10220 if (integer_all_onesp (arg0)) 10221 return omit_one_operand (type, arg0, arg1); 10222 goto shift; 10223 10224 case RSHIFT_EXPR: 10225 /* Optimize -1 >> x for arithmetic right shifts. */ 10226 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)) 10227 return omit_one_operand (type, arg0, arg1); 10228 /* ... fall through ... */ 10229 10230 case LSHIFT_EXPR: 10231 shift: 10232 if (integer_zerop (arg1)) 10233 return non_lvalue (fold_convert (type, arg0)); 10234 if (integer_zerop (arg0)) 10235 return omit_one_operand (type, arg0, arg1); 10236 10237 /* Since negative shift count is not well-defined, 10238 don't try to compute it in the compiler. */ 10239 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0) 10240 return NULL_TREE; 10241 10242 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */ 10243 if (TREE_CODE (op0) == code && host_integerp (arg1, false) 10244 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type) 10245 && host_integerp (TREE_OPERAND (arg0, 1), false) 10246 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type)) 10247 { 10248 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) 10249 + TREE_INT_CST_LOW (arg1)); 10250 10251 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2 10252 being well defined. */ 10253 if (low >= TYPE_PRECISION (type)) 10254 { 10255 if (code == LROTATE_EXPR || code == RROTATE_EXPR) 10256 low = low % TYPE_PRECISION (type); 10257 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR) 10258 return build_int_cst (type, 0); 10259 else 10260 low = TYPE_PRECISION (type) - 1; 10261 } 10262 10263 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), 10264 build_int_cst (type, low)); 10265 } 10266 10267 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c 10268 into x & ((unsigned)-1 >> c) for unsigned types. */ 10269 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR) 10270 || (TYPE_UNSIGNED (type) 10271 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR)) 10272 && host_integerp (arg1, false) 10273 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type) 10274 && host_integerp (TREE_OPERAND (arg0, 1), false) 10275 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type)) 10276 { 10277 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)); 10278 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1); 10279 tree lshift; 10280 tree arg00; 10281 10282 if (low0 == low1) 10283 { 10284 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0)); 10285 10286 lshift = build_int_cst (type, -1); 10287 lshift = int_const_binop (code, lshift, arg1, 0); 10288 10289 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift); 10290 } 10291 } 10292 10293 /* Rewrite an LROTATE_EXPR by a constant into an 10294 RROTATE_EXPR by a new constant. */ 10295 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST) 10296 { 10297 tree tem = build_int_cst (NULL_TREE, 10298 GET_MODE_BITSIZE (TYPE_MODE (type))); 10299 tem = fold_convert (TREE_TYPE (arg1), tem); 10300 tem = const_binop (MINUS_EXPR, tem, arg1, 0); 10301 return fold_build2 (RROTATE_EXPR, type, arg0, tem); 10302 } 10303 10304 /* If we have a rotate of a bit operation with the rotate count and 10305 the second operand of the bit operation both constant, 10306 permute the two operations. */ 10307 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST 10308 && (TREE_CODE (arg0) == BIT_AND_EXPR 10309 || TREE_CODE (arg0) == BIT_IOR_EXPR 10310 || TREE_CODE (arg0) == BIT_XOR_EXPR) 10311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 10312 return fold_build2 (TREE_CODE (arg0), type, 10313 fold_build2 (code, type, 10314 TREE_OPERAND (arg0, 0), arg1), 10315 fold_build2 (code, type, 10316 TREE_OPERAND (arg0, 1), arg1)); 10317 10318 /* Two consecutive rotates adding up to the width of the mode can 10319 be ignored. */ 10320 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST 10321 && TREE_CODE (arg0) == RROTATE_EXPR 10322 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 10323 && TREE_INT_CST_HIGH (arg1) == 0 10324 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0 10325 && ((TREE_INT_CST_LOW (arg1) 10326 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))) 10327 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type)))) 10328 return TREE_OPERAND (arg0, 0); 10329 10330 return NULL_TREE; 10331 10332 case MIN_EXPR: 10333 if (operand_equal_p (arg0, arg1, 0)) 10334 return omit_one_operand (type, arg0, arg1); 10335 if (INTEGRAL_TYPE_P (type) 10336 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST)) 10337 return omit_one_operand (type, arg1, arg0); 10338 tem = fold_minmax (MIN_EXPR, type, arg0, arg1); 10339 if (tem) 10340 return tem; 10341 goto associate; 10342 10343 case MAX_EXPR: 10344 if (operand_equal_p (arg0, arg1, 0)) 10345 return omit_one_operand (type, arg0, arg1); 10346 if (INTEGRAL_TYPE_P (type) 10347 && TYPE_MAX_VALUE (type) 10348 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST)) 10349 return omit_one_operand (type, arg1, arg0); 10350 tem = fold_minmax (MAX_EXPR, type, arg0, arg1); 10351 if (tem) 10352 return tem; 10353 goto associate; 10354 10355 case TRUTH_ANDIF_EXPR: 10356 /* Note that the operands of this must be ints 10357 and their values must be 0 or 1. 10358 ("true" is a fixed value perhaps depending on the language.) */ 10359 /* If first arg is constant zero, return it. */ 10360 if (integer_zerop (arg0)) 10361 return fold_convert (type, arg0); 10362 case TRUTH_AND_EXPR: 10363 /* If either arg is constant true, drop it. */ 10364 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 10365 return non_lvalue (fold_convert (type, arg1)); 10366 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1) 10367 /* Preserve sequence points. */ 10368 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) 10369 return non_lvalue (fold_convert (type, arg0)); 10370 /* If second arg is constant zero, result is zero, but first arg 10371 must be evaluated. */ 10372 if (integer_zerop (arg1)) 10373 return omit_one_operand (type, arg1, arg0); 10374 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR 10375 case will be handled here. */ 10376 if (integer_zerop (arg0)) 10377 return omit_one_operand (type, arg0, arg1); 10378 10379 /* !X && X is always false. */ 10380 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 10381 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 10382 return omit_one_operand (type, integer_zero_node, arg1); 10383 /* X && !X is always false. */ 10384 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 10385 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10386 return omit_one_operand (type, integer_zero_node, arg0); 10387 10388 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y 10389 means A >= Y && A != MAX, but in this case we know that 10390 A < X <= MAX. */ 10391 10392 if (!TREE_SIDE_EFFECTS (arg0) 10393 && !TREE_SIDE_EFFECTS (arg1)) 10394 { 10395 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1); 10396 if (tem && !operand_equal_p (tem, arg0, 0)) 10397 return fold_build2 (code, type, tem, arg1); 10398 10399 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0); 10400 if (tem && !operand_equal_p (tem, arg1, 0)) 10401 return fold_build2 (code, type, arg0, tem); 10402 } 10403 10404 truth_andor: 10405 /* We only do these simplifications if we are optimizing. */ 10406 if (!optimize) 10407 return NULL_TREE; 10408 10409 /* Check for things like (A || B) && (A || C). We can convert this 10410 to A || (B && C). Note that either operator can be any of the four 10411 truth and/or operations and the transformation will still be 10412 valid. Also note that we only care about order for the 10413 ANDIF and ORIF operators. If B contains side effects, this 10414 might change the truth-value of A. */ 10415 if (TREE_CODE (arg0) == TREE_CODE (arg1) 10416 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR 10417 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR 10418 || TREE_CODE (arg0) == TRUTH_AND_EXPR 10419 || TREE_CODE (arg0) == TRUTH_OR_EXPR) 10420 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1))) 10421 { 10422 tree a00 = TREE_OPERAND (arg0, 0); 10423 tree a01 = TREE_OPERAND (arg0, 1); 10424 tree a10 = TREE_OPERAND (arg1, 0); 10425 tree a11 = TREE_OPERAND (arg1, 1); 10426 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR 10427 || TREE_CODE (arg0) == TRUTH_AND_EXPR) 10428 && (code == TRUTH_AND_EXPR 10429 || code == TRUTH_OR_EXPR)); 10430 10431 if (operand_equal_p (a00, a10, 0)) 10432 return fold_build2 (TREE_CODE (arg0), type, a00, 10433 fold_build2 (code, type, a01, a11)); 10434 else if (commutative && operand_equal_p (a00, a11, 0)) 10435 return fold_build2 (TREE_CODE (arg0), type, a00, 10436 fold_build2 (code, type, a01, a10)); 10437 else if (commutative && operand_equal_p (a01, a10, 0)) 10438 return fold_build2 (TREE_CODE (arg0), type, a01, 10439 fold_build2 (code, type, a00, a11)); 10440 10441 /* This case if tricky because we must either have commutative 10442 operators or else A10 must not have side-effects. */ 10443 10444 else if ((commutative || ! TREE_SIDE_EFFECTS (a10)) 10445 && operand_equal_p (a01, a11, 0)) 10446 return fold_build2 (TREE_CODE (arg0), type, 10447 fold_build2 (code, type, a00, a10), 10448 a01); 10449 } 10450 10451 /* See if we can build a range comparison. */ 10452 if (0 != (tem = fold_range_test (code, type, op0, op1))) 10453 return tem; 10454 10455 /* Check for the possibility of merging component references. If our 10456 lhs is another similar operation, try to merge its rhs with our 10457 rhs. Then try to merge our lhs and rhs. */ 10458 if (TREE_CODE (arg0) == code 10459 && 0 != (tem = fold_truthop (code, type, 10460 TREE_OPERAND (arg0, 1), arg1))) 10461 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); 10462 10463 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0) 10464 return tem; 10465 10466 return NULL_TREE; 10467 10468 case TRUTH_ORIF_EXPR: 10469 /* Note that the operands of this must be ints 10470 and their values must be 0 or true. 10471 ("true" is a fixed value perhaps depending on the language.) */ 10472 /* If first arg is constant true, return it. */ 10473 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 10474 return fold_convert (type, arg0); 10475 case TRUTH_OR_EXPR: 10476 /* If either arg is constant zero, drop it. */ 10477 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0)) 10478 return non_lvalue (fold_convert (type, arg1)); 10479 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1) 10480 /* Preserve sequence points. */ 10481 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) 10482 return non_lvalue (fold_convert (type, arg0)); 10483 /* If second arg is constant true, result is true, but we must 10484 evaluate first arg. */ 10485 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)) 10486 return omit_one_operand (type, arg1, arg0); 10487 /* Likewise for first arg, but note this only occurs here for 10488 TRUTH_OR_EXPR. */ 10489 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 10490 return omit_one_operand (type, arg0, arg1); 10491 10492 /* !X || X is always true. */ 10493 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 10494 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 10495 return omit_one_operand (type, integer_one_node, arg1); 10496 /* X || !X is always true. */ 10497 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 10498 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10499 return omit_one_operand (type, integer_one_node, arg0); 10500 10501 goto truth_andor; 10502 10503 case TRUTH_XOR_EXPR: 10504 /* If the second arg is constant zero, drop it. */ 10505 if (integer_zerop (arg1)) 10506 return non_lvalue (fold_convert (type, arg0)); 10507 /* If the second arg is constant true, this is a logical inversion. */ 10508 if (integer_onep (arg1)) 10509 { 10510 /* Only call invert_truthvalue if operand is a truth value. */ 10511 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) 10512 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0); 10513 else 10514 tem = invert_truthvalue (arg0); 10515 return non_lvalue (fold_convert (type, tem)); 10516 } 10517 /* Identical arguments cancel to zero. */ 10518 if (operand_equal_p (arg0, arg1, 0)) 10519 return omit_one_operand (type, integer_zero_node, arg0); 10520 10521 /* !X ^ X is always true. */ 10522 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 10523 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 10524 return omit_one_operand (type, integer_one_node, arg1); 10525 10526 /* X ^ !X is always true. */ 10527 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 10528 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10529 return omit_one_operand (type, integer_one_node, arg0); 10530 10531 return NULL_TREE; 10532 10533 case EQ_EXPR: 10534 case NE_EXPR: 10535 tem = fold_comparison (code, type, op0, op1); 10536 if (tem != NULL_TREE) 10537 return tem; 10538 10539 /* bool_var != 0 becomes bool_var. */ 10540 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) 10541 && code == NE_EXPR) 10542 return non_lvalue (fold_convert (type, arg0)); 10543 10544 /* bool_var == 1 becomes bool_var. */ 10545 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) 10546 && code == EQ_EXPR) 10547 return non_lvalue (fold_convert (type, arg0)); 10548 10549 /* bool_var != 1 becomes !bool_var. */ 10550 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) 10551 && code == NE_EXPR) 10552 return fold_build1 (TRUTH_NOT_EXPR, type, arg0); 10553 10554 /* bool_var == 0 becomes !bool_var. */ 10555 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) 10556 && code == EQ_EXPR) 10557 return fold_build1 (TRUTH_NOT_EXPR, type, arg0); 10558 10559 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */ 10560 if (TREE_CODE (arg0) == BIT_NOT_EXPR 10561 && TREE_CODE (arg1) == INTEGER_CST) 10562 { 10563 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); 10564 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), 10565 fold_build1 (BIT_NOT_EXPR, cmp_type, 10566 fold_convert (cmp_type, arg1))); 10567 } 10568 10569 /* If this is an equality comparison of the address of a non-weak 10570 object against zero, then we know the result. */ 10571 if (TREE_CODE (arg0) == ADDR_EXPR 10572 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0)) 10573 && ! DECL_WEAK (TREE_OPERAND (arg0, 0)) 10574 && integer_zerop (arg1)) 10575 return constant_boolean_node (code != EQ_EXPR, type); 10576 10577 /* If this is an equality comparison of the address of two non-weak, 10578 unaliased symbols neither of which are extern (since we do not 10579 have access to attributes for externs), then we know the result. */ 10580 if (TREE_CODE (arg0) == ADDR_EXPR 10581 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0)) 10582 && ! DECL_WEAK (TREE_OPERAND (arg0, 0)) 10583 && ! lookup_attribute ("alias", 10584 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0))) 10585 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0)) 10586 && TREE_CODE (arg1) == ADDR_EXPR 10587 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0)) 10588 && ! DECL_WEAK (TREE_OPERAND (arg1, 0)) 10589 && ! lookup_attribute ("alias", 10590 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0))) 10591 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0))) 10592 { 10593 /* We know that we're looking at the address of two 10594 non-weak, unaliased, static _DECL nodes. 10595 10596 It is both wasteful and incorrect to call operand_equal_p 10597 to compare the two ADDR_EXPR nodes. It is wasteful in that 10598 all we need to do is test pointer equality for the arguments 10599 to the two ADDR_EXPR nodes. It is incorrect to use 10600 operand_equal_p as that function is NOT equivalent to a 10601 C equality test. It can in fact return false for two 10602 objects which would test as equal using the C equality 10603 operator. */ 10604 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0); 10605 return constant_boolean_node (equal 10606 ? code == EQ_EXPR : code != EQ_EXPR, 10607 type); 10608 } 10609 10610 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or 10611 a MINUS_EXPR of a constant, we can convert it into a comparison with 10612 a revised constant as long as no overflow occurs. */ 10613 if (TREE_CODE (arg1) == INTEGER_CST 10614 && (TREE_CODE (arg0) == PLUS_EXPR 10615 || TREE_CODE (arg0) == MINUS_EXPR) 10616 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 10617 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR 10618 ? MINUS_EXPR : PLUS_EXPR, 10619 fold_convert (TREE_TYPE (arg0), arg1), 10620 TREE_OPERAND (arg0, 1), 0)) 10621 && ! TREE_CONSTANT_OVERFLOW (tem)) 10622 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); 10623 10624 /* Similarly for a NEGATE_EXPR. */ 10625 if (TREE_CODE (arg0) == NEGATE_EXPR 10626 && TREE_CODE (arg1) == INTEGER_CST 10627 && 0 != (tem = negate_expr (arg1)) 10628 && TREE_CODE (tem) == INTEGER_CST 10629 && ! TREE_CONSTANT_OVERFLOW (tem)) 10630 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); 10631 10632 /* If we have X - Y == 0, we can convert that to X == Y and similarly 10633 for !=. Don't do this for ordered comparisons due to overflow. */ 10634 if (TREE_CODE (arg0) == MINUS_EXPR 10635 && integer_zerop (arg1)) 10636 return fold_build2 (code, type, 10637 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); 10638 10639 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */ 10640 if (TREE_CODE (arg0) == ABS_EXPR 10641 && (integer_zerop (arg1) || real_zerop (arg1))) 10642 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1); 10643 10644 /* If this is an EQ or NE comparison with zero and ARG0 is 10645 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require 10646 two operations, but the latter can be done in one less insn 10647 on machines that have only two-operand insns or on which a 10648 constant cannot be the first operand. */ 10649 if (TREE_CODE (arg0) == BIT_AND_EXPR 10650 && integer_zerop (arg1)) 10651 { 10652 tree arg00 = TREE_OPERAND (arg0, 0); 10653 tree arg01 = TREE_OPERAND (arg0, 1); 10654 if (TREE_CODE (arg00) == LSHIFT_EXPR 10655 && integer_onep (TREE_OPERAND (arg00, 0))) 10656 { 10657 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00), 10658 arg01, TREE_OPERAND (arg00, 1)); 10659 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem, 10660 build_int_cst (TREE_TYPE (arg0), 1)); 10661 return fold_build2 (code, type, 10662 fold_convert (TREE_TYPE (arg1), tem), arg1); 10663 } 10664 else if (TREE_CODE (arg01) == LSHIFT_EXPR 10665 && integer_onep (TREE_OPERAND (arg01, 0))) 10666 { 10667 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01), 10668 arg00, TREE_OPERAND (arg01, 1)); 10669 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem, 10670 build_int_cst (TREE_TYPE (arg0), 1)); 10671 return fold_build2 (code, type, 10672 fold_convert (TREE_TYPE (arg1), tem), arg1); 10673 } 10674 } 10675 10676 /* If this is an NE or EQ comparison of zero against the result of a 10677 signed MOD operation whose second operand is a power of 2, make 10678 the MOD operation unsigned since it is simpler and equivalent. */ 10679 if (integer_zerop (arg1) 10680 && !TYPE_UNSIGNED (TREE_TYPE (arg0)) 10681 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR 10682 || TREE_CODE (arg0) == CEIL_MOD_EXPR 10683 || TREE_CODE (arg0) == FLOOR_MOD_EXPR 10684 || TREE_CODE (arg0) == ROUND_MOD_EXPR) 10685 && integer_pow2p (TREE_OPERAND (arg0, 1))) 10686 { 10687 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0)); 10688 tree newmod = fold_build2 (TREE_CODE (arg0), newtype, 10689 fold_convert (newtype, 10690 TREE_OPERAND (arg0, 0)), 10691 fold_convert (newtype, 10692 TREE_OPERAND (arg0, 1))); 10693 10694 return fold_build2 (code, type, newmod, 10695 fold_convert (newtype, arg1)); 10696 } 10697 10698 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where 10699 C1 is a valid shift constant, and C2 is a power of two, i.e. 10700 a single bit. */ 10701 if (TREE_CODE (arg0) == BIT_AND_EXPR 10702 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR 10703 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)) 10704 == INTEGER_CST 10705 && integer_pow2p (TREE_OPERAND (arg0, 1)) 10706 && integer_zerop (arg1)) 10707 { 10708 tree itype = TREE_TYPE (arg0); 10709 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype); 10710 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1); 10711 10712 /* Check for a valid shift count. */ 10713 if (TREE_INT_CST_HIGH (arg001) == 0 10714 && TREE_INT_CST_LOW (arg001) < prec) 10715 { 10716 tree arg01 = TREE_OPERAND (arg0, 1); 10717 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 10718 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01); 10719 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0 10720 can be rewritten as (X & (C2 << C1)) != 0. */ 10721 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec) 10722 { 10723 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001); 10724 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem); 10725 return fold_build2 (code, type, tem, arg1); 10726 } 10727 /* Otherwise, for signed (arithmetic) shifts, 10728 ((X >> C1) & C2) != 0 is rewritten as X < 0, and 10729 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */ 10730 else if (!TYPE_UNSIGNED (itype)) 10731 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type, 10732 arg000, build_int_cst (itype, 0)); 10733 /* Otherwise, of unsigned (logical) shifts, 10734 ((X >> C1) & C2) != 0 is rewritten as (X,false), and 10735 ((X >> C1) & C2) == 0 is rewritten as (X,true). */ 10736 else 10737 return omit_one_operand (type, 10738 code == EQ_EXPR ? integer_one_node 10739 : integer_zero_node, 10740 arg000); 10741 } 10742 } 10743 10744 /* If this is an NE comparison of zero with an AND of one, remove the 10745 comparison since the AND will give the correct value. */ 10746 if (code == NE_EXPR 10747 && integer_zerop (arg1) 10748 && TREE_CODE (arg0) == BIT_AND_EXPR 10749 && integer_onep (TREE_OPERAND (arg0, 1))) 10750 return fold_convert (type, arg0); 10751 10752 /* If we have (A & C) == C where C is a power of 2, convert this into 10753 (A & C) != 0. Similarly for NE_EXPR. */ 10754 if (TREE_CODE (arg0) == BIT_AND_EXPR 10755 && integer_pow2p (TREE_OPERAND (arg0, 1)) 10756 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 10757 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 10758 arg0, fold_convert (TREE_TYPE (arg0), 10759 integer_zero_node)); 10760 10761 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign 10762 bit, then fold the expression into A < 0 or A >= 0. */ 10763 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type); 10764 if (tem) 10765 return tem; 10766 10767 /* If we have (A & C) == D where D & ~C != 0, convert this into 0. 10768 Similarly for NE_EXPR. */ 10769 if (TREE_CODE (arg0) == BIT_AND_EXPR 10770 && TREE_CODE (arg1) == INTEGER_CST 10771 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 10772 { 10773 tree notc = fold_build1 (BIT_NOT_EXPR, 10774 TREE_TYPE (TREE_OPERAND (arg0, 1)), 10775 TREE_OPERAND (arg0, 1)); 10776 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), 10777 arg1, notc); 10778 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; 10779 if (integer_nonzerop (dandnotc)) 10780 return omit_one_operand (type, rslt, arg0); 10781 } 10782 10783 /* If we have (A | C) == D where C & ~D != 0, convert this into 0. 10784 Similarly for NE_EXPR. */ 10785 if (TREE_CODE (arg0) == BIT_IOR_EXPR 10786 && TREE_CODE (arg1) == INTEGER_CST 10787 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 10788 { 10789 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1); 10790 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), 10791 TREE_OPERAND (arg0, 1), notd); 10792 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; 10793 if (integer_nonzerop (candnotd)) 10794 return omit_one_operand (type, rslt, arg0); 10795 } 10796 10797 /* If this is a comparison of a field, we may be able to simplify it. */ 10798 if (((TREE_CODE (arg0) == COMPONENT_REF 10799 && lang_hooks.can_use_bit_fields_p ()) 10800 || TREE_CODE (arg0) == BIT_FIELD_REF) 10801 /* Handle the constant case even without -O 10802 to make sure the warnings are given. */ 10803 && (optimize || TREE_CODE (arg1) == INTEGER_CST)) 10804 { 10805 t1 = optimize_bit_field_compare (code, type, arg0, arg1); 10806 if (t1) 10807 return t1; 10808 } 10809 10810 /* Optimize comparisons of strlen vs zero to a compare of the 10811 first character of the string vs zero. To wit, 10812 strlen(ptr) == 0 => *ptr == 0 10813 strlen(ptr) != 0 => *ptr != 0 10814 Other cases should reduce to one of these two (or a constant) 10815 due to the return value of strlen being unsigned. */ 10816 if (TREE_CODE (arg0) == CALL_EXPR 10817 && integer_zerop (arg1)) 10818 { 10819 tree fndecl = get_callee_fndecl (arg0); 10820 tree arglist; 10821 10822 if (fndecl 10823 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 10824 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN 10825 && (arglist = TREE_OPERAND (arg0, 1)) 10826 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE 10827 && ! TREE_CHAIN (arglist)) 10828 { 10829 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist)); 10830 return fold_build2 (code, type, iref, 10831 build_int_cst (TREE_TYPE (iref), 0)); 10832 } 10833 } 10834 10835 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width 10836 of X. Similarly fold (X >> C) == 0 into X >= 0. */ 10837 if (TREE_CODE (arg0) == RSHIFT_EXPR 10838 && integer_zerop (arg1) 10839 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 10840 { 10841 tree arg00 = TREE_OPERAND (arg0, 0); 10842 tree arg01 = TREE_OPERAND (arg0, 1); 10843 tree itype = TREE_TYPE (arg00); 10844 if (TREE_INT_CST_HIGH (arg01) == 0 10845 && TREE_INT_CST_LOW (arg01) 10846 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1)) 10847 { 10848 if (TYPE_UNSIGNED (itype)) 10849 { 10850 itype = lang_hooks.types.signed_type (itype); 10851 arg00 = fold_convert (itype, arg00); 10852 } 10853 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, 10854 type, arg00, build_int_cst (itype, 0)); 10855 } 10856 } 10857 10858 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */ 10859 if (integer_zerop (arg1) 10860 && TREE_CODE (arg0) == BIT_XOR_EXPR) 10861 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), 10862 TREE_OPERAND (arg0, 1)); 10863 10864 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */ 10865 if (TREE_CODE (arg0) == BIT_XOR_EXPR 10866 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 10867 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), 10868 build_int_cst (TREE_TYPE (arg1), 0)); 10869 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */ 10870 if (TREE_CODE (arg0) == BIT_XOR_EXPR 10871 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 10872 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 10873 return fold_build2 (code, type, TREE_OPERAND (arg0, 1), 10874 build_int_cst (TREE_TYPE (arg1), 0)); 10875 10876 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */ 10877 if (TREE_CODE (arg0) == BIT_XOR_EXPR 10878 && TREE_CODE (arg1) == INTEGER_CST 10879 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 10880 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), 10881 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1), 10882 TREE_OPERAND (arg0, 1), arg1)); 10883 10884 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into 10885 (X & C) == 0 when C is a single bit. */ 10886 if (TREE_CODE (arg0) == BIT_AND_EXPR 10887 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR 10888 && integer_zerop (arg1) 10889 && integer_pow2p (TREE_OPERAND (arg0, 1))) 10890 { 10891 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), 10892 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0), 10893 TREE_OPERAND (arg0, 1)); 10894 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, 10895 type, tem, arg1); 10896 } 10897 10898 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the 10899 constant C is a power of two, i.e. a single bit. */ 10900 if (TREE_CODE (arg0) == BIT_XOR_EXPR 10901 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR 10902 && integer_zerop (arg1) 10903 && integer_pow2p (TREE_OPERAND (arg0, 1)) 10904 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 10905 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) 10906 { 10907 tree arg00 = TREE_OPERAND (arg0, 0); 10908 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 10909 arg00, build_int_cst (TREE_TYPE (arg00), 0)); 10910 } 10911 10912 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0, 10913 when is C is a power of two, i.e. a single bit. */ 10914 if (TREE_CODE (arg0) == BIT_AND_EXPR 10915 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR 10916 && integer_zerop (arg1) 10917 && integer_pow2p (TREE_OPERAND (arg0, 1)) 10918 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 10919 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) 10920 { 10921 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 10922 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000), 10923 arg000, TREE_OPERAND (arg0, 1)); 10924 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 10925 tem, build_int_cst (TREE_TYPE (tem), 0)); 10926 } 10927 10928 if (integer_zerop (arg1) 10929 && tree_expr_nonzero_p (arg0)) 10930 { 10931 tree res = constant_boolean_node (code==NE_EXPR, type); 10932 return omit_one_operand (type, res, arg0); 10933 } 10934 return NULL_TREE; 10935 10936 case LT_EXPR: 10937 case GT_EXPR: 10938 case LE_EXPR: 10939 case GE_EXPR: 10940 tem = fold_comparison (code, type, op0, op1); 10941 if (tem != NULL_TREE) 10942 return tem; 10943 10944 /* Transform comparisons of the form X +- C CMP X. */ 10945 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 10946 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 10947 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 10948 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))) 10949 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 10950 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))))) 10951 { 10952 tree arg01 = TREE_OPERAND (arg0, 1); 10953 enum tree_code code0 = TREE_CODE (arg0); 10954 int is_positive; 10955 10956 if (TREE_CODE (arg01) == REAL_CST) 10957 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1; 10958 else 10959 is_positive = tree_int_cst_sgn (arg01); 10960 10961 /* (X - c) > X becomes false. */ 10962 if (code == GT_EXPR 10963 && ((code0 == MINUS_EXPR && is_positive >= 0) 10964 || (code0 == PLUS_EXPR && is_positive <= 0))) 10965 { 10966 if (TREE_CODE (arg01) == INTEGER_CST 10967 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 10968 fold_overflow_warning (("assuming signed overflow does not " 10969 "occur when assuming that (X - c) > X " 10970 "is always false"), 10971 WARN_STRICT_OVERFLOW_ALL); 10972 return constant_boolean_node (0, type); 10973 } 10974 10975 /* Likewise (X + c) < X becomes false. */ 10976 if (code == LT_EXPR 10977 && ((code0 == PLUS_EXPR && is_positive >= 0) 10978 || (code0 == MINUS_EXPR && is_positive <= 0))) 10979 { 10980 if (TREE_CODE (arg01) == INTEGER_CST 10981 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 10982 fold_overflow_warning (("assuming signed overflow does not " 10983 "occur when assuming that " 10984 "(X + c) < X is always false"), 10985 WARN_STRICT_OVERFLOW_ALL); 10986 return constant_boolean_node (0, type); 10987 } 10988 10989 /* Convert (X - c) <= X to true. */ 10990 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))) 10991 && code == LE_EXPR 10992 && ((code0 == MINUS_EXPR && is_positive >= 0) 10993 || (code0 == PLUS_EXPR && is_positive <= 0))) 10994 { 10995 if (TREE_CODE (arg01) == INTEGER_CST 10996 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 10997 fold_overflow_warning (("assuming signed overflow does not " 10998 "occur when assuming that " 10999 "(X - c) <= X is always true"), 11000 WARN_STRICT_OVERFLOW_ALL); 11001 return constant_boolean_node (1, type); 11002 } 11003 11004 /* Convert (X + c) >= X to true. */ 11005 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))) 11006 && code == GE_EXPR 11007 && ((code0 == PLUS_EXPR && is_positive >= 0) 11008 || (code0 == MINUS_EXPR && is_positive <= 0))) 11009 { 11010 if (TREE_CODE (arg01) == INTEGER_CST 11011 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 11012 fold_overflow_warning (("assuming signed overflow does not " 11013 "occur when assuming that " 11014 "(X + c) >= X is always true"), 11015 WARN_STRICT_OVERFLOW_ALL); 11016 return constant_boolean_node (1, type); 11017 } 11018 11019 if (TREE_CODE (arg01) == INTEGER_CST) 11020 { 11021 /* Convert X + c > X and X - c < X to true for integers. */ 11022 if (code == GT_EXPR 11023 && ((code0 == PLUS_EXPR && is_positive > 0) 11024 || (code0 == MINUS_EXPR && is_positive < 0))) 11025 { 11026 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 11027 fold_overflow_warning (("assuming signed overflow does " 11028 "not occur when assuming that " 11029 "(X + c) > X is always true"), 11030 WARN_STRICT_OVERFLOW_ALL); 11031 return constant_boolean_node (1, type); 11032 } 11033 11034 if (code == LT_EXPR 11035 && ((code0 == MINUS_EXPR && is_positive > 0) 11036 || (code0 == PLUS_EXPR && is_positive < 0))) 11037 { 11038 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 11039 fold_overflow_warning (("assuming signed overflow does " 11040 "not occur when assuming that " 11041 "(X - c) < X is always true"), 11042 WARN_STRICT_OVERFLOW_ALL); 11043 return constant_boolean_node (1, type); 11044 } 11045 11046 /* Convert X + c <= X and X - c >= X to false for integers. */ 11047 if (code == LE_EXPR 11048 && ((code0 == PLUS_EXPR && is_positive > 0) 11049 || (code0 == MINUS_EXPR && is_positive < 0))) 11050 { 11051 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 11052 fold_overflow_warning (("assuming signed overflow does " 11053 "not occur when assuming that " 11054 "(X + c) <= X is always false"), 11055 WARN_STRICT_OVERFLOW_ALL); 11056 return constant_boolean_node (0, type); 11057 } 11058 11059 if (code == GE_EXPR 11060 && ((code0 == MINUS_EXPR && is_positive > 0) 11061 || (code0 == PLUS_EXPR && is_positive < 0))) 11062 { 11063 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 11064 fold_overflow_warning (("assuming signed overflow does " 11065 "not occur when assuming that " 11066 "(X - c) >= X is always true"), 11067 WARN_STRICT_OVERFLOW_ALL); 11068 return constant_boolean_node (0, type); 11069 } 11070 } 11071 } 11072 11073 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0. 11074 This transformation affects the cases which are handled in later 11075 optimizations involving comparisons with non-negative constants. */ 11076 if (TREE_CODE (arg1) == INTEGER_CST 11077 && TREE_CODE (arg0) != INTEGER_CST 11078 && tree_int_cst_sgn (arg1) > 0) 11079 { 11080 if (code == GE_EXPR) 11081 { 11082 arg1 = const_binop (MINUS_EXPR, arg1, 11083 build_int_cst (TREE_TYPE (arg1), 1), 0); 11084 return fold_build2 (GT_EXPR, type, arg0, 11085 fold_convert (TREE_TYPE (arg0), arg1)); 11086 } 11087 if (code == LT_EXPR) 11088 { 11089 arg1 = const_binop (MINUS_EXPR, arg1, 11090 build_int_cst (TREE_TYPE (arg1), 1), 0); 11091 return fold_build2 (LE_EXPR, type, arg0, 11092 fold_convert (TREE_TYPE (arg0), arg1)); 11093 } 11094 } 11095 11096 /* Comparisons with the highest or lowest possible integer of 11097 the specified size will have known values. */ 11098 { 11099 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1))); 11100 11101 if (TREE_CODE (arg1) == INTEGER_CST 11102 && ! TREE_CONSTANT_OVERFLOW (arg1) 11103 && width <= 2 * HOST_BITS_PER_WIDE_INT 11104 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) 11105 || POINTER_TYPE_P (TREE_TYPE (arg1)))) 11106 { 11107 HOST_WIDE_INT signed_max_hi; 11108 unsigned HOST_WIDE_INT signed_max_lo; 11109 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo; 11110 11111 if (width <= HOST_BITS_PER_WIDE_INT) 11112 { 11113 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) 11114 - 1; 11115 signed_max_hi = 0; 11116 max_hi = 0; 11117 11118 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 11119 { 11120 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1; 11121 min_lo = 0; 11122 min_hi = 0; 11123 } 11124 else 11125 { 11126 max_lo = signed_max_lo; 11127 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1)); 11128 min_hi = -1; 11129 } 11130 } 11131 else 11132 { 11133 width -= HOST_BITS_PER_WIDE_INT; 11134 signed_max_lo = -1; 11135 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) 11136 - 1; 11137 max_lo = -1; 11138 min_lo = 0; 11139 11140 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 11141 { 11142 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1; 11143 min_hi = 0; 11144 } 11145 else 11146 { 11147 max_hi = signed_max_hi; 11148 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1)); 11149 } 11150 } 11151 11152 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi 11153 && TREE_INT_CST_LOW (arg1) == max_lo) 11154 switch (code) 11155 { 11156 case GT_EXPR: 11157 return omit_one_operand (type, integer_zero_node, arg0); 11158 11159 case GE_EXPR: 11160 return fold_build2 (EQ_EXPR, type, op0, op1); 11161 11162 case LE_EXPR: 11163 return omit_one_operand (type, integer_one_node, arg0); 11164 11165 case LT_EXPR: 11166 return fold_build2 (NE_EXPR, type, op0, op1); 11167 11168 /* The GE_EXPR and LT_EXPR cases above are not normally 11169 reached because of previous transformations. */ 11170 11171 default: 11172 break; 11173 } 11174 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) 11175 == max_hi 11176 && TREE_INT_CST_LOW (arg1) == max_lo - 1) 11177 switch (code) 11178 { 11179 case GT_EXPR: 11180 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0); 11181 return fold_build2 (EQ_EXPR, type, 11182 fold_convert (TREE_TYPE (arg1), arg0), 11183 arg1); 11184 case LE_EXPR: 11185 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0); 11186 return fold_build2 (NE_EXPR, type, 11187 fold_convert (TREE_TYPE (arg1), arg0), 11188 arg1); 11189 default: 11190 break; 11191 } 11192 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) 11193 == min_hi 11194 && TREE_INT_CST_LOW (arg1) == min_lo) 11195 switch (code) 11196 { 11197 case LT_EXPR: 11198 return omit_one_operand (type, integer_zero_node, arg0); 11199 11200 case LE_EXPR: 11201 return fold_build2 (EQ_EXPR, type, op0, op1); 11202 11203 case GE_EXPR: 11204 return omit_one_operand (type, integer_one_node, arg0); 11205 11206 case GT_EXPR: 11207 return fold_build2 (NE_EXPR, type, op0, op1); 11208 11209 default: 11210 break; 11211 } 11212 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) 11213 == min_hi 11214 && TREE_INT_CST_LOW (arg1) == min_lo + 1) 11215 switch (code) 11216 { 11217 case GE_EXPR: 11218 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); 11219 return fold_build2 (NE_EXPR, type, 11220 fold_convert (TREE_TYPE (arg1), arg0), 11221 arg1); 11222 case LT_EXPR: 11223 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); 11224 return fold_build2 (EQ_EXPR, type, 11225 fold_convert (TREE_TYPE (arg1), arg0), 11226 arg1); 11227 default: 11228 break; 11229 } 11230 11231 else if (!in_gimple_form 11232 && TREE_INT_CST_HIGH (arg1) == signed_max_hi 11233 && TREE_INT_CST_LOW (arg1) == signed_max_lo 11234 && TYPE_UNSIGNED (TREE_TYPE (arg1)) 11235 /* signed_type does not work on pointer types. */ 11236 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))) 11237 { 11238 /* The following case also applies to X < signed_max+1 11239 and X >= signed_max+1 because previous transformations. */ 11240 if (code == LE_EXPR || code == GT_EXPR) 11241 { 11242 tree st; 11243 st = lang_hooks.types.signed_type (TREE_TYPE (arg1)); 11244 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR, 11245 type, fold_convert (st, arg0), 11246 build_int_cst (st, 0)); 11247 } 11248 } 11249 } 11250 } 11251 11252 /* If we are comparing an ABS_EXPR with a constant, we can 11253 convert all the cases into explicit comparisons, but they may 11254 well not be faster than doing the ABS and one comparison. 11255 But ABS (X) <= C is a range comparison, which becomes a subtraction 11256 and a comparison, and is probably faster. */ 11257 if (code == LE_EXPR 11258 && TREE_CODE (arg1) == INTEGER_CST 11259 && TREE_CODE (arg0) == ABS_EXPR 11260 && ! TREE_SIDE_EFFECTS (arg0) 11261 && (0 != (tem = negate_expr (arg1))) 11262 && TREE_CODE (tem) == INTEGER_CST 11263 && ! TREE_CONSTANT_OVERFLOW (tem)) 11264 return fold_build2 (TRUTH_ANDIF_EXPR, type, 11265 build2 (GE_EXPR, type, 11266 TREE_OPERAND (arg0, 0), tem), 11267 build2 (LE_EXPR, type, 11268 TREE_OPERAND (arg0, 0), arg1)); 11269 11270 /* Convert ABS_EXPR<x> >= 0 to true. */ 11271 strict_overflow_p = false; 11272 if (code == GE_EXPR 11273 && (integer_zerop (arg1) 11274 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 11275 && real_zerop (arg1))) 11276 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) 11277 { 11278 if (strict_overflow_p) 11279 fold_overflow_warning (("assuming signed overflow does not occur " 11280 "when simplifying comparison of " 11281 "absolute value and zero"), 11282 WARN_STRICT_OVERFLOW_CONDITIONAL); 11283 return omit_one_operand (type, integer_one_node, arg0); 11284 } 11285 11286 /* Convert ABS_EXPR<x> < 0 to false. */ 11287 strict_overflow_p = false; 11288 if (code == LT_EXPR 11289 && (integer_zerop (arg1) || real_zerop (arg1)) 11290 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) 11291 { 11292 if (strict_overflow_p) 11293 fold_overflow_warning (("assuming signed overflow does not occur " 11294 "when simplifying comparison of " 11295 "absolute value and zero"), 11296 WARN_STRICT_OVERFLOW_CONDITIONAL); 11297 return omit_one_operand (type, integer_zero_node, arg0); 11298 } 11299 11300 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0 11301 and similarly for >= into !=. */ 11302 if ((code == LT_EXPR || code == GE_EXPR) 11303 && TYPE_UNSIGNED (TREE_TYPE (arg0)) 11304 && TREE_CODE (arg1) == LSHIFT_EXPR 11305 && integer_onep (TREE_OPERAND (arg1, 0))) 11306 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, 11307 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, 11308 TREE_OPERAND (arg1, 1)), 11309 build_int_cst (TREE_TYPE (arg0), 0)); 11310 11311 if ((code == LT_EXPR || code == GE_EXPR) 11312 && TYPE_UNSIGNED (TREE_TYPE (arg0)) 11313 && (TREE_CODE (arg1) == NOP_EXPR 11314 || TREE_CODE (arg1) == CONVERT_EXPR) 11315 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR 11316 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0))) 11317 return 11318 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, 11319 fold_convert (TREE_TYPE (arg0), 11320 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, 11321 TREE_OPERAND (TREE_OPERAND (arg1, 0), 11322 1))), 11323 build_int_cst (TREE_TYPE (arg0), 0)); 11324 11325 return NULL_TREE; 11326 11327 case UNORDERED_EXPR: 11328 case ORDERED_EXPR: 11329 case UNLT_EXPR: 11330 case UNLE_EXPR: 11331 case UNGT_EXPR: 11332 case UNGE_EXPR: 11333 case UNEQ_EXPR: 11334 case LTGT_EXPR: 11335 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST) 11336 { 11337 t1 = fold_relational_const (code, type, arg0, arg1); 11338 if (t1 != NULL_TREE) 11339 return t1; 11340 } 11341 11342 /* If the first operand is NaN, the result is constant. */ 11343 if (TREE_CODE (arg0) == REAL_CST 11344 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0)) 11345 && (code != LTGT_EXPR || ! flag_trapping_math)) 11346 { 11347 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) 11348 ? integer_zero_node 11349 : integer_one_node; 11350 return omit_one_operand (type, t1, arg1); 11351 } 11352 11353 /* If the second operand is NaN, the result is constant. */ 11354 if (TREE_CODE (arg1) == REAL_CST 11355 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)) 11356 && (code != LTGT_EXPR || ! flag_trapping_math)) 11357 { 11358 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) 11359 ? integer_zero_node 11360 : integer_one_node; 11361 return omit_one_operand (type, t1, arg0); 11362 } 11363 11364 /* Simplify unordered comparison of something with itself. */ 11365 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR) 11366 && operand_equal_p (arg0, arg1, 0)) 11367 return constant_boolean_node (1, type); 11368 11369 if (code == LTGT_EXPR 11370 && !flag_trapping_math 11371 && operand_equal_p (arg0, arg1, 0)) 11372 return constant_boolean_node (0, type); 11373 11374 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */ 11375 { 11376 tree targ0 = strip_float_extensions (arg0); 11377 tree targ1 = strip_float_extensions (arg1); 11378 tree newtype = TREE_TYPE (targ0); 11379 11380 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype)) 11381 newtype = TREE_TYPE (targ1); 11382 11383 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) 11384 return fold_build2 (code, type, fold_convert (newtype, targ0), 11385 fold_convert (newtype, targ1)); 11386 } 11387 11388 return NULL_TREE; 11389 11390 case COMPOUND_EXPR: 11391 /* When pedantic, a compound expression can be neither an lvalue 11392 nor an integer constant expression. */ 11393 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1)) 11394 return NULL_TREE; 11395 /* Don't let (0, 0) be null pointer constant. */ 11396 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1) 11397 : fold_convert (type, arg1); 11398 return pedantic_non_lvalue (tem); 11399 11400 case COMPLEX_EXPR: 11401 if ((TREE_CODE (arg0) == REAL_CST 11402 && TREE_CODE (arg1) == REAL_CST) 11403 || (TREE_CODE (arg0) == INTEGER_CST 11404 && TREE_CODE (arg1) == INTEGER_CST)) 11405 return build_complex (type, arg0, arg1); 11406 return NULL_TREE; 11407 11408 case ASSERT_EXPR: 11409 /* An ASSERT_EXPR should never be passed to fold_binary. */ 11410 gcc_unreachable (); 11411 11412 default: 11413 return NULL_TREE; 11414 } /* switch (code) */ 11415} 11416 11417/* Callback for walk_tree, looking for LABEL_EXPR. 11418 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE. 11419 Do not check the sub-tree of GOTO_EXPR. */ 11420 11421static tree 11422contains_label_1 (tree *tp, 11423 int *walk_subtrees, 11424 void *data ATTRIBUTE_UNUSED) 11425{ 11426 switch (TREE_CODE (*tp)) 11427 { 11428 case LABEL_EXPR: 11429 return *tp; 11430 case GOTO_EXPR: 11431 *walk_subtrees = 0; 11432 /* no break */ 11433 default: 11434 return NULL_TREE; 11435 } 11436} 11437 11438/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is 11439 accessible from outside the sub-tree. Returns NULL_TREE if no 11440 addressable label is found. */ 11441 11442static bool 11443contains_label_p (tree st) 11444{ 11445 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE); 11446} 11447 11448/* Fold a ternary expression of code CODE and type TYPE with operands 11449 OP0, OP1, and OP2. Return the folded expression if folding is 11450 successful. Otherwise, return NULL_TREE. */ 11451 11452tree 11453fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) 11454{ 11455 tree tem; 11456 tree arg0 = NULL_TREE, arg1 = NULL_TREE; 11457 enum tree_code_class kind = TREE_CODE_CLASS (code); 11458 11459 gcc_assert (IS_EXPR_CODE_CLASS (kind) 11460 && TREE_CODE_LENGTH (code) == 3); 11461 11462 /* Strip any conversions that don't change the mode. This is safe 11463 for every expression, except for a comparison expression because 11464 its signedness is derived from its operands. So, in the latter 11465 case, only strip conversions that don't change the signedness. 11466 11467 Note that this is done as an internal manipulation within the 11468 constant folder, in order to find the simplest representation of 11469 the arguments so that their form can be studied. In any cases, 11470 the appropriate type conversions should be put back in the tree 11471 that will get out of the constant folder. */ 11472 if (op0) 11473 { 11474 arg0 = op0; 11475 STRIP_NOPS (arg0); 11476 } 11477 11478 if (op1) 11479 { 11480 arg1 = op1; 11481 STRIP_NOPS (arg1); 11482 } 11483 11484 switch (code) 11485 { 11486 case COMPONENT_REF: 11487 if (TREE_CODE (arg0) == CONSTRUCTOR 11488 && ! type_contains_placeholder_p (TREE_TYPE (arg0))) 11489 { 11490 unsigned HOST_WIDE_INT idx; 11491 tree field, value; 11492 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value) 11493 if (field == arg1) 11494 return value; 11495 } 11496 return NULL_TREE; 11497 11498 case COND_EXPR: 11499 /* Pedantic ANSI C says that a conditional expression is never an lvalue, 11500 so all simple results must be passed through pedantic_non_lvalue. */ 11501 if (TREE_CODE (arg0) == INTEGER_CST) 11502 { 11503 tree unused_op = integer_zerop (arg0) ? op1 : op2; 11504 tem = integer_zerop (arg0) ? op2 : op1; 11505 /* Only optimize constant conditions when the selected branch 11506 has the same type as the COND_EXPR. This avoids optimizing 11507 away "c ? x : throw", where the throw has a void type. 11508 Avoid throwing away that operand which contains label. */ 11509 if ((!TREE_SIDE_EFFECTS (unused_op) 11510 || !contains_label_p (unused_op)) 11511 && (! VOID_TYPE_P (TREE_TYPE (tem)) 11512 || VOID_TYPE_P (type))) 11513 return pedantic_non_lvalue (tem); 11514 return NULL_TREE; 11515 } 11516 if (operand_equal_p (arg1, op2, 0)) 11517 return pedantic_omit_one_operand (type, arg1, arg0); 11518 11519 /* If we have A op B ? A : C, we may be able to convert this to a 11520 simpler expression, depending on the operation and the values 11521 of B and C. Signed zeros prevent all of these transformations, 11522 for reasons given above each one. 11523 11524 Also try swapping the arguments and inverting the conditional. */ 11525 if (COMPARISON_CLASS_P (arg0) 11526 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), 11527 arg1, TREE_OPERAND (arg0, 1)) 11528 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1)))) 11529 { 11530 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2); 11531 if (tem) 11532 return tem; 11533 } 11534 11535 if (COMPARISON_CLASS_P (arg0) 11536 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), 11537 op2, 11538 TREE_OPERAND (arg0, 1)) 11539 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2)))) 11540 { 11541 tem = fold_truth_not_expr (arg0); 11542 if (tem && COMPARISON_CLASS_P (tem)) 11543 { 11544 tem = fold_cond_expr_with_comparison (type, tem, op2, op1); 11545 if (tem) 11546 return tem; 11547 } 11548 } 11549 11550 /* If the second operand is simpler than the third, swap them 11551 since that produces better jump optimization results. */ 11552 if (truth_value_p (TREE_CODE (arg0)) 11553 && tree_swap_operands_p (op1, op2, false)) 11554 { 11555 /* See if this can be inverted. If it can't, possibly because 11556 it was a floating-point inequality comparison, don't do 11557 anything. */ 11558 tem = fold_truth_not_expr (arg0); 11559 if (tem) 11560 return fold_build3 (code, type, tem, op2, op1); 11561 } 11562 11563 /* Convert A ? 1 : 0 to simply A. */ 11564 if (integer_onep (op1) 11565 && integer_zerop (op2) 11566 /* If we try to convert OP0 to our type, the 11567 call to fold will try to move the conversion inside 11568 a COND, which will recurse. In that case, the COND_EXPR 11569 is probably the best choice, so leave it alone. */ 11570 && type == TREE_TYPE (arg0)) 11571 return pedantic_non_lvalue (arg0); 11572 11573 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR 11574 over COND_EXPR in cases such as floating point comparisons. */ 11575 if (integer_zerop (op1) 11576 && integer_onep (op2) 11577 && truth_value_p (TREE_CODE (arg0))) 11578 return pedantic_non_lvalue (fold_convert (type, 11579 invert_truthvalue (arg0))); 11580 11581 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */ 11582 if (TREE_CODE (arg0) == LT_EXPR 11583 && integer_zerop (TREE_OPERAND (arg0, 1)) 11584 && integer_zerop (op2) 11585 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1))) 11586 { 11587 /* sign_bit_p only checks ARG1 bits within A's precision. 11588 If <sign bit of A> has wider type than A, bits outside 11589 of A's precision in <sign bit of A> need to be checked. 11590 If they are all 0, this optimization needs to be done 11591 in unsigned A's type, if they are all 1 in signed A's type, 11592 otherwise this can't be done. */ 11593 if (TYPE_PRECISION (TREE_TYPE (tem)) 11594 < TYPE_PRECISION (TREE_TYPE (arg1)) 11595 && TYPE_PRECISION (TREE_TYPE (tem)) 11596 < TYPE_PRECISION (type)) 11597 { 11598 unsigned HOST_WIDE_INT mask_lo; 11599 HOST_WIDE_INT mask_hi; 11600 int inner_width, outer_width; 11601 tree tem_type; 11602 11603 inner_width = TYPE_PRECISION (TREE_TYPE (tem)); 11604 outer_width = TYPE_PRECISION (TREE_TYPE (arg1)); 11605 if (outer_width > TYPE_PRECISION (type)) 11606 outer_width = TYPE_PRECISION (type); 11607 11608 if (outer_width > HOST_BITS_PER_WIDE_INT) 11609 { 11610 mask_hi = ((unsigned HOST_WIDE_INT) -1 11611 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width)); 11612 mask_lo = -1; 11613 } 11614 else 11615 { 11616 mask_hi = 0; 11617 mask_lo = ((unsigned HOST_WIDE_INT) -1 11618 >> (HOST_BITS_PER_WIDE_INT - outer_width)); 11619 } 11620 if (inner_width > HOST_BITS_PER_WIDE_INT) 11621 { 11622 mask_hi &= ~((unsigned HOST_WIDE_INT) -1 11623 >> (HOST_BITS_PER_WIDE_INT - inner_width)); 11624 mask_lo = 0; 11625 } 11626 else 11627 mask_lo &= ~((unsigned HOST_WIDE_INT) -1 11628 >> (HOST_BITS_PER_WIDE_INT - inner_width)); 11629 11630 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi 11631 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo) 11632 { 11633 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem)); 11634 tem = fold_convert (tem_type, tem); 11635 } 11636 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0 11637 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0) 11638 { 11639 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem)); 11640 tem = fold_convert (tem_type, tem); 11641 } 11642 else 11643 tem = NULL; 11644 } 11645 11646 if (tem) 11647 return fold_convert (type, 11648 fold_build2 (BIT_AND_EXPR, 11649 TREE_TYPE (tem), tem, 11650 fold_convert (TREE_TYPE (tem), 11651 arg1))); 11652 } 11653 11654 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was 11655 already handled above. */ 11656 if (TREE_CODE (arg0) == BIT_AND_EXPR 11657 && integer_onep (TREE_OPERAND (arg0, 1)) 11658 && integer_zerop (op2) 11659 && integer_pow2p (arg1)) 11660 { 11661 tree tem = TREE_OPERAND (arg0, 0); 11662 STRIP_NOPS (tem); 11663 if (TREE_CODE (tem) == RSHIFT_EXPR 11664 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST 11665 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) == 11666 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))) 11667 return fold_build2 (BIT_AND_EXPR, type, 11668 TREE_OPERAND (tem, 0), arg1); 11669 } 11670 11671 /* A & N ? N : 0 is simply A & N if N is a power of two. This 11672 is probably obsolete because the first operand should be a 11673 truth value (that's why we have the two cases above), but let's 11674 leave it in until we can confirm this for all front-ends. */ 11675 if (integer_zerop (op2) 11676 && TREE_CODE (arg0) == NE_EXPR 11677 && integer_zerop (TREE_OPERAND (arg0, 1)) 11678 && integer_pow2p (arg1) 11679 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR 11680 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 11681 arg1, OEP_ONLY_CONST)) 11682 return pedantic_non_lvalue (fold_convert (type, 11683 TREE_OPERAND (arg0, 0))); 11684 11685 /* Convert A ? B : 0 into A && B if A and B are truth values. */ 11686 if (integer_zerop (op2) 11687 && truth_value_p (TREE_CODE (arg0)) 11688 && truth_value_p (TREE_CODE (arg1))) 11689 return fold_build2 (TRUTH_ANDIF_EXPR, type, 11690 fold_convert (type, arg0), 11691 arg1); 11692 11693 /* Convert A ? B : 1 into !A || B if A and B are truth values. */ 11694 if (integer_onep (op2) 11695 && truth_value_p (TREE_CODE (arg0)) 11696 && truth_value_p (TREE_CODE (arg1))) 11697 { 11698 /* Only perform transformation if ARG0 is easily inverted. */ 11699 tem = fold_truth_not_expr (arg0); 11700 if (tem) 11701 return fold_build2 (TRUTH_ORIF_EXPR, type, 11702 fold_convert (type, tem), 11703 arg1); 11704 } 11705 11706 /* Convert A ? 0 : B into !A && B if A and B are truth values. */ 11707 if (integer_zerop (arg1) 11708 && truth_value_p (TREE_CODE (arg0)) 11709 && truth_value_p (TREE_CODE (op2))) 11710 { 11711 /* Only perform transformation if ARG0 is easily inverted. */ 11712 tem = fold_truth_not_expr (arg0); 11713 if (tem) 11714 return fold_build2 (TRUTH_ANDIF_EXPR, type, 11715 fold_convert (type, tem), 11716 op2); 11717 } 11718 11719 /* Convert A ? 1 : B into A || B if A and B are truth values. */ 11720 if (integer_onep (arg1) 11721 && truth_value_p (TREE_CODE (arg0)) 11722 && truth_value_p (TREE_CODE (op2))) 11723 return fold_build2 (TRUTH_ORIF_EXPR, type, 11724 fold_convert (type, arg0), 11725 op2); 11726 11727 return NULL_TREE; 11728 11729 case CALL_EXPR: 11730 /* Check for a built-in function. */ 11731 if (TREE_CODE (op0) == ADDR_EXPR 11732 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL 11733 && DECL_BUILT_IN (TREE_OPERAND (op0, 0))) 11734 return fold_builtin (TREE_OPERAND (op0, 0), op1, false); 11735 return NULL_TREE; 11736 11737 case BIT_FIELD_REF: 11738 if (TREE_CODE (arg0) == VECTOR_CST 11739 && type == TREE_TYPE (TREE_TYPE (arg0)) 11740 && host_integerp (arg1, 1) 11741 && host_integerp (op2, 1)) 11742 { 11743 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1); 11744 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1); 11745 11746 if (width != 0 11747 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1 11748 && (idx % width) == 0 11749 && (idx = idx / width) 11750 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))) 11751 { 11752 tree elements = TREE_VECTOR_CST_ELTS (arg0); 11753 while (idx-- > 0 && elements) 11754 elements = TREE_CHAIN (elements); 11755 if (elements) 11756 return TREE_VALUE (elements); 11757 else 11758 return fold_convert (type, integer_zero_node); 11759 } 11760 } 11761 return NULL_TREE; 11762 11763 default: 11764 return NULL_TREE; 11765 } /* switch (code) */ 11766} 11767 11768/* Perform constant folding and related simplification of EXPR. 11769 The related simplifications include x*1 => x, x*0 => 0, etc., 11770 and application of the associative law. 11771 NOP_EXPR conversions may be removed freely (as long as we 11772 are careful not to change the type of the overall expression). 11773 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR, 11774 but we can constant-fold them if they have constant operands. */ 11775 11776#ifdef ENABLE_FOLD_CHECKING 11777# define fold(x) fold_1 (x) 11778static tree fold_1 (tree); 11779static 11780#endif 11781tree 11782fold (tree expr) 11783{ 11784 const tree t = expr; 11785 enum tree_code code = TREE_CODE (t); 11786 enum tree_code_class kind = TREE_CODE_CLASS (code); 11787 tree tem; 11788 11789 /* Return right away if a constant. */ 11790 if (kind == tcc_constant) 11791 return t; 11792 11793 if (IS_EXPR_CODE_CLASS (kind)) 11794 { 11795 tree type = TREE_TYPE (t); 11796 tree op0, op1, op2; 11797 11798 switch (TREE_CODE_LENGTH (code)) 11799 { 11800 case 1: 11801 op0 = TREE_OPERAND (t, 0); 11802 tem = fold_unary (code, type, op0); 11803 return tem ? tem : expr; 11804 case 2: 11805 op0 = TREE_OPERAND (t, 0); 11806 op1 = TREE_OPERAND (t, 1); 11807 tem = fold_binary (code, type, op0, op1); 11808 return tem ? tem : expr; 11809 case 3: 11810 op0 = TREE_OPERAND (t, 0); 11811 op1 = TREE_OPERAND (t, 1); 11812 op2 = TREE_OPERAND (t, 2); 11813 tem = fold_ternary (code, type, op0, op1, op2); 11814 return tem ? tem : expr; 11815 default: 11816 break; 11817 } 11818 } 11819 11820 switch (code) 11821 { 11822 case CONST_DECL: 11823 return fold (DECL_INITIAL (t)); 11824 11825 default: 11826 return t; 11827 } /* switch (code) */ 11828} 11829 11830#ifdef ENABLE_FOLD_CHECKING 11831#undef fold 11832 11833static void fold_checksum_tree (tree, struct md5_ctx *, htab_t); 11834static void fold_check_failed (tree, tree); 11835void print_fold_checksum (tree); 11836 11837/* When --enable-checking=fold, compute a digest of expr before 11838 and after actual fold call to see if fold did not accidentally 11839 change original expr. */ 11840 11841tree 11842fold (tree expr) 11843{ 11844 tree ret; 11845 struct md5_ctx ctx; 11846 unsigned char checksum_before[16], checksum_after[16]; 11847 htab_t ht; 11848 11849 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 11850 md5_init_ctx (&ctx); 11851 fold_checksum_tree (expr, &ctx, ht); 11852 md5_finish_ctx (&ctx, checksum_before); 11853 htab_empty (ht); 11854 11855 ret = fold_1 (expr); 11856 11857 md5_init_ctx (&ctx); 11858 fold_checksum_tree (expr, &ctx, ht); 11859 md5_finish_ctx (&ctx, checksum_after); 11860 htab_delete (ht); 11861 11862 if (memcmp (checksum_before, checksum_after, 16)) 11863 fold_check_failed (expr, ret); 11864 11865 return ret; 11866} 11867 11868void 11869print_fold_checksum (tree expr) 11870{ 11871 struct md5_ctx ctx; 11872 unsigned char checksum[16], cnt; 11873 htab_t ht; 11874 11875 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 11876 md5_init_ctx (&ctx); 11877 fold_checksum_tree (expr, &ctx, ht); 11878 md5_finish_ctx (&ctx, checksum); 11879 htab_delete (ht); 11880 for (cnt = 0; cnt < 16; ++cnt) 11881 fprintf (stderr, "%02x", checksum[cnt]); 11882 putc ('\n', stderr); 11883} 11884 11885static void 11886fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED) 11887{ 11888 internal_error ("fold check: original tree changed by fold"); 11889} 11890 11891static void 11892fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht) 11893{ 11894 void **slot; 11895 enum tree_code code; 11896 struct tree_function_decl buf; 11897 int i, len; 11898 11899recursive_label: 11900 11901 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree) 11902 <= sizeof (struct tree_function_decl)) 11903 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl)); 11904 if (expr == NULL) 11905 return; 11906 slot = htab_find_slot (ht, expr, INSERT); 11907 if (*slot != NULL) 11908 return; 11909 *slot = expr; 11910 code = TREE_CODE (expr); 11911 if (TREE_CODE_CLASS (code) == tcc_declaration 11912 && DECL_ASSEMBLER_NAME_SET_P (expr)) 11913 { 11914 /* Allow DECL_ASSEMBLER_NAME to be modified. */ 11915 memcpy ((char *) &buf, expr, tree_size (expr)); 11916 expr = (tree) &buf; 11917 SET_DECL_ASSEMBLER_NAME (expr, NULL); 11918 } 11919 else if (TREE_CODE_CLASS (code) == tcc_type 11920 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr) 11921 || TYPE_CACHED_VALUES_P (expr) 11922 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr))) 11923 { 11924 /* Allow these fields to be modified. */ 11925 memcpy ((char *) &buf, expr, tree_size (expr)); 11926 expr = (tree) &buf; 11927 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0; 11928 TYPE_POINTER_TO (expr) = NULL; 11929 TYPE_REFERENCE_TO (expr) = NULL; 11930 if (TYPE_CACHED_VALUES_P (expr)) 11931 { 11932 TYPE_CACHED_VALUES_P (expr) = 0; 11933 TYPE_CACHED_VALUES (expr) = NULL; 11934 } 11935 } 11936 md5_process_bytes (expr, tree_size (expr), ctx); 11937 fold_checksum_tree (TREE_TYPE (expr), ctx, ht); 11938 if (TREE_CODE_CLASS (code) != tcc_type 11939 && TREE_CODE_CLASS (code) != tcc_declaration 11940 && code != TREE_LIST) 11941 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht); 11942 switch (TREE_CODE_CLASS (code)) 11943 { 11944 case tcc_constant: 11945 switch (code) 11946 { 11947 case STRING_CST: 11948 md5_process_bytes (TREE_STRING_POINTER (expr), 11949 TREE_STRING_LENGTH (expr), ctx); 11950 break; 11951 case COMPLEX_CST: 11952 fold_checksum_tree (TREE_REALPART (expr), ctx, ht); 11953 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht); 11954 break; 11955 case VECTOR_CST: 11956 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht); 11957 break; 11958 default: 11959 break; 11960 } 11961 break; 11962 case tcc_exceptional: 11963 switch (code) 11964 { 11965 case TREE_LIST: 11966 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht); 11967 fold_checksum_tree (TREE_VALUE (expr), ctx, ht); 11968 expr = TREE_CHAIN (expr); 11969 goto recursive_label; 11970 break; 11971 case TREE_VEC: 11972 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i) 11973 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht); 11974 break; 11975 default: 11976 break; 11977 } 11978 break; 11979 case tcc_expression: 11980 case tcc_reference: 11981 case tcc_comparison: 11982 case tcc_unary: 11983 case tcc_binary: 11984 case tcc_statement: 11985 len = TREE_CODE_LENGTH (code); 11986 for (i = 0; i < len; ++i) 11987 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht); 11988 break; 11989 case tcc_declaration: 11990 fold_checksum_tree (DECL_NAME (expr), ctx, ht); 11991 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht); 11992 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON)) 11993 { 11994 fold_checksum_tree (DECL_SIZE (expr), ctx, ht); 11995 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht); 11996 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht); 11997 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht); 11998 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht); 11999 } 12000 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS)) 12001 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht); 12002 12003 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON)) 12004 { 12005 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht); 12006 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht); 12007 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht); 12008 } 12009 break; 12010 case tcc_type: 12011 if (TREE_CODE (expr) == ENUMERAL_TYPE) 12012 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht); 12013 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht); 12014 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht); 12015 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht); 12016 fold_checksum_tree (TYPE_NAME (expr), ctx, ht); 12017 if (INTEGRAL_TYPE_P (expr) 12018 || SCALAR_FLOAT_TYPE_P (expr)) 12019 { 12020 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht); 12021 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht); 12022 } 12023 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht); 12024 if (TREE_CODE (expr) == RECORD_TYPE 12025 || TREE_CODE (expr) == UNION_TYPE 12026 || TREE_CODE (expr) == QUAL_UNION_TYPE) 12027 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht); 12028 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht); 12029 break; 12030 default: 12031 break; 12032 } 12033} 12034 12035#endif 12036 12037/* Fold a unary tree expression with code CODE of type TYPE with an 12038 operand OP0. Return a folded expression if successful. Otherwise, 12039 return a tree expression with code CODE of type TYPE with an 12040 operand OP0. */ 12041 12042tree 12043fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL) 12044{ 12045 tree tem; 12046#ifdef ENABLE_FOLD_CHECKING 12047 unsigned char checksum_before[16], checksum_after[16]; 12048 struct md5_ctx ctx; 12049 htab_t ht; 12050 12051 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 12052 md5_init_ctx (&ctx); 12053 fold_checksum_tree (op0, &ctx, ht); 12054 md5_finish_ctx (&ctx, checksum_before); 12055 htab_empty (ht); 12056#endif 12057 12058 tem = fold_unary (code, type, op0); 12059 if (!tem) 12060 tem = build1_stat (code, type, op0 PASS_MEM_STAT); 12061 12062#ifdef ENABLE_FOLD_CHECKING 12063 md5_init_ctx (&ctx); 12064 fold_checksum_tree (op0, &ctx, ht); 12065 md5_finish_ctx (&ctx, checksum_after); 12066 htab_delete (ht); 12067 12068 if (memcmp (checksum_before, checksum_after, 16)) 12069 fold_check_failed (op0, tem); 12070#endif 12071 return tem; 12072} 12073 12074/* Fold a binary tree expression with code CODE of type TYPE with 12075 operands OP0 and OP1. Return a folded expression if successful. 12076 Otherwise, return a tree expression with code CODE of type TYPE 12077 with operands OP0 and OP1. */ 12078 12079tree 12080fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1 12081 MEM_STAT_DECL) 12082{ 12083 tree tem; 12084#ifdef ENABLE_FOLD_CHECKING 12085 unsigned char checksum_before_op0[16], 12086 checksum_before_op1[16], 12087 checksum_after_op0[16], 12088 checksum_after_op1[16]; 12089 struct md5_ctx ctx; 12090 htab_t ht; 12091 12092 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 12093 md5_init_ctx (&ctx); 12094 fold_checksum_tree (op0, &ctx, ht); 12095 md5_finish_ctx (&ctx, checksum_before_op0); 12096 htab_empty (ht); 12097 12098 md5_init_ctx (&ctx); 12099 fold_checksum_tree (op1, &ctx, ht); 12100 md5_finish_ctx (&ctx, checksum_before_op1); 12101 htab_empty (ht); 12102#endif 12103 12104 tem = fold_binary (code, type, op0, op1); 12105 if (!tem) 12106 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT); 12107 12108#ifdef ENABLE_FOLD_CHECKING 12109 md5_init_ctx (&ctx); 12110 fold_checksum_tree (op0, &ctx, ht); 12111 md5_finish_ctx (&ctx, checksum_after_op0); 12112 htab_empty (ht); 12113 12114 if (memcmp (checksum_before_op0, checksum_after_op0, 16)) 12115 fold_check_failed (op0, tem); 12116 12117 md5_init_ctx (&ctx); 12118 fold_checksum_tree (op1, &ctx, ht); 12119 md5_finish_ctx (&ctx, checksum_after_op1); 12120 htab_delete (ht); 12121 12122 if (memcmp (checksum_before_op1, checksum_after_op1, 16)) 12123 fold_check_failed (op1, tem); 12124#endif 12125 return tem; 12126} 12127 12128/* Fold a ternary tree expression with code CODE of type TYPE with 12129 operands OP0, OP1, and OP2. Return a folded expression if 12130 successful. Otherwise, return a tree expression with code CODE of 12131 type TYPE with operands OP0, OP1, and OP2. */ 12132 12133tree 12134fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2 12135 MEM_STAT_DECL) 12136{ 12137 tree tem; 12138#ifdef ENABLE_FOLD_CHECKING 12139 unsigned char checksum_before_op0[16], 12140 checksum_before_op1[16], 12141 checksum_before_op2[16], 12142 checksum_after_op0[16], 12143 checksum_after_op1[16], 12144 checksum_after_op2[16]; 12145 struct md5_ctx ctx; 12146 htab_t ht; 12147 12148 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 12149 md5_init_ctx (&ctx); 12150 fold_checksum_tree (op0, &ctx, ht); 12151 md5_finish_ctx (&ctx, checksum_before_op0); 12152 htab_empty (ht); 12153 12154 md5_init_ctx (&ctx); 12155 fold_checksum_tree (op1, &ctx, ht); 12156 md5_finish_ctx (&ctx, checksum_before_op1); 12157 htab_empty (ht); 12158 12159 md5_init_ctx (&ctx); 12160 fold_checksum_tree (op2, &ctx, ht); 12161 md5_finish_ctx (&ctx, checksum_before_op2); 12162 htab_empty (ht); 12163#endif 12164 12165 tem = fold_ternary (code, type, op0, op1, op2); 12166 if (!tem) 12167 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT); 12168 12169#ifdef ENABLE_FOLD_CHECKING 12170 md5_init_ctx (&ctx); 12171 fold_checksum_tree (op0, &ctx, ht); 12172 md5_finish_ctx (&ctx, checksum_after_op0); 12173 htab_empty (ht); 12174 12175 if (memcmp (checksum_before_op0, checksum_after_op0, 16)) 12176 fold_check_failed (op0, tem); 12177 12178 md5_init_ctx (&ctx); 12179 fold_checksum_tree (op1, &ctx, ht); 12180 md5_finish_ctx (&ctx, checksum_after_op1); 12181 htab_empty (ht); 12182 12183 if (memcmp (checksum_before_op1, checksum_after_op1, 16)) 12184 fold_check_failed (op1, tem); 12185 12186 md5_init_ctx (&ctx); 12187 fold_checksum_tree (op2, &ctx, ht); 12188 md5_finish_ctx (&ctx, checksum_after_op2); 12189 htab_delete (ht); 12190 12191 if (memcmp (checksum_before_op2, checksum_after_op2, 16)) 12192 fold_check_failed (op2, tem); 12193#endif 12194 return tem; 12195} 12196 12197/* Perform constant folding and related simplification of initializer 12198 expression EXPR. These behave identically to "fold_buildN" but ignore 12199 potential run-time traps and exceptions that fold must preserve. */ 12200 12201#define START_FOLD_INIT \ 12202 int saved_signaling_nans = flag_signaling_nans;\ 12203 int saved_trapping_math = flag_trapping_math;\ 12204 int saved_rounding_math = flag_rounding_math;\ 12205 int saved_trapv = flag_trapv;\ 12206 int saved_folding_initializer = folding_initializer;\ 12207 flag_signaling_nans = 0;\ 12208 flag_trapping_math = 0;\ 12209 flag_rounding_math = 0;\ 12210 flag_trapv = 0;\ 12211 folding_initializer = 1; 12212 12213#define END_FOLD_INIT \ 12214 flag_signaling_nans = saved_signaling_nans;\ 12215 flag_trapping_math = saved_trapping_math;\ 12216 flag_rounding_math = saved_rounding_math;\ 12217 flag_trapv = saved_trapv;\ 12218 folding_initializer = saved_folding_initializer; 12219 12220tree 12221fold_build1_initializer (enum tree_code code, tree type, tree op) 12222{ 12223 tree result; 12224 START_FOLD_INIT; 12225 12226 result = fold_build1 (code, type, op); 12227 12228 END_FOLD_INIT; 12229 return result; 12230} 12231 12232tree 12233fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1) 12234{ 12235 tree result; 12236 START_FOLD_INIT; 12237 12238 result = fold_build2 (code, type, op0, op1); 12239 12240 END_FOLD_INIT; 12241 return result; 12242} 12243 12244tree 12245fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1, 12246 tree op2) 12247{ 12248 tree result; 12249 START_FOLD_INIT; 12250 12251 result = fold_build3 (code, type, op0, op1, op2); 12252 12253 END_FOLD_INIT; 12254 return result; 12255} 12256 12257#undef START_FOLD_INIT 12258#undef END_FOLD_INIT 12259 12260/* Determine if first argument is a multiple of second argument. Return 0 if 12261 it is not, or we cannot easily determined it to be. 12262 12263 An example of the sort of thing we care about (at this point; this routine 12264 could surely be made more general, and expanded to do what the *_DIV_EXPR's 12265 fold cases do now) is discovering that 12266 12267 SAVE_EXPR (I) * SAVE_EXPR (J * 8) 12268 12269 is a multiple of 12270 12271 SAVE_EXPR (J * 8) 12272 12273 when we know that the two SAVE_EXPR (J * 8) nodes are the same node. 12274 12275 This code also handles discovering that 12276 12277 SAVE_EXPR (I) * SAVE_EXPR (J * 8) 12278 12279 is a multiple of 8 so we don't have to worry about dealing with a 12280 possible remainder. 12281 12282 Note that we *look* inside a SAVE_EXPR only to determine how it was 12283 calculated; it is not safe for fold to do much of anything else with the 12284 internals of a SAVE_EXPR, since it cannot know when it will be evaluated 12285 at run time. For example, the latter example above *cannot* be implemented 12286 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at 12287 evaluation time of the original SAVE_EXPR is not necessarily the same at 12288 the time the new expression is evaluated. The only optimization of this 12289 sort that would be valid is changing 12290 12291 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8) 12292 12293 divided by 8 to 12294 12295 SAVE_EXPR (I) * SAVE_EXPR (J) 12296 12297 (where the same SAVE_EXPR (J) is used in the original and the 12298 transformed version). */ 12299 12300static int 12301multiple_of_p (tree type, tree top, tree bottom) 12302{ 12303 if (operand_equal_p (top, bottom, 0)) 12304 return 1; 12305 12306 if (TREE_CODE (type) != INTEGER_TYPE) 12307 return 0; 12308 12309 switch (TREE_CODE (top)) 12310 { 12311 case BIT_AND_EXPR: 12312 /* Bitwise and provides a power of two multiple. If the mask is 12313 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */ 12314 if (!integer_pow2p (bottom)) 12315 return 0; 12316 /* FALLTHRU */ 12317 12318 case MULT_EXPR: 12319 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom) 12320 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom)); 12321 12322 case PLUS_EXPR: 12323 case MINUS_EXPR: 12324 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom) 12325 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom)); 12326 12327 case LSHIFT_EXPR: 12328 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST) 12329 { 12330 tree op1, t1; 12331 12332 op1 = TREE_OPERAND (top, 1); 12333 /* const_binop may not detect overflow correctly, 12334 so check for it explicitly here. */ 12335 if (TYPE_PRECISION (TREE_TYPE (size_one_node)) 12336 > TREE_INT_CST_LOW (op1) 12337 && TREE_INT_CST_HIGH (op1) == 0 12338 && 0 != (t1 = fold_convert (type, 12339 const_binop (LSHIFT_EXPR, 12340 size_one_node, 12341 op1, 0))) 12342 && ! TREE_OVERFLOW (t1)) 12343 return multiple_of_p (type, t1, bottom); 12344 } 12345 return 0; 12346 12347 case NOP_EXPR: 12348 /* Can't handle conversions from non-integral or wider integral type. */ 12349 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE) 12350 || (TYPE_PRECISION (type) 12351 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0))))) 12352 return 0; 12353 12354 /* .. fall through ... */ 12355 12356 case SAVE_EXPR: 12357 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom); 12358 12359 case INTEGER_CST: 12360 if (TREE_CODE (bottom) != INTEGER_CST 12361 || (TYPE_UNSIGNED (type) 12362 && (tree_int_cst_sgn (top) < 0 12363 || tree_int_cst_sgn (bottom) < 0))) 12364 return 0; 12365 return integer_zerop (const_binop (TRUNC_MOD_EXPR, 12366 top, bottom, 0)); 12367 12368 default: 12369 return 0; 12370 } 12371} 12372 12373/* Return true if `t' is known to be non-negative. If the return 12374 value is based on the assumption that signed overflow is undefined, 12375 set *STRICT_OVERFLOW_P to true; otherwise, don't change 12376 *STRICT_OVERFLOW_P. */ 12377 12378int 12379tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 12380{ 12381 if (t == error_mark_node) 12382 return 0; 12383 12384 if (TYPE_UNSIGNED (TREE_TYPE (t))) 12385 return 1; 12386 12387 switch (TREE_CODE (t)) 12388 { 12389 case SSA_NAME: 12390 /* Query VRP to see if it has recorded any information about 12391 the range of this object. */ 12392 return ssa_name_nonnegative_p (t); 12393 12394 case ABS_EXPR: 12395 /* We can't return 1 if flag_wrapv is set because 12396 ABS_EXPR<INT_MIN> = INT_MIN. */ 12397 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))) 12398 return 1; 12399 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))) 12400 { 12401 *strict_overflow_p = true; 12402 return 1; 12403 } 12404 break; 12405 12406 case INTEGER_CST: 12407 return tree_int_cst_sgn (t) >= 0; 12408 12409 case REAL_CST: 12410 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); 12411 12412 case PLUS_EXPR: 12413 if (FLOAT_TYPE_P (TREE_TYPE (t))) 12414 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 12415 strict_overflow_p) 12416 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 12417 strict_overflow_p)); 12418 12419 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are 12420 both unsigned and at least 2 bits shorter than the result. */ 12421 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE 12422 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR 12423 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR) 12424 { 12425 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)); 12426 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)); 12427 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1) 12428 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2)) 12429 { 12430 unsigned int prec = MAX (TYPE_PRECISION (inner1), 12431 TYPE_PRECISION (inner2)) + 1; 12432 return prec < TYPE_PRECISION (TREE_TYPE (t)); 12433 } 12434 } 12435 break; 12436 12437 case MULT_EXPR: 12438 if (FLOAT_TYPE_P (TREE_TYPE (t))) 12439 { 12440 /* x * x for floating point x is always non-negative. */ 12441 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0)) 12442 return 1; 12443 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 12444 strict_overflow_p) 12445 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 12446 strict_overflow_p)); 12447 } 12448 12449 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are 12450 both unsigned and their total bits is shorter than the result. */ 12451 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE 12452 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR 12453 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR) 12454 { 12455 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)); 12456 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)); 12457 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1) 12458 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2)) 12459 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2) 12460 < TYPE_PRECISION (TREE_TYPE (t)); 12461 } 12462 return 0; 12463 12464 case BIT_AND_EXPR: 12465 case MAX_EXPR: 12466 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 12467 strict_overflow_p) 12468 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 12469 strict_overflow_p)); 12470 12471 case BIT_IOR_EXPR: 12472 case BIT_XOR_EXPR: 12473 case MIN_EXPR: 12474 case RDIV_EXPR: 12475 case TRUNC_DIV_EXPR: 12476 case CEIL_DIV_EXPR: 12477 case FLOOR_DIV_EXPR: 12478 case ROUND_DIV_EXPR: 12479 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 12480 strict_overflow_p) 12481 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 12482 strict_overflow_p)); 12483 12484 case TRUNC_MOD_EXPR: 12485 case CEIL_MOD_EXPR: 12486 case FLOOR_MOD_EXPR: 12487 case ROUND_MOD_EXPR: 12488 case SAVE_EXPR: 12489 case NON_LVALUE_EXPR: 12490 case FLOAT_EXPR: 12491 case FIX_TRUNC_EXPR: 12492 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 12493 strict_overflow_p); 12494 12495 case COMPOUND_EXPR: 12496 case MODIFY_EXPR: 12497 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 12498 strict_overflow_p); 12499 12500 case BIND_EXPR: 12501 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)), 12502 strict_overflow_p); 12503 12504 case COND_EXPR: 12505 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 12506 strict_overflow_p) 12507 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2), 12508 strict_overflow_p)); 12509 12510 case NOP_EXPR: 12511 { 12512 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0)); 12513 tree outer_type = TREE_TYPE (t); 12514 12515 if (TREE_CODE (outer_type) == REAL_TYPE) 12516 { 12517 if (TREE_CODE (inner_type) == REAL_TYPE) 12518 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 12519 strict_overflow_p); 12520 if (TREE_CODE (inner_type) == INTEGER_TYPE) 12521 { 12522 if (TYPE_UNSIGNED (inner_type)) 12523 return 1; 12524 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 12525 strict_overflow_p); 12526 } 12527 } 12528 else if (TREE_CODE (outer_type) == INTEGER_TYPE) 12529 { 12530 if (TREE_CODE (inner_type) == REAL_TYPE) 12531 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0), 12532 strict_overflow_p); 12533 if (TREE_CODE (inner_type) == INTEGER_TYPE) 12534 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type) 12535 && TYPE_UNSIGNED (inner_type); 12536 } 12537 } 12538 break; 12539 12540 case TARGET_EXPR: 12541 { 12542 tree temp = TARGET_EXPR_SLOT (t); 12543 t = TARGET_EXPR_INITIAL (t); 12544 12545 /* If the initializer is non-void, then it's a normal expression 12546 that will be assigned to the slot. */ 12547 if (!VOID_TYPE_P (t)) 12548 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p); 12549 12550 /* Otherwise, the initializer sets the slot in some way. One common 12551 way is an assignment statement at the end of the initializer. */ 12552 while (1) 12553 { 12554 if (TREE_CODE (t) == BIND_EXPR) 12555 t = expr_last (BIND_EXPR_BODY (t)); 12556 else if (TREE_CODE (t) == TRY_FINALLY_EXPR 12557 || TREE_CODE (t) == TRY_CATCH_EXPR) 12558 t = expr_last (TREE_OPERAND (t, 0)); 12559 else if (TREE_CODE (t) == STATEMENT_LIST) 12560 t = expr_last (t); 12561 else 12562 break; 12563 } 12564 if (TREE_CODE (t) == MODIFY_EXPR 12565 && TREE_OPERAND (t, 0) == temp) 12566 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 12567 strict_overflow_p); 12568 12569 return 0; 12570 } 12571 12572 case CALL_EXPR: 12573 { 12574 tree fndecl = get_callee_fndecl (t); 12575 tree arglist = TREE_OPERAND (t, 1); 12576 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 12577 switch (DECL_FUNCTION_CODE (fndecl)) 12578 { 12579 CASE_FLT_FN (BUILT_IN_ACOS): 12580 CASE_FLT_FN (BUILT_IN_ACOSH): 12581 CASE_FLT_FN (BUILT_IN_CABS): 12582 CASE_FLT_FN (BUILT_IN_COSH): 12583 CASE_FLT_FN (BUILT_IN_ERFC): 12584 CASE_FLT_FN (BUILT_IN_EXP): 12585 CASE_FLT_FN (BUILT_IN_EXP10): 12586 CASE_FLT_FN (BUILT_IN_EXP2): 12587 CASE_FLT_FN (BUILT_IN_FABS): 12588 CASE_FLT_FN (BUILT_IN_FDIM): 12589 CASE_FLT_FN (BUILT_IN_HYPOT): 12590 CASE_FLT_FN (BUILT_IN_POW10): 12591 CASE_INT_FN (BUILT_IN_FFS): 12592 CASE_INT_FN (BUILT_IN_PARITY): 12593 CASE_INT_FN (BUILT_IN_POPCOUNT): 12594 /* Always true. */ 12595 return 1; 12596 12597 CASE_FLT_FN (BUILT_IN_SQRT): 12598 /* sqrt(-0.0) is -0.0. */ 12599 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t)))) 12600 return 1; 12601 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist), 12602 strict_overflow_p); 12603 12604 CASE_FLT_FN (BUILT_IN_ASINH): 12605 CASE_FLT_FN (BUILT_IN_ATAN): 12606 CASE_FLT_FN (BUILT_IN_ATANH): 12607 CASE_FLT_FN (BUILT_IN_CBRT): 12608 CASE_FLT_FN (BUILT_IN_CEIL): 12609 CASE_FLT_FN (BUILT_IN_ERF): 12610 CASE_FLT_FN (BUILT_IN_EXPM1): 12611 CASE_FLT_FN (BUILT_IN_FLOOR): 12612 CASE_FLT_FN (BUILT_IN_FMOD): 12613 CASE_FLT_FN (BUILT_IN_FREXP): 12614 CASE_FLT_FN (BUILT_IN_LCEIL): 12615 CASE_FLT_FN (BUILT_IN_LDEXP): 12616 CASE_FLT_FN (BUILT_IN_LFLOOR): 12617 CASE_FLT_FN (BUILT_IN_LLCEIL): 12618 CASE_FLT_FN (BUILT_IN_LLFLOOR): 12619 CASE_FLT_FN (BUILT_IN_LLRINT): 12620 CASE_FLT_FN (BUILT_IN_LLROUND): 12621 CASE_FLT_FN (BUILT_IN_LRINT): 12622 CASE_FLT_FN (BUILT_IN_LROUND): 12623 CASE_FLT_FN (BUILT_IN_MODF): 12624 CASE_FLT_FN (BUILT_IN_NEARBYINT): 12625 CASE_FLT_FN (BUILT_IN_POW): 12626 CASE_FLT_FN (BUILT_IN_RINT): 12627 CASE_FLT_FN (BUILT_IN_ROUND): 12628 CASE_FLT_FN (BUILT_IN_SIGNBIT): 12629 CASE_FLT_FN (BUILT_IN_SINH): 12630 CASE_FLT_FN (BUILT_IN_TANH): 12631 CASE_FLT_FN (BUILT_IN_TRUNC): 12632 /* True if the 1st argument is nonnegative. */ 12633 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist), 12634 strict_overflow_p); 12635 12636 CASE_FLT_FN (BUILT_IN_FMAX): 12637 /* True if the 1st OR 2nd arguments are nonnegative. */ 12638 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist), 12639 strict_overflow_p) 12640 || (tree_expr_nonnegative_warnv_p 12641 (TREE_VALUE (TREE_CHAIN (arglist)), 12642 strict_overflow_p))); 12643 12644 CASE_FLT_FN (BUILT_IN_FMIN): 12645 /* True if the 1st AND 2nd arguments are nonnegative. */ 12646 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist), 12647 strict_overflow_p) 12648 && (tree_expr_nonnegative_warnv_p 12649 (TREE_VALUE (TREE_CHAIN (arglist)), 12650 strict_overflow_p))); 12651 12652 CASE_FLT_FN (BUILT_IN_COPYSIGN): 12653 /* True if the 2nd argument is nonnegative. */ 12654 return (tree_expr_nonnegative_warnv_p 12655 (TREE_VALUE (TREE_CHAIN (arglist)), 12656 strict_overflow_p)); 12657 12658 default: 12659 break; 12660 } 12661 } 12662 12663 /* ... fall through ... */ 12664 12665 default: 12666 { 12667 tree type = TREE_TYPE (t); 12668 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type)) 12669 && truth_value_p (TREE_CODE (t))) 12670 /* Truth values evaluate to 0 or 1, which is nonnegative unless we 12671 have a signed:1 type (where the value is -1 and 0). */ 12672 return true; 12673 } 12674 } 12675 12676 /* We don't know sign of `t', so be conservative and return false. */ 12677 return 0; 12678} 12679 12680/* Return true if `t' is known to be non-negative. Handle warnings 12681 about undefined signed overflow. */ 12682 12683int 12684tree_expr_nonnegative_p (tree t) 12685{ 12686 int ret; 12687 bool strict_overflow_p; 12688 12689 strict_overflow_p = false; 12690 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p); 12691 if (strict_overflow_p) 12692 fold_overflow_warning (("assuming signed overflow does not occur when " 12693 "determining that expression is always " 12694 "non-negative"), 12695 WARN_STRICT_OVERFLOW_MISC); 12696 return ret; 12697} 12698 12699/* Return true when T is an address and is known to be nonzero. 12700 For floating point we further ensure that T is not denormal. 12701 Similar logic is present in nonzero_address in rtlanal.h. 12702 12703 If the return value is based on the assumption that signed overflow 12704 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 12705 change *STRICT_OVERFLOW_P. */ 12706 12707bool 12708tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p) 12709{ 12710 tree type = TREE_TYPE (t); 12711 bool sub_strict_overflow_p; 12712 12713 /* Doing something useful for floating point would need more work. */ 12714 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) 12715 return false; 12716 12717 switch (TREE_CODE (t)) 12718 { 12719 case SSA_NAME: 12720 /* Query VRP to see if it has recorded any information about 12721 the range of this object. */ 12722 return ssa_name_nonzero_p (t); 12723 12724 case ABS_EXPR: 12725 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 12726 strict_overflow_p); 12727 12728 case INTEGER_CST: 12729 /* We used to test for !integer_zerop here. This does not work correctly 12730 if TREE_CONSTANT_OVERFLOW (t). */ 12731 return (TREE_INT_CST_LOW (t) != 0 12732 || TREE_INT_CST_HIGH (t) != 0); 12733 12734 case PLUS_EXPR: 12735 if (TYPE_OVERFLOW_UNDEFINED (type)) 12736 { 12737 /* With the presence of negative values it is hard 12738 to say something. */ 12739 sub_strict_overflow_p = false; 12740 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 12741 &sub_strict_overflow_p) 12742 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 12743 &sub_strict_overflow_p)) 12744 return false; 12745 /* One of operands must be positive and the other non-negative. */ 12746 /* We don't set *STRICT_OVERFLOW_P here: even if this value 12747 overflows, on a twos-complement machine the sum of two 12748 nonnegative numbers can never be zero. */ 12749 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 12750 strict_overflow_p) 12751 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 12752 strict_overflow_p)); 12753 } 12754 break; 12755 12756 case MULT_EXPR: 12757 if (TYPE_OVERFLOW_UNDEFINED (type)) 12758 { 12759 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 12760 strict_overflow_p) 12761 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 12762 strict_overflow_p)) 12763 { 12764 *strict_overflow_p = true; 12765 return true; 12766 } 12767 } 12768 break; 12769 12770 case NOP_EXPR: 12771 { 12772 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0)); 12773 tree outer_type = TREE_TYPE (t); 12774 12775 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type) 12776 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 12777 strict_overflow_p)); 12778 } 12779 break; 12780 12781 case ADDR_EXPR: 12782 { 12783 tree base = get_base_address (TREE_OPERAND (t, 0)); 12784 12785 if (!base) 12786 return false; 12787 12788 /* Weak declarations may link to NULL. */ 12789 if (VAR_OR_FUNCTION_DECL_P (base)) 12790 return !DECL_WEAK (base); 12791 12792 /* Constants are never weak. */ 12793 if (CONSTANT_CLASS_P (base)) 12794 return true; 12795 12796 return false; 12797 } 12798 12799 case COND_EXPR: 12800 sub_strict_overflow_p = false; 12801 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 12802 &sub_strict_overflow_p) 12803 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2), 12804 &sub_strict_overflow_p)) 12805 { 12806 if (sub_strict_overflow_p) 12807 *strict_overflow_p = true; 12808 return true; 12809 } 12810 break; 12811 12812 case MIN_EXPR: 12813 sub_strict_overflow_p = false; 12814 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 12815 &sub_strict_overflow_p) 12816 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 12817 &sub_strict_overflow_p)) 12818 { 12819 if (sub_strict_overflow_p) 12820 *strict_overflow_p = true; 12821 } 12822 break; 12823 12824 case MAX_EXPR: 12825 sub_strict_overflow_p = false; 12826 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 12827 &sub_strict_overflow_p)) 12828 { 12829 if (sub_strict_overflow_p) 12830 *strict_overflow_p = true; 12831 12832 /* When both operands are nonzero, then MAX must be too. */ 12833 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 12834 strict_overflow_p)) 12835 return true; 12836 12837 /* MAX where operand 0 is positive is positive. */ 12838 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 12839 strict_overflow_p); 12840 } 12841 /* MAX where operand 1 is positive is positive. */ 12842 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 12843 &sub_strict_overflow_p) 12844 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 12845 &sub_strict_overflow_p)) 12846 { 12847 if (sub_strict_overflow_p) 12848 *strict_overflow_p = true; 12849 return true; 12850 } 12851 break; 12852 12853 case COMPOUND_EXPR: 12854 case MODIFY_EXPR: 12855 case BIND_EXPR: 12856 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 12857 strict_overflow_p); 12858 12859 case SAVE_EXPR: 12860 case NON_LVALUE_EXPR: 12861 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 12862 strict_overflow_p); 12863 12864 case BIT_IOR_EXPR: 12865 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 12866 strict_overflow_p) 12867 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 12868 strict_overflow_p)); 12869 12870 case CALL_EXPR: 12871 return alloca_call_p (t); 12872 12873 default: 12874 break; 12875 } 12876 return false; 12877} 12878 12879/* Return true when T is an address and is known to be nonzero. 12880 Handle warnings about undefined signed overflow. */ 12881 12882bool 12883tree_expr_nonzero_p (tree t) 12884{ 12885 bool ret, strict_overflow_p; 12886 12887 strict_overflow_p = false; 12888 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p); 12889 if (strict_overflow_p) 12890 fold_overflow_warning (("assuming signed overflow does not occur when " 12891 "determining that expression is always " 12892 "non-zero"), 12893 WARN_STRICT_OVERFLOW_MISC); 12894 return ret; 12895} 12896 12897/* Given the components of a binary expression CODE, TYPE, OP0 and OP1, 12898 attempt to fold the expression to a constant without modifying TYPE, 12899 OP0 or OP1. 12900 12901 If the expression could be simplified to a constant, then return 12902 the constant. If the expression would not be simplified to a 12903 constant, then return NULL_TREE. */ 12904 12905tree 12906fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1) 12907{ 12908 tree tem = fold_binary (code, type, op0, op1); 12909 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE; 12910} 12911 12912/* Given the components of a unary expression CODE, TYPE and OP0, 12913 attempt to fold the expression to a constant without modifying 12914 TYPE or OP0. 12915 12916 If the expression could be simplified to a constant, then return 12917 the constant. If the expression would not be simplified to a 12918 constant, then return NULL_TREE. */ 12919 12920tree 12921fold_unary_to_constant (enum tree_code code, tree type, tree op0) 12922{ 12923 tree tem = fold_unary (code, type, op0); 12924 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE; 12925} 12926 12927/* If EXP represents referencing an element in a constant string 12928 (either via pointer arithmetic or array indexing), return the 12929 tree representing the value accessed, otherwise return NULL. */ 12930 12931tree 12932fold_read_from_constant_string (tree exp) 12933{ 12934 if ((TREE_CODE (exp) == INDIRECT_REF 12935 || TREE_CODE (exp) == ARRAY_REF) 12936 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE) 12937 { 12938 tree exp1 = TREE_OPERAND (exp, 0); 12939 tree index; 12940 tree string; 12941 12942 if (TREE_CODE (exp) == INDIRECT_REF) 12943 string = string_constant (exp1, &index); 12944 else 12945 { 12946 tree low_bound = array_ref_low_bound (exp); 12947 index = fold_convert (sizetype, TREE_OPERAND (exp, 1)); 12948 12949 /* Optimize the special-case of a zero lower bound. 12950 12951 We convert the low_bound to sizetype to avoid some problems 12952 with constant folding. (E.g. suppose the lower bound is 1, 12953 and its mode is QI. Without the conversion,l (ARRAY 12954 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) 12955 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */ 12956 if (! integer_zerop (low_bound)) 12957 index = size_diffop (index, fold_convert (sizetype, low_bound)); 12958 12959 string = exp1; 12960 } 12961 12962 if (string 12963 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string))) 12964 && TREE_CODE (string) == STRING_CST 12965 && TREE_CODE (index) == INTEGER_CST 12966 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0 12967 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) 12968 == MODE_INT) 12969 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1)) 12970 return fold_convert (TREE_TYPE (exp), 12971 build_int_cst (NULL_TREE, 12972 (TREE_STRING_POINTER (string) 12973 [TREE_INT_CST_LOW (index)]))); 12974 } 12975 return NULL; 12976} 12977 12978/* Return the tree for neg (ARG0) when ARG0 is known to be either 12979 an integer constant or real constant. 12980 12981 TYPE is the type of the result. */ 12982 12983static tree 12984fold_negate_const (tree arg0, tree type) 12985{ 12986 tree t = NULL_TREE; 12987 12988 switch (TREE_CODE (arg0)) 12989 { 12990 case INTEGER_CST: 12991 { 12992 unsigned HOST_WIDE_INT low; 12993 HOST_WIDE_INT high; 12994 int overflow = neg_double (TREE_INT_CST_LOW (arg0), 12995 TREE_INT_CST_HIGH (arg0), 12996 &low, &high); 12997 t = build_int_cst_wide (type, low, high); 12998 t = force_fit_type (t, 1, 12999 (overflow | TREE_OVERFLOW (arg0)) 13000 && !TYPE_UNSIGNED (type), 13001 TREE_CONSTANT_OVERFLOW (arg0)); 13002 break; 13003 } 13004 13005 case REAL_CST: 13006 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0))); 13007 break; 13008 13009 default: 13010 gcc_unreachable (); 13011 } 13012 13013 return t; 13014} 13015 13016/* Return the tree for abs (ARG0) when ARG0 is known to be either 13017 an integer constant or real constant. 13018 13019 TYPE is the type of the result. */ 13020 13021tree 13022fold_abs_const (tree arg0, tree type) 13023{ 13024 tree t = NULL_TREE; 13025 13026 switch (TREE_CODE (arg0)) 13027 { 13028 case INTEGER_CST: 13029 /* If the value is unsigned, then the absolute value is 13030 the same as the ordinary value. */ 13031 if (TYPE_UNSIGNED (type)) 13032 t = arg0; 13033 /* Similarly, if the value is non-negative. */ 13034 else if (INT_CST_LT (integer_minus_one_node, arg0)) 13035 t = arg0; 13036 /* If the value is negative, then the absolute value is 13037 its negation. */ 13038 else 13039 { 13040 unsigned HOST_WIDE_INT low; 13041 HOST_WIDE_INT high; 13042 int overflow = neg_double (TREE_INT_CST_LOW (arg0), 13043 TREE_INT_CST_HIGH (arg0), 13044 &low, &high); 13045 t = build_int_cst_wide (type, low, high); 13046 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0), 13047 TREE_CONSTANT_OVERFLOW (arg0)); 13048 } 13049 break; 13050 13051 case REAL_CST: 13052 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))) 13053 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0))); 13054 else 13055 t = arg0; 13056 break; 13057 13058 default: 13059 gcc_unreachable (); 13060 } 13061 13062 return t; 13063} 13064 13065/* Return the tree for not (ARG0) when ARG0 is known to be an integer 13066 constant. TYPE is the type of the result. */ 13067 13068static tree 13069fold_not_const (tree arg0, tree type) 13070{ 13071 tree t = NULL_TREE; 13072 13073 gcc_assert (TREE_CODE (arg0) == INTEGER_CST); 13074 13075 t = build_int_cst_wide (type, 13076 ~ TREE_INT_CST_LOW (arg0), 13077 ~ TREE_INT_CST_HIGH (arg0)); 13078 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0), 13079 TREE_CONSTANT_OVERFLOW (arg0)); 13080 13081 return t; 13082} 13083 13084/* Given CODE, a relational operator, the target type, TYPE and two 13085 constant operands OP0 and OP1, return the result of the 13086 relational operation. If the result is not a compile time 13087 constant, then return NULL_TREE. */ 13088 13089static tree 13090fold_relational_const (enum tree_code code, tree type, tree op0, tree op1) 13091{ 13092 int result, invert; 13093 13094 /* From here on, the only cases we handle are when the result is 13095 known to be a constant. */ 13096 13097 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST) 13098 { 13099 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0); 13100 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1); 13101 13102 /* Handle the cases where either operand is a NaN. */ 13103 if (real_isnan (c0) || real_isnan (c1)) 13104 { 13105 switch (code) 13106 { 13107 case EQ_EXPR: 13108 case ORDERED_EXPR: 13109 result = 0; 13110 break; 13111 13112 case NE_EXPR: 13113 case UNORDERED_EXPR: 13114 case UNLT_EXPR: 13115 case UNLE_EXPR: 13116 case UNGT_EXPR: 13117 case UNGE_EXPR: 13118 case UNEQ_EXPR: 13119 result = 1; 13120 break; 13121 13122 case LT_EXPR: 13123 case LE_EXPR: 13124 case GT_EXPR: 13125 case GE_EXPR: 13126 case LTGT_EXPR: 13127 if (flag_trapping_math) 13128 return NULL_TREE; 13129 result = 0; 13130 break; 13131 13132 default: 13133 gcc_unreachable (); 13134 } 13135 13136 return constant_boolean_node (result, type); 13137 } 13138 13139 return constant_boolean_node (real_compare (code, c0, c1), type); 13140 } 13141 13142 /* Handle equality/inequality of complex constants. */ 13143 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST) 13144 { 13145 tree rcond = fold_relational_const (code, type, 13146 TREE_REALPART (op0), 13147 TREE_REALPART (op1)); 13148 tree icond = fold_relational_const (code, type, 13149 TREE_IMAGPART (op0), 13150 TREE_IMAGPART (op1)); 13151 if (code == EQ_EXPR) 13152 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond); 13153 else if (code == NE_EXPR) 13154 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond); 13155 else 13156 return NULL_TREE; 13157 } 13158 13159 /* From here on we only handle LT, LE, GT, GE, EQ and NE. 13160 13161 To compute GT, swap the arguments and do LT. 13162 To compute GE, do LT and invert the result. 13163 To compute LE, swap the arguments, do LT and invert the result. 13164 To compute NE, do EQ and invert the result. 13165 13166 Therefore, the code below must handle only EQ and LT. */ 13167 13168 if (code == LE_EXPR || code == GT_EXPR) 13169 { 13170 tree tem = op0; 13171 op0 = op1; 13172 op1 = tem; 13173 code = swap_tree_comparison (code); 13174 } 13175 13176 /* Note that it is safe to invert for real values here because we 13177 have already handled the one case that it matters. */ 13178 13179 invert = 0; 13180 if (code == NE_EXPR || code == GE_EXPR) 13181 { 13182 invert = 1; 13183 code = invert_tree_comparison (code, false); 13184 } 13185 13186 /* Compute a result for LT or EQ if args permit; 13187 Otherwise return T. */ 13188 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST) 13189 { 13190 if (code == EQ_EXPR) 13191 result = tree_int_cst_equal (op0, op1); 13192 else if (TYPE_UNSIGNED (TREE_TYPE (op0))) 13193 result = INT_CST_LT_UNSIGNED (op0, op1); 13194 else 13195 result = INT_CST_LT (op0, op1); 13196 } 13197 else 13198 return NULL_TREE; 13199 13200 if (invert) 13201 result ^= 1; 13202 return constant_boolean_node (result, type); 13203} 13204 13205/* Build an expression for the a clean point containing EXPR with type TYPE. 13206 Don't build a cleanup point expression for EXPR which don't have side 13207 effects. */ 13208 13209tree 13210fold_build_cleanup_point_expr (tree type, tree expr) 13211{ 13212 /* If the expression does not have side effects then we don't have to wrap 13213 it with a cleanup point expression. */ 13214 if (!TREE_SIDE_EFFECTS (expr)) 13215 return expr; 13216 13217 /* If the expression is a return, check to see if the expression inside the 13218 return has no side effects or the right hand side of the modify expression 13219 inside the return. If either don't have side effects set we don't need to 13220 wrap the expression in a cleanup point expression. Note we don't check the 13221 left hand side of the modify because it should always be a return decl. */ 13222 if (TREE_CODE (expr) == RETURN_EXPR) 13223 { 13224 tree op = TREE_OPERAND (expr, 0); 13225 if (!op || !TREE_SIDE_EFFECTS (op)) 13226 return expr; 13227 op = TREE_OPERAND (op, 1); 13228 if (!TREE_SIDE_EFFECTS (op)) 13229 return expr; 13230 } 13231 13232 return build1 (CLEANUP_POINT_EXPR, type, expr); 13233} 13234 13235/* Build an expression for the address of T. Folds away INDIRECT_REF to 13236 avoid confusing the gimplify process. */ 13237 13238tree 13239build_fold_addr_expr_with_type (tree t, tree ptrtype) 13240{ 13241 /* The size of the object is not relevant when talking about its address. */ 13242 if (TREE_CODE (t) == WITH_SIZE_EXPR) 13243 t = TREE_OPERAND (t, 0); 13244 13245 /* Note: doesn't apply to ALIGN_INDIRECT_REF */ 13246 if (TREE_CODE (t) == INDIRECT_REF 13247 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF) 13248 { 13249 t = TREE_OPERAND (t, 0); 13250 if (TREE_TYPE (t) != ptrtype) 13251 t = build1 (NOP_EXPR, ptrtype, t); 13252 } 13253 else 13254 { 13255 tree base = t; 13256 13257 while (handled_component_p (base)) 13258 base = TREE_OPERAND (base, 0); 13259 if (DECL_P (base)) 13260 TREE_ADDRESSABLE (base) = 1; 13261 13262 t = build1 (ADDR_EXPR, ptrtype, t); 13263 } 13264 13265 return t; 13266} 13267 13268tree 13269build_fold_addr_expr (tree t) 13270{ 13271 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t))); 13272} 13273 13274/* Given a pointer value OP0 and a type TYPE, return a simplified version 13275 of an indirection through OP0, or NULL_TREE if no simplification is 13276 possible. */ 13277 13278tree 13279fold_indirect_ref_1 (tree type, tree op0) 13280{ 13281 tree sub = op0; 13282 tree subtype; 13283 13284 STRIP_NOPS (sub); 13285 subtype = TREE_TYPE (sub); 13286 if (!POINTER_TYPE_P (subtype)) 13287 return NULL_TREE; 13288 13289 if (TREE_CODE (sub) == ADDR_EXPR) 13290 { 13291 tree op = TREE_OPERAND (sub, 0); 13292 tree optype = TREE_TYPE (op); 13293 /* *&CONST_DECL -> to the value of the const decl. */ 13294 if (TREE_CODE (op) == CONST_DECL) 13295 return DECL_INITIAL (op); 13296 /* *&p => p; make sure to handle *&"str"[cst] here. */ 13297 if (type == optype) 13298 { 13299 tree fop = fold_read_from_constant_string (op); 13300 if (fop) 13301 return fop; 13302 else 13303 return op; 13304 } 13305 /* *(foo *)&fooarray => fooarray[0] */ 13306 else if (TREE_CODE (optype) == ARRAY_TYPE 13307 && type == TREE_TYPE (optype)) 13308 { 13309 tree type_domain = TYPE_DOMAIN (optype); 13310 tree min_val = size_zero_node; 13311 if (type_domain && TYPE_MIN_VALUE (type_domain)) 13312 min_val = TYPE_MIN_VALUE (type_domain); 13313 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); 13314 } 13315 /* *(foo *)&complexfoo => __real__ complexfoo */ 13316 else if (TREE_CODE (optype) == COMPLEX_TYPE 13317 && type == TREE_TYPE (optype)) 13318 return fold_build1 (REALPART_EXPR, type, op); 13319 } 13320 13321 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ 13322 if (TREE_CODE (sub) == PLUS_EXPR 13323 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) 13324 { 13325 tree op00 = TREE_OPERAND (sub, 0); 13326 tree op01 = TREE_OPERAND (sub, 1); 13327 tree op00type; 13328 13329 STRIP_NOPS (op00); 13330 op00type = TREE_TYPE (op00); 13331 if (TREE_CODE (op00) == ADDR_EXPR 13332 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE 13333 && type == TREE_TYPE (TREE_TYPE (op00type))) 13334 { 13335 tree size = TYPE_SIZE_UNIT (type); 13336 if (tree_int_cst_equal (size, op01)) 13337 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0)); 13338 } 13339 } 13340 13341 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ 13342 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE 13343 && type == TREE_TYPE (TREE_TYPE (subtype))) 13344 { 13345 tree type_domain; 13346 tree min_val = size_zero_node; 13347 sub = build_fold_indirect_ref (sub); 13348 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); 13349 if (type_domain && TYPE_MIN_VALUE (type_domain)) 13350 min_val = TYPE_MIN_VALUE (type_domain); 13351 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); 13352 } 13353 13354 return NULL_TREE; 13355} 13356 13357/* Builds an expression for an indirection through T, simplifying some 13358 cases. */ 13359 13360tree 13361build_fold_indirect_ref (tree t) 13362{ 13363 tree type = TREE_TYPE (TREE_TYPE (t)); 13364 tree sub = fold_indirect_ref_1 (type, t); 13365 13366 if (sub) 13367 return sub; 13368 else 13369 return build1 (INDIRECT_REF, type, t); 13370} 13371 13372/* Given an INDIRECT_REF T, return either T or a simplified version. */ 13373 13374tree 13375fold_indirect_ref (tree t) 13376{ 13377 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0)); 13378 13379 if (sub) 13380 return sub; 13381 else 13382 return t; 13383} 13384 13385/* Strip non-trapping, non-side-effecting tree nodes from an expression 13386 whose result is ignored. The type of the returned tree need not be 13387 the same as the original expression. */ 13388 13389tree 13390fold_ignored_result (tree t) 13391{ 13392 if (!TREE_SIDE_EFFECTS (t)) 13393 return integer_zero_node; 13394 13395 for (;;) 13396 switch (TREE_CODE_CLASS (TREE_CODE (t))) 13397 { 13398 case tcc_unary: 13399 t = TREE_OPERAND (t, 0); 13400 break; 13401 13402 case tcc_binary: 13403 case tcc_comparison: 13404 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))) 13405 t = TREE_OPERAND (t, 0); 13406 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))) 13407 t = TREE_OPERAND (t, 1); 13408 else 13409 return t; 13410 break; 13411 13412 case tcc_expression: 13413 switch (TREE_CODE (t)) 13414 { 13415 case COMPOUND_EXPR: 13416 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))) 13417 return t; 13418 t = TREE_OPERAND (t, 0); 13419 break; 13420 13421 case COND_EXPR: 13422 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)) 13423 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2))) 13424 return t; 13425 t = TREE_OPERAND (t, 0); 13426 break; 13427 13428 default: 13429 return t; 13430 } 13431 break; 13432 13433 default: 13434 return t; 13435 } 13436} 13437 13438/* Return the value of VALUE, rounded up to a multiple of DIVISOR. 13439 This can only be applied to objects of a sizetype. */ 13440 13441tree 13442round_up (tree value, int divisor) 13443{ 13444 tree div = NULL_TREE; 13445 13446 gcc_assert (divisor > 0); 13447 if (divisor == 1) 13448 return value; 13449 13450 /* See if VALUE is already a multiple of DIVISOR. If so, we don't 13451 have to do anything. Only do this when we are not given a const, 13452 because in that case, this check is more expensive than just 13453 doing it. */ 13454 if (TREE_CODE (value) != INTEGER_CST) 13455 { 13456 div = build_int_cst (TREE_TYPE (value), divisor); 13457 13458 if (multiple_of_p (TREE_TYPE (value), value, div)) 13459 return value; 13460 } 13461 13462 /* If divisor is a power of two, simplify this to bit manipulation. */ 13463 if (divisor == (divisor & -divisor)) 13464 { 13465 tree t; 13466 13467 t = build_int_cst (TREE_TYPE (value), divisor - 1); 13468 value = size_binop (PLUS_EXPR, value, t); 13469 t = build_int_cst (TREE_TYPE (value), -divisor); 13470 value = size_binop (BIT_AND_EXPR, value, t); 13471 } 13472 else 13473 { 13474 if (!div) 13475 div = build_int_cst (TREE_TYPE (value), divisor); 13476 value = size_binop (CEIL_DIV_EXPR, value, div); 13477 value = size_binop (MULT_EXPR, value, div); 13478 } 13479 13480 return value; 13481} 13482 13483/* Likewise, but round down. */ 13484 13485tree 13486round_down (tree value, int divisor) 13487{ 13488 tree div = NULL_TREE; 13489 13490 gcc_assert (divisor > 0); 13491 if (divisor == 1) 13492 return value; 13493 13494 /* See if VALUE is already a multiple of DIVISOR. If so, we don't 13495 have to do anything. Only do this when we are not given a const, 13496 because in that case, this check is more expensive than just 13497 doing it. */ 13498 if (TREE_CODE (value) != INTEGER_CST) 13499 { 13500 div = build_int_cst (TREE_TYPE (value), divisor); 13501 13502 if (multiple_of_p (TREE_TYPE (value), value, div)) 13503 return value; 13504 } 13505 13506 /* If divisor is a power of two, simplify this to bit manipulation. */ 13507 if (divisor == (divisor & -divisor)) 13508 { 13509 tree t; 13510 13511 t = build_int_cst (TREE_TYPE (value), -divisor); 13512 value = size_binop (BIT_AND_EXPR, value, t); 13513 } 13514 else 13515 { 13516 if (!div) 13517 div = build_int_cst (TREE_TYPE (value), divisor); 13518 value = size_binop (FLOOR_DIV_EXPR, value, div); 13519 value = size_binop (MULT_EXPR, value, div); 13520 } 13521 13522 return value; 13523} 13524 13525/* Returns the pointer to the base of the object addressed by EXP and 13526 extracts the information about the offset of the access, storing it 13527 to PBITPOS and POFFSET. */ 13528 13529static tree 13530split_address_to_core_and_offset (tree exp, 13531 HOST_WIDE_INT *pbitpos, tree *poffset) 13532{ 13533 tree core; 13534 enum machine_mode mode; 13535 int unsignedp, volatilep; 13536 HOST_WIDE_INT bitsize; 13537 13538 if (TREE_CODE (exp) == ADDR_EXPR) 13539 { 13540 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos, 13541 poffset, &mode, &unsignedp, &volatilep, 13542 false); 13543 core = build_fold_addr_expr (core); 13544 } 13545 else 13546 { 13547 core = exp; 13548 *pbitpos = 0; 13549 *poffset = NULL_TREE; 13550 } 13551 13552 return core; 13553} 13554 13555/* Returns true if addresses of E1 and E2 differ by a constant, false 13556 otherwise. If they do, E1 - E2 is stored in *DIFF. */ 13557 13558bool 13559ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff) 13560{ 13561 tree core1, core2; 13562 HOST_WIDE_INT bitpos1, bitpos2; 13563 tree toffset1, toffset2, tdiff, type; 13564 13565 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1); 13566 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2); 13567 13568 if (bitpos1 % BITS_PER_UNIT != 0 13569 || bitpos2 % BITS_PER_UNIT != 0 13570 || !operand_equal_p (core1, core2, 0)) 13571 return false; 13572 13573 if (toffset1 && toffset2) 13574 { 13575 type = TREE_TYPE (toffset1); 13576 if (type != TREE_TYPE (toffset2)) 13577 toffset2 = fold_convert (type, toffset2); 13578 13579 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2); 13580 if (!cst_and_fits_in_hwi (tdiff)) 13581 return false; 13582 13583 *diff = int_cst_value (tdiff); 13584 } 13585 else if (toffset1 || toffset2) 13586 { 13587 /* If only one of the offsets is non-constant, the difference cannot 13588 be a constant. */ 13589 return false; 13590 } 13591 else 13592 *diff = 0; 13593 13594 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT; 13595 return true; 13596} 13597 13598/* Simplify the floating point expression EXP when the sign of the 13599 result is not significant. Return NULL_TREE if no simplification 13600 is possible. */ 13601 13602tree 13603fold_strip_sign_ops (tree exp) 13604{ 13605 tree arg0, arg1; 13606 13607 switch (TREE_CODE (exp)) 13608 { 13609 case ABS_EXPR: 13610 case NEGATE_EXPR: 13611 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0)); 13612 return arg0 ? arg0 : TREE_OPERAND (exp, 0); 13613 13614 case MULT_EXPR: 13615 case RDIV_EXPR: 13616 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp)))) 13617 return NULL_TREE; 13618 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0)); 13619 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 13620 if (arg0 != NULL_TREE || arg1 != NULL_TREE) 13621 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp), 13622 arg0 ? arg0 : TREE_OPERAND (exp, 0), 13623 arg1 ? arg1 : TREE_OPERAND (exp, 1)); 13624 break; 13625 13626 default: 13627 break; 13628 } 13629 return NULL_TREE; 13630} 13631 13632