1/* Convert tree expression to rtl instructions, for GNU compiler. 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. 4 5This file is part of GCC. 6 7GCC is free software; you can redistribute it and/or modify it under 8the terms of the GNU General Public License as published by the Free 9Software Foundation; either version 2, or (at your option) any later 10version. 11 12GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13WARRANTY; without even the implied warranty of MERCHANTABILITY or 14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15for more details. 16 17You should have received a copy of the GNU General Public License 18along with GCC; see the file COPYING. If not, write to the Free 19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 2002110-1301, USA. */ 21 22#include "config.h" 23#include "system.h" 24#include "coretypes.h" 25#include "tm.h" 26#include "rtl.h" 27#include "tree.h" 28#include "flags.h" 29#include "function.h" 30#include "insn-config.h" 31#include "insn-attr.h" 32/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ 33#include "expr.h" 34#include "optabs.h" 35#include "langhooks.h" 36#include "ggc.h" 37 38static bool prefer_and_bit_test (enum machine_mode, int); 39static void do_jump_by_parts_greater (tree, int, rtx, rtx); 40static void do_jump_by_parts_equality (tree, rtx, rtx); 41static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx, 42 rtx); 43 44/* At the start of a function, record that we have no previously-pushed 45 arguments waiting to be popped. */ 46 47void 48init_pending_stack_adjust (void) 49{ 50 pending_stack_adjust = 0; 51} 52 53/* Discard any pending stack adjustment. This avoid relying on the 54 RTL optimizers to remove useless adjustments when we know the 55 stack pointer value is dead. */ 56void discard_pending_stack_adjust (void) 57{ 58 stack_pointer_delta -= pending_stack_adjust; 59 pending_stack_adjust = 0; 60} 61 62/* When exiting from function, if safe, clear out any pending stack adjust 63 so the adjustment won't get done. 64 65 Note, if the current function calls alloca, then it must have a 66 frame pointer regardless of the value of flag_omit_frame_pointer. */ 67 68void 69clear_pending_stack_adjust (void) 70{ 71 if (optimize > 0 72 && (! flag_omit_frame_pointer || current_function_calls_alloca) 73 && EXIT_IGNORE_STACK 74 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)) 75 discard_pending_stack_adjust (); 76} 77 78/* Pop any previously-pushed arguments that have not been popped yet. */ 79 80void 81do_pending_stack_adjust (void) 82{ 83 if (inhibit_defer_pop == 0) 84 { 85 if (pending_stack_adjust != 0) 86 adjust_stack (GEN_INT (pending_stack_adjust)); 87 pending_stack_adjust = 0; 88 } 89} 90 91/* Expand conditional expressions. */ 92 93/* Generate code to evaluate EXP and jump to LABEL if the value is zero. 94 LABEL is an rtx of code CODE_LABEL, in this function and all the 95 functions here. */ 96 97void 98jumpifnot (tree exp, rtx label) 99{ 100 do_jump (exp, label, NULL_RTX); 101} 102 103/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */ 104 105void 106jumpif (tree exp, rtx label) 107{ 108 do_jump (exp, NULL_RTX, label); 109} 110 111/* Used internally by prefer_and_bit_test. */ 112 113static GTY(()) rtx and_reg; 114static GTY(()) rtx and_test; 115static GTY(()) rtx shift_test; 116 117/* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1", 118 where X is an arbitrary register of mode MODE. Return true if the former 119 is preferred. */ 120 121static bool 122prefer_and_bit_test (enum machine_mode mode, int bitnum) 123{ 124 if (and_test == 0) 125 { 126 /* Set up rtxes for the two variations. Use NULL as a placeholder 127 for the BITNUM-based constants. */ 128 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER); 129 and_test = gen_rtx_AND (mode, and_reg, NULL); 130 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL), 131 const1_rtx); 132 } 133 else 134 { 135 /* Change the mode of the previously-created rtxes. */ 136 PUT_MODE (and_reg, mode); 137 PUT_MODE (and_test, mode); 138 PUT_MODE (shift_test, mode); 139 PUT_MODE (XEXP (shift_test, 0), mode); 140 } 141 142 /* Fill in the integers. */ 143 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum); 144 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum); 145 146 return (rtx_cost (and_test, IF_THEN_ELSE) 147 <= rtx_cost (shift_test, IF_THEN_ELSE)); 148} 149 150/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if 151 the result is zero, or IF_TRUE_LABEL if the result is one. 152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero, 153 meaning fall through in that case. 154 155 do_jump always does any pending stack adjust except when it does not 156 actually perform a jump. An example where there is no jump 157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */ 158 159void 160do_jump (tree exp, rtx if_false_label, rtx if_true_label) 161{ 162 enum tree_code code = TREE_CODE (exp); 163 rtx temp; 164 int i; 165 tree type; 166 enum machine_mode mode; 167 rtx drop_through_label = 0; 168 169 switch (code) 170 { 171 case ERROR_MARK: 172 break; 173 174 case INTEGER_CST: 175 temp = integer_zerop (exp) ? if_false_label : if_true_label; 176 if (temp) 177 emit_jump (temp); 178 break; 179 180#if 0 181 /* This is not true with #pragma weak */ 182 case ADDR_EXPR: 183 /* The address of something can never be zero. */ 184 if (if_true_label) 185 emit_jump (if_true_label); 186 break; 187#endif 188 189 case NOP_EXPR: 190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF 191 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF 192 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF 193 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) 194 goto normal; 195 case CONVERT_EXPR: 196 /* If we are narrowing the operand, we have to do the compare in the 197 narrower mode. */ 198 if ((TYPE_PRECISION (TREE_TYPE (exp)) 199 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) 200 goto normal; 201 case NON_LVALUE_EXPR: 202 case ABS_EXPR: 203 case NEGATE_EXPR: 204 case LROTATE_EXPR: 205 case RROTATE_EXPR: 206 /* These cannot change zero->nonzero or vice versa. */ 207 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); 208 break; 209 210 case BIT_AND_EXPR: 211 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1. 212 See if the former is preferred for jump tests and restore it 213 if so. */ 214 if (integer_onep (TREE_OPERAND (exp, 1))) 215 { 216 tree exp0 = TREE_OPERAND (exp, 0); 217 rtx set_label, clr_label; 218 219 /* Strip narrowing integral type conversions. */ 220 while ((TREE_CODE (exp0) == NOP_EXPR 221 || TREE_CODE (exp0) == CONVERT_EXPR 222 || TREE_CODE (exp0) == NON_LVALUE_EXPR) 223 && TREE_OPERAND (exp0, 0) != error_mark_node 224 && TYPE_PRECISION (TREE_TYPE (exp0)) 225 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0)))) 226 exp0 = TREE_OPERAND (exp0, 0); 227 228 /* "exp0 ^ 1" inverts the sense of the single bit test. */ 229 if (TREE_CODE (exp0) == BIT_XOR_EXPR 230 && integer_onep (TREE_OPERAND (exp0, 1))) 231 { 232 exp0 = TREE_OPERAND (exp0, 0); 233 clr_label = if_true_label; 234 set_label = if_false_label; 235 } 236 else 237 { 238 clr_label = if_false_label; 239 set_label = if_true_label; 240 } 241 242 if (TREE_CODE (exp0) == RSHIFT_EXPR) 243 { 244 tree arg = TREE_OPERAND (exp0, 0); 245 tree shift = TREE_OPERAND (exp0, 1); 246 tree argtype = TREE_TYPE (arg); 247 if (TREE_CODE (shift) == INTEGER_CST 248 && compare_tree_int (shift, 0) >= 0 249 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0 250 && prefer_and_bit_test (TYPE_MODE (argtype), 251 TREE_INT_CST_LOW (shift))) 252 { 253 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1 254 << TREE_INT_CST_LOW (shift); 255 do_jump (build2 (BIT_AND_EXPR, argtype, arg, 256 build_int_cst_type (argtype, mask)), 257 clr_label, set_label); 258 break; 259 } 260 } 261 } 262 263 /* If we are AND'ing with a small constant, do this comparison in the 264 smallest type that fits. If the machine doesn't have comparisons 265 that small, it will be converted back to the wider comparison. 266 This helps if we are testing the sign bit of a narrower object. 267 combine can't do this for us because it can't know whether a 268 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ 269 270 if (! SLOW_BYTE_ACCESS 271 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST 272 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT 273 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 274 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode 275 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0 276 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) 277 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code 278 != CODE_FOR_nothing)) 279 { 280 do_jump (convert (type, exp), if_false_label, if_true_label); 281 break; 282 } 283 goto normal; 284 285 case TRUTH_NOT_EXPR: 286 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); 287 break; 288 289 case COND_EXPR: 290 { 291 rtx label1 = gen_label_rtx (); 292 if (!if_true_label || !if_false_label) 293 { 294 drop_through_label = gen_label_rtx (); 295 if (!if_true_label) 296 if_true_label = drop_through_label; 297 if (!if_false_label) 298 if_false_label = drop_through_label; 299 } 300 301 do_pending_stack_adjust (); 302 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); 303 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); 304 emit_label (label1); 305 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label); 306 break; 307 } 308 309 case TRUTH_ANDIF_EXPR: 310 case TRUTH_ORIF_EXPR: 311 case COMPOUND_EXPR: 312 /* Lowered by gimplify.c. */ 313 gcc_unreachable (); 314 315 case COMPONENT_REF: 316 case BIT_FIELD_REF: 317 case ARRAY_REF: 318 case ARRAY_RANGE_REF: 319 { 320 HOST_WIDE_INT bitsize, bitpos; 321 int unsignedp; 322 enum machine_mode mode; 323 tree type; 324 tree offset; 325 int volatilep = 0; 326 327 /* Get description of this reference. We don't actually care 328 about the underlying object here. */ 329 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, 330 &unsignedp, &volatilep, false); 331 332 type = lang_hooks.types.type_for_size (bitsize, unsignedp); 333 if (! SLOW_BYTE_ACCESS 334 && type != 0 && bitsize >= 0 335 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) 336 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code 337 != CODE_FOR_nothing)) 338 { 339 do_jump (convert (type, exp), if_false_label, if_true_label); 340 break; 341 } 342 goto normal; 343 } 344 345 case EQ_EXPR: 346 { 347 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 348 349 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) 350 != MODE_COMPLEX_FLOAT); 351 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) 352 != MODE_COMPLEX_INT); 353 354 if (integer_zerop (TREE_OPERAND (exp, 1))) 355 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); 356 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT 357 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump)) 358 do_jump_by_parts_equality (exp, if_false_label, if_true_label); 359 else 360 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label); 361 break; 362 } 363 364 case MINUS_EXPR: 365 /* Nonzero iff operands of minus differ. */ 366 exp = build2 (NE_EXPR, TREE_TYPE (exp), 367 TREE_OPERAND (exp, 0), 368 TREE_OPERAND (exp, 1)); 369 /* FALLTHRU */ 370 case NE_EXPR: 371 { 372 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 373 374 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) 375 != MODE_COMPLEX_FLOAT); 376 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type)) 377 != MODE_COMPLEX_INT); 378 379 if (integer_zerop (TREE_OPERAND (exp, 1))) 380 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); 381 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT 382 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump)) 383 do_jump_by_parts_equality (exp, if_true_label, if_false_label); 384 else 385 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label); 386 break; 387 } 388 389 case LT_EXPR: 390 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 391 if (GET_MODE_CLASS (mode) == MODE_INT 392 && ! can_compare_p (LT, mode, ccp_jump)) 393 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); 394 else 395 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label); 396 break; 397 398 case LE_EXPR: 399 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 400 if (GET_MODE_CLASS (mode) == MODE_INT 401 && ! can_compare_p (LE, mode, ccp_jump)) 402 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); 403 else 404 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label); 405 break; 406 407 case GT_EXPR: 408 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 409 if (GET_MODE_CLASS (mode) == MODE_INT 410 && ! can_compare_p (GT, mode, ccp_jump)) 411 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); 412 else 413 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label); 414 break; 415 416 case GE_EXPR: 417 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 418 if (GET_MODE_CLASS (mode) == MODE_INT 419 && ! can_compare_p (GE, mode, ccp_jump)) 420 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); 421 else 422 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label); 423 break; 424 425 case UNORDERED_EXPR: 426 case ORDERED_EXPR: 427 { 428 enum rtx_code cmp, rcmp; 429 int do_rev; 430 431 if (code == UNORDERED_EXPR) 432 cmp = UNORDERED, rcmp = ORDERED; 433 else 434 cmp = ORDERED, rcmp = UNORDERED; 435 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 436 437 do_rev = 0; 438 if (! can_compare_p (cmp, mode, ccp_jump) 439 && (can_compare_p (rcmp, mode, ccp_jump) 440 /* If the target doesn't provide either UNORDERED or ORDERED 441 comparisons, canonicalize on UNORDERED for the library. */ 442 || rcmp == UNORDERED)) 443 do_rev = 1; 444 445 if (! do_rev) 446 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label); 447 else 448 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label); 449 } 450 break; 451 452 { 453 enum rtx_code rcode1; 454 enum tree_code tcode1, tcode2; 455 456 case UNLT_EXPR: 457 rcode1 = UNLT; 458 tcode1 = UNORDERED_EXPR; 459 tcode2 = LT_EXPR; 460 goto unordered_bcc; 461 case UNLE_EXPR: 462 rcode1 = UNLE; 463 tcode1 = UNORDERED_EXPR; 464 tcode2 = LE_EXPR; 465 goto unordered_bcc; 466 case UNGT_EXPR: 467 rcode1 = UNGT; 468 tcode1 = UNORDERED_EXPR; 469 tcode2 = GT_EXPR; 470 goto unordered_bcc; 471 case UNGE_EXPR: 472 rcode1 = UNGE; 473 tcode1 = UNORDERED_EXPR; 474 tcode2 = GE_EXPR; 475 goto unordered_bcc; 476 case UNEQ_EXPR: 477 rcode1 = UNEQ; 478 tcode1 = UNORDERED_EXPR; 479 tcode2 = EQ_EXPR; 480 goto unordered_bcc; 481 case LTGT_EXPR: 482 /* It is ok for LTGT_EXPR to trap when the result is unordered, 483 so expand to (a < b) || (a > b). */ 484 rcode1 = LTGT; 485 tcode1 = LT_EXPR; 486 tcode2 = GT_EXPR; 487 goto unordered_bcc; 488 489 unordered_bcc: 490 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 491 if (can_compare_p (rcode1, mode, ccp_jump)) 492 do_compare_and_jump (exp, rcode1, rcode1, if_false_label, 493 if_true_label); 494 else 495 { 496 tree op0 = save_expr (TREE_OPERAND (exp, 0)); 497 tree op1 = save_expr (TREE_OPERAND (exp, 1)); 498 tree cmp0, cmp1; 499 500 /* If the target doesn't support combined unordered 501 compares, decompose into two comparisons. */ 502 if (if_true_label == 0) 503 drop_through_label = if_true_label = gen_label_rtx (); 504 505 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1); 506 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1); 507 do_jump (cmp0, 0, if_true_label); 508 do_jump (cmp1, if_false_label, if_true_label); 509 } 510 } 511 break; 512 513 case TRUTH_AND_EXPR: 514 /* High branch cost, expand as the bitwise AND of the conditions. 515 Do the same if the RHS has side effects, because we're effectively 516 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */ 517 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) 518 goto normal; 519 520 if (if_false_label == NULL_RTX) 521 { 522 drop_through_label = gen_label_rtx (); 523 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX); 524 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label); 525 } 526 else 527 { 528 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); 529 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); 530 } 531 break; 532 533 case TRUTH_OR_EXPR: 534 /* High branch cost, expand as the bitwise OR of the conditions. 535 Do the same if the RHS has side effects, because we're effectively 536 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */ 537 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) 538 goto normal; 539 540 if (if_true_label == NULL_RTX) 541 { 542 drop_through_label = gen_label_rtx (); 543 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label); 544 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX); 545 } 546 else 547 { 548 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); 549 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); 550 } 551 break; 552 553 /* Special case: 554 __builtin_expect (<test>, 0) and 555 __builtin_expect (<test>, 1) 556 557 We need to do this here, so that <test> is not converted to a SCC 558 operation on machines that use condition code registers and COMPARE 559 like the PowerPC, and then the jump is done based on whether the SCC 560 operation produced a 1 or 0. */ 561 case CALL_EXPR: 562 /* Check for a built-in function. */ 563 { 564 tree fndecl = get_callee_fndecl (exp); 565 tree arglist = TREE_OPERAND (exp, 1); 566 567 if (fndecl 568 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 569 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT 570 && arglist != NULL_TREE 571 && TREE_CHAIN (arglist) != NULL_TREE) 572 { 573 rtx seq = expand_builtin_expect_jump (exp, if_false_label, 574 if_true_label); 575 576 if (seq != NULL_RTX) 577 { 578 emit_insn (seq); 579 return; 580 } 581 } 582 } 583 584 /* Fall through and generate the normal code. */ 585 default: 586 normal: 587 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); 588 do_pending_stack_adjust (); 589 590 if (GET_CODE (temp) == CONST_INT 591 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode) 592 || GET_CODE (temp) == LABEL_REF) 593 { 594 rtx target = temp == const0_rtx ? if_false_label : if_true_label; 595 if (target) 596 emit_jump (target); 597 } 598 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT 599 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump)) 600 /* Note swapping the labels gives us not-equal. */ 601 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label); 602 else 603 { 604 gcc_assert (GET_MODE (temp) != VOIDmode); 605 606 /* The RTL optimizers prefer comparisons against pseudos. */ 607 if (GET_CODE (temp) == SUBREG) 608 { 609 /* Compare promoted variables in their promoted mode. */ 610 if (SUBREG_PROMOTED_VAR_P (temp) 611 && REG_P (XEXP (temp, 0))) 612 temp = XEXP (temp, 0); 613 else 614 temp = copy_to_reg (temp); 615 } 616 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), 617 NE, TYPE_UNSIGNED (TREE_TYPE (exp)), 618 GET_MODE (temp), NULL_RTX, 619 if_false_label, if_true_label); 620 } 621 } 622 623 if (drop_through_label) 624 { 625 do_pending_stack_adjust (); 626 emit_label (drop_through_label); 627 } 628} 629 630/* Given a comparison expression EXP for values too wide to be compared 631 with one insn, test the comparison and jump to the appropriate label. 632 The code of EXP is ignored; we always test GT if SWAP is 0, 633 and LT if SWAP is 1. */ 634 635static void 636do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label, 637 rtx if_true_label) 638{ 639 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0); 640 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0); 641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 642 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); 643 644 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, 645 if_true_label); 646} 647 648/* Compare OP0 with OP1, word at a time, in mode MODE. 649 UNSIGNEDP says to do unsigned comparison. 650 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */ 651 652void 653do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0, 654 rtx op1, rtx if_false_label, rtx if_true_label) 655{ 656 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); 657 rtx drop_through_label = 0; 658 int i; 659 660 if (! if_true_label || ! if_false_label) 661 drop_through_label = gen_label_rtx (); 662 if (! if_true_label) 663 if_true_label = drop_through_label; 664 if (! if_false_label) 665 if_false_label = drop_through_label; 666 667 /* Compare a word at a time, high order first. */ 668 for (i = 0; i < nwords; i++) 669 { 670 rtx op0_word, op1_word; 671 672 if (WORDS_BIG_ENDIAN) 673 { 674 op0_word = operand_subword_force (op0, i, mode); 675 op1_word = operand_subword_force (op1, i, mode); 676 } 677 else 678 { 679 op0_word = operand_subword_force (op0, nwords - 1 - i, mode); 680 op1_word = operand_subword_force (op1, nwords - 1 - i, mode); 681 } 682 683 /* All but high-order word must be compared as unsigned. */ 684 do_compare_rtx_and_jump (op0_word, op1_word, GT, 685 (unsignedp || i > 0), word_mode, NULL_RTX, 686 NULL_RTX, if_true_label); 687 688 /* Consider lower words only if these are equal. */ 689 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode, 690 NULL_RTX, NULL_RTX, if_false_label); 691 } 692 693 if (if_false_label) 694 emit_jump (if_false_label); 695 if (drop_through_label) 696 emit_label (drop_through_label); 697} 698 699/* Given an EQ_EXPR expression EXP for values too wide to be compared 700 with one insn, test the comparison and jump to the appropriate label. */ 701 702static void 703do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label) 704{ 705 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); 706 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); 707 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 708 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); 709 int i; 710 rtx drop_through_label = 0; 711 712 if (! if_false_label) 713 drop_through_label = if_false_label = gen_label_rtx (); 714 715 for (i = 0; i < nwords; i++) 716 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode), 717 operand_subword_force (op1, i, mode), 718 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)), 719 word_mode, NULL_RTX, if_false_label, NULL_RTX); 720 721 if (if_true_label) 722 emit_jump (if_true_label); 723 if (drop_through_label) 724 emit_label (drop_through_label); 725} 726 727/* Jump according to whether OP0 is 0. 728 We assume that OP0 has an integer mode that is too wide 729 for the available compare insns. */ 730 731void 732do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label) 733{ 734 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD; 735 rtx part; 736 int i; 737 rtx drop_through_label = 0; 738 739 /* The fastest way of doing this comparison on almost any machine is to 740 "or" all the words and compare the result. If all have to be loaded 741 from memory and this is a very wide item, it's possible this may 742 be slower, but that's highly unlikely. */ 743 744 part = gen_reg_rtx (word_mode); 745 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0))); 746 for (i = 1; i < nwords && part != 0; i++) 747 part = expand_binop (word_mode, ior_optab, part, 748 operand_subword_force (op0, i, GET_MODE (op0)), 749 part, 1, OPTAB_WIDEN); 750 751 if (part != 0) 752 { 753 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode, 754 NULL_RTX, if_false_label, if_true_label); 755 756 return; 757 } 758 759 /* If we couldn't do the "or" simply, do this with a series of compares. */ 760 if (! if_false_label) 761 drop_through_label = if_false_label = gen_label_rtx (); 762 763 for (i = 0; i < nwords; i++) 764 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)), 765 const0_rtx, EQ, 1, word_mode, NULL_RTX, 766 if_false_label, NULL_RTX); 767 768 if (if_true_label) 769 emit_jump (if_true_label); 770 771 if (drop_through_label) 772 emit_label (drop_through_label); 773} 774 775/* Generate code for a comparison of OP0 and OP1 with rtx code CODE. 776 MODE is the machine mode of the comparison, not of the result. 777 (including code to compute the values to be compared) and set CC0 778 according to the result. The decision as to signed or unsigned 779 comparison must be made by the caller. 780 781 We force a stack adjustment unless there are currently 782 things pushed on the stack that aren't yet used. 783 784 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being 785 compared. */ 786 787rtx 788compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp, 789 enum machine_mode mode, rtx size) 790{ 791 rtx tem; 792 793 /* If one operand is constant, make it the second one. Only do this 794 if the other operand is not constant as well. */ 795 796 if (swap_commutative_operands_p (op0, op1)) 797 { 798 tem = op0; 799 op0 = op1; 800 op1 = tem; 801 code = swap_condition (code); 802 } 803 804 do_pending_stack_adjust (); 805 806 code = unsignedp ? unsigned_condition (code) : code; 807 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1); 808 if (tem) 809 { 810 if (CONSTANT_P (tem)) 811 return tem; 812 813 if (COMPARISON_P (tem)) 814 { 815 code = GET_CODE (tem); 816 op0 = XEXP (tem, 0); 817 op1 = XEXP (tem, 1); 818 mode = GET_MODE (op0); 819 unsignedp = (code == GTU || code == LTU 820 || code == GEU || code == LEU); 821 } 822 } 823 824 emit_cmp_insn (op0, op1, code, size, mode, unsignedp); 825 826#if HAVE_cc0 827 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx); 828#else 829 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); 830#endif 831} 832 833/* Like do_compare_and_jump but expects the values to compare as two rtx's. 834 The decision as to signed or unsigned comparison must be made by the caller. 835 836 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being 837 compared. */ 838 839void 840do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp, 841 enum machine_mode mode, rtx size, rtx if_false_label, 842 rtx if_true_label) 843{ 844 rtx tem; 845 int dummy_true_label = 0; 846 847 /* Reverse the comparison if that is safe and we want to jump if it is 848 false. */ 849 if (! if_true_label && ! FLOAT_MODE_P (mode)) 850 { 851 if_true_label = if_false_label; 852 if_false_label = 0; 853 code = reverse_condition (code); 854 } 855 856 /* If one operand is constant, make it the second one. Only do this 857 if the other operand is not constant as well. */ 858 859 if (swap_commutative_operands_p (op0, op1)) 860 { 861 tem = op0; 862 op0 = op1; 863 op1 = tem; 864 code = swap_condition (code); 865 } 866 867 do_pending_stack_adjust (); 868 869 code = unsignedp ? unsigned_condition (code) : code; 870 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode, 871 op0, op1))) 872 { 873 if (CONSTANT_P (tem)) 874 { 875 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode)) 876 ? if_false_label : if_true_label; 877 if (label) 878 emit_jump (label); 879 return; 880 } 881 882 code = GET_CODE (tem); 883 mode = GET_MODE (tem); 884 op0 = XEXP (tem, 0); 885 op1 = XEXP (tem, 1); 886 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU); 887 } 888 889 if (! if_true_label) 890 { 891 dummy_true_label = 1; 892 if_true_label = gen_label_rtx (); 893 } 894 895 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, 896 if_true_label); 897 898 if (if_false_label) 899 emit_jump (if_false_label); 900 if (dummy_true_label) 901 emit_label (if_true_label); 902} 903 904/* Generate code for a comparison expression EXP (including code to compute 905 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or 906 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the 907 generated code will drop through. 908 SIGNED_CODE should be the rtx operation for this comparison for 909 signed data; UNSIGNED_CODE, likewise for use if data is unsigned. 910 911 We force a stack adjustment unless there are currently 912 things pushed on the stack that aren't yet used. */ 913 914static void 915do_compare_and_jump (tree exp, enum rtx_code signed_code, 916 enum rtx_code unsigned_code, rtx if_false_label, 917 rtx if_true_label) 918{ 919 rtx op0, op1; 920 tree type; 921 enum machine_mode mode; 922 int unsignedp; 923 enum rtx_code code; 924 925 /* Don't crash if the comparison was erroneous. */ 926 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); 927 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) 928 return; 929 930 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); 931 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK) 932 return; 933 934 type = TREE_TYPE (TREE_OPERAND (exp, 0)); 935 mode = TYPE_MODE (type); 936 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST 937 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST 938 || (GET_MODE_BITSIZE (mode) 939 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 940 1))))))) 941 { 942 /* op0 might have been replaced by promoted constant, in which 943 case the type of second argument should be used. */ 944 type = TREE_TYPE (TREE_OPERAND (exp, 1)); 945 mode = TYPE_MODE (type); 946 } 947 unsignedp = TYPE_UNSIGNED (type); 948 code = unsignedp ? unsigned_code : signed_code; 949 950#ifdef HAVE_canonicalize_funcptr_for_compare 951 /* If function pointers need to be "canonicalized" before they can 952 be reliably compared, then canonicalize them. 953 Only do this if *both* sides of the comparison are function pointers. 954 If one side isn't, we want a noncanonicalized comparison. See PR 955 middle-end/17564. */ 956 if (HAVE_canonicalize_funcptr_for_compare 957 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE 958 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) 959 == FUNCTION_TYPE 960 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE 961 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) 962 == FUNCTION_TYPE) 963 { 964 rtx new_op0 = gen_reg_rtx (mode); 965 rtx new_op1 = gen_reg_rtx (mode); 966 967 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0)); 968 op0 = new_op0; 969 970 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1)); 971 op1 = new_op1; 972 } 973#endif 974 975 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, 976 ((mode == BLKmode) 977 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), 978 if_false_label, if_true_label); 979} 980 981#include "gt-dojump.h" 982