dojump.c revision 132718
1/* Convert tree expression to rtl instructions, for GNU compiler. 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 3 2000, 2001, 2002, 2003 Free Software Foundation, Inc. 4 5This file is part of GCC. 6 7GCC is free software; you can redistribute it and/or modify it under 8the terms of the GNU General Public License as published by the Free 9Software Foundation; either version 2, or (at your option) any later 10version. 11 12GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13WARRANTY; without even the implied warranty of MERCHANTABILITY or 14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15for more details. 16 17You should have received a copy of the GNU General Public License 18along with GCC; see the file COPYING. If not, write to the Free 19Software Foundation, 59 Temple Place - Suite 330, Boston, MA 2002111-1307, USA. */ 21 22#include "config.h" 23#include "system.h" 24#include "coretypes.h" 25#include "tm.h" 26#include "rtl.h" 27#include "tree.h" 28#include "flags.h" 29#include "function.h" 30#include "insn-config.h" 31#include "insn-attr.h" 32/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ 33#include "expr.h" 34#include "optabs.h" 35#include "langhooks.h" 36 37static void do_jump_by_parts_greater (tree, int, rtx, rtx); 38static void do_jump_by_parts_equality (tree, rtx, rtx); 39static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx, 40 rtx); 41 42/* At the start of a function, record that we have no previously-pushed 43 arguments waiting to be popped. */ 44 45void 46init_pending_stack_adjust (void) 47{ 48 pending_stack_adjust = 0; 49} 50 51/* When exiting from function, if safe, clear out any pending stack adjust 52 so the adjustment won't get done. 53 54 Note, if the current function calls alloca, then it must have a 55 frame pointer regardless of the value of flag_omit_frame_pointer. */ 56 57void 58clear_pending_stack_adjust (void) 59{ 60 if (optimize > 0 61 && (! flag_omit_frame_pointer || current_function_calls_alloca) 62 && EXIT_IGNORE_STACK 63 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline) 64 && ! flag_inline_functions) 65 { 66 stack_pointer_delta -= pending_stack_adjust, 67 pending_stack_adjust = 0; 68 } 69} 70 71/* Pop any previously-pushed arguments that have not been popped yet. */ 72 73void 74do_pending_stack_adjust (void) 75{ 76 if (inhibit_defer_pop == 0) 77 { 78 if (pending_stack_adjust != 0) 79 adjust_stack (GEN_INT (pending_stack_adjust)); 80 pending_stack_adjust = 0; 81 } 82} 83 84/* Expand conditional expressions. */ 85 86/* Generate code to evaluate EXP and jump to LABEL if the value is zero. 87 LABEL is an rtx of code CODE_LABEL, in this function and all the 88 functions here. */ 89 90void 91jumpifnot (tree exp, rtx label) 92{ 93 do_jump (exp, label, NULL_RTX); 94} 95 96/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */ 97 98void 99jumpif (tree exp, rtx label) 100{ 101 do_jump (exp, NULL_RTX, label); 102} 103 104/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if 105 the result is zero, or IF_TRUE_LABEL if the result is one. 106 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero, 107 meaning fall through in that case. 108 109 do_jump always does any pending stack adjust except when it does not 110 actually perform a jump. An example where there is no jump 111 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. 112 113 This function is responsible for optimizing cases such as 114 &&, || and comparison operators in EXP. */ 115 116void 117do_jump (tree exp, rtx if_false_label, rtx if_true_label) 118{ 119 enum tree_code code = TREE_CODE (exp); 120 /* Some cases need to create a label to jump to 121 in order to properly fall through. 122 These cases set DROP_THROUGH_LABEL nonzero. */ 123 rtx drop_through_label = 0; 124 rtx temp; 125 int i; 126 tree type; 127 enum machine_mode mode; 128 129 emit_queue (); 130 131 switch (code) 132 { 133 case ERROR_MARK: 134 break; 135 136 case INTEGER_CST: 137 temp = integer_zerop (exp) ? if_false_label : if_true_label; 138 if (temp) 139 emit_jump (temp); 140 break; 141 142#if 0 143 /* This is not true with #pragma weak */ 144 case ADDR_EXPR: 145 /* The address of something can never be zero. */ 146 if (if_true_label) 147 emit_jump (if_true_label); 148 break; 149#endif 150 151 case UNSAVE_EXPR: 152 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); 153 TREE_OPERAND (exp, 0) 154 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0)); 155 break; 156 157 case NOP_EXPR: 158 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF 159 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF 160 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF 161 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) 162 goto normal; 163 case CONVERT_EXPR: 164 /* If we are narrowing the operand, we have to do the compare in the 165 narrower mode. */ 166 if ((TYPE_PRECISION (TREE_TYPE (exp)) 167 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) 168 goto normal; 169 case NON_LVALUE_EXPR: 170 case REFERENCE_EXPR: 171 case ABS_EXPR: 172 case NEGATE_EXPR: 173 case LROTATE_EXPR: 174 case RROTATE_EXPR: 175 /* These cannot change zero->nonzero or vice versa. */ 176 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); 177 break; 178 179 case WITH_RECORD_EXPR: 180 /* Put the object on the placeholder list, recurse through our first 181 operand, and pop the list. */ 182 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, 183 placeholder_list); 184 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); 185 placeholder_list = TREE_CHAIN (placeholder_list); 186 break; 187 188#if 0 189 /* This is never less insns than evaluating the PLUS_EXPR followed by 190 a test and can be longer if the test is eliminated. */ 191 case PLUS_EXPR: 192 /* Reduce to minus. */ 193 exp = build (MINUS_EXPR, TREE_TYPE (exp), 194 TREE_OPERAND (exp, 0), 195 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)), 196 TREE_OPERAND (exp, 1)))); 197 /* Process as MINUS. */ 198#endif 199 200 case MINUS_EXPR: 201 /* Nonzero iff operands of minus differ. */ 202 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp), 203 TREE_OPERAND (exp, 0), 204 TREE_OPERAND (exp, 1)), 205 NE, NE, if_false_label, if_true_label); 206 break; 207 208 case BIT_AND_EXPR: 209 /* If we are AND'ing with a small constant, do this comparison in the 210 smallest type that fits. If the machine doesn't have comparisons 211 that small, it will be converted back to the wider comparison. 212 This helps if we are testing the sign bit of a narrower object. 213 combine can't do this for us because it can't know whether a 214 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ 215 216 if (! SLOW_BYTE_ACCESS 217 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST 218 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT 219 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 220 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode 221 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0 222 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) 223 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code 224 != CODE_FOR_nothing)) 225 { 226 do_jump (convert (type, exp), if_false_label, if_true_label); 227 break; 228 } 229 goto normal; 230 231 case TRUTH_NOT_EXPR: 232 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); 233 break; 234 235 case TRUTH_ANDIF_EXPR: 236 if (if_false_label == 0) 237 if_false_label = drop_through_label = gen_label_rtx (); 238 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); 239 start_cleanup_deferral (); 240 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); 241 end_cleanup_deferral (); 242 break; 243 244 case TRUTH_ORIF_EXPR: 245 if (if_true_label == 0) 246 if_true_label = drop_through_label = gen_label_rtx (); 247 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); 248 start_cleanup_deferral (); 249 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); 250 end_cleanup_deferral (); 251 break; 252 253 case COMPOUND_EXPR: 254 push_temp_slots (); 255 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); 256 preserve_temp_slots (NULL_RTX); 257 free_temp_slots (); 258 pop_temp_slots (); 259 emit_queue (); 260 do_pending_stack_adjust (); 261 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); 262 break; 263 264 case COMPONENT_REF: 265 case BIT_FIELD_REF: 266 case ARRAY_REF: 267 case ARRAY_RANGE_REF: 268 { 269 HOST_WIDE_INT bitsize, bitpos; 270 int unsignedp; 271 enum machine_mode mode; 272 tree type; 273 tree offset; 274 int volatilep = 0; 275 276 /* Get description of this reference. We don't actually care 277 about the underlying object here. */ 278 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, 279 &unsignedp, &volatilep); 280 281 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp); 282 if (! SLOW_BYTE_ACCESS 283 && type != 0 && bitsize >= 0 284 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) 285 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code 286 != CODE_FOR_nothing)) 287 { 288 do_jump (convert (type, exp), if_false_label, if_true_label); 289 break; 290 } 291 goto normal; 292 } 293 294 case COND_EXPR: 295 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */ 296 if (integer_onep (TREE_OPERAND (exp, 1)) 297 && integer_zerop (TREE_OPERAND (exp, 2))) 298 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); 299 300 else if (integer_zerop (TREE_OPERAND (exp, 1)) 301 && integer_onep (TREE_OPERAND (exp, 2))) 302 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); 303 304 else 305 { 306 rtx label1 = gen_label_rtx (); 307 drop_through_label = gen_label_rtx (); 308 309 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); 310 311 start_cleanup_deferral (); 312 /* Now the THEN-expression. */ 313 do_jump (TREE_OPERAND (exp, 1), 314 if_false_label ? if_false_label : drop_through_label, 315 if_true_label ? if_true_label : drop_through_label); 316 /* In case the do_jump just above never jumps. */ 317 do_pending_stack_adjust (); 318 emit_label (label1); 319 320 /* Now the ELSE-expression. */ 321 do_jump (TREE_OPERAND (exp, 2), 322 if_false_label ? if_false_label : drop_through_label, 323 if_true_label ? if_true_label : drop_through_label); 324 end_cleanup_deferral (); 325 } 326 break; 327 328 case EQ_EXPR: 329 { 330 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 331 332 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT 333 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) 334 { 335 tree exp0 = save_expr (TREE_OPERAND (exp, 0)); 336 tree exp1 = save_expr (TREE_OPERAND (exp, 1)); 337 do_jump 338 (fold 339 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp), 340 fold (build (EQ_EXPR, TREE_TYPE (exp), 341 fold (build1 (REALPART_EXPR, 342 TREE_TYPE (inner_type), 343 exp0)), 344 fold (build1 (REALPART_EXPR, 345 TREE_TYPE (inner_type), 346 exp1)))), 347 fold (build (EQ_EXPR, TREE_TYPE (exp), 348 fold (build1 (IMAGPART_EXPR, 349 TREE_TYPE (inner_type), 350 exp0)), 351 fold (build1 (IMAGPART_EXPR, 352 TREE_TYPE (inner_type), 353 exp1)))))), 354 if_false_label, if_true_label); 355 } 356 357 else if (integer_zerop (TREE_OPERAND (exp, 1))) 358 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); 359 360 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT 361 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump)) 362 do_jump_by_parts_equality (exp, if_false_label, if_true_label); 363 else 364 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label); 365 break; 366 } 367 368 case NE_EXPR: 369 { 370 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 371 372 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT 373 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) 374 { 375 tree exp0 = save_expr (TREE_OPERAND (exp, 0)); 376 tree exp1 = save_expr (TREE_OPERAND (exp, 1)); 377 do_jump 378 (fold 379 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), 380 fold (build (NE_EXPR, TREE_TYPE (exp), 381 fold (build1 (REALPART_EXPR, 382 TREE_TYPE (inner_type), 383 exp0)), 384 fold (build1 (REALPART_EXPR, 385 TREE_TYPE (inner_type), 386 exp1)))), 387 fold (build (NE_EXPR, TREE_TYPE (exp), 388 fold (build1 (IMAGPART_EXPR, 389 TREE_TYPE (inner_type), 390 exp0)), 391 fold (build1 (IMAGPART_EXPR, 392 TREE_TYPE (inner_type), 393 exp1)))))), 394 if_false_label, if_true_label); 395 } 396 397 else if (integer_zerop (TREE_OPERAND (exp, 1))) 398 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); 399 400 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT 401 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump)) 402 do_jump_by_parts_equality (exp, if_true_label, if_false_label); 403 else 404 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label); 405 break; 406 } 407 408 case LT_EXPR: 409 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 410 if (GET_MODE_CLASS (mode) == MODE_INT 411 && ! can_compare_p (LT, mode, ccp_jump)) 412 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); 413 else 414 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label); 415 break; 416 417 case LE_EXPR: 418 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 419 if (GET_MODE_CLASS (mode) == MODE_INT 420 && ! can_compare_p (LE, mode, ccp_jump)) 421 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); 422 else 423 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label); 424 break; 425 426 case GT_EXPR: 427 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 428 if (GET_MODE_CLASS (mode) == MODE_INT 429 && ! can_compare_p (GT, mode, ccp_jump)) 430 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); 431 else 432 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label); 433 break; 434 435 case GE_EXPR: 436 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 437 if (GET_MODE_CLASS (mode) == MODE_INT 438 && ! can_compare_p (GE, mode, ccp_jump)) 439 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); 440 else 441 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label); 442 break; 443 444 case UNORDERED_EXPR: 445 case ORDERED_EXPR: 446 { 447 enum rtx_code cmp, rcmp; 448 int do_rev; 449 450 if (code == UNORDERED_EXPR) 451 cmp = UNORDERED, rcmp = ORDERED; 452 else 453 cmp = ORDERED, rcmp = UNORDERED; 454 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 455 456 do_rev = 0; 457 if (! can_compare_p (cmp, mode, ccp_jump) 458 && (can_compare_p (rcmp, mode, ccp_jump) 459 /* If the target doesn't provide either UNORDERED or ORDERED 460 comparisons, canonicalize on UNORDERED for the library. */ 461 || rcmp == UNORDERED)) 462 do_rev = 1; 463 464 if (! do_rev) 465 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label); 466 else 467 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label); 468 } 469 break; 470 471 { 472 enum rtx_code rcode1; 473 enum tree_code tcode2; 474 475 case UNLT_EXPR: 476 rcode1 = UNLT; 477 tcode2 = LT_EXPR; 478 goto unordered_bcc; 479 case UNLE_EXPR: 480 rcode1 = UNLE; 481 tcode2 = LE_EXPR; 482 goto unordered_bcc; 483 case UNGT_EXPR: 484 rcode1 = UNGT; 485 tcode2 = GT_EXPR; 486 goto unordered_bcc; 487 case UNGE_EXPR: 488 rcode1 = UNGE; 489 tcode2 = GE_EXPR; 490 goto unordered_bcc; 491 case UNEQ_EXPR: 492 rcode1 = UNEQ; 493 tcode2 = EQ_EXPR; 494 goto unordered_bcc; 495 496 unordered_bcc: 497 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 498 if (can_compare_p (rcode1, mode, ccp_jump)) 499 do_compare_and_jump (exp, rcode1, rcode1, if_false_label, 500 if_true_label); 501 else 502 { 503 tree op0 = save_expr (TREE_OPERAND (exp, 0)); 504 tree op1 = save_expr (TREE_OPERAND (exp, 1)); 505 tree cmp0, cmp1; 506 507 /* If the target doesn't support combined unordered 508 compares, decompose into UNORDERED + comparison. */ 509 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1)); 510 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1)); 511 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1); 512 do_jump (exp, if_false_label, if_true_label); 513 } 514 } 515 break; 516 517 /* Special case: 518 __builtin_expect (<test>, 0) and 519 __builtin_expect (<test>, 1) 520 521 We need to do this here, so that <test> is not converted to a SCC 522 operation on machines that use condition code registers and COMPARE 523 like the PowerPC, and then the jump is done based on whether the SCC 524 operation produced a 1 or 0. */ 525 case CALL_EXPR: 526 /* Check for a built-in function. */ 527 { 528 tree fndecl = get_callee_fndecl (exp); 529 tree arglist = TREE_OPERAND (exp, 1); 530 531 if (fndecl 532 && DECL_BUILT_IN (fndecl) 533 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT 534 && arglist != NULL_TREE 535 && TREE_CHAIN (arglist) != NULL_TREE) 536 { 537 rtx seq = expand_builtin_expect_jump (exp, if_false_label, 538 if_true_label); 539 540 if (seq != NULL_RTX) 541 { 542 emit_insn (seq); 543 return; 544 } 545 } 546 } 547 /* Fall through and generate the normal code. */ 548 549 default: 550 normal: 551 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); 552#if 0 553 /* This is not needed any more and causes poor code since it causes 554 comparisons and tests from non-SI objects to have different code 555 sequences. */ 556 /* Copy to register to avoid generating bad insns by cse 557 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */ 558 if (!cse_not_expected && GET_CODE (temp) == MEM) 559 temp = copy_to_reg (temp); 560#endif 561 do_pending_stack_adjust (); 562 /* Do any postincrements in the expression that was tested. */ 563 emit_queue (); 564 565 if (GET_CODE (temp) == CONST_INT 566 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode) 567 || GET_CODE (temp) == LABEL_REF) 568 { 569 rtx target = temp == const0_rtx ? if_false_label : if_true_label; 570 if (target) 571 emit_jump (target); 572 } 573 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT 574 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump)) 575 /* Note swapping the labels gives us not-equal. */ 576 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label); 577 else if (GET_MODE (temp) != VOIDmode) 578 { 579 /* The RTL optimizers prefer comparisons against pseudos. */ 580 if (GET_CODE (temp) == SUBREG) 581 { 582 /* Compare promoted variables in their promoted mode. */ 583 if (SUBREG_PROMOTED_VAR_P (temp) 584 && GET_CODE (XEXP (temp, 0)) == REG) 585 temp = XEXP (temp, 0); 586 else 587 temp = copy_to_reg (temp); 588 } 589 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), 590 NE, TREE_UNSIGNED (TREE_TYPE (exp)), 591 GET_MODE (temp), NULL_RTX, 592 if_false_label, if_true_label); 593 } 594 else 595 abort (); 596 } 597 598 if (drop_through_label) 599 { 600 /* If do_jump produces code that might be jumped around, 601 do any stack adjusts from that code, before the place 602 where control merges in. */ 603 do_pending_stack_adjust (); 604 emit_label (drop_through_label); 605 } 606} 607 608/* Given a comparison expression EXP for values too wide to be compared 609 with one insn, test the comparison and jump to the appropriate label. 610 The code of EXP is ignored; we always test GT if SWAP is 0, 611 and LT if SWAP is 1. */ 612 613static void 614do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label, 615 rtx if_true_label) 616{ 617 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0); 618 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0); 619 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 620 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); 621 622 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label); 623} 624 625/* Compare OP0 with OP1, word at a time, in mode MODE. 626 UNSIGNEDP says to do unsigned comparison. 627 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */ 628 629void 630do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0, 631 rtx op1, rtx if_false_label, rtx if_true_label) 632{ 633 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); 634 rtx drop_through_label = 0; 635 int i; 636 637 if (! if_true_label || ! if_false_label) 638 drop_through_label = gen_label_rtx (); 639 if (! if_true_label) 640 if_true_label = drop_through_label; 641 if (! if_false_label) 642 if_false_label = drop_through_label; 643 644 /* Compare a word at a time, high order first. */ 645 for (i = 0; i < nwords; i++) 646 { 647 rtx op0_word, op1_word; 648 649 if (WORDS_BIG_ENDIAN) 650 { 651 op0_word = operand_subword_force (op0, i, mode); 652 op1_word = operand_subword_force (op1, i, mode); 653 } 654 else 655 { 656 op0_word = operand_subword_force (op0, nwords - 1 - i, mode); 657 op1_word = operand_subword_force (op1, nwords - 1 - i, mode); 658 } 659 660 /* All but high-order word must be compared as unsigned. */ 661 do_compare_rtx_and_jump (op0_word, op1_word, GT, 662 (unsignedp || i > 0), word_mode, NULL_RTX, 663 NULL_RTX, if_true_label); 664 665 /* Consider lower words only if these are equal. */ 666 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode, 667 NULL_RTX, NULL_RTX, if_false_label); 668 } 669 670 if (if_false_label) 671 emit_jump (if_false_label); 672 if (drop_through_label) 673 emit_label (drop_through_label); 674} 675 676/* Given an EQ_EXPR expression EXP for values too wide to be compared 677 with one insn, test the comparison and jump to the appropriate label. */ 678 679static void 680do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label) 681{ 682 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); 683 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); 684 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 685 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); 686 int i; 687 rtx drop_through_label = 0; 688 689 if (! if_false_label) 690 drop_through_label = if_false_label = gen_label_rtx (); 691 692 for (i = 0; i < nwords; i++) 693 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode), 694 operand_subword_force (op1, i, mode), 695 EQ, TREE_UNSIGNED (TREE_TYPE (exp)), 696 word_mode, NULL_RTX, if_false_label, NULL_RTX); 697 698 if (if_true_label) 699 emit_jump (if_true_label); 700 if (drop_through_label) 701 emit_label (drop_through_label); 702} 703 704/* Jump according to whether OP0 is 0. 705 We assume that OP0 has an integer mode that is too wide 706 for the available compare insns. */ 707 708void 709do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label) 710{ 711 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD; 712 rtx part; 713 int i; 714 rtx drop_through_label = 0; 715 716 /* The fastest way of doing this comparison on almost any machine is to 717 "or" all the words and compare the result. If all have to be loaded 718 from memory and this is a very wide item, it's possible this may 719 be slower, but that's highly unlikely. */ 720 721 part = gen_reg_rtx (word_mode); 722 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0))); 723 for (i = 1; i < nwords && part != 0; i++) 724 part = expand_binop (word_mode, ior_optab, part, 725 operand_subword_force (op0, i, GET_MODE (op0)), 726 part, 1, OPTAB_WIDEN); 727 728 if (part != 0) 729 { 730 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode, 731 NULL_RTX, if_false_label, if_true_label); 732 733 return; 734 } 735 736 /* If we couldn't do the "or" simply, do this with a series of compares. */ 737 if (! if_false_label) 738 drop_through_label = if_false_label = gen_label_rtx (); 739 740 for (i = 0; i < nwords; i++) 741 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)), 742 const0_rtx, EQ, 1, word_mode, NULL_RTX, 743 if_false_label, NULL_RTX); 744 745 if (if_true_label) 746 emit_jump (if_true_label); 747 748 if (drop_through_label) 749 emit_label (drop_through_label); 750} 751 752/* Generate code for a comparison of OP0 and OP1 with rtx code CODE. 753 (including code to compute the values to be compared) 754 and set (CC0) according to the result. 755 The decision as to signed or unsigned comparison must be made by the caller. 756 757 We force a stack adjustment unless there are currently 758 things pushed on the stack that aren't yet used. 759 760 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being 761 compared. */ 762 763rtx 764compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp, 765 enum machine_mode mode, rtx size) 766{ 767 enum rtx_code ucode; 768 rtx tem; 769 770 /* If one operand is constant, make it the second one. Only do this 771 if the other operand is not constant as well. */ 772 773 if (swap_commutative_operands_p (op0, op1)) 774 { 775 tem = op0; 776 op0 = op1; 777 op1 = tem; 778 code = swap_condition (code); 779 } 780 781 if (flag_force_mem) 782 { 783 op0 = force_not_mem (op0); 784 op1 = force_not_mem (op1); 785 } 786 787 do_pending_stack_adjust (); 788 789 ucode = unsignedp ? unsigned_condition (code) : code; 790 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0) 791 return tem; 792 793#if 0 794 /* There's no need to do this now that combine.c can eliminate lots of 795 sign extensions. This can be less efficient in certain cases on other 796 machines. */ 797 798 /* If this is a signed equality comparison, we can do it as an 799 unsigned comparison since zero-extension is cheaper than sign 800 extension and comparisons with zero are done as unsigned. This is 801 the case even on machines that can do fast sign extension, since 802 zero-extension is easier to combine with other operations than 803 sign-extension is. If we are comparing against a constant, we must 804 convert it to what it would look like unsigned. */ 805 if ((code == EQ || code == NE) && ! unsignedp 806 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) 807 { 808 if (GET_CODE (op1) == CONST_INT 809 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) 810 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); 811 unsignedp = 1; 812 } 813#endif 814 815 emit_cmp_insn (op0, op1, code, size, mode, unsignedp); 816 817#if HAVE_cc0 818 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx); 819#else 820 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); 821#endif 822} 823 824/* Like do_compare_and_jump but expects the values to compare as two rtx's. 825 The decision as to signed or unsigned comparison must be made by the caller. 826 827 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being 828 compared. */ 829 830void 831do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp, 832 enum machine_mode mode, rtx size, rtx if_false_label, 833 rtx if_true_label) 834{ 835 enum rtx_code ucode; 836 rtx tem; 837 int dummy_true_label = 0; 838 839 /* Reverse the comparison if that is safe and we want to jump if it is 840 false. */ 841 if (! if_true_label && ! FLOAT_MODE_P (mode)) 842 { 843 if_true_label = if_false_label; 844 if_false_label = 0; 845 code = reverse_condition (code); 846 } 847 848 /* If one operand is constant, make it the second one. Only do this 849 if the other operand is not constant as well. */ 850 851 if (swap_commutative_operands_p (op0, op1)) 852 { 853 tem = op0; 854 op0 = op1; 855 op1 = tem; 856 code = swap_condition (code); 857 } 858 859 if (flag_force_mem) 860 { 861 op0 = force_not_mem (op0); 862 op1 = force_not_mem (op1); 863 } 864 865 do_pending_stack_adjust (); 866 867 ucode = unsignedp ? unsigned_condition (code) : code; 868 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0) 869 { 870 if (tem == const_true_rtx) 871 { 872 if (if_true_label) 873 emit_jump (if_true_label); 874 } 875 else 876 { 877 if (if_false_label) 878 emit_jump (if_false_label); 879 } 880 return; 881 } 882 883#if 0 884 /* There's no need to do this now that combine.c can eliminate lots of 885 sign extensions. This can be less efficient in certain cases on other 886 machines. */ 887 888 /* If this is a signed equality comparison, we can do it as an 889 unsigned comparison since zero-extension is cheaper than sign 890 extension and comparisons with zero are done as unsigned. This is 891 the case even on machines that can do fast sign extension, since 892 zero-extension is easier to combine with other operations than 893 sign-extension is. If we are comparing against a constant, we must 894 convert it to what it would look like unsigned. */ 895 if ((code == EQ || code == NE) && ! unsignedp 896 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) 897 { 898 if (GET_CODE (op1) == CONST_INT 899 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) 900 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); 901 unsignedp = 1; 902 } 903#endif 904 905 if (! if_true_label) 906 { 907 dummy_true_label = 1; 908 if_true_label = gen_label_rtx (); 909 } 910 911 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, 912 if_true_label); 913 914 if (if_false_label) 915 emit_jump (if_false_label); 916 if (dummy_true_label) 917 emit_label (if_true_label); 918} 919 920/* Generate code for a comparison expression EXP (including code to compute 921 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or 922 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the 923 generated code will drop through. 924 SIGNED_CODE should be the rtx operation for this comparison for 925 signed data; UNSIGNED_CODE, likewise for use if data is unsigned. 926 927 We force a stack adjustment unless there are currently 928 things pushed on the stack that aren't yet used. */ 929 930static void 931do_compare_and_jump (tree exp, enum rtx_code signed_code, 932 enum rtx_code unsigned_code, rtx if_false_label, 933 rtx if_true_label) 934{ 935 rtx op0, op1; 936 tree type; 937 enum machine_mode mode; 938 int unsignedp; 939 enum rtx_code code; 940 941 /* Don't crash if the comparison was erroneous. */ 942 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); 943 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) 944 return; 945 946 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); 947 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK) 948 return; 949 950 type = TREE_TYPE (TREE_OPERAND (exp, 0)); 951 mode = TYPE_MODE (type); 952 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST 953 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST 954 || (GET_MODE_BITSIZE (mode) 955 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 956 1))))))) 957 { 958 /* op0 might have been replaced by promoted constant, in which 959 case the type of second argument should be used. */ 960 type = TREE_TYPE (TREE_OPERAND (exp, 1)); 961 mode = TYPE_MODE (type); 962 } 963 unsignedp = TREE_UNSIGNED (type); 964 code = unsignedp ? unsigned_code : signed_code; 965 966#ifdef HAVE_canonicalize_funcptr_for_compare 967 /* If function pointers need to be "canonicalized" before they can 968 be reliably compared, then canonicalize them. */ 969 if (HAVE_canonicalize_funcptr_for_compare 970 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE 971 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) 972 == FUNCTION_TYPE)) 973 { 974 rtx new_op0 = gen_reg_rtx (mode); 975 976 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0)); 977 op0 = new_op0; 978 } 979 980 if (HAVE_canonicalize_funcptr_for_compare 981 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE 982 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) 983 == FUNCTION_TYPE)) 984 { 985 rtx new_op1 = gen_reg_rtx (mode); 986 987 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1)); 988 op1 = new_op1; 989 } 990#endif 991 992 /* Do any postincrements in the expression that was tested. */ 993 emit_queue (); 994 995 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, 996 ((mode == BLKmode) 997 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), 998 if_false_label, if_true_label); 999} 1000