1/* Expands front end tree to back end RTL for GCC 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 4 Free Software Foundation, Inc. 5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify it under 9the terms of the GNU General Public License as published by the Free 10Software Foundation; either version 2, or (at your option) any later 11version. 12 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14WARRANTY; without even the implied warranty of MERCHANTABILITY or 15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING. If not, write to the Free 20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 2102110-1301, USA. */ 22 23/* This file handles the generation of rtl code from tree structure 24 above the level of expressions, using subroutines in exp*.c and emit-rtl.c. 25 The functions whose names start with `expand_' are called by the 26 expander to generate RTL instructions for various kinds of constructs. */ 27 28#include "config.h" 29#include "system.h" 30#include "coretypes.h" 31#include "tm.h" 32 33#include "rtl.h" 34#include "hard-reg-set.h" 35#include "tree.h" 36#include "tm_p.h" 37#include "flags.h" 38#include "except.h" 39#include "function.h" 40#include "insn-config.h" 41#include "expr.h" 42#include "libfuncs.h" 43#include "recog.h" 44#include "machmode.h" 45#include "toplev.h" 46#include "output.h" 47#include "ggc.h" 48#include "langhooks.h" 49#include "predict.h" 50#include "optabs.h" 51#include "target.h" 52#include "regs.h" 53 54/* Functions and data structures for expanding case statements. */ 55 56/* Case label structure, used to hold info on labels within case 57 statements. We handle "range" labels; for a single-value label 58 as in C, the high and low limits are the same. 59 60 We start with a vector of case nodes sorted in ascending order, and 61 the default label as the last element in the vector. Before expanding 62 to RTL, we transform this vector into a list linked via the RIGHT 63 fields in the case_node struct. Nodes with higher case values are 64 later in the list. 65 66 Switch statements can be output in three forms. A branch table is 67 used if there are more than a few labels and the labels are dense 68 within the range between the smallest and largest case value. If a 69 branch table is used, no further manipulations are done with the case 70 node chain. 71 72 The alternative to the use of a branch table is to generate a series 73 of compare and jump insns. When that is done, we use the LEFT, RIGHT, 74 and PARENT fields to hold a binary tree. Initially the tree is 75 totally unbalanced, with everything on the right. We balance the tree 76 with nodes on the left having lower case values than the parent 77 and nodes on the right having higher values. We then output the tree 78 in order. 79 80 For very small, suitable switch statements, we can generate a series 81 of simple bit test and branches instead. */ 82 83struct case_node GTY(()) 84{ 85 struct case_node *left; /* Left son in binary tree */ 86 struct case_node *right; /* Right son in binary tree; also node chain */ 87 struct case_node *parent; /* Parent of node in binary tree */ 88 tree low; /* Lowest index value for this label */ 89 tree high; /* Highest index value for this label */ 90 tree code_label; /* Label to jump to when node matches */ 91}; 92 93typedef struct case_node case_node; 94typedef struct case_node *case_node_ptr; 95 96/* These are used by estimate_case_costs and balance_case_nodes. */ 97 98/* This must be a signed type, and non-ANSI compilers lack signed char. */ 99static short cost_table_[129]; 100static int use_cost_table; 101static int cost_table_initialized; 102 103/* Special care is needed because we allow -1, but TREE_INT_CST_LOW 104 is unsigned. */ 105#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)] 106 107static int n_occurrences (int, const char *); 108static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *); 109static void expand_nl_goto_receiver (void); 110static bool check_operand_nalternatives (tree, tree); 111static bool check_unique_operand_names (tree, tree); 112static char *resolve_operand_name_1 (char *, tree, tree); 113static void expand_null_return_1 (void); 114static void expand_value_return (rtx); 115static void do_jump_if_equal (rtx, rtx, rtx, int); 116static int estimate_case_costs (case_node_ptr); 117static bool lshift_cheap_p (void); 118static int case_bit_test_cmp (const void *, const void *); 119static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx); 120static void balance_case_nodes (case_node_ptr *, case_node_ptr); 121static int node_has_low_bound (case_node_ptr, tree); 122static int node_has_high_bound (case_node_ptr, tree); 123static int node_is_bounded (case_node_ptr, tree); 124static void emit_case_nodes (rtx, case_node_ptr, rtx, tree); 125static struct case_node *add_case_node (struct case_node *, tree, 126 tree, tree, tree); 127 128 129/* Return the rtx-label that corresponds to a LABEL_DECL, 130 creating it if necessary. */ 131 132rtx 133label_rtx (tree label) 134{ 135 gcc_assert (TREE_CODE (label) == LABEL_DECL); 136 137 if (!DECL_RTL_SET_P (label)) 138 { 139 rtx r = gen_label_rtx (); 140 SET_DECL_RTL (label, r); 141 if (FORCED_LABEL (label) || DECL_NONLOCAL (label)) 142 LABEL_PRESERVE_P (r) = 1; 143 } 144 145 return DECL_RTL (label); 146} 147 148/* As above, but also put it on the forced-reference list of the 149 function that contains it. */ 150rtx 151force_label_rtx (tree label) 152{ 153 rtx ref = label_rtx (label); 154 tree function = decl_function_context (label); 155 struct function *p; 156 157 gcc_assert (function); 158 159 if (function != current_function_decl) 160 p = find_function_data (function); 161 else 162 p = cfun; 163 164 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, 165 p->expr->x_forced_labels); 166 return ref; 167} 168 169/* Add an unconditional jump to LABEL as the next sequential instruction. */ 170 171void 172emit_jump (rtx label) 173{ 174 do_pending_stack_adjust (); 175 emit_jump_insn (gen_jump (label)); 176 emit_barrier (); 177} 178 179/* Emit code to jump to the address 180 specified by the pointer expression EXP. */ 181 182void 183expand_computed_goto (tree exp) 184{ 185 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0); 186 187 x = convert_memory_address (Pmode, x); 188 189 do_pending_stack_adjust (); 190 emit_indirect_jump (x); 191} 192 193/* Handle goto statements and the labels that they can go to. */ 194 195/* Specify the location in the RTL code of a label LABEL, 196 which is a LABEL_DECL tree node. 197 198 This is used for the kind of label that the user can jump to with a 199 goto statement, and for alternatives of a switch or case statement. 200 RTL labels generated for loops and conditionals don't go through here; 201 they are generated directly at the RTL level, by other functions below. 202 203 Note that this has nothing to do with defining label *names*. 204 Languages vary in how they do that and what that even means. */ 205 206void 207expand_label (tree label) 208{ 209 rtx label_r = label_rtx (label); 210 211 do_pending_stack_adjust (); 212 emit_label (label_r); 213 if (DECL_NAME (label)) 214 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label)); 215 216 if (DECL_NONLOCAL (label)) 217 { 218 expand_nl_goto_receiver (); 219 nonlocal_goto_handler_labels 220 = gen_rtx_EXPR_LIST (VOIDmode, label_r, 221 nonlocal_goto_handler_labels); 222 } 223 224 if (FORCED_LABEL (label)) 225 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels); 226 227 if (DECL_NONLOCAL (label) || FORCED_LABEL (label)) 228 maybe_set_first_label_num (label_r); 229} 230 231/* Generate RTL code for a `goto' statement with target label LABEL. 232 LABEL should be a LABEL_DECL tree node that was or will later be 233 defined with `expand_label'. */ 234 235void 236expand_goto (tree label) 237{ 238#ifdef ENABLE_CHECKING 239 /* Check for a nonlocal goto to a containing function. Should have 240 gotten translated to __builtin_nonlocal_goto. */ 241 tree context = decl_function_context (label); 242 gcc_assert (!context || context == current_function_decl); 243#endif 244 245 emit_jump (label_rtx (label)); 246} 247 248/* Return the number of times character C occurs in string S. */ 249static int 250n_occurrences (int c, const char *s) 251{ 252 int n = 0; 253 while (*s) 254 n += (*s++ == c); 255 return n; 256} 257 258/* Generate RTL for an asm statement (explicit assembler code). 259 STRING is a STRING_CST node containing the assembler code text, 260 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the 261 insn is volatile; don't optimize it. */ 262 263static void 264expand_asm (tree string, int vol) 265{ 266 rtx body; 267 268 if (TREE_CODE (string) == ADDR_EXPR) 269 string = TREE_OPERAND (string, 0); 270 271 body = gen_rtx_ASM_INPUT (VOIDmode, 272 ggc_strdup (TREE_STRING_POINTER (string))); 273 274 MEM_VOLATILE_P (body) = vol; 275 276 emit_insn (body); 277} 278 279/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the 280 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS 281 inputs and NOUTPUTS outputs to this extended-asm. Upon return, 282 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a 283 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the 284 constraint allows the use of a register operand. And, *IS_INOUT 285 will be true if the operand is read-write, i.e., if it is used as 286 an input as well as an output. If *CONSTRAINT_P is not in 287 canonical form, it will be made canonical. (Note that `+' will be 288 replaced with `=' as part of this process.) 289 290 Returns TRUE if all went well; FALSE if an error occurred. */ 291 292bool 293parse_output_constraint (const char **constraint_p, int operand_num, 294 int ninputs, int noutputs, bool *allows_mem, 295 bool *allows_reg, bool *is_inout) 296{ 297 const char *constraint = *constraint_p; 298 const char *p; 299 300 /* Assume the constraint doesn't allow the use of either a register 301 or memory. */ 302 *allows_mem = false; 303 *allows_reg = false; 304 305 /* Allow the `=' or `+' to not be at the beginning of the string, 306 since it wasn't explicitly documented that way, and there is a 307 large body of code that puts it last. Swap the character to 308 the front, so as not to uglify any place else. */ 309 p = strchr (constraint, '='); 310 if (!p) 311 p = strchr (constraint, '+'); 312 313 /* If the string doesn't contain an `=', issue an error 314 message. */ 315 if (!p) 316 { 317 error ("output operand constraint lacks %<=%>"); 318 return false; 319 } 320 321 /* If the constraint begins with `+', then the operand is both read 322 from and written to. */ 323 *is_inout = (*p == '+'); 324 325 /* Canonicalize the output constraint so that it begins with `='. */ 326 if (p != constraint || *is_inout) 327 { 328 char *buf; 329 size_t c_len = strlen (constraint); 330 331 if (p != constraint) 332 warning (0, "output constraint %qc for operand %d " 333 "is not at the beginning", 334 *p, operand_num); 335 336 /* Make a copy of the constraint. */ 337 buf = alloca (c_len + 1); 338 strcpy (buf, constraint); 339 /* Swap the first character and the `=' or `+'. */ 340 buf[p - constraint] = buf[0]; 341 /* Make sure the first character is an `='. (Until we do this, 342 it might be a `+'.) */ 343 buf[0] = '='; 344 /* Replace the constraint with the canonicalized string. */ 345 *constraint_p = ggc_alloc_string (buf, c_len); 346 constraint = *constraint_p; 347 } 348 349 /* Loop through the constraint string. */ 350 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p)) 351 switch (*p) 352 { 353 case '+': 354 case '=': 355 error ("operand constraint contains incorrectly positioned " 356 "%<+%> or %<=%>"); 357 return false; 358 359 case '%': 360 if (operand_num + 1 == ninputs + noutputs) 361 { 362 error ("%<%%%> constraint used with last operand"); 363 return false; 364 } 365 break; 366 367 case 'V': case 'm': case 'o': 368 *allows_mem = true; 369 break; 370 371 case '?': case '!': case '*': case '&': case '#': 372 case 'E': case 'F': case 'G': case 'H': 373 case 's': case 'i': case 'n': 374 case 'I': case 'J': case 'K': case 'L': case 'M': 375 case 'N': case 'O': case 'P': case ',': 376 break; 377 378 case '0': case '1': case '2': case '3': case '4': 379 case '5': case '6': case '7': case '8': case '9': 380 case '[': 381 error ("matching constraint not valid in output operand"); 382 return false; 383 384 case '<': case '>': 385 /* ??? Before flow, auto inc/dec insns are not supposed to exist, 386 excepting those that expand_call created. So match memory 387 and hope. */ 388 *allows_mem = true; 389 break; 390 391 case 'g': case 'X': 392 *allows_reg = true; 393 *allows_mem = true; 394 break; 395 396 case 'p': case 'r': 397 *allows_reg = true; 398 break; 399 400 default: 401 if (!ISALPHA (*p)) 402 break; 403 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS) 404 *allows_reg = true; 405#ifdef EXTRA_CONSTRAINT_STR 406 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p)) 407 *allows_reg = true; 408 else if (EXTRA_MEMORY_CONSTRAINT (*p, p)) 409 *allows_mem = true; 410 else 411 { 412 /* Otherwise we can't assume anything about the nature of 413 the constraint except that it isn't purely registers. 414 Treat it like "g" and hope for the best. */ 415 *allows_reg = true; 416 *allows_mem = true; 417 } 418#endif 419 break; 420 } 421 422 return true; 423} 424 425/* Similar, but for input constraints. */ 426 427bool 428parse_input_constraint (const char **constraint_p, int input_num, 429 int ninputs, int noutputs, int ninout, 430 const char * const * constraints, 431 bool *allows_mem, bool *allows_reg) 432{ 433 const char *constraint = *constraint_p; 434 const char *orig_constraint = constraint; 435 size_t c_len = strlen (constraint); 436 size_t j; 437 bool saw_match = false; 438 439 /* Assume the constraint doesn't allow the use of either 440 a register or memory. */ 441 *allows_mem = false; 442 *allows_reg = false; 443 444 /* Make sure constraint has neither `=', `+', nor '&'. */ 445 446 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j)) 447 switch (constraint[j]) 448 { 449 case '+': case '=': case '&': 450 if (constraint == orig_constraint) 451 { 452 error ("input operand constraint contains %qc", constraint[j]); 453 return false; 454 } 455 break; 456 457 case '%': 458 if (constraint == orig_constraint 459 && input_num + 1 == ninputs - ninout) 460 { 461 error ("%<%%%> constraint used with last operand"); 462 return false; 463 } 464 break; 465 466 case 'V': case 'm': case 'o': 467 *allows_mem = true; 468 break; 469 470 case '<': case '>': 471 case '?': case '!': case '*': case '#': 472 case 'E': case 'F': case 'G': case 'H': 473 case 's': case 'i': case 'n': 474 case 'I': case 'J': case 'K': case 'L': case 'M': 475 case 'N': case 'O': case 'P': case ',': 476 break; 477 478 /* Whether or not a numeric constraint allows a register is 479 decided by the matching constraint, and so there is no need 480 to do anything special with them. We must handle them in 481 the default case, so that we don't unnecessarily force 482 operands to memory. */ 483 case '0': case '1': case '2': case '3': case '4': 484 case '5': case '6': case '7': case '8': case '9': 485 { 486 char *end; 487 unsigned long match; 488 489 saw_match = true; 490 491 match = strtoul (constraint + j, &end, 10); 492 if (match >= (unsigned long) noutputs) 493 { 494 error ("matching constraint references invalid operand number"); 495 return false; 496 } 497 498 /* Try and find the real constraint for this dup. Only do this 499 if the matching constraint is the only alternative. */ 500 if (*end == '\0' 501 && (j == 0 || (j == 1 && constraint[0] == '%'))) 502 { 503 constraint = constraints[match]; 504 *constraint_p = constraint; 505 c_len = strlen (constraint); 506 j = 0; 507 /* ??? At the end of the loop, we will skip the first part of 508 the matched constraint. This assumes not only that the 509 other constraint is an output constraint, but also that 510 the '=' or '+' come first. */ 511 break; 512 } 513 else 514 j = end - constraint; 515 /* Anticipate increment at end of loop. */ 516 j--; 517 } 518 /* Fall through. */ 519 520 case 'p': case 'r': 521 *allows_reg = true; 522 break; 523 524 case 'g': case 'X': 525 *allows_reg = true; 526 *allows_mem = true; 527 break; 528 529 default: 530 if (! ISALPHA (constraint[j])) 531 { 532 error ("invalid punctuation %qc in constraint", constraint[j]); 533 return false; 534 } 535 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j) 536 != NO_REGS) 537 *allows_reg = true; 538#ifdef EXTRA_CONSTRAINT_STR 539 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j)) 540 *allows_reg = true; 541 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j)) 542 *allows_mem = true; 543 else 544 { 545 /* Otherwise we can't assume anything about the nature of 546 the constraint except that it isn't purely registers. 547 Treat it like "g" and hope for the best. */ 548 *allows_reg = true; 549 *allows_mem = true; 550 } 551#endif 552 break; 553 } 554 555 if (saw_match && !*allows_reg) 556 warning (0, "matching constraint does not allow a register"); 557 558 return true; 559} 560 561/* Return DECL iff there's an overlap between *REGS and DECL, where DECL 562 can be an asm-declared register. Called via walk_tree. */ 563 564static tree 565decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED, 566 void *data) 567{ 568 tree decl = *declp; 569 const HARD_REG_SET *regs = data; 570 571 if (TREE_CODE (decl) == VAR_DECL) 572 { 573 if (DECL_HARD_REGISTER (decl) 574 && REG_P (DECL_RTL (decl)) 575 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER) 576 { 577 rtx reg = DECL_RTL (decl); 578 unsigned int regno; 579 580 for (regno = REGNO (reg); 581 regno < (REGNO (reg) 582 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]); 583 regno++) 584 if (TEST_HARD_REG_BIT (*regs, regno)) 585 return decl; 586 } 587 walk_subtrees = 0; 588 } 589 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL) 590 walk_subtrees = 0; 591 return NULL_TREE; 592} 593 594/* If there is an overlap between *REGS and DECL, return the first overlap 595 found. */ 596tree 597tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs) 598{ 599 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL); 600} 601 602/* Check for overlap between registers marked in CLOBBERED_REGS and 603 anything inappropriate in T. Emit error and return the register 604 variable definition for error, NULL_TREE for ok. */ 605 606static bool 607tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs) 608{ 609 /* Conflicts between asm-declared register variables and the clobber 610 list are not allowed. */ 611 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs); 612 613 if (overlap) 614 { 615 error ("asm-specifier for variable %qs conflicts with asm clobber list", 616 IDENTIFIER_POINTER (DECL_NAME (overlap))); 617 618 /* Reset registerness to stop multiple errors emitted for a single 619 variable. */ 620 DECL_REGISTER (overlap) = 0; 621 return true; 622 } 623 624 return false; 625} 626 627/* Generate RTL for an asm statement with arguments. 628 STRING is the instruction template. 629 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs. 630 Each output or input has an expression in the TREE_VALUE and 631 and a tree list in TREE_PURPOSE which in turn contains a constraint 632 name in TREE_VALUE (or NULL_TREE) and a constraint string 633 in TREE_PURPOSE. 634 CLOBBERS is a list of STRING_CST nodes each naming a hard register 635 that is clobbered by this insn. 636 637 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly. 638 Some elements of OUTPUTS may be replaced with trees representing temporary 639 values. The caller should copy those temporary values to the originally 640 specified lvalues. 641 642 VOL nonzero means the insn is volatile; don't optimize it. */ 643 644static void 645expand_asm_operands (tree string, tree outputs, tree inputs, 646 tree clobbers, int vol, location_t locus) 647{ 648 rtvec argvec, constraintvec; 649 rtx body; 650 int ninputs = list_length (inputs); 651 int noutputs = list_length (outputs); 652 int ninout; 653 int nclobbers; 654 HARD_REG_SET clobbered_regs; 655 int clobber_conflict_found = 0; 656 tree tail; 657 tree t; 658 int i; 659 /* Vector of RTX's of evaluated output operands. */ 660 rtx *output_rtx = alloca (noutputs * sizeof (rtx)); 661 int *inout_opnum = alloca (noutputs * sizeof (int)); 662 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx)); 663 enum machine_mode *inout_mode 664 = alloca (noutputs * sizeof (enum machine_mode)); 665 const char **constraints 666 = alloca ((noutputs + ninputs) * sizeof (const char *)); 667 int old_generating_concat_p = generating_concat_p; 668 669 /* An ASM with no outputs needs to be treated as volatile, for now. */ 670 if (noutputs == 0) 671 vol = 1; 672 673 if (! check_operand_nalternatives (outputs, inputs)) 674 return; 675 676 string = resolve_asm_operand_names (string, outputs, inputs); 677 678 /* Collect constraints. */ 679 i = 0; 680 for (t = outputs; t ; t = TREE_CHAIN (t), i++) 681 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); 682 for (t = inputs; t ; t = TREE_CHAIN (t), i++) 683 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); 684 685 /* Sometimes we wish to automatically clobber registers across an asm. 686 Case in point is when the i386 backend moved from cc0 to a hard reg -- 687 maintaining source-level compatibility means automatically clobbering 688 the flags register. */ 689 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers); 690 691 /* Count the number of meaningful clobbered registers, ignoring what 692 we would ignore later. */ 693 nclobbers = 0; 694 CLEAR_HARD_REG_SET (clobbered_regs); 695 for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) 696 { 697 const char *regname; 698 699 if (TREE_VALUE (tail) == error_mark_node) 700 return; 701 regname = TREE_STRING_POINTER (TREE_VALUE (tail)); 702 703 i = decode_reg_name (regname); 704 if (i >= 0 || i == -4) 705 ++nclobbers; 706 else if (i == -2) 707 error ("unknown register name %qs in %<asm%>", regname); 708 709 /* Mark clobbered registers. */ 710 if (i >= 0) 711 { 712 /* Clobbering the PIC register is an error. */ 713 if (i == (int) PIC_OFFSET_TABLE_REGNUM) 714 { 715 error ("PIC register %qs clobbered in %<asm%>", regname); 716 return; 717 } 718 719 SET_HARD_REG_BIT (clobbered_regs, i); 720 } 721 } 722 723 /* First pass over inputs and outputs checks validity and sets 724 mark_addressable if needed. */ 725 726 ninout = 0; 727 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) 728 { 729 tree val = TREE_VALUE (tail); 730 tree type = TREE_TYPE (val); 731 const char *constraint; 732 bool is_inout; 733 bool allows_reg; 734 bool allows_mem; 735 736 /* If there's an erroneous arg, emit no insn. */ 737 if (type == error_mark_node) 738 return; 739 740 /* Try to parse the output constraint. If that fails, there's 741 no point in going further. */ 742 constraint = constraints[i]; 743 if (!parse_output_constraint (&constraint, i, ninputs, noutputs, 744 &allows_mem, &allows_reg, &is_inout)) 745 return; 746 747 if (! allows_reg 748 && (allows_mem 749 || is_inout 750 || (DECL_P (val) 751 && REG_P (DECL_RTL (val)) 752 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))) 753 lang_hooks.mark_addressable (val); 754 755 if (is_inout) 756 ninout++; 757 } 758 759 ninputs += ninout; 760 if (ninputs + noutputs > MAX_RECOG_OPERANDS) 761 { 762 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS); 763 return; 764 } 765 766 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail)) 767 { 768 bool allows_reg, allows_mem; 769 const char *constraint; 770 771 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT 772 would get VOIDmode and that could cause a crash in reload. */ 773 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node) 774 return; 775 776 constraint = constraints[i + noutputs]; 777 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout, 778 constraints, &allows_mem, &allows_reg)) 779 return; 780 781 if (! allows_reg && allows_mem) 782 lang_hooks.mark_addressable (TREE_VALUE (tail)); 783 } 784 785 /* Second pass evaluates arguments. */ 786 787 ninout = 0; 788 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) 789 { 790 tree val = TREE_VALUE (tail); 791 tree type = TREE_TYPE (val); 792 bool is_inout; 793 bool allows_reg; 794 bool allows_mem; 795 rtx op; 796 bool ok; 797 798 ok = parse_output_constraint (&constraints[i], i, ninputs, 799 noutputs, &allows_mem, &allows_reg, 800 &is_inout); 801 gcc_assert (ok); 802 803 /* If an output operand is not a decl or indirect ref and our constraint 804 allows a register, make a temporary to act as an intermediate. 805 Make the asm insn write into that, then our caller will copy it to 806 the real output operand. Likewise for promoted variables. */ 807 808 generating_concat_p = 0; 809 810 real_output_rtx[i] = NULL_RTX; 811 if ((TREE_CODE (val) == INDIRECT_REF 812 && allows_mem) 813 || (DECL_P (val) 814 && (allows_mem || REG_P (DECL_RTL (val))) 815 && ! (REG_P (DECL_RTL (val)) 816 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))) 817 || ! allows_reg 818 || is_inout) 819 { 820 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE); 821 if (MEM_P (op)) 822 op = validize_mem (op); 823 824 if (! allows_reg && !MEM_P (op)) 825 error ("output number %d not directly addressable", i); 826 if ((! allows_mem && MEM_P (op)) 827 || GET_CODE (op) == CONCAT) 828 { 829 real_output_rtx[i] = op; 830 op = gen_reg_rtx (GET_MODE (op)); 831 if (is_inout) 832 emit_move_insn (op, real_output_rtx[i]); 833 } 834 } 835 else 836 { 837 op = assign_temp (type, 0, 0, 1); 838 op = validize_mem (op); 839 TREE_VALUE (tail) = make_tree (type, op); 840 } 841 output_rtx[i] = op; 842 843 generating_concat_p = old_generating_concat_p; 844 845 if (is_inout) 846 { 847 inout_mode[ninout] = TYPE_MODE (type); 848 inout_opnum[ninout++] = i; 849 } 850 851 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs)) 852 clobber_conflict_found = 1; 853 } 854 855 /* Make vectors for the expression-rtx, constraint strings, 856 and named operands. */ 857 858 argvec = rtvec_alloc (ninputs); 859 constraintvec = rtvec_alloc (ninputs); 860 861 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode 862 : GET_MODE (output_rtx[0])), 863 ggc_strdup (TREE_STRING_POINTER (string)), 864 empty_string, 0, argvec, constraintvec, 865 locus); 866 867 MEM_VOLATILE_P (body) = vol; 868 869 /* Eval the inputs and put them into ARGVEC. 870 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */ 871 872 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i) 873 { 874 bool allows_reg, allows_mem; 875 const char *constraint; 876 tree val, type; 877 rtx op; 878 bool ok; 879 880 constraint = constraints[i + noutputs]; 881 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout, 882 constraints, &allows_mem, &allows_reg); 883 gcc_assert (ok); 884 885 generating_concat_p = 0; 886 887 val = TREE_VALUE (tail); 888 type = TREE_TYPE (val); 889 op = expand_expr (val, NULL_RTX, VOIDmode, 890 (allows_mem && !allows_reg 891 ? EXPAND_MEMORY : EXPAND_NORMAL)); 892 893 /* Never pass a CONCAT to an ASM. */ 894 if (GET_CODE (op) == CONCAT) 895 op = force_reg (GET_MODE (op), op); 896 else if (MEM_P (op)) 897 op = validize_mem (op); 898 899 if (asm_operand_ok (op, constraint) <= 0) 900 { 901 if (allows_reg && TYPE_MODE (type) != BLKmode) 902 op = force_reg (TYPE_MODE (type), op); 903 else if (!allows_mem) 904 warning (0, "asm operand %d probably doesn%'t match constraints", 905 i + noutputs); 906 else if (MEM_P (op)) 907 { 908 /* We won't recognize either volatile memory or memory 909 with a queued address as available a memory_operand 910 at this point. Ignore it: clearly this *is* a memory. */ 911 } 912 else 913 { 914 warning (0, "use of memory input without lvalue in " 915 "asm operand %d is deprecated", i + noutputs); 916 917 if (CONSTANT_P (op)) 918 { 919 rtx mem = force_const_mem (TYPE_MODE (type), op); 920 if (mem) 921 op = validize_mem (mem); 922 else 923 op = force_reg (TYPE_MODE (type), op); 924 } 925 if (REG_P (op) 926 || GET_CODE (op) == SUBREG 927 || GET_CODE (op) == CONCAT) 928 { 929 tree qual_type = build_qualified_type (type, 930 (TYPE_QUALS (type) 931 | TYPE_QUAL_CONST)); 932 rtx memloc = assign_temp (qual_type, 1, 1, 1); 933 memloc = validize_mem (memloc); 934 emit_move_insn (memloc, op); 935 op = memloc; 936 } 937 } 938 } 939 940 generating_concat_p = old_generating_concat_p; 941 ASM_OPERANDS_INPUT (body, i) = op; 942 943 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i) 944 = gen_rtx_ASM_INPUT (TYPE_MODE (type), 945 ggc_strdup (constraints[i + noutputs])); 946 947 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs)) 948 clobber_conflict_found = 1; 949 } 950 951 /* Protect all the operands from the queue now that they have all been 952 evaluated. */ 953 954 generating_concat_p = 0; 955 956 /* For in-out operands, copy output rtx to input rtx. */ 957 for (i = 0; i < ninout; i++) 958 { 959 int j = inout_opnum[i]; 960 char buffer[16]; 961 962 ASM_OPERANDS_INPUT (body, ninputs - ninout + i) 963 = output_rtx[j]; 964 965 sprintf (buffer, "%d", j); 966 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i) 967 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer)); 968 } 969 970 generating_concat_p = old_generating_concat_p; 971 972 /* Now, for each output, construct an rtx 973 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER 974 ARGVEC CONSTRAINTS OPNAMES)) 975 If there is more than one, put them inside a PARALLEL. */ 976 977 if (noutputs == 1 && nclobbers == 0) 978 { 979 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]); 980 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body)); 981 } 982 983 else if (noutputs == 0 && nclobbers == 0) 984 { 985 /* No output operands: put in a raw ASM_OPERANDS rtx. */ 986 emit_insn (body); 987 } 988 989 else 990 { 991 rtx obody = body; 992 int num = noutputs; 993 994 if (num == 0) 995 num = 1; 996 997 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers)); 998 999 /* For each output operand, store a SET. */ 1000 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) 1001 { 1002 XVECEXP (body, 0, i) 1003 = gen_rtx_SET (VOIDmode, 1004 output_rtx[i], 1005 gen_rtx_ASM_OPERANDS 1006 (GET_MODE (output_rtx[i]), 1007 ggc_strdup (TREE_STRING_POINTER (string)), 1008 ggc_strdup (constraints[i]), 1009 i, argvec, constraintvec, locus)); 1010 1011 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol; 1012 } 1013 1014 /* If there are no outputs (but there are some clobbers) 1015 store the bare ASM_OPERANDS into the PARALLEL. */ 1016 1017 if (i == 0) 1018 XVECEXP (body, 0, i++) = obody; 1019 1020 /* Store (clobber REG) for each clobbered register specified. */ 1021 1022 for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) 1023 { 1024 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail)); 1025 int j = decode_reg_name (regname); 1026 rtx clobbered_reg; 1027 1028 if (j < 0) 1029 { 1030 if (j == -3) /* `cc', which is not a register */ 1031 continue; 1032 1033 if (j == -4) /* `memory', don't cache memory across asm */ 1034 { 1035 XVECEXP (body, 0, i++) 1036 = gen_rtx_CLOBBER (VOIDmode, 1037 gen_rtx_MEM 1038 (BLKmode, 1039 gen_rtx_SCRATCH (VOIDmode))); 1040 continue; 1041 } 1042 1043 /* Ignore unknown register, error already signaled. */ 1044 continue; 1045 } 1046 1047 /* Use QImode since that's guaranteed to clobber just one reg. */ 1048 clobbered_reg = gen_rtx_REG (QImode, j); 1049 1050 /* Do sanity check for overlap between clobbers and respectively 1051 input and outputs that hasn't been handled. Such overlap 1052 should have been detected and reported above. */ 1053 if (!clobber_conflict_found) 1054 { 1055 int opno; 1056 1057 /* We test the old body (obody) contents to avoid tripping 1058 over the under-construction body. */ 1059 for (opno = 0; opno < noutputs; opno++) 1060 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno])) 1061 internal_error ("asm clobber conflict with output operand"); 1062 1063 for (opno = 0; opno < ninputs - ninout; opno++) 1064 if (reg_overlap_mentioned_p (clobbered_reg, 1065 ASM_OPERANDS_INPUT (obody, opno))) 1066 internal_error ("asm clobber conflict with input operand"); 1067 } 1068 1069 XVECEXP (body, 0, i++) 1070 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg); 1071 } 1072 1073 emit_insn (body); 1074 } 1075 1076 /* For any outputs that needed reloading into registers, spill them 1077 back to where they belong. */ 1078 for (i = 0; i < noutputs; ++i) 1079 if (real_output_rtx[i]) 1080 emit_move_insn (real_output_rtx[i], output_rtx[i]); 1081 1082 free_temp_slots (); 1083} 1084 1085void 1086expand_asm_expr (tree exp) 1087{ 1088 int noutputs, i; 1089 tree outputs, tail; 1090 tree *o; 1091 1092 if (ASM_INPUT_P (exp)) 1093 { 1094 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp)); 1095 return; 1096 } 1097 1098 outputs = ASM_OUTPUTS (exp); 1099 noutputs = list_length (outputs); 1100 /* o[I] is the place that output number I should be written. */ 1101 o = (tree *) alloca (noutputs * sizeof (tree)); 1102 1103 /* Record the contents of OUTPUTS before it is modified. */ 1104 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) 1105 o[i] = TREE_VALUE (tail); 1106 1107 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of 1108 OUTPUTS some trees for where the values were actually stored. */ 1109 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp), 1110 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp), 1111 input_location); 1112 1113 /* Copy all the intermediate outputs into the specified outputs. */ 1114 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) 1115 { 1116 if (o[i] != TREE_VALUE (tail)) 1117 { 1118 expand_assignment (o[i], TREE_VALUE (tail)); 1119 free_temp_slots (); 1120 1121 /* Restore the original value so that it's correct the next 1122 time we expand this function. */ 1123 TREE_VALUE (tail) = o[i]; 1124 } 1125 } 1126} 1127 1128/* A subroutine of expand_asm_operands. Check that all operands have 1129 the same number of alternatives. Return true if so. */ 1130 1131static bool 1132check_operand_nalternatives (tree outputs, tree inputs) 1133{ 1134 if (outputs || inputs) 1135 { 1136 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs); 1137 int nalternatives 1138 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp))); 1139 tree next = inputs; 1140 1141 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES) 1142 { 1143 error ("too many alternatives in %<asm%>"); 1144 return false; 1145 } 1146 1147 tmp = outputs; 1148 while (tmp) 1149 { 1150 const char *constraint 1151 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp))); 1152 1153 if (n_occurrences (',', constraint) != nalternatives) 1154 { 1155 error ("operand constraints for %<asm%> differ " 1156 "in number of alternatives"); 1157 return false; 1158 } 1159 1160 if (TREE_CHAIN (tmp)) 1161 tmp = TREE_CHAIN (tmp); 1162 else 1163 tmp = next, next = 0; 1164 } 1165 } 1166 1167 return true; 1168} 1169 1170/* A subroutine of expand_asm_operands. Check that all operand names 1171 are unique. Return true if so. We rely on the fact that these names 1172 are identifiers, and so have been canonicalized by get_identifier, 1173 so all we need are pointer comparisons. */ 1174 1175static bool 1176check_unique_operand_names (tree outputs, tree inputs) 1177{ 1178 tree i, j; 1179 1180 for (i = outputs; i ; i = TREE_CHAIN (i)) 1181 { 1182 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i)); 1183 if (! i_name) 1184 continue; 1185 1186 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j)) 1187 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j)))) 1188 goto failure; 1189 } 1190 1191 for (i = inputs; i ; i = TREE_CHAIN (i)) 1192 { 1193 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i)); 1194 if (! i_name) 1195 continue; 1196 1197 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j)) 1198 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j)))) 1199 goto failure; 1200 for (j = outputs; j ; j = TREE_CHAIN (j)) 1201 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j)))) 1202 goto failure; 1203 } 1204 1205 return true; 1206 1207 failure: 1208 error ("duplicate asm operand name %qs", 1209 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i)))); 1210 return false; 1211} 1212 1213/* A subroutine of expand_asm_operands. Resolve the names of the operands 1214 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in 1215 STRING and in the constraints to those numbers. */ 1216 1217tree 1218resolve_asm_operand_names (tree string, tree outputs, tree inputs) 1219{ 1220 char *buffer; 1221 char *p; 1222 const char *c; 1223 tree t; 1224 1225 check_unique_operand_names (outputs, inputs); 1226 1227 /* Substitute [<name>] in input constraint strings. There should be no 1228 named operands in output constraints. */ 1229 for (t = inputs; t ; t = TREE_CHAIN (t)) 1230 { 1231 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); 1232 if (strchr (c, '[') != NULL) 1233 { 1234 p = buffer = xstrdup (c); 1235 while ((p = strchr (p, '[')) != NULL) 1236 p = resolve_operand_name_1 (p, outputs, inputs); 1237 TREE_VALUE (TREE_PURPOSE (t)) 1238 = build_string (strlen (buffer), buffer); 1239 free (buffer); 1240 } 1241 } 1242 1243 /* Now check for any needed substitutions in the template. */ 1244 c = TREE_STRING_POINTER (string); 1245 while ((c = strchr (c, '%')) != NULL) 1246 { 1247 if (c[1] == '[') 1248 break; 1249 else if (ISALPHA (c[1]) && c[2] == '[') 1250 break; 1251 else 1252 { 1253 c += 1; 1254 continue; 1255 } 1256 } 1257 1258 if (c) 1259 { 1260 /* OK, we need to make a copy so we can perform the substitutions. 1261 Assume that we will not need extra space--we get to remove '[' 1262 and ']', which means we cannot have a problem until we have more 1263 than 999 operands. */ 1264 buffer = xstrdup (TREE_STRING_POINTER (string)); 1265 p = buffer + (c - TREE_STRING_POINTER (string)); 1266 1267 while ((p = strchr (p, '%')) != NULL) 1268 { 1269 if (p[1] == '[') 1270 p += 1; 1271 else if (ISALPHA (p[1]) && p[2] == '[') 1272 p += 2; 1273 else 1274 { 1275 p += 1; 1276 continue; 1277 } 1278 1279 p = resolve_operand_name_1 (p, outputs, inputs); 1280 } 1281 1282 string = build_string (strlen (buffer), buffer); 1283 free (buffer); 1284 } 1285 1286 return string; 1287} 1288 1289/* A subroutine of resolve_operand_names. P points to the '[' for a 1290 potential named operand of the form [<name>]. In place, replace 1291 the name and brackets with a number. Return a pointer to the 1292 balance of the string after substitution. */ 1293 1294static char * 1295resolve_operand_name_1 (char *p, tree outputs, tree inputs) 1296{ 1297 char *q; 1298 int op; 1299 tree t; 1300 size_t len; 1301 1302 /* Collect the operand name. */ 1303 q = strchr (p, ']'); 1304 if (!q) 1305 { 1306 error ("missing close brace for named operand"); 1307 return strchr (p, '\0'); 1308 } 1309 len = q - p - 1; 1310 1311 /* Resolve the name to a number. */ 1312 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++) 1313 { 1314 tree name = TREE_PURPOSE (TREE_PURPOSE (t)); 1315 if (name) 1316 { 1317 const char *c = TREE_STRING_POINTER (name); 1318 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0') 1319 goto found; 1320 } 1321 } 1322 for (t = inputs; t ; t = TREE_CHAIN (t), op++) 1323 { 1324 tree name = TREE_PURPOSE (TREE_PURPOSE (t)); 1325 if (name) 1326 { 1327 const char *c = TREE_STRING_POINTER (name); 1328 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0') 1329 goto found; 1330 } 1331 } 1332 1333 *q = '\0'; 1334 error ("undefined named operand %qs", p + 1); 1335 op = 0; 1336 found: 1337 1338 /* Replace the name with the number. Unfortunately, not all libraries 1339 get the return value of sprintf correct, so search for the end of the 1340 generated string by hand. */ 1341 sprintf (p, "%d", op); 1342 p = strchr (p, '\0'); 1343 1344 /* Verify the no extra buffer space assumption. */ 1345 gcc_assert (p <= q); 1346 1347 /* Shift the rest of the buffer down to fill the gap. */ 1348 memmove (p, q + 1, strlen (q + 1) + 1); 1349 1350 return p; 1351} 1352 1353/* Generate RTL to evaluate the expression EXP. */ 1354 1355void 1356expand_expr_stmt (tree exp) 1357{ 1358 rtx value; 1359 tree type; 1360 1361 value = expand_expr (exp, const0_rtx, VOIDmode, 0); 1362 type = TREE_TYPE (exp); 1363 1364 /* If all we do is reference a volatile value in memory, 1365 copy it to a register to be sure it is actually touched. */ 1366 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp)) 1367 { 1368 if (TYPE_MODE (type) == VOIDmode) 1369 ; 1370 else if (TYPE_MODE (type) != BLKmode) 1371 value = copy_to_reg (value); 1372 else 1373 { 1374 rtx lab = gen_label_rtx (); 1375 1376 /* Compare the value with itself to reference it. */ 1377 emit_cmp_and_jump_insns (value, value, EQ, 1378 expand_expr (TYPE_SIZE (type), 1379 NULL_RTX, VOIDmode, 0), 1380 BLKmode, 0, lab); 1381 emit_label (lab); 1382 } 1383 } 1384 1385 /* Free any temporaries used to evaluate this expression. */ 1386 free_temp_slots (); 1387} 1388 1389/* Warn if EXP contains any computations whose results are not used. 1390 Return 1 if a warning is printed; 0 otherwise. LOCUS is the 1391 (potential) location of the expression. */ 1392 1393int 1394warn_if_unused_value (tree exp, location_t locus) 1395{ 1396 restart: 1397 if (TREE_USED (exp) || TREE_NO_WARNING (exp)) 1398 return 0; 1399 1400 /* Don't warn about void constructs. This includes casting to void, 1401 void function calls, and statement expressions with a final cast 1402 to void. */ 1403 if (VOID_TYPE_P (TREE_TYPE (exp))) 1404 return 0; 1405 1406 if (EXPR_HAS_LOCATION (exp)) 1407 locus = EXPR_LOCATION (exp); 1408 1409 switch (TREE_CODE (exp)) 1410 { 1411 case PREINCREMENT_EXPR: 1412 case POSTINCREMENT_EXPR: 1413 case PREDECREMENT_EXPR: 1414 case POSTDECREMENT_EXPR: 1415 case MODIFY_EXPR: 1416 case INIT_EXPR: 1417 case TARGET_EXPR: 1418 case CALL_EXPR: 1419 case TRY_CATCH_EXPR: 1420 case WITH_CLEANUP_EXPR: 1421 case EXIT_EXPR: 1422 case VA_ARG_EXPR: 1423 return 0; 1424 1425 case BIND_EXPR: 1426 /* For a binding, warn if no side effect within it. */ 1427 exp = BIND_EXPR_BODY (exp); 1428 goto restart; 1429 1430 case SAVE_EXPR: 1431 exp = TREE_OPERAND (exp, 0); 1432 goto restart; 1433 1434 case TRUTH_ORIF_EXPR: 1435 case TRUTH_ANDIF_EXPR: 1436 /* In && or ||, warn if 2nd operand has no side effect. */ 1437 exp = TREE_OPERAND (exp, 1); 1438 goto restart; 1439 1440 case COMPOUND_EXPR: 1441 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus)) 1442 return 1; 1443 /* Let people do `(foo (), 0)' without a warning. */ 1444 if (TREE_CONSTANT (TREE_OPERAND (exp, 1))) 1445 return 0; 1446 exp = TREE_OPERAND (exp, 1); 1447 goto restart; 1448 1449 case COND_EXPR: 1450 /* If this is an expression with side effects, don't warn; this 1451 case commonly appears in macro expansions. */ 1452 if (TREE_SIDE_EFFECTS (exp)) 1453 return 0; 1454 goto warn; 1455 1456 case INDIRECT_REF: 1457 /* Don't warn about automatic dereferencing of references, since 1458 the user cannot control it. */ 1459 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE) 1460 { 1461 exp = TREE_OPERAND (exp, 0); 1462 goto restart; 1463 } 1464 /* Fall through. */ 1465 1466 default: 1467 /* Referencing a volatile value is a side effect, so don't warn. */ 1468 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp)) 1469 && TREE_THIS_VOLATILE (exp)) 1470 return 0; 1471 1472 /* If this is an expression which has no operands, there is no value 1473 to be unused. There are no such language-independent codes, 1474 but front ends may define such. */ 1475 if (EXPRESSION_CLASS_P (exp) && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0) 1476 return 0; 1477 1478 warn: 1479 warning (0, "%Hvalue computed is not used", &locus); 1480 return 1; 1481 } 1482} 1483 1484 1485/* Generate RTL to return from the current function, with no value. 1486 (That is, we do not do anything about returning any value.) */ 1487 1488void 1489expand_null_return (void) 1490{ 1491 /* If this function was declared to return a value, but we 1492 didn't, clobber the return registers so that they are not 1493 propagated live to the rest of the function. */ 1494 clobber_return_register (); 1495 1496 expand_null_return_1 (); 1497} 1498 1499/* Generate RTL to return directly from the current function. 1500 (That is, we bypass any return value.) */ 1501 1502void 1503expand_naked_return (void) 1504{ 1505 rtx end_label; 1506 1507 clear_pending_stack_adjust (); 1508 do_pending_stack_adjust (); 1509 1510 end_label = naked_return_label; 1511 if (end_label == 0) 1512 end_label = naked_return_label = gen_label_rtx (); 1513 1514 emit_jump (end_label); 1515} 1516 1517/* Generate RTL to return from the current function, with value VAL. */ 1518 1519static void 1520expand_value_return (rtx val) 1521{ 1522 /* Copy the value to the return location 1523 unless it's already there. */ 1524 1525 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl)); 1526 if (return_reg != val) 1527 { 1528 tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); 1529 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl))) 1530 { 1531 int unsignedp = TYPE_UNSIGNED (type); 1532 enum machine_mode old_mode 1533 = DECL_MODE (DECL_RESULT (current_function_decl)); 1534 enum machine_mode mode 1535 = promote_mode (type, old_mode, &unsignedp, 1); 1536 1537 if (mode != old_mode) 1538 val = convert_modes (mode, old_mode, val, unsignedp); 1539 } 1540 if (GET_CODE (return_reg) == PARALLEL) 1541 emit_group_load (return_reg, val, type, int_size_in_bytes (type)); 1542 else 1543 emit_move_insn (return_reg, val); 1544 } 1545 1546 expand_null_return_1 (); 1547} 1548 1549/* Output a return with no value. */ 1550 1551static void 1552expand_null_return_1 (void) 1553{ 1554 clear_pending_stack_adjust (); 1555 do_pending_stack_adjust (); 1556 emit_jump (return_label); 1557} 1558 1559/* Generate RTL to evaluate the expression RETVAL and return it 1560 from the current function. */ 1561 1562void 1563expand_return (tree retval) 1564{ 1565 rtx result_rtl; 1566 rtx val = 0; 1567 tree retval_rhs; 1568 1569 /* If function wants no value, give it none. */ 1570 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE) 1571 { 1572 expand_expr (retval, NULL_RTX, VOIDmode, 0); 1573 expand_null_return (); 1574 return; 1575 } 1576 1577 if (retval == error_mark_node) 1578 { 1579 /* Treat this like a return of no value from a function that 1580 returns a value. */ 1581 expand_null_return (); 1582 return; 1583 } 1584 else if ((TREE_CODE (retval) == MODIFY_EXPR 1585 || TREE_CODE (retval) == INIT_EXPR) 1586 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL) 1587 retval_rhs = TREE_OPERAND (retval, 1); 1588 else 1589 retval_rhs = retval; 1590 1591 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl)); 1592 1593 /* If we are returning the RESULT_DECL, then the value has already 1594 been stored into it, so we don't have to do anything special. */ 1595 if (TREE_CODE (retval_rhs) == RESULT_DECL) 1596 expand_value_return (result_rtl); 1597 1598 /* If the result is an aggregate that is being returned in one (or more) 1599 registers, load the registers here. The compiler currently can't handle 1600 copying a BLKmode value into registers. We could put this code in a 1601 more general area (for use by everyone instead of just function 1602 call/return), but until this feature is generally usable it is kept here 1603 (and in expand_call). */ 1604 1605 else if (retval_rhs != 0 1606 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode 1607 && REG_P (result_rtl)) 1608 { 1609 int i; 1610 unsigned HOST_WIDE_INT bitpos, xbitpos; 1611 unsigned HOST_WIDE_INT padding_correction = 0; 1612 unsigned HOST_WIDE_INT bytes 1613 = int_size_in_bytes (TREE_TYPE (retval_rhs)); 1614 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; 1615 unsigned int bitsize 1616 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD); 1617 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs); 1618 rtx result_reg, src = NULL_RTX, dst = NULL_RTX; 1619 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0); 1620 enum machine_mode tmpmode, result_reg_mode; 1621 1622 if (bytes == 0) 1623 { 1624 expand_null_return (); 1625 return; 1626 } 1627 1628 /* If the structure doesn't take up a whole number of words, see 1629 whether the register value should be padded on the left or on 1630 the right. Set PADDING_CORRECTION to the number of padding 1631 bits needed on the left side. 1632 1633 In most ABIs, the structure will be returned at the least end of 1634 the register, which translates to right padding on little-endian 1635 targets and left padding on big-endian targets. The opposite 1636 holds if the structure is returned at the most significant 1637 end of the register. */ 1638 if (bytes % UNITS_PER_WORD != 0 1639 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs)) 1640 ? !BYTES_BIG_ENDIAN 1641 : BYTES_BIG_ENDIAN)) 1642 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) 1643 * BITS_PER_UNIT)); 1644 1645 /* Copy the structure BITSIZE bits at a time. */ 1646 for (bitpos = 0, xbitpos = padding_correction; 1647 bitpos < bytes * BITS_PER_UNIT; 1648 bitpos += bitsize, xbitpos += bitsize) 1649 { 1650 /* We need a new destination pseudo each time xbitpos is 1651 on a word boundary and when xbitpos == padding_correction 1652 (the first time through). */ 1653 if (xbitpos % BITS_PER_WORD == 0 1654 || xbitpos == padding_correction) 1655 { 1656 /* Generate an appropriate register. */ 1657 dst = gen_reg_rtx (word_mode); 1658 result_pseudos[xbitpos / BITS_PER_WORD] = dst; 1659 1660 /* Clear the destination before we move anything into it. */ 1661 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst))); 1662 } 1663 1664 /* We need a new source operand each time bitpos is on a word 1665 boundary. */ 1666 if (bitpos % BITS_PER_WORD == 0) 1667 src = operand_subword_force (result_val, 1668 bitpos / BITS_PER_WORD, 1669 BLKmode); 1670 1671 /* Use bitpos for the source extraction (left justified) and 1672 xbitpos for the destination store (right justified). */ 1673 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode, 1674 extract_bit_field (src, bitsize, 1675 bitpos % BITS_PER_WORD, 1, 1676 NULL_RTX, word_mode, word_mode)); 1677 } 1678 1679 tmpmode = GET_MODE (result_rtl); 1680 if (tmpmode == BLKmode) 1681 { 1682 /* Find the smallest integer mode large enough to hold the 1683 entire structure and use that mode instead of BLKmode 1684 on the USE insn for the return register. */ 1685 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 1686 tmpmode != VOIDmode; 1687 tmpmode = GET_MODE_WIDER_MODE (tmpmode)) 1688 /* Have we found a large enough mode? */ 1689 if (GET_MODE_SIZE (tmpmode) >= bytes) 1690 break; 1691 1692 /* A suitable mode should have been found. */ 1693 gcc_assert (tmpmode != VOIDmode); 1694 1695 PUT_MODE (result_rtl, tmpmode); 1696 } 1697 1698 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode)) 1699 result_reg_mode = word_mode; 1700 else 1701 result_reg_mode = tmpmode; 1702 result_reg = gen_reg_rtx (result_reg_mode); 1703 1704 for (i = 0; i < n_regs; i++) 1705 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode), 1706 result_pseudos[i]); 1707 1708 if (tmpmode != result_reg_mode) 1709 result_reg = gen_lowpart (tmpmode, result_reg); 1710 1711 expand_value_return (result_reg); 1712 } 1713 else if (retval_rhs != 0 1714 && !VOID_TYPE_P (TREE_TYPE (retval_rhs)) 1715 && (REG_P (result_rtl) 1716 || (GET_CODE (result_rtl) == PARALLEL))) 1717 { 1718 /* Calculate the return value into a temporary (usually a pseudo 1719 reg). */ 1720 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl)); 1721 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST); 1722 1723 val = assign_temp (nt, 0, 0, 1); 1724 val = expand_expr (retval_rhs, val, GET_MODE (val), 0); 1725 val = force_not_mem (val); 1726 /* Return the calculated value. */ 1727 expand_value_return (val); 1728 } 1729 else 1730 { 1731 /* No hard reg used; calculate value into hard return reg. */ 1732 expand_expr (retval, const0_rtx, VOIDmode, 0); 1733 expand_value_return (result_rtl); 1734 } 1735} 1736 1737/* Given a pointer to a BLOCK node return nonzero if (and only if) the node 1738 in question represents the outermost pair of curly braces (i.e. the "body 1739 block") of a function or method. 1740 1741 For any BLOCK node representing a "body block" of a function or method, the 1742 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which 1743 represents the outermost (function) scope for the function or method (i.e. 1744 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of 1745 *that* node in turn will point to the relevant FUNCTION_DECL node. */ 1746 1747int 1748is_body_block (tree stmt) 1749{ 1750 if (lang_hooks.no_body_blocks) 1751 return 0; 1752 1753 if (TREE_CODE (stmt) == BLOCK) 1754 { 1755 tree parent = BLOCK_SUPERCONTEXT (stmt); 1756 1757 if (parent && TREE_CODE (parent) == BLOCK) 1758 { 1759 tree grandparent = BLOCK_SUPERCONTEXT (parent); 1760 1761 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL) 1762 return 1; 1763 } 1764 } 1765 1766 return 0; 1767} 1768 1769/* Emit code to restore vital registers at the beginning of a nonlocal goto 1770 handler. */ 1771static void 1772expand_nl_goto_receiver (void) 1773{ 1774 /* Clobber the FP when we get here, so we have to make sure it's 1775 marked as used by this function. */ 1776 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); 1777 1778 /* Mark the static chain as clobbered here so life information 1779 doesn't get messed up for it. */ 1780 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx)); 1781 1782#ifdef HAVE_nonlocal_goto 1783 if (! HAVE_nonlocal_goto) 1784#endif 1785 /* First adjust our frame pointer to its actual value. It was 1786 previously set to the start of the virtual area corresponding to 1787 the stacked variables when we branched here and now needs to be 1788 adjusted to the actual hardware fp value. 1789 1790 Assignments are to virtual registers are converted by 1791 instantiate_virtual_regs into the corresponding assignment 1792 to the underlying register (fp in this case) that makes 1793 the original assignment true. 1794 So the following insn will actually be 1795 decrementing fp by STARTING_FRAME_OFFSET. */ 1796 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); 1797 1798#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 1799 if (fixed_regs[ARG_POINTER_REGNUM]) 1800 { 1801#ifdef ELIMINABLE_REGS 1802 /* If the argument pointer can be eliminated in favor of the 1803 frame pointer, we don't need to restore it. We assume here 1804 that if such an elimination is present, it can always be used. 1805 This is the case on all known machines; if we don't make this 1806 assumption, we do unnecessary saving on many machines. */ 1807 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS; 1808 size_t i; 1809 1810 for (i = 0; i < ARRAY_SIZE (elim_regs); i++) 1811 if (elim_regs[i].from == ARG_POINTER_REGNUM 1812 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) 1813 break; 1814 1815 if (i == ARRAY_SIZE (elim_regs)) 1816#endif 1817 { 1818 /* Now restore our arg pointer from the address at which it 1819 was saved in our stack frame. */ 1820 emit_move_insn (virtual_incoming_args_rtx, 1821 copy_to_reg (get_arg_pointer_save_area (cfun))); 1822 } 1823 } 1824#endif 1825 1826#ifdef HAVE_nonlocal_goto_receiver 1827 if (HAVE_nonlocal_goto_receiver) 1828 emit_insn (gen_nonlocal_goto_receiver ()); 1829#endif 1830 1831 /* @@@ This is a kludge. Not all machine descriptions define a blockage 1832 insn, but we must not allow the code we just generated to be reordered 1833 by scheduling. Specifically, the update of the frame pointer must 1834 happen immediately, not later. So emit an ASM_INPUT to act as blockage 1835 insn. */ 1836 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); 1837} 1838 1839/* Generate RTL for the automatic variable declaration DECL. 1840 (Other kinds of declarations are simply ignored if seen here.) */ 1841 1842void 1843expand_decl (tree decl) 1844{ 1845 tree type; 1846 1847 type = TREE_TYPE (decl); 1848 1849 /* For a CONST_DECL, set mode, alignment, and sizes from those of the 1850 type in case this node is used in a reference. */ 1851 if (TREE_CODE (decl) == CONST_DECL) 1852 { 1853 DECL_MODE (decl) = TYPE_MODE (type); 1854 DECL_ALIGN (decl) = TYPE_ALIGN (type); 1855 DECL_SIZE (decl) = TYPE_SIZE (type); 1856 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type); 1857 return; 1858 } 1859 1860 /* Otherwise, only automatic variables need any expansion done. Static and 1861 external variables, and external functions, will be handled by 1862 `assemble_variable' (called from finish_decl). TYPE_DECL requires 1863 nothing. PARM_DECLs are handled in `assign_parms'. */ 1864 if (TREE_CODE (decl) != VAR_DECL) 1865 return; 1866 1867 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)) 1868 return; 1869 1870 /* Create the RTL representation for the variable. */ 1871 1872 if (type == error_mark_node) 1873 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx)); 1874 1875 else if (DECL_SIZE (decl) == 0) 1876 /* Variable with incomplete type. */ 1877 { 1878 rtx x; 1879 if (DECL_INITIAL (decl) == 0) 1880 /* Error message was already done; now avoid a crash. */ 1881 x = gen_rtx_MEM (BLKmode, const0_rtx); 1882 else 1883 /* An initializer is going to decide the size of this array. 1884 Until we know the size, represent its address with a reg. */ 1885 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode)); 1886 1887 set_mem_attributes (x, decl, 1); 1888 SET_DECL_RTL (decl, x); 1889 } 1890 else if (use_register_for_decl (decl)) 1891 { 1892 /* Automatic variable that can go in a register. */ 1893 int unsignedp = TYPE_UNSIGNED (type); 1894 enum machine_mode reg_mode 1895 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0); 1896 1897 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode)); 1898 1899 /* Note if the object is a user variable. */ 1900 if (!DECL_ARTIFICIAL (decl)) 1901 { 1902 mark_user_reg (DECL_RTL (decl)); 1903 1904 /* Trust user variables which have a pointer type to really 1905 be pointers. Do not trust compiler generated temporaries 1906 as our type system is totally busted as it relates to 1907 pointer arithmetic which translates into lots of compiler 1908 generated objects with pointer types, but which are not really 1909 pointers. */ 1910 if (POINTER_TYPE_P (type)) 1911 mark_reg_pointer (DECL_RTL (decl), 1912 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))); 1913 } 1914 } 1915 1916 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST 1917 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN 1918 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl), 1919 STACK_CHECK_MAX_VAR_SIZE))) 1920 { 1921 /* Variable of fixed size that goes on the stack. */ 1922 rtx oldaddr = 0; 1923 rtx addr; 1924 rtx x; 1925 1926 /* If we previously made RTL for this decl, it must be an array 1927 whose size was determined by the initializer. 1928 The old address was a register; set that register now 1929 to the proper address. */ 1930 if (DECL_RTL_SET_P (decl)) 1931 { 1932 gcc_assert (MEM_P (DECL_RTL (decl))); 1933 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0))); 1934 oldaddr = XEXP (DECL_RTL (decl), 0); 1935 } 1936 1937 /* Set alignment we actually gave this decl. */ 1938 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT 1939 : GET_MODE_BITSIZE (DECL_MODE (decl))); 1940 DECL_USER_ALIGN (decl) = 0; 1941 1942 x = assign_temp (decl, 1, 1, 1); 1943 set_mem_attributes (x, decl, 1); 1944 SET_DECL_RTL (decl, x); 1945 1946 if (oldaddr) 1947 { 1948 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr); 1949 if (addr != oldaddr) 1950 emit_move_insn (oldaddr, addr); 1951 } 1952 } 1953 else 1954 /* Dynamic-size object: must push space on the stack. */ 1955 { 1956 rtx address, size, x; 1957 1958 /* Record the stack pointer on entry to block, if have 1959 not already done so. */ 1960 do_pending_stack_adjust (); 1961 1962 /* Compute the variable's size, in bytes. This will expand any 1963 needed SAVE_EXPRs for the first time. */ 1964 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0); 1965 free_temp_slots (); 1966 1967 /* Allocate space on the stack for the variable. Note that 1968 DECL_ALIGN says how the variable is to be aligned and we 1969 cannot use it to conclude anything about the alignment of 1970 the size. */ 1971 address = allocate_dynamic_stack_space (size, NULL_RTX, 1972 TYPE_ALIGN (TREE_TYPE (decl))); 1973 1974 /* Reference the variable indirect through that rtx. */ 1975 x = gen_rtx_MEM (DECL_MODE (decl), address); 1976 set_mem_attributes (x, decl, 1); 1977 SET_DECL_RTL (decl, x); 1978 1979 1980 /* Indicate the alignment we actually gave this variable. */ 1981#ifdef STACK_BOUNDARY 1982 DECL_ALIGN (decl) = STACK_BOUNDARY; 1983#else 1984 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT; 1985#endif 1986 DECL_USER_ALIGN (decl) = 0; 1987 } 1988} 1989 1990/* Emit code to save the current value of stack. */ 1991rtx 1992expand_stack_save (void) 1993{ 1994 rtx ret = NULL_RTX; 1995 1996 do_pending_stack_adjust (); 1997 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX); 1998 return ret; 1999} 2000 2001/* Emit code to restore the current value of stack. */ 2002void 2003expand_stack_restore (tree var) 2004{ 2005 rtx sa = DECL_RTL (var); 2006 2007 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX); 2008} 2009 2010/* DECL is an anonymous union. CLEANUP is a cleanup for DECL. 2011 DECL_ELTS is the list of elements that belong to DECL's type. 2012 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */ 2013 2014void 2015expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED, 2016 tree decl_elts) 2017{ 2018 rtx x; 2019 tree t; 2020 2021 /* If any of the elements are addressable, so is the entire union. */ 2022 for (t = decl_elts; t; t = TREE_CHAIN (t)) 2023 if (TREE_ADDRESSABLE (TREE_VALUE (t))) 2024 { 2025 TREE_ADDRESSABLE (decl) = 1; 2026 break; 2027 } 2028 2029 expand_decl (decl); 2030 x = DECL_RTL (decl); 2031 2032 /* Go through the elements, assigning RTL to each. */ 2033 for (t = decl_elts; t; t = TREE_CHAIN (t)) 2034 { 2035 tree decl_elt = TREE_VALUE (t); 2036 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt)); 2037 rtx decl_rtl; 2038 2039 /* If any of the elements are addressable, so is the entire 2040 union. */ 2041 if (TREE_USED (decl_elt)) 2042 TREE_USED (decl) = 1; 2043 2044 /* Propagate the union's alignment to the elements. */ 2045 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl); 2046 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl); 2047 2048 /* If the element has BLKmode and the union doesn't, the union is 2049 aligned such that the element doesn't need to have BLKmode, so 2050 change the element's mode to the appropriate one for its size. */ 2051 if (mode == BLKmode && DECL_MODE (decl) != BLKmode) 2052 DECL_MODE (decl_elt) = mode 2053 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1); 2054 2055 if (mode == GET_MODE (x)) 2056 decl_rtl = x; 2057 else if (MEM_P (x)) 2058 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we 2059 instead create a new MEM rtx with the proper mode. */ 2060 decl_rtl = adjust_address_nv (x, mode, 0); 2061 else 2062 { 2063 gcc_assert (REG_P (x)); 2064 decl_rtl = gen_lowpart_SUBREG (mode, x); 2065 } 2066 SET_DECL_RTL (decl_elt, decl_rtl); 2067 } 2068} 2069 2070/* Do the insertion of a case label into case_list. The labels are 2071 fed to us in descending order from the sorted vector of case labels used 2072 in the tree part of the middle end. So the list we construct is 2073 sorted in ascending order. The bounds on the case range, LOW and HIGH, 2074 are converted to case's index type TYPE. */ 2075 2076static struct case_node * 2077add_case_node (struct case_node *head, tree type, tree low, tree high, 2078 tree label) 2079{ 2080 tree min_value, max_value; 2081 struct case_node *r; 2082 2083 gcc_assert (TREE_CODE (low) == INTEGER_CST); 2084 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST); 2085 2086 min_value = TYPE_MIN_VALUE (type); 2087 max_value = TYPE_MAX_VALUE (type); 2088 2089 /* If there's no HIGH value, then this is not a case range; it's 2090 just a simple case label. But that's just a degenerate case 2091 range. 2092 If the bounds are equal, turn this into the one-value case. */ 2093 if (!high || tree_int_cst_equal (low, high)) 2094 { 2095 /* If the simple case value is unreachable, ignore it. */ 2096 if ((TREE_CODE (min_value) == INTEGER_CST 2097 && tree_int_cst_compare (low, min_value) < 0) 2098 || (TREE_CODE (max_value) == INTEGER_CST 2099 && tree_int_cst_compare (low, max_value) > 0)) 2100 return head; 2101 low = fold_convert (type, low); 2102 high = low; 2103 } 2104 else 2105 { 2106 /* If the entire case range is unreachable, ignore it. */ 2107 if ((TREE_CODE (min_value) == INTEGER_CST 2108 && tree_int_cst_compare (high, min_value) < 0) 2109 || (TREE_CODE (max_value) == INTEGER_CST 2110 && tree_int_cst_compare (low, max_value) > 0)) 2111 return head; 2112 2113 /* If the lower bound is less than the index type's minimum 2114 value, truncate the range bounds. */ 2115 if (TREE_CODE (min_value) == INTEGER_CST 2116 && tree_int_cst_compare (low, min_value) < 0) 2117 low = min_value; 2118 low = fold_convert (type, low); 2119 2120 /* If the upper bound is greater than the index type's maximum 2121 value, truncate the range bounds. */ 2122 if (TREE_CODE (max_value) == INTEGER_CST 2123 && tree_int_cst_compare (high, max_value) > 0) 2124 high = max_value; 2125 high = fold_convert (type, high); 2126 } 2127 2128 2129 /* Add this label to the chain. Make sure to drop overflow flags. */ 2130 r = ggc_alloc (sizeof (struct case_node)); 2131 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low), 2132 TREE_INT_CST_HIGH (low)); 2133 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high), 2134 TREE_INT_CST_HIGH (high)); 2135 r->code_label = label; 2136 r->parent = r->left = NULL; 2137 r->right = head; 2138 return r; 2139} 2140 2141/* Maximum number of case bit tests. */ 2142#define MAX_CASE_BIT_TESTS 3 2143 2144/* By default, enable case bit tests on targets with ashlsi3. */ 2145#ifndef CASE_USE_BIT_TESTS 2146#define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \ 2147 != CODE_FOR_nothing) 2148#endif 2149 2150 2151/* A case_bit_test represents a set of case nodes that may be 2152 selected from using a bit-wise comparison. HI and LO hold 2153 the integer to be tested against, LABEL contains the label 2154 to jump to upon success and BITS counts the number of case 2155 nodes handled by this test, typically the number of bits 2156 set in HI:LO. */ 2157 2158struct case_bit_test 2159{ 2160 HOST_WIDE_INT hi; 2161 HOST_WIDE_INT lo; 2162 rtx label; 2163 int bits; 2164}; 2165 2166/* Determine whether "1 << x" is relatively cheap in word_mode. */ 2167 2168static 2169bool lshift_cheap_p (void) 2170{ 2171 static bool init = false; 2172 static bool cheap = true; 2173 2174 if (!init) 2175 { 2176 rtx reg = gen_rtx_REG (word_mode, 10000); 2177 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET); 2178 cheap = cost < COSTS_N_INSNS (3); 2179 init = true; 2180 } 2181 2182 return cheap; 2183} 2184 2185/* Comparison function for qsort to order bit tests by decreasing 2186 number of case nodes, i.e. the node with the most cases gets 2187 tested first. */ 2188 2189static int 2190case_bit_test_cmp (const void *p1, const void *p2) 2191{ 2192 const struct case_bit_test *d1 = p1; 2193 const struct case_bit_test *d2 = p2; 2194 2195 return d2->bits - d1->bits; 2196} 2197 2198/* Expand a switch statement by a short sequence of bit-wise 2199 comparisons. "switch(x)" is effectively converted into 2200 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are 2201 integer constants. 2202 2203 INDEX_EXPR is the value being switched on, which is of 2204 type INDEX_TYPE. MINVAL is the lowest case value of in 2205 the case nodes, of INDEX_TYPE type, and RANGE is highest 2206 value minus MINVAL, also of type INDEX_TYPE. NODES is 2207 the set of case nodes, and DEFAULT_LABEL is the label to 2208 branch to should none of the cases match. 2209 2210 There *MUST* be MAX_CASE_BIT_TESTS or less unique case 2211 node targets. */ 2212 2213static void 2214emit_case_bit_tests (tree index_type, tree index_expr, tree minval, 2215 tree range, case_node_ptr nodes, rtx default_label) 2216{ 2217 struct case_bit_test test[MAX_CASE_BIT_TESTS]; 2218 enum machine_mode mode; 2219 rtx expr, index, label; 2220 unsigned int i,j,lo,hi; 2221 struct case_node *n; 2222 unsigned int count; 2223 2224 count = 0; 2225 for (n = nodes; n; n = n->right) 2226 { 2227 label = label_rtx (n->code_label); 2228 for (i = 0; i < count; i++) 2229 if (label == test[i].label) 2230 break; 2231 2232 if (i == count) 2233 { 2234 gcc_assert (count < MAX_CASE_BIT_TESTS); 2235 test[i].hi = 0; 2236 test[i].lo = 0; 2237 test[i].label = label; 2238 test[i].bits = 1; 2239 count++; 2240 } 2241 else 2242 test[i].bits++; 2243 2244 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, 2245 n->low, minval), 1); 2246 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, 2247 n->high, minval), 1); 2248 for (j = lo; j <= hi; j++) 2249 if (j >= HOST_BITS_PER_WIDE_INT) 2250 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT); 2251 else 2252 test[i].lo |= (HOST_WIDE_INT) 1 << j; 2253 } 2254 2255 qsort (test, count, sizeof(*test), case_bit_test_cmp); 2256 2257 index_expr = fold_build2 (MINUS_EXPR, index_type, 2258 fold_convert (index_type, index_expr), 2259 fold_convert (index_type, minval)); 2260 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); 2261 do_pending_stack_adjust (); 2262 2263 mode = TYPE_MODE (index_type); 2264 expr = expand_expr (range, NULL_RTX, VOIDmode, 0); 2265 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1, 2266 default_label); 2267 2268 index = convert_to_mode (word_mode, index, 0); 2269 index = expand_binop (word_mode, ashl_optab, const1_rtx, 2270 index, NULL_RTX, 1, OPTAB_WIDEN); 2271 2272 for (i = 0; i < count; i++) 2273 { 2274 expr = immed_double_const (test[i].lo, test[i].hi, word_mode); 2275 expr = expand_binop (word_mode, and_optab, index, expr, 2276 NULL_RTX, 1, OPTAB_WIDEN); 2277 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX, 2278 word_mode, 1, test[i].label); 2279 } 2280 2281 emit_jump (default_label); 2282} 2283 2284#ifndef HAVE_casesi 2285#define HAVE_casesi 0 2286#endif 2287 2288#ifndef HAVE_tablejump 2289#define HAVE_tablejump 0 2290#endif 2291 2292/* Terminate a case (Pascal/Ada) or switch (C) statement 2293 in which ORIG_INDEX is the expression to be tested. 2294 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX 2295 type as given in the source before any compiler conversions. 2296 Generate the code to test it and jump to the right place. */ 2297 2298void 2299expand_case (tree exp) 2300{ 2301 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE; 2302 rtx default_label = 0; 2303 struct case_node *n; 2304 unsigned int count, uniq; 2305 rtx index; 2306 rtx table_label; 2307 int ncases; 2308 rtx *labelvec; 2309 int i, fail; 2310 rtx before_case, end, lab; 2311 2312 tree vec = SWITCH_LABELS (exp); 2313 tree orig_type = TREE_TYPE (exp); 2314 tree index_expr = SWITCH_COND (exp); 2315 tree index_type = TREE_TYPE (index_expr); 2316 int unsignedp = TYPE_UNSIGNED (index_type); 2317 2318 /* The insn after which the case dispatch should finally 2319 be emitted. Zero for a dummy. */ 2320 rtx start; 2321 2322 /* A list of case labels; it is first built as a list and it may then 2323 be rearranged into a nearly balanced binary tree. */ 2324 struct case_node *case_list = 0; 2325 2326 /* Label to jump to if no case matches. */ 2327 tree default_label_decl; 2328 2329 /* The switch body is lowered in gimplify.c, we should never have 2330 switches with a non-NULL SWITCH_BODY here. */ 2331 gcc_assert (!SWITCH_BODY (exp)); 2332 gcc_assert (SWITCH_LABELS (exp)); 2333 2334 do_pending_stack_adjust (); 2335 2336 /* An ERROR_MARK occurs for various reasons including invalid data type. */ 2337 if (index_type != error_mark_node) 2338 { 2339 tree elt; 2340 bitmap label_bitmap; 2341 2342 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index 2343 expressions being INTEGER_CST. */ 2344 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST); 2345 2346 /* The default case is at the end of TREE_VEC. */ 2347 elt = TREE_VEC_ELT (vec, TREE_VEC_LENGTH (vec) - 1); 2348 gcc_assert (!CASE_HIGH (elt)); 2349 gcc_assert (!CASE_LOW (elt)); 2350 default_label_decl = CASE_LABEL (elt); 2351 2352 for (i = TREE_VEC_LENGTH (vec) - 1; --i >= 0; ) 2353 { 2354 tree low, high; 2355 elt = TREE_VEC_ELT (vec, i); 2356 2357 low = CASE_LOW (elt); 2358 gcc_assert (low); 2359 high = CASE_HIGH (elt); 2360 2361 /* Discard empty ranges. */ 2362 if (high && INT_CST_LT (high, low)) 2363 continue; 2364 2365 case_list = add_case_node (case_list, index_type, low, high, 2366 CASE_LABEL (elt)); 2367 } 2368 2369 2370 /* Make sure start points to something that won't need any 2371 transformation before the end of this function. */ 2372 start = get_last_insn (); 2373 if (! NOTE_P (start)) 2374 { 2375 emit_note (NOTE_INSN_DELETED); 2376 start = get_last_insn (); 2377 } 2378 2379 default_label = label_rtx (default_label_decl); 2380 2381 before_case = get_last_insn (); 2382 2383 /* Get upper and lower bounds of case values. */ 2384 2385 uniq = 0; 2386 count = 0; 2387 label_bitmap = BITMAP_ALLOC (NULL); 2388 for (n = case_list; n; n = n->right) 2389 { 2390 /* Count the elements and track the largest and smallest 2391 of them (treating them as signed even if they are not). */ 2392 if (count++ == 0) 2393 { 2394 minval = n->low; 2395 maxval = n->high; 2396 } 2397 else 2398 { 2399 if (INT_CST_LT (n->low, minval)) 2400 minval = n->low; 2401 if (INT_CST_LT (maxval, n->high)) 2402 maxval = n->high; 2403 } 2404 /* A range counts double, since it requires two compares. */ 2405 if (! tree_int_cst_equal (n->low, n->high)) 2406 count++; 2407 2408 /* If we have not seen this label yet, then increase the 2409 number of unique case node targets seen. */ 2410 lab = label_rtx (n->code_label); 2411 if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab))) 2412 { 2413 bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab)); 2414 uniq++; 2415 } 2416 } 2417 2418 BITMAP_FREE (label_bitmap); 2419 2420 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single 2421 destination, such as one with a default case only. However, 2422 it doesn't remove cases that are out of range for the switch 2423 type, so we may still get a zero here. */ 2424 if (count == 0) 2425 { 2426 emit_jump (default_label); 2427 return; 2428 } 2429 2430 /* Compute span of values. */ 2431 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval); 2432 2433 /* Try implementing this switch statement by a short sequence of 2434 bit-wise comparisons. However, we let the binary-tree case 2435 below handle constant index expressions. */ 2436 if (CASE_USE_BIT_TESTS 2437 && ! TREE_CONSTANT (index_expr) 2438 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0 2439 && compare_tree_int (range, 0) > 0 2440 && lshift_cheap_p () 2441 && ((uniq == 1 && count >= 3) 2442 || (uniq == 2 && count >= 5) 2443 || (uniq == 3 && count >= 6))) 2444 { 2445 /* Optimize the case where all the case values fit in a 2446 word without having to subtract MINVAL. In this case, 2447 we can optimize away the subtraction. */ 2448 if (compare_tree_int (minval, 0) > 0 2449 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0) 2450 { 2451 minval = build_int_cst (index_type, 0); 2452 range = maxval; 2453 } 2454 emit_case_bit_tests (index_type, index_expr, minval, range, 2455 case_list, default_label); 2456 } 2457 2458 /* If range of values is much bigger than number of values, 2459 make a sequence of conditional branches instead of a dispatch. 2460 If the switch-index is a constant, do it this way 2461 because we can optimize it. */ 2462 2463 else if (count < case_values_threshold () 2464 || compare_tree_int (range, 2465 (optimize_size ? 3 : 10) * count) > 0 2466 /* RANGE may be signed, and really large ranges will show up 2467 as negative numbers. */ 2468 || compare_tree_int (range, 0) < 0 2469#ifndef ASM_OUTPUT_ADDR_DIFF_ELT 2470 || flag_pic 2471#endif 2472 || !flag_jump_tables 2473 || TREE_CONSTANT (index_expr) 2474 /* If neither casesi or tablejump is available, we can 2475 only go this way. */ 2476 || (!HAVE_casesi && !HAVE_tablejump)) 2477 { 2478 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); 2479 2480 /* If the index is a short or char that we do not have 2481 an insn to handle comparisons directly, convert it to 2482 a full integer now, rather than letting each comparison 2483 generate the conversion. */ 2484 2485 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT 2486 && ! have_insn_for (COMPARE, GET_MODE (index))) 2487 { 2488 enum machine_mode wider_mode; 2489 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode; 2490 wider_mode = GET_MODE_WIDER_MODE (wider_mode)) 2491 if (have_insn_for (COMPARE, wider_mode)) 2492 { 2493 index = convert_to_mode (wider_mode, index, unsignedp); 2494 break; 2495 } 2496 } 2497 2498 do_pending_stack_adjust (); 2499 2500 if (MEM_P (index)) 2501 index = copy_to_reg (index); 2502 2503 /* We generate a binary decision tree to select the 2504 appropriate target code. This is done as follows: 2505 2506 The list of cases is rearranged into a binary tree, 2507 nearly optimal assuming equal probability for each case. 2508 2509 The tree is transformed into RTL, eliminating 2510 redundant test conditions at the same time. 2511 2512 If program flow could reach the end of the 2513 decision tree an unconditional jump to the 2514 default code is emitted. */ 2515 2516 use_cost_table 2517 = (TREE_CODE (orig_type) != ENUMERAL_TYPE 2518 && estimate_case_costs (case_list)); 2519 balance_case_nodes (&case_list, NULL); 2520 emit_case_nodes (index, case_list, default_label, index_type); 2521 emit_jump (default_label); 2522 } 2523 else 2524 { 2525 table_label = gen_label_rtx (); 2526 if (! try_casesi (index_type, index_expr, minval, range, 2527 table_label, default_label)) 2528 { 2529 bool ok; 2530 2531 /* Index jumptables from zero for suitable values of 2532 minval to avoid a subtraction. */ 2533 if (! optimize_size 2534 && compare_tree_int (minval, 0) > 0 2535 && compare_tree_int (minval, 3) < 0) 2536 { 2537 minval = build_int_cst (index_type, 0); 2538 range = maxval; 2539 } 2540 2541 ok = try_tablejump (index_type, index_expr, minval, range, 2542 table_label, default_label); 2543 gcc_assert (ok); 2544 } 2545 2546 /* Get table of labels to jump to, in order of case index. */ 2547 2548 ncases = tree_low_cst (range, 0) + 1; 2549 labelvec = alloca (ncases * sizeof (rtx)); 2550 memset (labelvec, 0, ncases * sizeof (rtx)); 2551 2552 for (n = case_list; n; n = n->right) 2553 { 2554 /* Compute the low and high bounds relative to the minimum 2555 value since that should fit in a HOST_WIDE_INT while the 2556 actual values may not. */ 2557 HOST_WIDE_INT i_low 2558 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, 2559 n->low, minval), 1); 2560 HOST_WIDE_INT i_high 2561 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, 2562 n->high, minval), 1); 2563 HOST_WIDE_INT i; 2564 2565 for (i = i_low; i <= i_high; i ++) 2566 labelvec[i] 2567 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label)); 2568 } 2569 2570 /* Fill in the gaps with the default. */ 2571 for (i = 0; i < ncases; i++) 2572 if (labelvec[i] == 0) 2573 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label); 2574 2575 /* Output the table. */ 2576 emit_label (table_label); 2577 2578 if (CASE_VECTOR_PC_RELATIVE || flag_pic) 2579 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE, 2580 gen_rtx_LABEL_REF (Pmode, table_label), 2581 gen_rtvec_v (ncases, labelvec), 2582 const0_rtx, const0_rtx)); 2583 else 2584 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE, 2585 gen_rtvec_v (ncases, labelvec))); 2586 2587 /* Record no drop-through after the table. */ 2588 emit_barrier (); 2589 } 2590 2591 before_case = NEXT_INSN (before_case); 2592 end = get_last_insn (); 2593 fail = squeeze_notes (&before_case, &end); 2594 gcc_assert (!fail); 2595 reorder_insns (before_case, end, start); 2596 } 2597 2598 free_temp_slots (); 2599} 2600 2601/* Generate code to jump to LABEL if OP1 and OP2 are equal. */ 2602 2603static void 2604do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp) 2605{ 2606 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT) 2607 { 2608 if (op1 == op2) 2609 emit_jump (label); 2610 } 2611 else 2612 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, 2613 (GET_MODE (op1) == VOIDmode 2614 ? GET_MODE (op2) : GET_MODE (op1)), 2615 unsignedp, label); 2616} 2617 2618/* Not all case values are encountered equally. This function 2619 uses a heuristic to weight case labels, in cases where that 2620 looks like a reasonable thing to do. 2621 2622 Right now, all we try to guess is text, and we establish the 2623 following weights: 2624 2625 chars above space: 16 2626 digits: 16 2627 default: 12 2628 space, punct: 8 2629 tab: 4 2630 newline: 2 2631 other "\" chars: 1 2632 remaining chars: 0 2633 2634 If we find any cases in the switch that are not either -1 or in the range 2635 of valid ASCII characters, or are control characters other than those 2636 commonly used with "\", don't treat this switch scanning text. 2637 2638 Return 1 if these nodes are suitable for cost estimation, otherwise 2639 return 0. */ 2640 2641static int 2642estimate_case_costs (case_node_ptr node) 2643{ 2644 tree min_ascii = integer_minus_one_node; 2645 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127); 2646 case_node_ptr n; 2647 int i; 2648 2649 /* If we haven't already made the cost table, make it now. Note that the 2650 lower bound of the table is -1, not zero. */ 2651 2652 if (! cost_table_initialized) 2653 { 2654 cost_table_initialized = 1; 2655 2656 for (i = 0; i < 128; i++) 2657 { 2658 if (ISALNUM (i)) 2659 COST_TABLE (i) = 16; 2660 else if (ISPUNCT (i)) 2661 COST_TABLE (i) = 8; 2662 else if (ISCNTRL (i)) 2663 COST_TABLE (i) = -1; 2664 } 2665 2666 COST_TABLE (' ') = 8; 2667 COST_TABLE ('\t') = 4; 2668 COST_TABLE ('\0') = 4; 2669 COST_TABLE ('\n') = 2; 2670 COST_TABLE ('\f') = 1; 2671 COST_TABLE ('\v') = 1; 2672 COST_TABLE ('\b') = 1; 2673 } 2674 2675 /* See if all the case expressions look like text. It is text if the 2676 constant is >= -1 and the highest constant is <= 127. Do all comparisons 2677 as signed arithmetic since we don't want to ever access cost_table with a 2678 value less than -1. Also check that none of the constants in a range 2679 are strange control characters. */ 2680 2681 for (n = node; n; n = n->right) 2682 { 2683 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high)) 2684 return 0; 2685 2686 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low); 2687 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++) 2688 if (COST_TABLE (i) < 0) 2689 return 0; 2690 } 2691 2692 /* All interesting values are within the range of interesting 2693 ASCII characters. */ 2694 return 1; 2695} 2696 2697/* Take an ordered list of case nodes 2698 and transform them into a near optimal binary tree, 2699 on the assumption that any target code selection value is as 2700 likely as any other. 2701 2702 The transformation is performed by splitting the ordered 2703 list into two equal sections plus a pivot. The parts are 2704 then attached to the pivot as left and right branches. Each 2705 branch is then transformed recursively. */ 2706 2707static void 2708balance_case_nodes (case_node_ptr *head, case_node_ptr parent) 2709{ 2710 case_node_ptr np; 2711 2712 np = *head; 2713 if (np) 2714 { 2715 int cost = 0; 2716 int i = 0; 2717 int ranges = 0; 2718 case_node_ptr *npp; 2719 case_node_ptr left; 2720 2721 /* Count the number of entries on branch. Also count the ranges. */ 2722 2723 while (np) 2724 { 2725 if (!tree_int_cst_equal (np->low, np->high)) 2726 { 2727 ranges++; 2728 if (use_cost_table) 2729 cost += COST_TABLE (TREE_INT_CST_LOW (np->high)); 2730 } 2731 2732 if (use_cost_table) 2733 cost += COST_TABLE (TREE_INT_CST_LOW (np->low)); 2734 2735 i++; 2736 np = np->right; 2737 } 2738 2739 if (i > 2) 2740 { 2741 /* Split this list if it is long enough for that to help. */ 2742 npp = head; 2743 left = *npp; 2744 if (use_cost_table) 2745 { 2746 /* Find the place in the list that bisects the list's total cost, 2747 Here I gets half the total cost. */ 2748 int n_moved = 0; 2749 i = (cost + 1) / 2; 2750 while (1) 2751 { 2752 /* Skip nodes while their cost does not reach that amount. */ 2753 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high)) 2754 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high)); 2755 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low)); 2756 if (i <= 0) 2757 break; 2758 npp = &(*npp)->right; 2759 n_moved += 1; 2760 } 2761 if (n_moved == 0) 2762 { 2763 /* Leave this branch lopsided, but optimize left-hand 2764 side and fill in `parent' fields for right-hand side. */ 2765 np = *head; 2766 np->parent = parent; 2767 balance_case_nodes (&np->left, np); 2768 for (; np->right; np = np->right) 2769 np->right->parent = np; 2770 return; 2771 } 2772 } 2773 /* If there are just three nodes, split at the middle one. */ 2774 else if (i == 3) 2775 npp = &(*npp)->right; 2776 else 2777 { 2778 /* Find the place in the list that bisects the list's total cost, 2779 where ranges count as 2. 2780 Here I gets half the total cost. */ 2781 i = (i + ranges + 1) / 2; 2782 while (1) 2783 { 2784 /* Skip nodes while their cost does not reach that amount. */ 2785 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high)) 2786 i--; 2787 i--; 2788 if (i <= 0) 2789 break; 2790 npp = &(*npp)->right; 2791 } 2792 } 2793 *head = np = *npp; 2794 *npp = 0; 2795 np->parent = parent; 2796 np->left = left; 2797 2798 /* Optimize each of the two split parts. */ 2799 balance_case_nodes (&np->left, np); 2800 balance_case_nodes (&np->right, np); 2801 } 2802 else 2803 { 2804 /* Else leave this branch as one level, 2805 but fill in `parent' fields. */ 2806 np = *head; 2807 np->parent = parent; 2808 for (; np->right; np = np->right) 2809 np->right->parent = np; 2810 } 2811 } 2812} 2813 2814/* Search the parent sections of the case node tree 2815 to see if a test for the lower bound of NODE would be redundant. 2816 INDEX_TYPE is the type of the index expression. 2817 2818 The instructions to generate the case decision tree are 2819 output in the same order as nodes are processed so it is 2820 known that if a parent node checks the range of the current 2821 node minus one that the current node is bounded at its lower 2822 span. Thus the test would be redundant. */ 2823 2824static int 2825node_has_low_bound (case_node_ptr node, tree index_type) 2826{ 2827 tree low_minus_one; 2828 case_node_ptr pnode; 2829 2830 /* If the lower bound of this node is the lowest value in the index type, 2831 we need not test it. */ 2832 2833 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type))) 2834 return 1; 2835 2836 /* If this node has a left branch, the value at the left must be less 2837 than that at this node, so it cannot be bounded at the bottom and 2838 we need not bother testing any further. */ 2839 2840 if (node->left) 2841 return 0; 2842 2843 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low), 2844 node->low, 2845 build_int_cst (TREE_TYPE (node->low), 1)); 2846 2847 /* If the subtraction above overflowed, we can't verify anything. 2848 Otherwise, look for a parent that tests our value - 1. */ 2849 2850 if (! tree_int_cst_lt (low_minus_one, node->low)) 2851 return 0; 2852 2853 for (pnode = node->parent; pnode; pnode = pnode->parent) 2854 if (tree_int_cst_equal (low_minus_one, pnode->high)) 2855 return 1; 2856 2857 return 0; 2858} 2859 2860/* Search the parent sections of the case node tree 2861 to see if a test for the upper bound of NODE would be redundant. 2862 INDEX_TYPE is the type of the index expression. 2863 2864 The instructions to generate the case decision tree are 2865 output in the same order as nodes are processed so it is 2866 known that if a parent node checks the range of the current 2867 node plus one that the current node is bounded at its upper 2868 span. Thus the test would be redundant. */ 2869 2870static int 2871node_has_high_bound (case_node_ptr node, tree index_type) 2872{ 2873 tree high_plus_one; 2874 case_node_ptr pnode; 2875 2876 /* If there is no upper bound, obviously no test is needed. */ 2877 2878 if (TYPE_MAX_VALUE (index_type) == NULL) 2879 return 1; 2880 2881 /* If the upper bound of this node is the highest value in the type 2882 of the index expression, we need not test against it. */ 2883 2884 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type))) 2885 return 1; 2886 2887 /* If this node has a right branch, the value at the right must be greater 2888 than that at this node, so it cannot be bounded at the top and 2889 we need not bother testing any further. */ 2890 2891 if (node->right) 2892 return 0; 2893 2894 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high), 2895 node->high, 2896 build_int_cst (TREE_TYPE (node->high), 1)); 2897 2898 /* If the addition above overflowed, we can't verify anything. 2899 Otherwise, look for a parent that tests our value + 1. */ 2900 2901 if (! tree_int_cst_lt (node->high, high_plus_one)) 2902 return 0; 2903 2904 for (pnode = node->parent; pnode; pnode = pnode->parent) 2905 if (tree_int_cst_equal (high_plus_one, pnode->low)) 2906 return 1; 2907 2908 return 0; 2909} 2910 2911/* Search the parent sections of the 2912 case node tree to see if both tests for the upper and lower 2913 bounds of NODE would be redundant. */ 2914 2915static int 2916node_is_bounded (case_node_ptr node, tree index_type) 2917{ 2918 return (node_has_low_bound (node, index_type) 2919 && node_has_high_bound (node, index_type)); 2920} 2921 2922/* Emit step-by-step code to select a case for the value of INDEX. 2923 The thus generated decision tree follows the form of the 2924 case-node binary tree NODE, whose nodes represent test conditions. 2925 INDEX_TYPE is the type of the index of the switch. 2926 2927 Care is taken to prune redundant tests from the decision tree 2928 by detecting any boundary conditions already checked by 2929 emitted rtx. (See node_has_high_bound, node_has_low_bound 2930 and node_is_bounded, above.) 2931 2932 Where the test conditions can be shown to be redundant we emit 2933 an unconditional jump to the target code. As a further 2934 optimization, the subordinates of a tree node are examined to 2935 check for bounded nodes. In this case conditional and/or 2936 unconditional jumps as a result of the boundary check for the 2937 current node are arranged to target the subordinates associated 2938 code for out of bound conditions on the current node. 2939 2940 We can assume that when control reaches the code generated here, 2941 the index value has already been compared with the parents 2942 of this node, and determined to be on the same side of each parent 2943 as this node is. Thus, if this node tests for the value 51, 2944 and a parent tested for 52, we don't need to consider 2945 the possibility of a value greater than 51. If another parent 2946 tests for the value 50, then this node need not test anything. */ 2947 2948static void 2949emit_case_nodes (rtx index, case_node_ptr node, rtx default_label, 2950 tree index_type) 2951{ 2952 /* If INDEX has an unsigned type, we must make unsigned branches. */ 2953 int unsignedp = TYPE_UNSIGNED (index_type); 2954 enum machine_mode mode = GET_MODE (index); 2955 enum machine_mode imode = TYPE_MODE (index_type); 2956 2957 /* Handle indices detected as constant during RTL expansion. */ 2958 if (mode == VOIDmode) 2959 mode = imode; 2960 2961 /* See if our parents have already tested everything for us. 2962 If they have, emit an unconditional jump for this node. */ 2963 if (node_is_bounded (node, index_type)) 2964 emit_jump (label_rtx (node->code_label)); 2965 2966 else if (tree_int_cst_equal (node->low, node->high)) 2967 { 2968 /* Node is single valued. First see if the index expression matches 2969 this node and then check our children, if any. */ 2970 2971 do_jump_if_equal (index, 2972 convert_modes (mode, imode, 2973 expand_expr (node->low, NULL_RTX, 2974 VOIDmode, 0), 2975 unsignedp), 2976 label_rtx (node->code_label), unsignedp); 2977 2978 if (node->right != 0 && node->left != 0) 2979 { 2980 /* This node has children on both sides. 2981 Dispatch to one side or the other 2982 by comparing the index value with this node's value. 2983 If one subtree is bounded, check that one first, 2984 so we can avoid real branches in the tree. */ 2985 2986 if (node_is_bounded (node->right, index_type)) 2987 { 2988 emit_cmp_and_jump_insns (index, 2989 convert_modes 2990 (mode, imode, 2991 expand_expr (node->high, NULL_RTX, 2992 VOIDmode, 0), 2993 unsignedp), 2994 GT, NULL_RTX, mode, unsignedp, 2995 label_rtx (node->right->code_label)); 2996 emit_case_nodes (index, node->left, default_label, index_type); 2997 } 2998 2999 else if (node_is_bounded (node->left, index_type)) 3000 { 3001 emit_cmp_and_jump_insns (index, 3002 convert_modes 3003 (mode, imode, 3004 expand_expr (node->high, NULL_RTX, 3005 VOIDmode, 0), 3006 unsignedp), 3007 LT, NULL_RTX, mode, unsignedp, 3008 label_rtx (node->left->code_label)); 3009 emit_case_nodes (index, node->right, default_label, index_type); 3010 } 3011 3012 /* If both children are single-valued cases with no 3013 children, finish up all the work. This way, we can save 3014 one ordered comparison. */ 3015 else if (tree_int_cst_equal (node->right->low, node->right->high) 3016 && node->right->left == 0 3017 && node->right->right == 0 3018 && tree_int_cst_equal (node->left->low, node->left->high) 3019 && node->left->left == 0 3020 && node->left->right == 0) 3021 { 3022 /* Neither node is bounded. First distinguish the two sides; 3023 then emit the code for one side at a time. */ 3024 3025 /* See if the value matches what the right hand side 3026 wants. */ 3027 do_jump_if_equal (index, 3028 convert_modes (mode, imode, 3029 expand_expr (node->right->low, 3030 NULL_RTX, 3031 VOIDmode, 0), 3032 unsignedp), 3033 label_rtx (node->right->code_label), 3034 unsignedp); 3035 3036 /* See if the value matches what the left hand side 3037 wants. */ 3038 do_jump_if_equal (index, 3039 convert_modes (mode, imode, 3040 expand_expr (node->left->low, 3041 NULL_RTX, 3042 VOIDmode, 0), 3043 unsignedp), 3044 label_rtx (node->left->code_label), 3045 unsignedp); 3046 } 3047 3048 else 3049 { 3050 /* Neither node is bounded. First distinguish the two sides; 3051 then emit the code for one side at a time. */ 3052 3053 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); 3054 3055 /* See if the value is on the right. */ 3056 emit_cmp_and_jump_insns (index, 3057 convert_modes 3058 (mode, imode, 3059 expand_expr (node->high, NULL_RTX, 3060 VOIDmode, 0), 3061 unsignedp), 3062 GT, NULL_RTX, mode, unsignedp, 3063 label_rtx (test_label)); 3064 3065 /* Value must be on the left. 3066 Handle the left-hand subtree. */ 3067 emit_case_nodes (index, node->left, default_label, index_type); 3068 /* If left-hand subtree does nothing, 3069 go to default. */ 3070 emit_jump (default_label); 3071 3072 /* Code branches here for the right-hand subtree. */ 3073 expand_label (test_label); 3074 emit_case_nodes (index, node->right, default_label, index_type); 3075 } 3076 } 3077 3078 else if (node->right != 0 && node->left == 0) 3079 { 3080 /* Here we have a right child but no left so we issue a conditional 3081 branch to default and process the right child. 3082 3083 Omit the conditional branch to default if the right child 3084 does not have any children and is single valued; it would 3085 cost too much space to save so little time. */ 3086 3087 if (node->right->right || node->right->left 3088 || !tree_int_cst_equal (node->right->low, node->right->high)) 3089 { 3090 if (!node_has_low_bound (node, index_type)) 3091 { 3092 emit_cmp_and_jump_insns (index, 3093 convert_modes 3094 (mode, imode, 3095 expand_expr (node->high, NULL_RTX, 3096 VOIDmode, 0), 3097 unsignedp), 3098 LT, NULL_RTX, mode, unsignedp, 3099 default_label); 3100 } 3101 3102 emit_case_nodes (index, node->right, default_label, index_type); 3103 } 3104 else 3105 /* We cannot process node->right normally 3106 since we haven't ruled out the numbers less than 3107 this node's value. So handle node->right explicitly. */ 3108 do_jump_if_equal (index, 3109 convert_modes 3110 (mode, imode, 3111 expand_expr (node->right->low, NULL_RTX, 3112 VOIDmode, 0), 3113 unsignedp), 3114 label_rtx (node->right->code_label), unsignedp); 3115 } 3116 3117 else if (node->right == 0 && node->left != 0) 3118 { 3119 /* Just one subtree, on the left. */ 3120 if (node->left->left || node->left->right 3121 || !tree_int_cst_equal (node->left->low, node->left->high)) 3122 { 3123 if (!node_has_high_bound (node, index_type)) 3124 { 3125 emit_cmp_and_jump_insns (index, 3126 convert_modes 3127 (mode, imode, 3128 expand_expr (node->high, NULL_RTX, 3129 VOIDmode, 0), 3130 unsignedp), 3131 GT, NULL_RTX, mode, unsignedp, 3132 default_label); 3133 } 3134 3135 emit_case_nodes (index, node->left, default_label, index_type); 3136 } 3137 else 3138 /* We cannot process node->left normally 3139 since we haven't ruled out the numbers less than 3140 this node's value. So handle node->left explicitly. */ 3141 do_jump_if_equal (index, 3142 convert_modes 3143 (mode, imode, 3144 expand_expr (node->left->low, NULL_RTX, 3145 VOIDmode, 0), 3146 unsignedp), 3147 label_rtx (node->left->code_label), unsignedp); 3148 } 3149 } 3150 else 3151 { 3152 /* Node is a range. These cases are very similar to those for a single 3153 value, except that we do not start by testing whether this node 3154 is the one to branch to. */ 3155 3156 if (node->right != 0 && node->left != 0) 3157 { 3158 /* Node has subtrees on both sides. 3159 If the right-hand subtree is bounded, 3160 test for it first, since we can go straight there. 3161 Otherwise, we need to make a branch in the control structure, 3162 then handle the two subtrees. */ 3163 tree test_label = 0; 3164 3165 if (node_is_bounded (node->right, index_type)) 3166 /* Right hand node is fully bounded so we can eliminate any 3167 testing and branch directly to the target code. */ 3168 emit_cmp_and_jump_insns (index, 3169 convert_modes 3170 (mode, imode, 3171 expand_expr (node->high, NULL_RTX, 3172 VOIDmode, 0), 3173 unsignedp), 3174 GT, NULL_RTX, mode, unsignedp, 3175 label_rtx (node->right->code_label)); 3176 else 3177 { 3178 /* Right hand node requires testing. 3179 Branch to a label where we will handle it later. */ 3180 3181 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); 3182 emit_cmp_and_jump_insns (index, 3183 convert_modes 3184 (mode, imode, 3185 expand_expr (node->high, NULL_RTX, 3186 VOIDmode, 0), 3187 unsignedp), 3188 GT, NULL_RTX, mode, unsignedp, 3189 label_rtx (test_label)); 3190 } 3191 3192 /* Value belongs to this node or to the left-hand subtree. */ 3193 3194 emit_cmp_and_jump_insns (index, 3195 convert_modes 3196 (mode, imode, 3197 expand_expr (node->low, NULL_RTX, 3198 VOIDmode, 0), 3199 unsignedp), 3200 GE, NULL_RTX, mode, unsignedp, 3201 label_rtx (node->code_label)); 3202 3203 /* Handle the left-hand subtree. */ 3204 emit_case_nodes (index, node->left, default_label, index_type); 3205 3206 /* If right node had to be handled later, do that now. */ 3207 3208 if (test_label) 3209 { 3210 /* If the left-hand subtree fell through, 3211 don't let it fall into the right-hand subtree. */ 3212 emit_jump (default_label); 3213 3214 expand_label (test_label); 3215 emit_case_nodes (index, node->right, default_label, index_type); 3216 } 3217 } 3218 3219 else if (node->right != 0 && node->left == 0) 3220 { 3221 /* Deal with values to the left of this node, 3222 if they are possible. */ 3223 if (!node_has_low_bound (node, index_type)) 3224 { 3225 emit_cmp_and_jump_insns (index, 3226 convert_modes 3227 (mode, imode, 3228 expand_expr (node->low, NULL_RTX, 3229 VOIDmode, 0), 3230 unsignedp), 3231 LT, NULL_RTX, mode, unsignedp, 3232 default_label); 3233 } 3234 3235 /* Value belongs to this node or to the right-hand subtree. */ 3236 3237 emit_cmp_and_jump_insns (index, 3238 convert_modes 3239 (mode, imode, 3240 expand_expr (node->high, NULL_RTX, 3241 VOIDmode, 0), 3242 unsignedp), 3243 LE, NULL_RTX, mode, unsignedp, 3244 label_rtx (node->code_label)); 3245 3246 emit_case_nodes (index, node->right, default_label, index_type); 3247 } 3248 3249 else if (node->right == 0 && node->left != 0) 3250 { 3251 /* Deal with values to the right of this node, 3252 if they are possible. */ 3253 if (!node_has_high_bound (node, index_type)) 3254 { 3255 emit_cmp_and_jump_insns (index, 3256 convert_modes 3257 (mode, imode, 3258 expand_expr (node->high, NULL_RTX, 3259 VOIDmode, 0), 3260 unsignedp), 3261 GT, NULL_RTX, mode, unsignedp, 3262 default_label); 3263 } 3264 3265 /* Value belongs to this node or to the left-hand subtree. */ 3266 3267 emit_cmp_and_jump_insns (index, 3268 convert_modes 3269 (mode, imode, 3270 expand_expr (node->low, NULL_RTX, 3271 VOIDmode, 0), 3272 unsignedp), 3273 GE, NULL_RTX, mode, unsignedp, 3274 label_rtx (node->code_label)); 3275 3276 emit_case_nodes (index, node->left, default_label, index_type); 3277 } 3278 3279 else 3280 { 3281 /* Node has no children so we check low and high bounds to remove 3282 redundant tests. Only one of the bounds can exist, 3283 since otherwise this node is bounded--a case tested already. */ 3284 int high_bound = node_has_high_bound (node, index_type); 3285 int low_bound = node_has_low_bound (node, index_type); 3286 3287 if (!high_bound && low_bound) 3288 { 3289 emit_cmp_and_jump_insns (index, 3290 convert_modes 3291 (mode, imode, 3292 expand_expr (node->high, NULL_RTX, 3293 VOIDmode, 0), 3294 unsignedp), 3295 GT, NULL_RTX, mode, unsignedp, 3296 default_label); 3297 } 3298 3299 else if (!low_bound && high_bound) 3300 { 3301 emit_cmp_and_jump_insns (index, 3302 convert_modes 3303 (mode, imode, 3304 expand_expr (node->low, NULL_RTX, 3305 VOIDmode, 0), 3306 unsignedp), 3307 LT, NULL_RTX, mode, unsignedp, 3308 default_label); 3309 } 3310 else if (!low_bound && !high_bound) 3311 { 3312 /* Widen LOW and HIGH to the same width as INDEX. */ 3313 tree type = lang_hooks.types.type_for_mode (mode, unsignedp); 3314 tree low = build1 (CONVERT_EXPR, type, node->low); 3315 tree high = build1 (CONVERT_EXPR, type, node->high); 3316 rtx low_rtx, new_index, new_bound; 3317 3318 /* Instead of doing two branches, emit one unsigned branch for 3319 (index-low) > (high-low). */ 3320 low_rtx = expand_expr (low, NULL_RTX, mode, 0); 3321 new_index = expand_simple_binop (mode, MINUS, index, low_rtx, 3322 NULL_RTX, unsignedp, 3323 OPTAB_WIDEN); 3324 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type, 3325 high, low), 3326 NULL_RTX, mode, 0); 3327 3328 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX, 3329 mode, 1, default_label); 3330 } 3331 3332 emit_jump (label_rtx (node->code_label)); 3333 } 3334 } 3335} 3336