1/* Expands front end tree to back end RTL for GCC. 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 4 2010 Free Software Foundation, Inc. 5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify it under 9the terms of the GNU General Public License as published by the Free 10Software Foundation; either version 3, or (at your option) any later 11version. 12 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14WARRANTY; without even the implied warranty of MERCHANTABILITY or 15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING3. If not see 20<http://www.gnu.org/licenses/>. */ 21 22/* This file handles the generation of rtl code from tree structure 23 at the level of the function as a whole. 24 It creates the rtl expressions for parameters and auto variables 25 and has full responsibility for allocating stack slots. 26 27 `expand_function_start' is called at the beginning of a function, 28 before the function body is parsed, and `expand_function_end' is 29 called after parsing the body. 30 31 Call `assign_stack_local' to allocate a stack slot for a local variable. 32 This is usually done during the RTL generation for the function body, 33 but it can also be done in the reload pass when a pseudo-register does 34 not get a hard register. */ 35 36#include "config.h" 37#include "system.h" 38#include "coretypes.h" 39#include "tm.h" 40#include "rtl.h" 41#include "tree.h" 42#include "flags.h" 43#include "except.h" 44#include "function.h" 45#include "expr.h" 46#include "optabs.h" 47#include "libfuncs.h" 48#include "regs.h" 49#include "hard-reg-set.h" 50#include "insn-config.h" 51#include "recog.h" 52#include "output.h" 53#include "basic-block.h" 54#include "toplev.h" 55#include "hashtab.h" 56#include "ggc.h" 57#include "tm_p.h" 58#include "integrate.h" 59#include "langhooks.h" 60#include "target.h" 61#include "cfglayout.h" 62#include "gimple.h" 63#include "tree-pass.h" 64#include "predict.h" 65#include "df.h" 66#include "timevar.h" 67#include "vecprim.h" 68 69/* So we can assign to cfun in this file. */ 70#undef cfun 71 72#ifndef STACK_ALIGNMENT_NEEDED 73#define STACK_ALIGNMENT_NEEDED 1 74#endif 75 76#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) 77 78/* Some systems use __main in a way incompatible with its use in gcc, in these 79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to 80 give the same symbol without quotes for an alternative entry point. You 81 must define both, or neither. */ 82#ifndef NAME__MAIN 83#define NAME__MAIN "__main" 84#endif 85 86/* Round a value to the lowest integer less than it that is a multiple of 87 the required alignment. Avoid using division in case the value is 88 negative. Assume the alignment is a power of two. */ 89#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) 90 91/* Similar, but round to the next highest integer that meets the 92 alignment. */ 93#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) 94 95/* Nonzero if function being compiled doesn't contain any calls 96 (ignoring the prologue and epilogue). This is set prior to 97 local register allocation and is valid for the remaining 98 compiler passes. */ 99int current_function_is_leaf; 100 101/* Nonzero if function being compiled doesn't modify the stack pointer 102 (ignoring the prologue and epilogue). This is only valid after 103 pass_stack_ptr_mod has run. */ 104int current_function_sp_is_unchanging; 105 106/* Nonzero if the function being compiled is a leaf function which only 107 uses leaf registers. This is valid after reload (specifically after 108 sched2) and is useful only if the port defines LEAF_REGISTERS. */ 109int current_function_uses_only_leaf_regs; 110 111/* Nonzero once virtual register instantiation has been done. 112 assign_stack_local uses frame_pointer_rtx when this is nonzero. 113 calls.c:emit_library_call_value_1 uses it to set up 114 post-instantiation libcalls. */ 115int virtuals_instantiated; 116 117/* Assign unique numbers to labels generated for profiling, debugging, etc. */ 118static GTY(()) int funcdef_no; 119 120/* These variables hold pointers to functions to create and destroy 121 target specific, per-function data structures. */ 122struct machine_function * (*init_machine_status) (void); 123 124/* The currently compiled function. */ 125struct function *cfun = 0; 126 127/* These hashes record the prologue and epilogue insns. */ 128static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) 129 htab_t prologue_insn_hash; 130static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) 131 htab_t epilogue_insn_hash; 132 133 134htab_t types_used_by_vars_hash = NULL; 135tree types_used_by_cur_var_decl = NULL; 136 137/* Forward declarations. */ 138 139static struct temp_slot *find_temp_slot_from_address (rtx); 140static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); 141static void pad_below (struct args_size *, enum machine_mode, tree); 142static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **); 143static int all_blocks (tree, tree *); 144static tree *get_block_vector (tree, int *); 145extern tree debug_find_var_in_block_tree (tree, tree); 146/* We always define `record_insns' even if it's not used so that we 147 can always export `prologue_epilogue_contains'. */ 148static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED; 149static bool contains (const_rtx, htab_t); 150#ifdef HAVE_return 151static void emit_return_into_block (basic_block); 152#endif 153static void prepare_function_start (void); 154static void do_clobber_return_reg (rtx, void *); 155static void do_use_return_reg (rtx, void *); 156static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED; 157 158/* Stack of nested functions. */ 159/* Keep track of the cfun stack. */ 160 161typedef struct function *function_p; 162 163DEF_VEC_P(function_p); 164DEF_VEC_ALLOC_P(function_p,heap); 165static VEC(function_p,heap) *function_context_stack; 166 167/* Save the current context for compilation of a nested function. 168 This is called from language-specific code. */ 169 170void 171push_function_context (void) 172{ 173 if (cfun == 0) 174 allocate_struct_function (NULL, false); 175 176 VEC_safe_push (function_p, heap, function_context_stack, cfun); 177 set_cfun (NULL); 178} 179 180/* Restore the last saved context, at the end of a nested function. 181 This function is called from language-specific code. */ 182 183void 184pop_function_context (void) 185{ 186 struct function *p = VEC_pop (function_p, function_context_stack); 187 set_cfun (p); 188 current_function_decl = p->decl; 189 190 /* Reset variables that have known state during rtx generation. */ 191 virtuals_instantiated = 0; 192 generating_concat_p = 1; 193} 194 195/* Clear out all parts of the state in F that can safely be discarded 196 after the function has been parsed, but not compiled, to let 197 garbage collection reclaim the memory. */ 198 199void 200free_after_parsing (struct function *f) 201{ 202 f->language = 0; 203} 204 205/* Clear out all parts of the state in F that can safely be discarded 206 after the function has been compiled, to let garbage collection 207 reclaim the memory. */ 208 209void 210free_after_compilation (struct function *f) 211{ 212 prologue_insn_hash = NULL; 213 epilogue_insn_hash = NULL; 214 215 if (crtl->emit.regno_pointer_align) 216 free (crtl->emit.regno_pointer_align); 217 218 memset (crtl, 0, sizeof (struct rtl_data)); 219 f->eh = NULL; 220 f->machine = NULL; 221 f->cfg = NULL; 222 223 regno_reg_rtx = NULL; 224 insn_locators_free (); 225} 226 227/* Return size needed for stack frame based on slots so far allocated. 228 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; 229 the caller may have to do that. */ 230 231HOST_WIDE_INT 232get_frame_size (void) 233{ 234 if (FRAME_GROWS_DOWNWARD) 235 return -frame_offset; 236 else 237 return frame_offset; 238} 239 240/* Issue an error message and return TRUE if frame OFFSET overflows in 241 the signed target pointer arithmetics for function FUNC. Otherwise 242 return FALSE. */ 243 244bool 245frame_offset_overflow (HOST_WIDE_INT offset, tree func) 246{ 247 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; 248 249 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1)) 250 /* Leave room for the fixed part of the frame. */ 251 - 64 * UNITS_PER_WORD) 252 { 253 error_at (DECL_SOURCE_LOCATION (func), 254 "total size of local objects too large"); 255 return TRUE; 256 } 257 258 return FALSE; 259} 260 261/* Return stack slot alignment in bits for TYPE and MODE. */ 262 263static unsigned int 264get_stack_local_alignment (tree type, enum machine_mode mode) 265{ 266 unsigned int alignment; 267 268 if (mode == BLKmode) 269 alignment = BIGGEST_ALIGNMENT; 270 else 271 alignment = GET_MODE_ALIGNMENT (mode); 272 273 /* Allow the frond-end to (possibly) increase the alignment of this 274 stack slot. */ 275 if (! type) 276 type = lang_hooks.types.type_for_mode (mode, 0); 277 278 return STACK_SLOT_ALIGNMENT (type, mode, alignment); 279} 280 281/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it 282 with machine mode MODE. 283 284 ALIGN controls the amount of alignment for the address of the slot: 285 0 means according to MODE, 286 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, 287 -2 means use BITS_PER_UNIT, 288 positive specifies alignment boundary in bits. 289 290 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment. 291 292 We do not round to stack_boundary here. */ 293 294rtx 295assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, 296 int align, 297 bool reduce_alignment_ok ATTRIBUTE_UNUSED) 298{ 299 rtx x, addr; 300 int bigend_correction = 0; 301 unsigned int alignment, alignment_in_bits; 302 int frame_off, frame_alignment, frame_phase; 303 304 if (align == 0) 305 { 306 alignment = get_stack_local_alignment (NULL, mode); 307 alignment /= BITS_PER_UNIT; 308 } 309 else if (align == -1) 310 { 311 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; 312 size = CEIL_ROUND (size, alignment); 313 } 314 else if (align == -2) 315 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */ 316 else 317 alignment = align / BITS_PER_UNIT; 318 319 alignment_in_bits = alignment * BITS_PER_UNIT; 320 321 if (FRAME_GROWS_DOWNWARD) 322 frame_offset -= size; 323 324 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */ 325 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT) 326 { 327 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT; 328 alignment = alignment_in_bits / BITS_PER_UNIT; 329 } 330 331 if (SUPPORTS_STACK_ALIGNMENT) 332 { 333 if (crtl->stack_alignment_estimated < alignment_in_bits) 334 { 335 if (!crtl->stack_realign_processed) 336 crtl->stack_alignment_estimated = alignment_in_bits; 337 else 338 { 339 /* If stack is realigned and stack alignment value 340 hasn't been finalized, it is OK not to increase 341 stack_alignment_estimated. The bigger alignment 342 requirement is recorded in stack_alignment_needed 343 below. */ 344 gcc_assert (!crtl->stack_realign_finalized); 345 if (!crtl->stack_realign_needed) 346 { 347 /* It is OK to reduce the alignment as long as the 348 requested size is 0 or the estimated stack 349 alignment >= mode alignment. */ 350 gcc_assert (reduce_alignment_ok 351 || size == 0 352 || (crtl->stack_alignment_estimated 353 >= GET_MODE_ALIGNMENT (mode))); 354 alignment_in_bits = crtl->stack_alignment_estimated; 355 alignment = alignment_in_bits / BITS_PER_UNIT; 356 } 357 } 358 } 359 } 360 361 if (crtl->stack_alignment_needed < alignment_in_bits) 362 crtl->stack_alignment_needed = alignment_in_bits; 363 if (crtl->max_used_stack_slot_alignment < alignment_in_bits) 364 crtl->max_used_stack_slot_alignment = alignment_in_bits; 365 366 /* Calculate how many bytes the start of local variables is off from 367 stack alignment. */ 368 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; 369 frame_off = STARTING_FRAME_OFFSET % frame_alignment; 370 frame_phase = frame_off ? frame_alignment - frame_off : 0; 371 372 /* Round the frame offset to the specified alignment. The default is 373 to always honor requests to align the stack but a port may choose to 374 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */ 375 if (STACK_ALIGNMENT_NEEDED 376 || mode != BLKmode 377 || size != 0) 378 { 379 /* We must be careful here, since FRAME_OFFSET might be negative and 380 division with a negative dividend isn't as well defined as we might 381 like. So we instead assume that ALIGNMENT is a power of two and 382 use logical operations which are unambiguous. */ 383 if (FRAME_GROWS_DOWNWARD) 384 frame_offset 385 = (FLOOR_ROUND (frame_offset - frame_phase, 386 (unsigned HOST_WIDE_INT) alignment) 387 + frame_phase); 388 else 389 frame_offset 390 = (CEIL_ROUND (frame_offset - frame_phase, 391 (unsigned HOST_WIDE_INT) alignment) 392 + frame_phase); 393 } 394 395 /* On a big-endian machine, if we are allocating more space than we will use, 396 use the least significant bytes of those that are allocated. */ 397 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size) 398 bigend_correction = size - GET_MODE_SIZE (mode); 399 400 /* If we have already instantiated virtual registers, return the actual 401 address relative to the frame pointer. */ 402 if (virtuals_instantiated) 403 addr = plus_constant (frame_pointer_rtx, 404 trunc_int_for_mode 405 (frame_offset + bigend_correction 406 + STARTING_FRAME_OFFSET, Pmode)); 407 else 408 addr = plus_constant (virtual_stack_vars_rtx, 409 trunc_int_for_mode 410 (frame_offset + bigend_correction, 411 Pmode)); 412 413 if (!FRAME_GROWS_DOWNWARD) 414 frame_offset += size; 415 416 x = gen_rtx_MEM (mode, addr); 417 set_mem_align (x, alignment_in_bits); 418 MEM_NOTRAP_P (x) = 1; 419 420 stack_slot_list 421 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list); 422 423 if (frame_offset_overflow (frame_offset, current_function_decl)) 424 frame_offset = 0; 425 426 return x; 427} 428 429/* Wrap up assign_stack_local_1 with last parameter as false. */ 430 431rtx 432assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align) 433{ 434 return assign_stack_local_1 (mode, size, align, false); 435} 436 437 438/* In order to evaluate some expressions, such as function calls returning 439 structures in memory, we need to temporarily allocate stack locations. 440 We record each allocated temporary in the following structure. 441 442 Associated with each temporary slot is a nesting level. When we pop up 443 one level, all temporaries associated with the previous level are freed. 444 Normally, all temporaries are freed after the execution of the statement 445 in which they were created. However, if we are inside a ({...}) grouping, 446 the result may be in a temporary and hence must be preserved. If the 447 result could be in a temporary, we preserve it if we can determine which 448 one it is in. If we cannot determine which temporary may contain the 449 result, all temporaries are preserved. A temporary is preserved by 450 pretending it was allocated at the previous nesting level. 451 452 Automatic variables are also assigned temporary slots, at the nesting 453 level where they are defined. They are marked a "kept" so that 454 free_temp_slots will not free them. */ 455 456struct GTY(()) temp_slot { 457 /* Points to next temporary slot. */ 458 struct temp_slot *next; 459 /* Points to previous temporary slot. */ 460 struct temp_slot *prev; 461 /* The rtx to used to reference the slot. */ 462 rtx slot; 463 /* The size, in units, of the slot. */ 464 HOST_WIDE_INT size; 465 /* The type of the object in the slot, or zero if it doesn't correspond 466 to a type. We use this to determine whether a slot can be reused. 467 It can be reused if objects of the type of the new slot will always 468 conflict with objects of the type of the old slot. */ 469 tree type; 470 /* The alignment (in bits) of the slot. */ 471 unsigned int align; 472 /* Nonzero if this temporary is currently in use. */ 473 char in_use; 474 /* Nonzero if this temporary has its address taken. */ 475 char addr_taken; 476 /* Nesting level at which this slot is being used. */ 477 int level; 478 /* Nonzero if this should survive a call to free_temp_slots. */ 479 int keep; 480 /* The offset of the slot from the frame_pointer, including extra space 481 for alignment. This info is for combine_temp_slots. */ 482 HOST_WIDE_INT base_offset; 483 /* The size of the slot, including extra space for alignment. This 484 info is for combine_temp_slots. */ 485 HOST_WIDE_INT full_size; 486}; 487 488/* A table of addresses that represent a stack slot. The table is a mapping 489 from address RTXen to a temp slot. */ 490static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table; 491 492/* Entry for the above hash table. */ 493struct GTY(()) temp_slot_address_entry { 494 hashval_t hash; 495 rtx address; 496 struct temp_slot *temp_slot; 497}; 498 499/* Removes temporary slot TEMP from LIST. */ 500 501static void 502cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list) 503{ 504 if (temp->next) 505 temp->next->prev = temp->prev; 506 if (temp->prev) 507 temp->prev->next = temp->next; 508 else 509 *list = temp->next; 510 511 temp->prev = temp->next = NULL; 512} 513 514/* Inserts temporary slot TEMP to LIST. */ 515 516static void 517insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list) 518{ 519 temp->next = *list; 520 if (*list) 521 (*list)->prev = temp; 522 temp->prev = NULL; 523 *list = temp; 524} 525 526/* Returns the list of used temp slots at LEVEL. */ 527 528static struct temp_slot ** 529temp_slots_at_level (int level) 530{ 531 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots)) 532 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1); 533 534 return &(VEC_address (temp_slot_p, used_temp_slots)[level]); 535} 536 537/* Returns the maximal temporary slot level. */ 538 539static int 540max_slot_level (void) 541{ 542 if (!used_temp_slots) 543 return -1; 544 545 return VEC_length (temp_slot_p, used_temp_slots) - 1; 546} 547 548/* Moves temporary slot TEMP to LEVEL. */ 549 550static void 551move_slot_to_level (struct temp_slot *temp, int level) 552{ 553 cut_slot_from_list (temp, temp_slots_at_level (temp->level)); 554 insert_slot_to_list (temp, temp_slots_at_level (level)); 555 temp->level = level; 556} 557 558/* Make temporary slot TEMP available. */ 559 560static void 561make_slot_available (struct temp_slot *temp) 562{ 563 cut_slot_from_list (temp, temp_slots_at_level (temp->level)); 564 insert_slot_to_list (temp, &avail_temp_slots); 565 temp->in_use = 0; 566 temp->level = -1; 567} 568 569/* Compute the hash value for an address -> temp slot mapping. 570 The value is cached on the mapping entry. */ 571static hashval_t 572temp_slot_address_compute_hash (struct temp_slot_address_entry *t) 573{ 574 int do_not_record = 0; 575 return hash_rtx (t->address, GET_MODE (t->address), 576 &do_not_record, NULL, false); 577} 578 579/* Return the hash value for an address -> temp slot mapping. */ 580static hashval_t 581temp_slot_address_hash (const void *p) 582{ 583 const struct temp_slot_address_entry *t; 584 t = (const struct temp_slot_address_entry *) p; 585 return t->hash; 586} 587 588/* Compare two address -> temp slot mapping entries. */ 589static int 590temp_slot_address_eq (const void *p1, const void *p2) 591{ 592 const struct temp_slot_address_entry *t1, *t2; 593 t1 = (const struct temp_slot_address_entry *) p1; 594 t2 = (const struct temp_slot_address_entry *) p2; 595 return exp_equiv_p (t1->address, t2->address, 0, true); 596} 597 598/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */ 599static void 600insert_temp_slot_address (rtx address, struct temp_slot *temp_slot) 601{ 602 void **slot; 603 struct temp_slot_address_entry *t = GGC_NEW (struct temp_slot_address_entry); 604 t->address = address; 605 t->temp_slot = temp_slot; 606 t->hash = temp_slot_address_compute_hash (t); 607 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT); 608 *slot = t; 609} 610 611/* Remove an address -> temp slot mapping entry if the temp slot is 612 not in use anymore. Callback for remove_unused_temp_slot_addresses. */ 613static int 614remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED) 615{ 616 const struct temp_slot_address_entry *t; 617 t = (const struct temp_slot_address_entry *) *slot; 618 if (! t->temp_slot->in_use) 619 *slot = NULL; 620 return 1; 621} 622 623/* Remove all mappings of addresses to unused temp slots. */ 624static void 625remove_unused_temp_slot_addresses (void) 626{ 627 htab_traverse (temp_slot_address_table, 628 remove_unused_temp_slot_addresses_1, 629 NULL); 630} 631 632/* Find the temp slot corresponding to the object at address X. */ 633 634static struct temp_slot * 635find_temp_slot_from_address (rtx x) 636{ 637 struct temp_slot *p; 638 struct temp_slot_address_entry tmp, *t; 639 640 /* First try the easy way: 641 See if X exists in the address -> temp slot mapping. */ 642 tmp.address = x; 643 tmp.temp_slot = NULL; 644 tmp.hash = temp_slot_address_compute_hash (&tmp); 645 t = (struct temp_slot_address_entry *) 646 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash); 647 if (t) 648 return t->temp_slot; 649 650 /* If we have a sum involving a register, see if it points to a temp 651 slot. */ 652 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0)) 653 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0) 654 return p; 655 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1)) 656 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) 657 return p; 658 659 /* Last resort: Address is a virtual stack var address. */ 660 if (GET_CODE (x) == PLUS 661 && XEXP (x, 0) == virtual_stack_vars_rtx 662 && CONST_INT_P (XEXP (x, 1))) 663 { 664 int i; 665 for (i = max_slot_level (); i >= 0; i--) 666 for (p = *temp_slots_at_level (i); p; p = p->next) 667 { 668 if (INTVAL (XEXP (x, 1)) >= p->base_offset 669 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size) 670 return p; 671 } 672 } 673 674 return NULL; 675} 676 677/* Allocate a temporary stack slot and record it for possible later 678 reuse. 679 680 MODE is the machine mode to be given to the returned rtx. 681 682 SIZE is the size in units of the space required. We do no rounding here 683 since assign_stack_local will do any required rounding. 684 685 KEEP is 1 if this slot is to be retained after a call to 686 free_temp_slots. Automatic variables for a block are allocated 687 with this flag. KEEP values of 2 or 3 were needed respectively 688 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs 689 or for SAVE_EXPRs, but they are now unused. 690 691 TYPE is the type that will be used for the stack slot. */ 692 693rtx 694assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, 695 int keep, tree type) 696{ 697 unsigned int align; 698 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp; 699 rtx slot; 700 701 /* If SIZE is -1 it means that somebody tried to allocate a temporary 702 of a variable size. */ 703 gcc_assert (size != -1); 704 705 /* These are now unused. */ 706 gcc_assert (keep <= 1); 707 708 align = get_stack_local_alignment (type, mode); 709 710 /* Try to find an available, already-allocated temporary of the proper 711 mode which meets the size and alignment requirements. Choose the 712 smallest one with the closest alignment. 713 714 If assign_stack_temp is called outside of the tree->rtl expansion, 715 we cannot reuse the stack slots (that may still refer to 716 VIRTUAL_STACK_VARS_REGNUM). */ 717 if (!virtuals_instantiated) 718 { 719 for (p = avail_temp_slots; p; p = p->next) 720 { 721 if (p->align >= align && p->size >= size 722 && GET_MODE (p->slot) == mode 723 && objects_must_conflict_p (p->type, type) 724 && (best_p == 0 || best_p->size > p->size 725 || (best_p->size == p->size && best_p->align > p->align))) 726 { 727 if (p->align == align && p->size == size) 728 { 729 selected = p; 730 cut_slot_from_list (selected, &avail_temp_slots); 731 best_p = 0; 732 break; 733 } 734 best_p = p; 735 } 736 } 737 } 738 739 /* Make our best, if any, the one to use. */ 740 if (best_p) 741 { 742 selected = best_p; 743 cut_slot_from_list (selected, &avail_temp_slots); 744 745 /* If there are enough aligned bytes left over, make them into a new 746 temp_slot so that the extra bytes don't get wasted. Do this only 747 for BLKmode slots, so that we can be sure of the alignment. */ 748 if (GET_MODE (best_p->slot) == BLKmode) 749 { 750 int alignment = best_p->align / BITS_PER_UNIT; 751 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment); 752 753 if (best_p->size - rounded_size >= alignment) 754 { 755 p = GGC_NEW (struct temp_slot); 756 p->in_use = p->addr_taken = 0; 757 p->size = best_p->size - rounded_size; 758 p->base_offset = best_p->base_offset + rounded_size; 759 p->full_size = best_p->full_size - rounded_size; 760 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size); 761 p->align = best_p->align; 762 p->type = best_p->type; 763 insert_slot_to_list (p, &avail_temp_slots); 764 765 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot, 766 stack_slot_list); 767 768 best_p->size = rounded_size; 769 best_p->full_size = rounded_size; 770 } 771 } 772 } 773 774 /* If we still didn't find one, make a new temporary. */ 775 if (selected == 0) 776 { 777 HOST_WIDE_INT frame_offset_old = frame_offset; 778 779 p = GGC_NEW (struct temp_slot); 780 781 /* We are passing an explicit alignment request to assign_stack_local. 782 One side effect of that is assign_stack_local will not round SIZE 783 to ensure the frame offset remains suitably aligned. 784 785 So for requests which depended on the rounding of SIZE, we go ahead 786 and round it now. We also make sure ALIGNMENT is at least 787 BIGGEST_ALIGNMENT. */ 788 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT); 789 p->slot = assign_stack_local (mode, 790 (mode == BLKmode 791 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT) 792 : size), 793 align); 794 795 p->align = align; 796 797 /* The following slot size computation is necessary because we don't 798 know the actual size of the temporary slot until assign_stack_local 799 has performed all the frame alignment and size rounding for the 800 requested temporary. Note that extra space added for alignment 801 can be either above or below this stack slot depending on which 802 way the frame grows. We include the extra space if and only if it 803 is above this slot. */ 804 if (FRAME_GROWS_DOWNWARD) 805 p->size = frame_offset_old - frame_offset; 806 else 807 p->size = size; 808 809 /* Now define the fields used by combine_temp_slots. */ 810 if (FRAME_GROWS_DOWNWARD) 811 { 812 p->base_offset = frame_offset; 813 p->full_size = frame_offset_old - frame_offset; 814 } 815 else 816 { 817 p->base_offset = frame_offset_old; 818 p->full_size = frame_offset - frame_offset_old; 819 } 820 821 selected = p; 822 } 823 824 p = selected; 825 p->in_use = 1; 826 p->addr_taken = 0; 827 p->type = type; 828 p->level = temp_slot_level; 829 p->keep = keep; 830 831 pp = temp_slots_at_level (p->level); 832 insert_slot_to_list (p, pp); 833 insert_temp_slot_address (XEXP (p->slot, 0), p); 834 835 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */ 836 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0)); 837 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list); 838 839 /* If we know the alias set for the memory that will be used, use 840 it. If there's no TYPE, then we don't know anything about the 841 alias set for the memory. */ 842 set_mem_alias_set (slot, type ? get_alias_set (type) : 0); 843 set_mem_align (slot, align); 844 845 /* If a type is specified, set the relevant flags. */ 846 if (type != 0) 847 { 848 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type); 849 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type) 850 || TREE_CODE (type) == COMPLEX_TYPE)); 851 } 852 MEM_NOTRAP_P (slot) = 1; 853 854 return slot; 855} 856 857/* Allocate a temporary stack slot and record it for possible later 858 reuse. First three arguments are same as in preceding function. */ 859 860rtx 861assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep) 862{ 863 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE); 864} 865 866/* Assign a temporary. 867 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl 868 and so that should be used in error messages. In either case, we 869 allocate of the given type. 870 KEEP is as for assign_stack_temp. 871 MEMORY_REQUIRED is 1 if the result must be addressable stack memory; 872 it is 0 if a register is OK. 873 DONT_PROMOTE is 1 if we should not promote values in register 874 to wider modes. */ 875 876rtx 877assign_temp (tree type_or_decl, int keep, int memory_required, 878 int dont_promote ATTRIBUTE_UNUSED) 879{ 880 tree type, decl; 881 enum machine_mode mode; 882#ifdef PROMOTE_MODE 883 int unsignedp; 884#endif 885 886 if (DECL_P (type_or_decl)) 887 decl = type_or_decl, type = TREE_TYPE (decl); 888 else 889 decl = NULL, type = type_or_decl; 890 891 mode = TYPE_MODE (type); 892#ifdef PROMOTE_MODE 893 unsignedp = TYPE_UNSIGNED (type); 894#endif 895 896 if (mode == BLKmode || memory_required) 897 { 898 HOST_WIDE_INT size = int_size_in_bytes (type); 899 rtx tmp; 900 901 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid 902 problems with allocating the stack space. */ 903 if (size == 0) 904 size = 1; 905 906 /* Unfortunately, we don't yet know how to allocate variable-sized 907 temporaries. However, sometimes we can find a fixed upper limit on 908 the size, so try that instead. */ 909 else if (size == -1) 910 size = max_int_size_in_bytes (type); 911 912 /* The size of the temporary may be too large to fit into an integer. */ 913 /* ??? Not sure this should happen except for user silliness, so limit 914 this to things that aren't compiler-generated temporaries. The 915 rest of the time we'll die in assign_stack_temp_for_type. */ 916 if (decl && size == -1 917 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST) 918 { 919 error ("size of variable %q+D is too large", decl); 920 size = 1; 921 } 922 923 tmp = assign_stack_temp_for_type (mode, size, keep, type); 924 return tmp; 925 } 926 927#ifdef PROMOTE_MODE 928 if (! dont_promote) 929 mode = promote_mode (type, mode, &unsignedp); 930#endif 931 932 return gen_reg_rtx (mode); 933} 934 935/* Combine temporary stack slots which are adjacent on the stack. 936 937 This allows for better use of already allocated stack space. This is only 938 done for BLKmode slots because we can be sure that we won't have alignment 939 problems in this case. */ 940 941static void 942combine_temp_slots (void) 943{ 944 struct temp_slot *p, *q, *next, *next_q; 945 int num_slots; 946 947 /* We can't combine slots, because the information about which slot 948 is in which alias set will be lost. */ 949 if (flag_strict_aliasing) 950 return; 951 952 /* If there are a lot of temp slots, don't do anything unless 953 high levels of optimization. */ 954 if (! flag_expensive_optimizations) 955 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++) 956 if (num_slots > 100 || (num_slots > 10 && optimize == 0)) 957 return; 958 959 for (p = avail_temp_slots; p; p = next) 960 { 961 int delete_p = 0; 962 963 next = p->next; 964 965 if (GET_MODE (p->slot) != BLKmode) 966 continue; 967 968 for (q = p->next; q; q = next_q) 969 { 970 int delete_q = 0; 971 972 next_q = q->next; 973 974 if (GET_MODE (q->slot) != BLKmode) 975 continue; 976 977 if (p->base_offset + p->full_size == q->base_offset) 978 { 979 /* Q comes after P; combine Q into P. */ 980 p->size += q->size; 981 p->full_size += q->full_size; 982 delete_q = 1; 983 } 984 else if (q->base_offset + q->full_size == p->base_offset) 985 { 986 /* P comes after Q; combine P into Q. */ 987 q->size += p->size; 988 q->full_size += p->full_size; 989 delete_p = 1; 990 break; 991 } 992 if (delete_q) 993 cut_slot_from_list (q, &avail_temp_slots); 994 } 995 996 /* Either delete P or advance past it. */ 997 if (delete_p) 998 cut_slot_from_list (p, &avail_temp_slots); 999 } 1000} 1001 1002/* Indicate that NEW_RTX is an alternate way of referring to the temp 1003 slot that previously was known by OLD_RTX. */ 1004 1005void 1006update_temp_slot_address (rtx old_rtx, rtx new_rtx) 1007{ 1008 struct temp_slot *p; 1009 1010 if (rtx_equal_p (old_rtx, new_rtx)) 1011 return; 1012 1013 p = find_temp_slot_from_address (old_rtx); 1014 1015 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and 1016 NEW_RTX is a register, see if one operand of the PLUS is a 1017 temporary location. If so, NEW_RTX points into it. Otherwise, 1018 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register 1019 in common between them. If so, try a recursive call on those 1020 values. */ 1021 if (p == 0) 1022 { 1023 if (GET_CODE (old_rtx) != PLUS) 1024 return; 1025 1026 if (REG_P (new_rtx)) 1027 { 1028 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx); 1029 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx); 1030 return; 1031 } 1032 else if (GET_CODE (new_rtx) != PLUS) 1033 return; 1034 1035 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0))) 1036 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1)); 1037 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0))) 1038 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1)); 1039 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1))) 1040 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0)); 1041 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1))) 1042 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0)); 1043 1044 return; 1045 } 1046 1047 /* Otherwise add an alias for the temp's address. */ 1048 insert_temp_slot_address (new_rtx, p); 1049} 1050 1051/* If X could be a reference to a temporary slot, mark the fact that its 1052 address was taken. */ 1053 1054void 1055mark_temp_addr_taken (rtx x) 1056{ 1057 struct temp_slot *p; 1058 1059 if (x == 0) 1060 return; 1061 1062 /* If X is not in memory or is at a constant address, it cannot be in 1063 a temporary slot. */ 1064 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))) 1065 return; 1066 1067 p = find_temp_slot_from_address (XEXP (x, 0)); 1068 if (p != 0) 1069 p->addr_taken = 1; 1070} 1071 1072/* If X could be a reference to a temporary slot, mark that slot as 1073 belonging to the to one level higher than the current level. If X 1074 matched one of our slots, just mark that one. Otherwise, we can't 1075 easily predict which it is, so upgrade all of them. Kept slots 1076 need not be touched. 1077 1078 This is called when an ({...}) construct occurs and a statement 1079 returns a value in memory. */ 1080 1081void 1082preserve_temp_slots (rtx x) 1083{ 1084 struct temp_slot *p = 0, *next; 1085 1086 /* If there is no result, we still might have some objects whose address 1087 were taken, so we need to make sure they stay around. */ 1088 if (x == 0) 1089 { 1090 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1091 { 1092 next = p->next; 1093 1094 if (p->addr_taken) 1095 move_slot_to_level (p, temp_slot_level - 1); 1096 } 1097 1098 return; 1099 } 1100 1101 /* If X is a register that is being used as a pointer, see if we have 1102 a temporary slot we know it points to. To be consistent with 1103 the code below, we really should preserve all non-kept slots 1104 if we can't find a match, but that seems to be much too costly. */ 1105 if (REG_P (x) && REG_POINTER (x)) 1106 p = find_temp_slot_from_address (x); 1107 1108 /* If X is not in memory or is at a constant address, it cannot be in 1109 a temporary slot, but it can contain something whose address was 1110 taken. */ 1111 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))) 1112 { 1113 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1114 { 1115 next = p->next; 1116 1117 if (p->addr_taken) 1118 move_slot_to_level (p, temp_slot_level - 1); 1119 } 1120 1121 return; 1122 } 1123 1124 /* First see if we can find a match. */ 1125 if (p == 0) 1126 p = find_temp_slot_from_address (XEXP (x, 0)); 1127 1128 if (p != 0) 1129 { 1130 /* Move everything at our level whose address was taken to our new 1131 level in case we used its address. */ 1132 struct temp_slot *q; 1133 1134 if (p->level == temp_slot_level) 1135 { 1136 for (q = *temp_slots_at_level (temp_slot_level); q; q = next) 1137 { 1138 next = q->next; 1139 1140 if (p != q && q->addr_taken) 1141 move_slot_to_level (q, temp_slot_level - 1); 1142 } 1143 1144 move_slot_to_level (p, temp_slot_level - 1); 1145 p->addr_taken = 0; 1146 } 1147 return; 1148 } 1149 1150 /* Otherwise, preserve all non-kept slots at this level. */ 1151 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1152 { 1153 next = p->next; 1154 1155 if (!p->keep) 1156 move_slot_to_level (p, temp_slot_level - 1); 1157 } 1158} 1159 1160/* Free all temporaries used so far. This is normally called at the 1161 end of generating code for a statement. */ 1162 1163void 1164free_temp_slots (void) 1165{ 1166 struct temp_slot *p, *next; 1167 bool some_available = false; 1168 1169 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1170 { 1171 next = p->next; 1172 1173 if (!p->keep) 1174 { 1175 make_slot_available (p); 1176 some_available = true; 1177 } 1178 } 1179 1180 if (some_available) 1181 { 1182 remove_unused_temp_slot_addresses (); 1183 combine_temp_slots (); 1184 } 1185} 1186 1187/* Push deeper into the nesting level for stack temporaries. */ 1188 1189void 1190push_temp_slots (void) 1191{ 1192 temp_slot_level++; 1193} 1194 1195/* Pop a temporary nesting level. All slots in use in the current level 1196 are freed. */ 1197 1198void 1199pop_temp_slots (void) 1200{ 1201 struct temp_slot *p, *next; 1202 bool some_available = false; 1203 1204 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1205 { 1206 next = p->next; 1207 make_slot_available (p); 1208 some_available = true; 1209 } 1210 1211 if (some_available) 1212 { 1213 remove_unused_temp_slot_addresses (); 1214 combine_temp_slots (); 1215 } 1216 1217 temp_slot_level--; 1218} 1219 1220/* Initialize temporary slots. */ 1221 1222void 1223init_temp_slots (void) 1224{ 1225 /* We have not allocated any temporaries yet. */ 1226 avail_temp_slots = 0; 1227 used_temp_slots = 0; 1228 temp_slot_level = 0; 1229 1230 /* Set up the table to map addresses to temp slots. */ 1231 if (! temp_slot_address_table) 1232 temp_slot_address_table = htab_create_ggc (32, 1233 temp_slot_address_hash, 1234 temp_slot_address_eq, 1235 NULL); 1236 else 1237 htab_empty (temp_slot_address_table); 1238} 1239 1240/* These routines are responsible for converting virtual register references 1241 to the actual hard register references once RTL generation is complete. 1242 1243 The following four variables are used for communication between the 1244 routines. They contain the offsets of the virtual registers from their 1245 respective hard registers. */ 1246 1247static int in_arg_offset; 1248static int var_offset; 1249static int dynamic_offset; 1250static int out_arg_offset; 1251static int cfa_offset; 1252 1253/* In most machines, the stack pointer register is equivalent to the bottom 1254 of the stack. */ 1255 1256#ifndef STACK_POINTER_OFFSET 1257#define STACK_POINTER_OFFSET 0 1258#endif 1259 1260/* If not defined, pick an appropriate default for the offset of dynamically 1261 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS, 1262 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */ 1263 1264#ifndef STACK_DYNAMIC_OFFSET 1265 1266/* The bottom of the stack points to the actual arguments. If 1267 REG_PARM_STACK_SPACE is defined, this includes the space for the register 1268 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined, 1269 stack space for register parameters is not pushed by the caller, but 1270 rather part of the fixed stack areas and hence not included in 1271 `crtl->outgoing_args_size'. Nevertheless, we must allow 1272 for it when allocating stack dynamic objects. */ 1273 1274#if defined(REG_PARM_STACK_SPACE) 1275#define STACK_DYNAMIC_OFFSET(FNDECL) \ 1276((ACCUMULATE_OUTGOING_ARGS \ 1277 ? (crtl->outgoing_args_size \ 1278 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \ 1279 : REG_PARM_STACK_SPACE (FNDECL))) \ 1280 : 0) + (STACK_POINTER_OFFSET)) 1281#else 1282#define STACK_DYNAMIC_OFFSET(FNDECL) \ 1283((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \ 1284 + (STACK_POINTER_OFFSET)) 1285#endif 1286#endif 1287 1288 1289/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX 1290 is a virtual register, return the equivalent hard register and set the 1291 offset indirectly through the pointer. Otherwise, return 0. */ 1292 1293static rtx 1294instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset) 1295{ 1296 rtx new_rtx; 1297 HOST_WIDE_INT offset; 1298 1299 if (x == virtual_incoming_args_rtx) 1300 { 1301 if (stack_realign_drap) 1302 { 1303 /* Replace virtual_incoming_args_rtx with internal arg 1304 pointer if DRAP is used to realign stack. */ 1305 new_rtx = crtl->args.internal_arg_pointer; 1306 offset = 0; 1307 } 1308 else 1309 new_rtx = arg_pointer_rtx, offset = in_arg_offset; 1310 } 1311 else if (x == virtual_stack_vars_rtx) 1312 new_rtx = frame_pointer_rtx, offset = var_offset; 1313 else if (x == virtual_stack_dynamic_rtx) 1314 new_rtx = stack_pointer_rtx, offset = dynamic_offset; 1315 else if (x == virtual_outgoing_args_rtx) 1316 new_rtx = stack_pointer_rtx, offset = out_arg_offset; 1317 else if (x == virtual_cfa_rtx) 1318 { 1319#ifdef FRAME_POINTER_CFA_OFFSET 1320 new_rtx = frame_pointer_rtx; 1321#else 1322 new_rtx = arg_pointer_rtx; 1323#endif 1324 offset = cfa_offset; 1325 } 1326 else 1327 return NULL_RTX; 1328 1329 *poffset = offset; 1330 return new_rtx; 1331} 1332 1333/* A subroutine of instantiate_virtual_regs, called via for_each_rtx. 1334 Instantiate any virtual registers present inside of *LOC. The expression 1335 is simplified, as much as possible, but is not to be considered "valid" 1336 in any sense implied by the target. If any change is made, set CHANGED 1337 to true. */ 1338 1339static int 1340instantiate_virtual_regs_in_rtx (rtx *loc, void *data) 1341{ 1342 HOST_WIDE_INT offset; 1343 bool *changed = (bool *) data; 1344 rtx x, new_rtx; 1345 1346 x = *loc; 1347 if (x == 0) 1348 return 0; 1349 1350 switch (GET_CODE (x)) 1351 { 1352 case REG: 1353 new_rtx = instantiate_new_reg (x, &offset); 1354 if (new_rtx) 1355 { 1356 *loc = plus_constant (new_rtx, offset); 1357 if (changed) 1358 *changed = true; 1359 } 1360 return -1; 1361 1362 case PLUS: 1363 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset); 1364 if (new_rtx) 1365 { 1366 new_rtx = plus_constant (new_rtx, offset); 1367 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1)); 1368 if (changed) 1369 *changed = true; 1370 return -1; 1371 } 1372 1373 /* FIXME -- from old code */ 1374 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know 1375 we can commute the PLUS and SUBREG because pointers into the 1376 frame are well-behaved. */ 1377 break; 1378 1379 default: 1380 break; 1381 } 1382 1383 return 0; 1384} 1385 1386/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X 1387 matches the predicate for insn CODE operand OPERAND. */ 1388 1389static int 1390safe_insn_predicate (int code, int operand, rtx x) 1391{ 1392 const struct insn_operand_data *op_data; 1393 1394 if (code < 0) 1395 return true; 1396 1397 op_data = &insn_data[code].operand[operand]; 1398 if (op_data->predicate == NULL) 1399 return true; 1400 1401 return op_data->predicate (x, op_data->mode); 1402} 1403 1404/* A subroutine of instantiate_virtual_regs. Instantiate any virtual 1405 registers present inside of insn. The result will be a valid insn. */ 1406 1407static void 1408instantiate_virtual_regs_in_insn (rtx insn) 1409{ 1410 HOST_WIDE_INT offset; 1411 int insn_code, i; 1412 bool any_change = false; 1413 rtx set, new_rtx, x, seq; 1414 1415 /* There are some special cases to be handled first. */ 1416 set = single_set (insn); 1417 if (set) 1418 { 1419 /* We're allowed to assign to a virtual register. This is interpreted 1420 to mean that the underlying register gets assigned the inverse 1421 transformation. This is used, for example, in the handling of 1422 non-local gotos. */ 1423 new_rtx = instantiate_new_reg (SET_DEST (set), &offset); 1424 if (new_rtx) 1425 { 1426 start_sequence (); 1427 1428 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL); 1429 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set), 1430 GEN_INT (-offset)); 1431 x = force_operand (x, new_rtx); 1432 if (x != new_rtx) 1433 emit_move_insn (new_rtx, x); 1434 1435 seq = get_insns (); 1436 end_sequence (); 1437 1438 emit_insn_before (seq, insn); 1439 delete_insn (insn); 1440 return; 1441 } 1442 1443 /* Handle a straight copy from a virtual register by generating a 1444 new add insn. The difference between this and falling through 1445 to the generic case is avoiding a new pseudo and eliminating a 1446 move insn in the initial rtl stream. */ 1447 new_rtx = instantiate_new_reg (SET_SRC (set), &offset); 1448 if (new_rtx && offset != 0 1449 && REG_P (SET_DEST (set)) 1450 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) 1451 { 1452 start_sequence (); 1453 1454 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, 1455 new_rtx, GEN_INT (offset), SET_DEST (set), 1456 1, OPTAB_LIB_WIDEN); 1457 if (x != SET_DEST (set)) 1458 emit_move_insn (SET_DEST (set), x); 1459 1460 seq = get_insns (); 1461 end_sequence (); 1462 1463 emit_insn_before (seq, insn); 1464 delete_insn (insn); 1465 return; 1466 } 1467 1468 extract_insn (insn); 1469 insn_code = INSN_CODE (insn); 1470 1471 /* Handle a plus involving a virtual register by determining if the 1472 operands remain valid if they're modified in place. */ 1473 if (GET_CODE (SET_SRC (set)) == PLUS 1474 && recog_data.n_operands >= 3 1475 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) 1476 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) 1477 && CONST_INT_P (recog_data.operand[2]) 1478 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) 1479 { 1480 offset += INTVAL (recog_data.operand[2]); 1481 1482 /* If the sum is zero, then replace with a plain move. */ 1483 if (offset == 0 1484 && REG_P (SET_DEST (set)) 1485 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) 1486 { 1487 start_sequence (); 1488 emit_move_insn (SET_DEST (set), new_rtx); 1489 seq = get_insns (); 1490 end_sequence (); 1491 1492 emit_insn_before (seq, insn); 1493 delete_insn (insn); 1494 return; 1495 } 1496 1497 x = gen_int_mode (offset, recog_data.operand_mode[2]); 1498 1499 /* Using validate_change and apply_change_group here leaves 1500 recog_data in an invalid state. Since we know exactly what 1501 we want to check, do those two by hand. */ 1502 if (safe_insn_predicate (insn_code, 1, new_rtx) 1503 && safe_insn_predicate (insn_code, 2, x)) 1504 { 1505 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx; 1506 *recog_data.operand_loc[2] = recog_data.operand[2] = x; 1507 any_change = true; 1508 1509 /* Fall through into the regular operand fixup loop in 1510 order to take care of operands other than 1 and 2. */ 1511 } 1512 } 1513 } 1514 else 1515 { 1516 extract_insn (insn); 1517 insn_code = INSN_CODE (insn); 1518 } 1519 1520 /* In the general case, we expect virtual registers to appear only in 1521 operands, and then only as either bare registers or inside memories. */ 1522 for (i = 0; i < recog_data.n_operands; ++i) 1523 { 1524 x = recog_data.operand[i]; 1525 switch (GET_CODE (x)) 1526 { 1527 case MEM: 1528 { 1529 rtx addr = XEXP (x, 0); 1530 bool changed = false; 1531 1532 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed); 1533 if (!changed) 1534 continue; 1535 1536 start_sequence (); 1537 x = replace_equiv_address (x, addr); 1538 /* It may happen that the address with the virtual reg 1539 was valid (e.g. based on the virtual stack reg, which might 1540 be acceptable to the predicates with all offsets), whereas 1541 the address now isn't anymore, for instance when the address 1542 is still offsetted, but the base reg isn't virtual-stack-reg 1543 anymore. Below we would do a force_reg on the whole operand, 1544 but this insn might actually only accept memory. Hence, 1545 before doing that last resort, try to reload the address into 1546 a register, so this operand stays a MEM. */ 1547 if (!safe_insn_predicate (insn_code, i, x)) 1548 { 1549 addr = force_reg (GET_MODE (addr), addr); 1550 x = replace_equiv_address (x, addr); 1551 } 1552 seq = get_insns (); 1553 end_sequence (); 1554 if (seq) 1555 emit_insn_before (seq, insn); 1556 } 1557 break; 1558 1559 case REG: 1560 new_rtx = instantiate_new_reg (x, &offset); 1561 if (new_rtx == NULL) 1562 continue; 1563 if (offset == 0) 1564 x = new_rtx; 1565 else 1566 { 1567 start_sequence (); 1568 1569 /* Careful, special mode predicates may have stuff in 1570 insn_data[insn_code].operand[i].mode that isn't useful 1571 to us for computing a new value. */ 1572 /* ??? Recognize address_operand and/or "p" constraints 1573 to see if (plus new offset) is a valid before we put 1574 this through expand_simple_binop. */ 1575 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx, 1576 GEN_INT (offset), NULL_RTX, 1577 1, OPTAB_LIB_WIDEN); 1578 seq = get_insns (); 1579 end_sequence (); 1580 emit_insn_before (seq, insn); 1581 } 1582 break; 1583 1584 case SUBREG: 1585 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset); 1586 if (new_rtx == NULL) 1587 continue; 1588 if (offset != 0) 1589 { 1590 start_sequence (); 1591 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx, 1592 GEN_INT (offset), NULL_RTX, 1593 1, OPTAB_LIB_WIDEN); 1594 seq = get_insns (); 1595 end_sequence (); 1596 emit_insn_before (seq, insn); 1597 } 1598 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx, 1599 GET_MODE (new_rtx), SUBREG_BYTE (x)); 1600 gcc_assert (x); 1601 break; 1602 1603 default: 1604 continue; 1605 } 1606 1607 /* At this point, X contains the new value for the operand. 1608 Validate the new value vs the insn predicate. Note that 1609 asm insns will have insn_code -1 here. */ 1610 if (!safe_insn_predicate (insn_code, i, x)) 1611 { 1612 start_sequence (); 1613 if (REG_P (x)) 1614 { 1615 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER); 1616 x = copy_to_reg (x); 1617 } 1618 else 1619 x = force_reg (insn_data[insn_code].operand[i].mode, x); 1620 seq = get_insns (); 1621 end_sequence (); 1622 if (seq) 1623 emit_insn_before (seq, insn); 1624 } 1625 1626 *recog_data.operand_loc[i] = recog_data.operand[i] = x; 1627 any_change = true; 1628 } 1629 1630 if (any_change) 1631 { 1632 /* Propagate operand changes into the duplicates. */ 1633 for (i = 0; i < recog_data.n_dups; ++i) 1634 *recog_data.dup_loc[i] 1635 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]); 1636 1637 /* Force re-recognition of the instruction for validation. */ 1638 INSN_CODE (insn) = -1; 1639 } 1640 1641 if (asm_noperands (PATTERN (insn)) >= 0) 1642 { 1643 if (!check_asm_operands (PATTERN (insn))) 1644 { 1645 error_for_asm (insn, "impossible constraint in %<asm%>"); 1646 delete_insn (insn); 1647 } 1648 } 1649 else 1650 { 1651 if (recog_memoized (insn) < 0) 1652 fatal_insn_not_found (insn); 1653 } 1654} 1655 1656/* Subroutine of instantiate_decls. Given RTL representing a decl, 1657 do any instantiation required. */ 1658 1659void 1660instantiate_decl_rtl (rtx x) 1661{ 1662 rtx addr; 1663 1664 if (x == 0) 1665 return; 1666 1667 /* If this is a CONCAT, recurse for the pieces. */ 1668 if (GET_CODE (x) == CONCAT) 1669 { 1670 instantiate_decl_rtl (XEXP (x, 0)); 1671 instantiate_decl_rtl (XEXP (x, 1)); 1672 return; 1673 } 1674 1675 /* If this is not a MEM, no need to do anything. Similarly if the 1676 address is a constant or a register that is not a virtual register. */ 1677 if (!MEM_P (x)) 1678 return; 1679 1680 addr = XEXP (x, 0); 1681 if (CONSTANT_P (addr) 1682 || (REG_P (addr) 1683 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER 1684 || REGNO (addr) > LAST_VIRTUAL_REGISTER))) 1685 return; 1686 1687 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL); 1688} 1689 1690/* Helper for instantiate_decls called via walk_tree: Process all decls 1691 in the given DECL_VALUE_EXPR. */ 1692 1693static tree 1694instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 1695{ 1696 tree t = *tp; 1697 if (! EXPR_P (t)) 1698 { 1699 *walk_subtrees = 0; 1700 if (DECL_P (t) && DECL_RTL_SET_P (t)) 1701 instantiate_decl_rtl (DECL_RTL (t)); 1702 } 1703 return NULL; 1704} 1705 1706/* Subroutine of instantiate_decls: Process all decls in the given 1707 BLOCK node and all its subblocks. */ 1708 1709static void 1710instantiate_decls_1 (tree let) 1711{ 1712 tree t; 1713 1714 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) 1715 { 1716 if (DECL_RTL_SET_P (t)) 1717 instantiate_decl_rtl (DECL_RTL (t)); 1718 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t)) 1719 { 1720 tree v = DECL_VALUE_EXPR (t); 1721 walk_tree (&v, instantiate_expr, NULL, NULL); 1722 } 1723 } 1724 1725 /* Process all subblocks. */ 1726 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t)) 1727 instantiate_decls_1 (t); 1728} 1729 1730/* Scan all decls in FNDECL (both variables and parameters) and instantiate 1731 all virtual registers in their DECL_RTL's. */ 1732 1733static void 1734instantiate_decls (tree fndecl) 1735{ 1736 tree decl, t, next; 1737 1738 /* Process all parameters of the function. */ 1739 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl)) 1740 { 1741 instantiate_decl_rtl (DECL_RTL (decl)); 1742 instantiate_decl_rtl (DECL_INCOMING_RTL (decl)); 1743 if (DECL_HAS_VALUE_EXPR_P (decl)) 1744 { 1745 tree v = DECL_VALUE_EXPR (decl); 1746 walk_tree (&v, instantiate_expr, NULL, NULL); 1747 } 1748 } 1749 1750 /* Now process all variables defined in the function or its subblocks. */ 1751 instantiate_decls_1 (DECL_INITIAL (fndecl)); 1752 1753 t = cfun->local_decls; 1754 cfun->local_decls = NULL_TREE; 1755 for (; t; t = next) 1756 { 1757 next = TREE_CHAIN (t); 1758 decl = TREE_VALUE (t); 1759 if (DECL_RTL_SET_P (decl)) 1760 instantiate_decl_rtl (DECL_RTL (decl)); 1761 ggc_free (t); 1762 } 1763} 1764 1765/* Pass through the INSNS of function FNDECL and convert virtual register 1766 references to hard register references. */ 1767 1768static unsigned int 1769instantiate_virtual_regs (void) 1770{ 1771 rtx insn; 1772 1773 /* Compute the offsets to use for this function. */ 1774 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl); 1775 var_offset = STARTING_FRAME_OFFSET; 1776 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl); 1777 out_arg_offset = STACK_POINTER_OFFSET; 1778#ifdef FRAME_POINTER_CFA_OFFSET 1779 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); 1780#else 1781 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); 1782#endif 1783 1784 /* Initialize recognition, indicating that volatile is OK. */ 1785 init_recog (); 1786 1787 /* Scan through all the insns, instantiating every virtual register still 1788 present. */ 1789 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 1790 if (INSN_P (insn)) 1791 { 1792 /* These patterns in the instruction stream can never be recognized. 1793 Fortunately, they shouldn't contain virtual registers either. */ 1794 if (GET_CODE (PATTERN (insn)) == USE 1795 || GET_CODE (PATTERN (insn)) == CLOBBER 1796 || GET_CODE (PATTERN (insn)) == ADDR_VEC 1797 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC 1798 || GET_CODE (PATTERN (insn)) == ASM_INPUT) 1799 continue; 1800 else if (DEBUG_INSN_P (insn)) 1801 for_each_rtx (&INSN_VAR_LOCATION (insn), 1802 instantiate_virtual_regs_in_rtx, NULL); 1803 else 1804 instantiate_virtual_regs_in_insn (insn); 1805 1806 if (INSN_DELETED_P (insn)) 1807 continue; 1808 1809 for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL); 1810 1811 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */ 1812 if (CALL_P (insn)) 1813 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn), 1814 instantiate_virtual_regs_in_rtx, NULL); 1815 } 1816 1817 /* Instantiate the virtual registers in the DECLs for debugging purposes. */ 1818 instantiate_decls (current_function_decl); 1819 1820 targetm.instantiate_decls (); 1821 1822 /* Indicate that, from now on, assign_stack_local should use 1823 frame_pointer_rtx. */ 1824 virtuals_instantiated = 1; 1825 return 0; 1826} 1827 1828struct rtl_opt_pass pass_instantiate_virtual_regs = 1829{ 1830 { 1831 RTL_PASS, 1832 "vregs", /* name */ 1833 NULL, /* gate */ 1834 instantiate_virtual_regs, /* execute */ 1835 NULL, /* sub */ 1836 NULL, /* next */ 1837 0, /* static_pass_number */ 1838 TV_NONE, /* tv_id */ 1839 0, /* properties_required */ 1840 0, /* properties_provided */ 1841 0, /* properties_destroyed */ 1842 0, /* todo_flags_start */ 1843 TODO_dump_func /* todo_flags_finish */ 1844 } 1845}; 1846 1847 1848/* Return 1 if EXP is an aggregate type (or a value with aggregate type). 1849 This means a type for which function calls must pass an address to the 1850 function or get an address back from the function. 1851 EXP may be a type node or an expression (whose type is tested). */ 1852 1853int 1854aggregate_value_p (const_tree exp, const_tree fntype) 1855{ 1856 int i, regno, nregs; 1857 rtx reg; 1858 1859 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); 1860 1861 /* DECL node associated with FNTYPE when relevant, which we might need to 1862 check for by-invisible-reference returns, typically for CALL_EXPR input 1863 EXPressions. */ 1864 const_tree fndecl = NULL_TREE; 1865 1866 if (fntype) 1867 switch (TREE_CODE (fntype)) 1868 { 1869 case CALL_EXPR: 1870 fndecl = get_callee_fndecl (fntype); 1871 fntype = (fndecl 1872 ? TREE_TYPE (fndecl) 1873 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)))); 1874 break; 1875 case FUNCTION_DECL: 1876 fndecl = fntype; 1877 fntype = TREE_TYPE (fndecl); 1878 break; 1879 case FUNCTION_TYPE: 1880 case METHOD_TYPE: 1881 break; 1882 case IDENTIFIER_NODE: 1883 fntype = 0; 1884 break; 1885 default: 1886 /* We don't expect other rtl types here. */ 1887 gcc_unreachable (); 1888 } 1889 1890 if (TREE_CODE (type) == VOID_TYPE) 1891 return 0; 1892 1893 /* If a record should be passed the same as its first (and only) member 1894 don't pass it as an aggregate. */ 1895 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) 1896 return aggregate_value_p (first_field (type), fntype); 1897 1898 /* If the front end has decided that this needs to be passed by 1899 reference, do so. */ 1900 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL) 1901 && DECL_BY_REFERENCE (exp)) 1902 return 1; 1903 1904 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the 1905 called function RESULT_DECL, meaning the function returns in memory by 1906 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE 1907 on the function type, which used to be the way to request such a return 1908 mechanism but might now be causing troubles at gimplification time if 1909 temporaries with the function type need to be created. */ 1910 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl) 1911 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))) 1912 return 1; 1913 1914 if (targetm.calls.return_in_memory (type, fntype)) 1915 return 1; 1916 /* Types that are TREE_ADDRESSABLE must be constructed in memory, 1917 and thus can't be returned in registers. */ 1918 if (TREE_ADDRESSABLE (type)) 1919 return 1; 1920 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) 1921 return 1; 1922 /* Make sure we have suitable call-clobbered regs to return 1923 the value in; if not, we must return it in memory. */ 1924 reg = hard_function_value (type, 0, fntype, 0); 1925 1926 /* If we have something other than a REG (e.g. a PARALLEL), then assume 1927 it is OK. */ 1928 if (!REG_P (reg)) 1929 return 0; 1930 1931 regno = REGNO (reg); 1932 nregs = hard_regno_nregs[regno][TYPE_MODE (type)]; 1933 for (i = 0; i < nregs; i++) 1934 if (! call_used_regs[regno + i]) 1935 return 1; 1936 return 0; 1937} 1938 1939/* Return true if we should assign DECL a pseudo register; false if it 1940 should live on the local stack. */ 1941 1942bool 1943use_register_for_decl (const_tree decl) 1944{ 1945 if (!targetm.calls.allocate_stack_slots_for_args()) 1946 return true; 1947 1948 /* Honor volatile. */ 1949 if (TREE_SIDE_EFFECTS (decl)) 1950 return false; 1951 1952 /* Honor addressability. */ 1953 if (TREE_ADDRESSABLE (decl)) 1954 return false; 1955 1956 /* Only register-like things go in registers. */ 1957 if (DECL_MODE (decl) == BLKmode) 1958 return false; 1959 1960 /* If -ffloat-store specified, don't put explicit float variables 1961 into registers. */ 1962 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa 1963 propagates values across these stores, and it probably shouldn't. */ 1964 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl))) 1965 return false; 1966 1967 /* If we're not interested in tracking debugging information for 1968 this decl, then we can certainly put it in a register. */ 1969 if (DECL_IGNORED_P (decl)) 1970 return true; 1971 1972 if (optimize) 1973 return true; 1974 1975 if (!DECL_REGISTER (decl)) 1976 return false; 1977 1978 switch (TREE_CODE (TREE_TYPE (decl))) 1979 { 1980 case RECORD_TYPE: 1981 case UNION_TYPE: 1982 case QUAL_UNION_TYPE: 1983 /* When not optimizing, disregard register keyword for variables with 1984 types containing methods, otherwise the methods won't be callable 1985 from the debugger. */ 1986 if (TYPE_METHODS (TREE_TYPE (decl))) 1987 return false; 1988 break; 1989 default: 1990 break; 1991 } 1992 1993 return true; 1994} 1995 1996/* Return true if TYPE should be passed by invisible reference. */ 1997 1998bool 1999pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode, 2000 tree type, bool named_arg) 2001{ 2002 if (type) 2003 { 2004 /* If this type contains non-trivial constructors, then it is 2005 forbidden for the middle-end to create any new copies. */ 2006 if (TREE_ADDRESSABLE (type)) 2007 return true; 2008 2009 /* GCC post 3.4 passes *all* variable sized types by reference. */ 2010 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) 2011 return true; 2012 2013 /* If a record type should be passed the same as its first (and only) 2014 member, use the type and mode of that member. */ 2015 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) 2016 { 2017 type = TREE_TYPE (first_field (type)); 2018 mode = TYPE_MODE (type); 2019 } 2020 } 2021 2022 return targetm.calls.pass_by_reference (ca, mode, type, named_arg); 2023} 2024 2025/* Return true if TYPE, which is passed by reference, should be callee 2026 copied instead of caller copied. */ 2027 2028bool 2029reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode, 2030 tree type, bool named_arg) 2031{ 2032 if (type && TREE_ADDRESSABLE (type)) 2033 return false; 2034 return targetm.calls.callee_copies (ca, mode, type, named_arg); 2035} 2036 2037/* Structures to communicate between the subroutines of assign_parms. 2038 The first holds data persistent across all parameters, the second 2039 is cleared out for each parameter. */ 2040 2041struct assign_parm_data_all 2042{ 2043 CUMULATIVE_ARGS args_so_far; 2044 struct args_size stack_args_size; 2045 tree function_result_decl; 2046 tree orig_fnargs; 2047 rtx first_conversion_insn; 2048 rtx last_conversion_insn; 2049 HOST_WIDE_INT pretend_args_size; 2050 HOST_WIDE_INT extra_pretend_bytes; 2051 int reg_parm_stack_space; 2052}; 2053 2054struct assign_parm_data_one 2055{ 2056 tree nominal_type; 2057 tree passed_type; 2058 rtx entry_parm; 2059 rtx stack_parm; 2060 enum machine_mode nominal_mode; 2061 enum machine_mode passed_mode; 2062 enum machine_mode promoted_mode; 2063 struct locate_and_pad_arg_data locate; 2064 int partial; 2065 BOOL_BITFIELD named_arg : 1; 2066 BOOL_BITFIELD passed_pointer : 1; 2067 BOOL_BITFIELD on_stack : 1; 2068 BOOL_BITFIELD loaded_in_reg : 1; 2069}; 2070 2071/* A subroutine of assign_parms. Initialize ALL. */ 2072 2073static void 2074assign_parms_initialize_all (struct assign_parm_data_all *all) 2075{ 2076 tree fntype; 2077 2078 memset (all, 0, sizeof (*all)); 2079 2080 fntype = TREE_TYPE (current_function_decl); 2081 2082#ifdef INIT_CUMULATIVE_INCOMING_ARGS 2083 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX); 2084#else 2085 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX, 2086 current_function_decl, -1); 2087#endif 2088 2089#ifdef REG_PARM_STACK_SPACE 2090 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl); 2091#endif 2092} 2093 2094/* If ARGS contains entries with complex types, split the entry into two 2095 entries of the component type. Return a new list of substitutions are 2096 needed, else the old list. */ 2097 2098static void 2099split_complex_args (VEC(tree, heap) **args) 2100{ 2101 unsigned i; 2102 tree p; 2103 2104 for (i = 0; VEC_iterate (tree, *args, i, p); ++i) 2105 { 2106 tree type = TREE_TYPE (p); 2107 if (TREE_CODE (type) == COMPLEX_TYPE 2108 && targetm.calls.split_complex_arg (type)) 2109 { 2110 tree decl; 2111 tree subtype = TREE_TYPE (type); 2112 bool addressable = TREE_ADDRESSABLE (p); 2113 2114 /* Rewrite the PARM_DECL's type with its component. */ 2115 p = copy_node (p); 2116 TREE_TYPE (p) = subtype; 2117 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p)); 2118 DECL_MODE (p) = VOIDmode; 2119 DECL_SIZE (p) = NULL; 2120 DECL_SIZE_UNIT (p) = NULL; 2121 /* If this arg must go in memory, put it in a pseudo here. 2122 We can't allow it to go in memory as per normal parms, 2123 because the usual place might not have the imag part 2124 adjacent to the real part. */ 2125 DECL_ARTIFICIAL (p) = addressable; 2126 DECL_IGNORED_P (p) = addressable; 2127 TREE_ADDRESSABLE (p) = 0; 2128 layout_decl (p, 0); 2129 VEC_replace (tree, *args, i, p); 2130 2131 /* Build a second synthetic decl. */ 2132 decl = build_decl (EXPR_LOCATION (p), 2133 PARM_DECL, NULL_TREE, subtype); 2134 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p); 2135 DECL_ARTIFICIAL (decl) = addressable; 2136 DECL_IGNORED_P (decl) = addressable; 2137 layout_decl (decl, 0); 2138 VEC_safe_insert (tree, heap, *args, ++i, decl); 2139 } 2140 } 2141} 2142 2143/* A subroutine of assign_parms. Adjust the parameter list to incorporate 2144 the hidden struct return argument, and (abi willing) complex args. 2145 Return the new parameter list. */ 2146 2147static VEC(tree, heap) * 2148assign_parms_augmented_arg_list (struct assign_parm_data_all *all) 2149{ 2150 tree fndecl = current_function_decl; 2151 tree fntype = TREE_TYPE (fndecl); 2152 VEC(tree, heap) *fnargs = NULL; 2153 tree arg; 2154 2155 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = TREE_CHAIN (arg)) 2156 VEC_safe_push (tree, heap, fnargs, arg); 2157 2158 all->orig_fnargs = DECL_ARGUMENTS (fndecl); 2159 2160 /* If struct value address is treated as the first argument, make it so. */ 2161 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl) 2162 && ! cfun->returns_pcc_struct 2163 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0) 2164 { 2165 tree type = build_pointer_type (TREE_TYPE (fntype)); 2166 tree decl; 2167 2168 decl = build_decl (DECL_SOURCE_LOCATION (fndecl), 2169 PARM_DECL, NULL_TREE, type); 2170 DECL_ARG_TYPE (decl) = type; 2171 DECL_ARTIFICIAL (decl) = 1; 2172 DECL_IGNORED_P (decl) = 1; 2173 2174 TREE_CHAIN (decl) = all->orig_fnargs; 2175 all->orig_fnargs = decl; 2176 VEC_safe_insert (tree, heap, fnargs, 0, decl); 2177 2178 all->function_result_decl = decl; 2179 } 2180 2181 /* If the target wants to split complex arguments into scalars, do so. */ 2182 if (targetm.calls.split_complex_arg) 2183 split_complex_args (&fnargs); 2184 2185 return fnargs; 2186} 2187 2188/* A subroutine of assign_parms. Examine PARM and pull out type and mode 2189 data for the parameter. Incorporate ABI specifics such as pass-by- 2190 reference and type promotion. */ 2191 2192static void 2193assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, 2194 struct assign_parm_data_one *data) 2195{ 2196 tree nominal_type, passed_type; 2197 enum machine_mode nominal_mode, passed_mode, promoted_mode; 2198 int unsignedp; 2199 2200 memset (data, 0, sizeof (*data)); 2201 2202 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */ 2203 if (!cfun->stdarg) 2204 data->named_arg = 1; /* No variadic parms. */ 2205 else if (TREE_CHAIN (parm)) 2206 data->named_arg = 1; /* Not the last non-variadic parm. */ 2207 else if (targetm.calls.strict_argument_naming (&all->args_so_far)) 2208 data->named_arg = 1; /* Only variadic ones are unnamed. */ 2209 else 2210 data->named_arg = 0; /* Treat as variadic. */ 2211 2212 nominal_type = TREE_TYPE (parm); 2213 passed_type = DECL_ARG_TYPE (parm); 2214 2215 /* Look out for errors propagating this far. Also, if the parameter's 2216 type is void then its value doesn't matter. */ 2217 if (TREE_TYPE (parm) == error_mark_node 2218 /* This can happen after weird syntax errors 2219 or if an enum type is defined among the parms. */ 2220 || TREE_CODE (parm) != PARM_DECL 2221 || passed_type == NULL 2222 || VOID_TYPE_P (nominal_type)) 2223 { 2224 nominal_type = passed_type = void_type_node; 2225 nominal_mode = passed_mode = promoted_mode = VOIDmode; 2226 goto egress; 2227 } 2228 2229 /* Find mode of arg as it is passed, and mode of arg as it should be 2230 during execution of this function. */ 2231 passed_mode = TYPE_MODE (passed_type); 2232 nominal_mode = TYPE_MODE (nominal_type); 2233 2234 /* If the parm is to be passed as a transparent union or record, use the 2235 type of the first field for the tests below. We have already verified 2236 that the modes are the same. */ 2237 if ((TREE_CODE (passed_type) == UNION_TYPE 2238 || TREE_CODE (passed_type) == RECORD_TYPE) 2239 && TYPE_TRANSPARENT_AGGR (passed_type)) 2240 passed_type = TREE_TYPE (first_field (passed_type)); 2241 2242 /* See if this arg was passed by invisible reference. */ 2243 if (pass_by_reference (&all->args_so_far, passed_mode, 2244 passed_type, data->named_arg)) 2245 { 2246 passed_type = nominal_type = build_pointer_type (passed_type); 2247 data->passed_pointer = true; 2248 passed_mode = nominal_mode = Pmode; 2249 } 2250 2251 /* Find mode as it is passed by the ABI. */ 2252 unsignedp = TYPE_UNSIGNED (passed_type); 2253 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp, 2254 TREE_TYPE (current_function_decl), 0); 2255 2256 egress: 2257 data->nominal_type = nominal_type; 2258 data->passed_type = passed_type; 2259 data->nominal_mode = nominal_mode; 2260 data->passed_mode = passed_mode; 2261 data->promoted_mode = promoted_mode; 2262} 2263 2264/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */ 2265 2266static void 2267assign_parms_setup_varargs (struct assign_parm_data_all *all, 2268 struct assign_parm_data_one *data, bool no_rtl) 2269{ 2270 int varargs_pretend_bytes = 0; 2271 2272 targetm.calls.setup_incoming_varargs (&all->args_so_far, 2273 data->promoted_mode, 2274 data->passed_type, 2275 &varargs_pretend_bytes, no_rtl); 2276 2277 /* If the back-end has requested extra stack space, record how much is 2278 needed. Do not change pretend_args_size otherwise since it may be 2279 nonzero from an earlier partial argument. */ 2280 if (varargs_pretend_bytes > 0) 2281 all->pretend_args_size = varargs_pretend_bytes; 2282} 2283 2284/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to 2285 the incoming location of the current parameter. */ 2286 2287static void 2288assign_parm_find_entry_rtl (struct assign_parm_data_all *all, 2289 struct assign_parm_data_one *data) 2290{ 2291 HOST_WIDE_INT pretend_bytes = 0; 2292 rtx entry_parm; 2293 bool in_regs; 2294 2295 if (data->promoted_mode == VOIDmode) 2296 { 2297 data->entry_parm = data->stack_parm = const0_rtx; 2298 return; 2299 } 2300 2301#ifdef FUNCTION_INCOMING_ARG 2302 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode, 2303 data->passed_type, data->named_arg); 2304#else 2305 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode, 2306 data->passed_type, data->named_arg); 2307#endif 2308 2309 if (entry_parm == 0) 2310 data->promoted_mode = data->passed_mode; 2311 2312 /* Determine parm's home in the stack, in case it arrives in the stack 2313 or we should pretend it did. Compute the stack position and rtx where 2314 the argument arrives and its size. 2315 2316 There is one complexity here: If this was a parameter that would 2317 have been passed in registers, but wasn't only because it is 2318 __builtin_va_alist, we want locate_and_pad_parm to treat it as if 2319 it came in a register so that REG_PARM_STACK_SPACE isn't skipped. 2320 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0 2321 as it was the previous time. */ 2322 in_regs = entry_parm != 0; 2323#ifdef STACK_PARMS_IN_REG_PARM_AREA 2324 in_regs = true; 2325#endif 2326 if (!in_regs && !data->named_arg) 2327 { 2328 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far)) 2329 { 2330 rtx tem; 2331#ifdef FUNCTION_INCOMING_ARG 2332 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode, 2333 data->passed_type, true); 2334#else 2335 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode, 2336 data->passed_type, true); 2337#endif 2338 in_regs = tem != NULL; 2339 } 2340 } 2341 2342 /* If this parameter was passed both in registers and in the stack, use 2343 the copy on the stack. */ 2344 if (targetm.calls.must_pass_in_stack (data->promoted_mode, 2345 data->passed_type)) 2346 entry_parm = 0; 2347 2348 if (entry_parm) 2349 { 2350 int partial; 2351 2352 partial = targetm.calls.arg_partial_bytes (&all->args_so_far, 2353 data->promoted_mode, 2354 data->passed_type, 2355 data->named_arg); 2356 data->partial = partial; 2357 2358 /* The caller might already have allocated stack space for the 2359 register parameters. */ 2360 if (partial != 0 && all->reg_parm_stack_space == 0) 2361 { 2362 /* Part of this argument is passed in registers and part 2363 is passed on the stack. Ask the prologue code to extend 2364 the stack part so that we can recreate the full value. 2365 2366 PRETEND_BYTES is the size of the registers we need to store. 2367 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra 2368 stack space that the prologue should allocate. 2369 2370 Internally, gcc assumes that the argument pointer is aligned 2371 to STACK_BOUNDARY bits. This is used both for alignment 2372 optimizations (see init_emit) and to locate arguments that are 2373 aligned to more than PARM_BOUNDARY bits. We must preserve this 2374 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to 2375 a stack boundary. */ 2376 2377 /* We assume at most one partial arg, and it must be the first 2378 argument on the stack. */ 2379 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size); 2380 2381 pretend_bytes = partial; 2382 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES); 2383 2384 /* We want to align relative to the actual stack pointer, so 2385 don't include this in the stack size until later. */ 2386 all->extra_pretend_bytes = all->pretend_args_size; 2387 } 2388 } 2389 2390 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs, 2391 entry_parm ? data->partial : 0, current_function_decl, 2392 &all->stack_args_size, &data->locate); 2393 2394 /* Update parm_stack_boundary if this parameter is passed in the 2395 stack. */ 2396 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary) 2397 crtl->parm_stack_boundary = data->locate.boundary; 2398 2399 /* Adjust offsets to include the pretend args. */ 2400 pretend_bytes = all->extra_pretend_bytes - pretend_bytes; 2401 data->locate.slot_offset.constant += pretend_bytes; 2402 data->locate.offset.constant += pretend_bytes; 2403 2404 data->entry_parm = entry_parm; 2405} 2406 2407/* A subroutine of assign_parms. If there is actually space on the stack 2408 for this parm, count it in stack_args_size and return true. */ 2409 2410static bool 2411assign_parm_is_stack_parm (struct assign_parm_data_all *all, 2412 struct assign_parm_data_one *data) 2413{ 2414 /* Trivially true if we've no incoming register. */ 2415 if (data->entry_parm == NULL) 2416 ; 2417 /* Also true if we're partially in registers and partially not, 2418 since we've arranged to drop the entire argument on the stack. */ 2419 else if (data->partial != 0) 2420 ; 2421 /* Also true if the target says that it's passed in both registers 2422 and on the stack. */ 2423 else if (GET_CODE (data->entry_parm) == PARALLEL 2424 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX) 2425 ; 2426 /* Also true if the target says that there's stack allocated for 2427 all register parameters. */ 2428 else if (all->reg_parm_stack_space > 0) 2429 ; 2430 /* Otherwise, no, this parameter has no ABI defined stack slot. */ 2431 else 2432 return false; 2433 2434 all->stack_args_size.constant += data->locate.size.constant; 2435 if (data->locate.size.var) 2436 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var); 2437 2438 return true; 2439} 2440 2441/* A subroutine of assign_parms. Given that this parameter is allocated 2442 stack space by the ABI, find it. */ 2443 2444static void 2445assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data) 2446{ 2447 rtx offset_rtx, stack_parm; 2448 unsigned int align, boundary; 2449 2450 /* If we're passing this arg using a reg, make its stack home the 2451 aligned stack slot. */ 2452 if (data->entry_parm) 2453 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset); 2454 else 2455 offset_rtx = ARGS_SIZE_RTX (data->locate.offset); 2456 2457 stack_parm = crtl->args.internal_arg_pointer; 2458 if (offset_rtx != const0_rtx) 2459 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx); 2460 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm); 2461 2462 if (!data->passed_pointer) 2463 { 2464 set_mem_attributes (stack_parm, parm, 1); 2465 /* set_mem_attributes could set MEM_SIZE to the passed mode's size, 2466 while promoted mode's size is needed. */ 2467 if (data->promoted_mode != BLKmode 2468 && data->promoted_mode != DECL_MODE (parm)) 2469 { 2470 set_mem_size (stack_parm, 2471 GEN_INT (GET_MODE_SIZE (data->promoted_mode))); 2472 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm)) 2473 { 2474 int offset = subreg_lowpart_offset (DECL_MODE (parm), 2475 data->promoted_mode); 2476 if (offset) 2477 set_mem_offset (stack_parm, 2478 plus_constant (MEM_OFFSET (stack_parm), 2479 -offset)); 2480 } 2481 } 2482 } 2483 2484 boundary = data->locate.boundary; 2485 align = BITS_PER_UNIT; 2486 2487 /* If we're padding upward, we know that the alignment of the slot 2488 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're 2489 intentionally forcing upward padding. Otherwise we have to come 2490 up with a guess at the alignment based on OFFSET_RTX. */ 2491 if (data->locate.where_pad != downward || data->entry_parm) 2492 align = boundary; 2493 else if (CONST_INT_P (offset_rtx)) 2494 { 2495 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary; 2496 align = align & -align; 2497 } 2498 set_mem_align (stack_parm, align); 2499 2500 if (data->entry_parm) 2501 set_reg_attrs_for_parm (data->entry_parm, stack_parm); 2502 2503 data->stack_parm = stack_parm; 2504} 2505 2506/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's 2507 always valid and contiguous. */ 2508 2509static void 2510assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data) 2511{ 2512 rtx entry_parm = data->entry_parm; 2513 rtx stack_parm = data->stack_parm; 2514 2515 /* If this parm was passed part in regs and part in memory, pretend it 2516 arrived entirely in memory by pushing the register-part onto the stack. 2517 In the special case of a DImode or DFmode that is split, we could put 2518 it together in a pseudoreg directly, but for now that's not worth 2519 bothering with. */ 2520 if (data->partial != 0) 2521 { 2522 /* Handle calls that pass values in multiple non-contiguous 2523 locations. The Irix 6 ABI has examples of this. */ 2524 if (GET_CODE (entry_parm) == PARALLEL) 2525 emit_group_store (validize_mem (stack_parm), entry_parm, 2526 data->passed_type, 2527 int_size_in_bytes (data->passed_type)); 2528 else 2529 { 2530 gcc_assert (data->partial % UNITS_PER_WORD == 0); 2531 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm), 2532 data->partial / UNITS_PER_WORD); 2533 } 2534 2535 entry_parm = stack_parm; 2536 } 2537 2538 /* If we didn't decide this parm came in a register, by default it came 2539 on the stack. */ 2540 else if (entry_parm == NULL) 2541 entry_parm = stack_parm; 2542 2543 /* When an argument is passed in multiple locations, we can't make use 2544 of this information, but we can save some copying if the whole argument 2545 is passed in a single register. */ 2546 else if (GET_CODE (entry_parm) == PARALLEL 2547 && data->nominal_mode != BLKmode 2548 && data->passed_mode != BLKmode) 2549 { 2550 size_t i, len = XVECLEN (entry_parm, 0); 2551 2552 for (i = 0; i < len; i++) 2553 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX 2554 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0)) 2555 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) 2556 == data->passed_mode) 2557 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0) 2558 { 2559 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0); 2560 break; 2561 } 2562 } 2563 2564 data->entry_parm = entry_parm; 2565} 2566 2567/* A subroutine of assign_parms. Reconstitute any values which were 2568 passed in multiple registers and would fit in a single register. */ 2569 2570static void 2571assign_parm_remove_parallels (struct assign_parm_data_one *data) 2572{ 2573 rtx entry_parm = data->entry_parm; 2574 2575 /* Convert the PARALLEL to a REG of the same mode as the parallel. 2576 This can be done with register operations rather than on the 2577 stack, even if we will store the reconstituted parameter on the 2578 stack later. */ 2579 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode) 2580 { 2581 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm)); 2582 emit_group_store (parmreg, entry_parm, data->passed_type, 2583 GET_MODE_SIZE (GET_MODE (entry_parm))); 2584 entry_parm = parmreg; 2585 } 2586 2587 data->entry_parm = entry_parm; 2588} 2589 2590/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's 2591 always valid and properly aligned. */ 2592 2593static void 2594assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data) 2595{ 2596 rtx stack_parm = data->stack_parm; 2597 2598 /* If we can't trust the parm stack slot to be aligned enough for its 2599 ultimate type, don't use that slot after entry. We'll make another 2600 stack slot, if we need one. */ 2601 if (stack_parm 2602 && ((STRICT_ALIGNMENT 2603 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)) 2604 || (data->nominal_type 2605 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm) 2606 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY))) 2607 stack_parm = NULL; 2608 2609 /* If parm was passed in memory, and we need to convert it on entry, 2610 don't store it back in that same slot. */ 2611 else if (data->entry_parm == stack_parm 2612 && data->nominal_mode != BLKmode 2613 && data->nominal_mode != data->passed_mode) 2614 stack_parm = NULL; 2615 2616 /* If stack protection is in effect for this function, don't leave any 2617 pointers in their passed stack slots. */ 2618 else if (crtl->stack_protect_guard 2619 && (flag_stack_protect == 2 2620 || data->passed_pointer 2621 || POINTER_TYPE_P (data->nominal_type))) 2622 stack_parm = NULL; 2623 2624 data->stack_parm = stack_parm; 2625} 2626 2627/* A subroutine of assign_parms. Return true if the current parameter 2628 should be stored as a BLKmode in the current frame. */ 2629 2630static bool 2631assign_parm_setup_block_p (struct assign_parm_data_one *data) 2632{ 2633 if (data->nominal_mode == BLKmode) 2634 return true; 2635 if (GET_MODE (data->entry_parm) == BLKmode) 2636 return true; 2637 2638#ifdef BLOCK_REG_PADDING 2639 /* Only assign_parm_setup_block knows how to deal with register arguments 2640 that are padded at the least significant end. */ 2641 if (REG_P (data->entry_parm) 2642 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD 2643 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1) 2644 == (BYTES_BIG_ENDIAN ? upward : downward))) 2645 return true; 2646#endif 2647 2648 return false; 2649} 2650 2651/* A subroutine of assign_parms. Arrange for the parameter to be 2652 present and valid in DATA->STACK_RTL. */ 2653 2654static void 2655assign_parm_setup_block (struct assign_parm_data_all *all, 2656 tree parm, struct assign_parm_data_one *data) 2657{ 2658 rtx entry_parm = data->entry_parm; 2659 rtx stack_parm = data->stack_parm; 2660 HOST_WIDE_INT size; 2661 HOST_WIDE_INT size_stored; 2662 2663 if (GET_CODE (entry_parm) == PARALLEL) 2664 entry_parm = emit_group_move_into_temps (entry_parm); 2665 2666 size = int_size_in_bytes (data->passed_type); 2667 size_stored = CEIL_ROUND (size, UNITS_PER_WORD); 2668 if (stack_parm == 0) 2669 { 2670 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD); 2671 stack_parm = assign_stack_local (BLKmode, size_stored, 2672 DECL_ALIGN (parm)); 2673 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size) 2674 PUT_MODE (stack_parm, GET_MODE (entry_parm)); 2675 set_mem_attributes (stack_parm, parm, 1); 2676 } 2677 2678 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle 2679 calls that pass values in multiple non-contiguous locations. */ 2680 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL) 2681 { 2682 rtx mem; 2683 2684 /* Note that we will be storing an integral number of words. 2685 So we have to be careful to ensure that we allocate an 2686 integral number of words. We do this above when we call 2687 assign_stack_local if space was not allocated in the argument 2688 list. If it was, this will not work if PARM_BOUNDARY is not 2689 a multiple of BITS_PER_WORD. It isn't clear how to fix this 2690 if it becomes a problem. Exception is when BLKmode arrives 2691 with arguments not conforming to word_mode. */ 2692 2693 if (data->stack_parm == 0) 2694 ; 2695 else if (GET_CODE (entry_parm) == PARALLEL) 2696 ; 2697 else 2698 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD)); 2699 2700 mem = validize_mem (stack_parm); 2701 2702 /* Handle values in multiple non-contiguous locations. */ 2703 if (GET_CODE (entry_parm) == PARALLEL) 2704 { 2705 push_to_sequence2 (all->first_conversion_insn, 2706 all->last_conversion_insn); 2707 emit_group_store (mem, entry_parm, data->passed_type, size); 2708 all->first_conversion_insn = get_insns (); 2709 all->last_conversion_insn = get_last_insn (); 2710 end_sequence (); 2711 } 2712 2713 else if (size == 0) 2714 ; 2715 2716 /* If SIZE is that of a mode no bigger than a word, just use 2717 that mode's store operation. */ 2718 else if (size <= UNITS_PER_WORD) 2719 { 2720 enum machine_mode mode 2721 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0); 2722 2723 if (mode != BLKmode 2724#ifdef BLOCK_REG_PADDING 2725 && (size == UNITS_PER_WORD 2726 || (BLOCK_REG_PADDING (mode, data->passed_type, 1) 2727 != (BYTES_BIG_ENDIAN ? upward : downward))) 2728#endif 2729 ) 2730 { 2731 rtx reg; 2732 2733 /* We are really truncating a word_mode value containing 2734 SIZE bytes into a value of mode MODE. If such an 2735 operation requires no actual instructions, we can refer 2736 to the value directly in mode MODE, otherwise we must 2737 start with the register in word_mode and explicitly 2738 convert it. */ 2739 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD)) 2740 reg = gen_rtx_REG (mode, REGNO (entry_parm)); 2741 else 2742 { 2743 reg = gen_rtx_REG (word_mode, REGNO (entry_parm)); 2744 reg = convert_to_mode (mode, copy_to_reg (reg), 1); 2745 } 2746 emit_move_insn (change_address (mem, mode, 0), reg); 2747 } 2748 2749 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN 2750 machine must be aligned to the left before storing 2751 to memory. Note that the previous test doesn't 2752 handle all cases (e.g. SIZE == 3). */ 2753 else if (size != UNITS_PER_WORD 2754#ifdef BLOCK_REG_PADDING 2755 && (BLOCK_REG_PADDING (mode, data->passed_type, 1) 2756 == downward) 2757#else 2758 && BYTES_BIG_ENDIAN 2759#endif 2760 ) 2761 { 2762 rtx tem, x; 2763 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT; 2764 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm)); 2765 2766 x = expand_shift (LSHIFT_EXPR, word_mode, reg, 2767 build_int_cst (NULL_TREE, by), 2768 NULL_RTX, 1); 2769 tem = change_address (mem, word_mode, 0); 2770 emit_move_insn (tem, x); 2771 } 2772 else 2773 move_block_from_reg (REGNO (entry_parm), mem, 2774 size_stored / UNITS_PER_WORD); 2775 } 2776 else 2777 move_block_from_reg (REGNO (entry_parm), mem, 2778 size_stored / UNITS_PER_WORD); 2779 } 2780 else if (data->stack_parm == 0) 2781 { 2782 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); 2783 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size), 2784 BLOCK_OP_NORMAL); 2785 all->first_conversion_insn = get_insns (); 2786 all->last_conversion_insn = get_last_insn (); 2787 end_sequence (); 2788 } 2789 2790 data->stack_parm = stack_parm; 2791 SET_DECL_RTL (parm, stack_parm); 2792} 2793 2794/* A subroutine of assign_parms. Allocate a pseudo to hold the current 2795 parameter. Get it there. Perform all ABI specified conversions. */ 2796 2797static void 2798assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, 2799 struct assign_parm_data_one *data) 2800{ 2801 rtx parmreg; 2802 enum machine_mode promoted_nominal_mode; 2803 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm)); 2804 bool did_conversion = false; 2805 2806 /* Store the parm in a pseudoregister during the function, but we may 2807 need to do it in a wider mode. Using 2 here makes the result 2808 consistent with promote_decl_mode and thus expand_expr_real_1. */ 2809 promoted_nominal_mode 2810 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp, 2811 TREE_TYPE (current_function_decl), 2); 2812 2813 parmreg = gen_reg_rtx (promoted_nominal_mode); 2814 2815 if (!DECL_ARTIFICIAL (parm)) 2816 mark_user_reg (parmreg); 2817 2818 /* If this was an item that we received a pointer to, 2819 set DECL_RTL appropriately. */ 2820 if (data->passed_pointer) 2821 { 2822 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg); 2823 set_mem_attributes (x, parm, 1); 2824 SET_DECL_RTL (parm, x); 2825 } 2826 else 2827 SET_DECL_RTL (parm, parmreg); 2828 2829 assign_parm_remove_parallels (data); 2830 2831 /* Copy the value into the register, thus bridging between 2832 assign_parm_find_data_types and expand_expr_real_1. */ 2833 if (data->nominal_mode != data->passed_mode 2834 || promoted_nominal_mode != data->promoted_mode) 2835 { 2836 int save_tree_used; 2837 2838 /* ENTRY_PARM has been converted to PROMOTED_MODE, its 2839 mode, by the caller. We now have to convert it to 2840 NOMINAL_MODE, if different. However, PARMREG may be in 2841 a different mode than NOMINAL_MODE if it is being stored 2842 promoted. 2843 2844 If ENTRY_PARM is a hard register, it might be in a register 2845 not valid for operating in its mode (e.g., an odd-numbered 2846 register for a DFmode). In that case, moves are the only 2847 thing valid, so we can't do a convert from there. This 2848 occurs when the calling sequence allow such misaligned 2849 usages. 2850 2851 In addition, the conversion may involve a call, which could 2852 clobber parameters which haven't been copied to pseudo 2853 registers yet. Therefore, we must first copy the parm to 2854 a pseudo reg here, and save the conversion until after all 2855 parameters have been moved. */ 2856 2857 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); 2858 2859 emit_move_insn (tempreg, validize_mem (data->entry_parm)); 2860 2861 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); 2862 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp); 2863 2864 if (GET_CODE (tempreg) == SUBREG 2865 && GET_MODE (tempreg) == data->nominal_mode 2866 && REG_P (SUBREG_REG (tempreg)) 2867 && data->nominal_mode == data->passed_mode 2868 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm) 2869 && GET_MODE_SIZE (GET_MODE (tempreg)) 2870 < GET_MODE_SIZE (GET_MODE (data->entry_parm))) 2871 { 2872 /* The argument is already sign/zero extended, so note it 2873 into the subreg. */ 2874 SUBREG_PROMOTED_VAR_P (tempreg) = 1; 2875 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp); 2876 } 2877 2878 /* TREE_USED gets set erroneously during expand_assignment. */ 2879 save_tree_used = TREE_USED (parm); 2880 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false); 2881 TREE_USED (parm) = save_tree_used; 2882 all->first_conversion_insn = get_insns (); 2883 all->last_conversion_insn = get_last_insn (); 2884 end_sequence (); 2885 2886 did_conversion = true; 2887 } 2888 else 2889 emit_move_insn (parmreg, validize_mem (data->entry_parm)); 2890 2891 /* If we were passed a pointer but the actual value can safely live 2892 in a register, put it in one. */ 2893 if (data->passed_pointer 2894 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode 2895 /* If by-reference argument was promoted, demote it. */ 2896 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm)) 2897 || use_register_for_decl (parm))) 2898 { 2899 /* We can't use nominal_mode, because it will have been set to 2900 Pmode above. We must use the actual mode of the parm. */ 2901 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm))); 2902 mark_user_reg (parmreg); 2903 2904 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm))) 2905 { 2906 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm))); 2907 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm)); 2908 2909 push_to_sequence2 (all->first_conversion_insn, 2910 all->last_conversion_insn); 2911 emit_move_insn (tempreg, DECL_RTL (parm)); 2912 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p); 2913 emit_move_insn (parmreg, tempreg); 2914 all->first_conversion_insn = get_insns (); 2915 all->last_conversion_insn = get_last_insn (); 2916 end_sequence (); 2917 2918 did_conversion = true; 2919 } 2920 else 2921 emit_move_insn (parmreg, DECL_RTL (parm)); 2922 2923 SET_DECL_RTL (parm, parmreg); 2924 2925 /* STACK_PARM is the pointer, not the parm, and PARMREG is 2926 now the parm. */ 2927 data->stack_parm = NULL; 2928 } 2929 2930 /* Mark the register as eliminable if we did no conversion and it was 2931 copied from memory at a fixed offset, and the arg pointer was not 2932 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the 2933 offset formed an invalid address, such memory-equivalences as we 2934 make here would screw up life analysis for it. */ 2935 if (data->nominal_mode == data->passed_mode 2936 && !did_conversion 2937 && data->stack_parm != 0 2938 && MEM_P (data->stack_parm) 2939 && data->locate.offset.var == 0 2940 && reg_mentioned_p (virtual_incoming_args_rtx, 2941 XEXP (data->stack_parm, 0))) 2942 { 2943 rtx linsn = get_last_insn (); 2944 rtx sinsn, set; 2945 2946 /* Mark complex types separately. */ 2947 if (GET_CODE (parmreg) == CONCAT) 2948 { 2949 enum machine_mode submode 2950 = GET_MODE_INNER (GET_MODE (parmreg)); 2951 int regnor = REGNO (XEXP (parmreg, 0)); 2952 int regnoi = REGNO (XEXP (parmreg, 1)); 2953 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0); 2954 rtx stacki = adjust_address_nv (data->stack_parm, submode, 2955 GET_MODE_SIZE (submode)); 2956 2957 /* Scan backwards for the set of the real and 2958 imaginary parts. */ 2959 for (sinsn = linsn; sinsn != 0; 2960 sinsn = prev_nonnote_insn (sinsn)) 2961 { 2962 set = single_set (sinsn); 2963 if (set == 0) 2964 continue; 2965 2966 if (SET_DEST (set) == regno_reg_rtx [regnoi]) 2967 set_unique_reg_note (sinsn, REG_EQUIV, stacki); 2968 else if (SET_DEST (set) == regno_reg_rtx [regnor]) 2969 set_unique_reg_note (sinsn, REG_EQUIV, stackr); 2970 } 2971 } 2972 else if ((set = single_set (linsn)) != 0 2973 && SET_DEST (set) == parmreg) 2974 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm); 2975 } 2976 2977 /* For pointer data type, suggest pointer register. */ 2978 if (POINTER_TYPE_P (TREE_TYPE (parm))) 2979 mark_reg_pointer (parmreg, 2980 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); 2981} 2982 2983/* A subroutine of assign_parms. Allocate stack space to hold the current 2984 parameter. Get it there. Perform all ABI specified conversions. */ 2985 2986static void 2987assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, 2988 struct assign_parm_data_one *data) 2989{ 2990 /* Value must be stored in the stack slot STACK_PARM during function 2991 execution. */ 2992 bool to_conversion = false; 2993 2994 assign_parm_remove_parallels (data); 2995 2996 if (data->promoted_mode != data->nominal_mode) 2997 { 2998 /* Conversion is required. */ 2999 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); 3000 3001 emit_move_insn (tempreg, validize_mem (data->entry_parm)); 3002 3003 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); 3004 to_conversion = true; 3005 3006 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg, 3007 TYPE_UNSIGNED (TREE_TYPE (parm))); 3008 3009 if (data->stack_parm) 3010 { 3011 int offset = subreg_lowpart_offset (data->nominal_mode, 3012 GET_MODE (data->stack_parm)); 3013 /* ??? This may need a big-endian conversion on sparc64. */ 3014 data->stack_parm 3015 = adjust_address (data->stack_parm, data->nominal_mode, 0); 3016 if (offset && MEM_OFFSET (data->stack_parm)) 3017 set_mem_offset (data->stack_parm, 3018 plus_constant (MEM_OFFSET (data->stack_parm), 3019 offset)); 3020 } 3021 } 3022 3023 if (data->entry_parm != data->stack_parm) 3024 { 3025 rtx src, dest; 3026 3027 if (data->stack_parm == 0) 3028 { 3029 int align = STACK_SLOT_ALIGNMENT (data->passed_type, 3030 GET_MODE (data->entry_parm), 3031 TYPE_ALIGN (data->passed_type)); 3032 data->stack_parm 3033 = assign_stack_local (GET_MODE (data->entry_parm), 3034 GET_MODE_SIZE (GET_MODE (data->entry_parm)), 3035 align); 3036 set_mem_attributes (data->stack_parm, parm, 1); 3037 } 3038 3039 dest = validize_mem (data->stack_parm); 3040 src = validize_mem (data->entry_parm); 3041 3042 if (MEM_P (src)) 3043 { 3044 /* Use a block move to handle potentially misaligned entry_parm. */ 3045 if (!to_conversion) 3046 push_to_sequence2 (all->first_conversion_insn, 3047 all->last_conversion_insn); 3048 to_conversion = true; 3049 3050 emit_block_move (dest, src, 3051 GEN_INT (int_size_in_bytes (data->passed_type)), 3052 BLOCK_OP_NORMAL); 3053 } 3054 else 3055 emit_move_insn (dest, src); 3056 } 3057 3058 if (to_conversion) 3059 { 3060 all->first_conversion_insn = get_insns (); 3061 all->last_conversion_insn = get_last_insn (); 3062 end_sequence (); 3063 } 3064 3065 SET_DECL_RTL (parm, data->stack_parm); 3066} 3067 3068/* A subroutine of assign_parms. If the ABI splits complex arguments, then 3069 undo the frobbing that we did in assign_parms_augmented_arg_list. */ 3070 3071static void 3072assign_parms_unsplit_complex (struct assign_parm_data_all *all, 3073 VEC(tree, heap) *fnargs) 3074{ 3075 tree parm; 3076 tree orig_fnargs = all->orig_fnargs; 3077 unsigned i = 0; 3078 3079 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i) 3080 { 3081 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE 3082 && targetm.calls.split_complex_arg (TREE_TYPE (parm))) 3083 { 3084 rtx tmp, real, imag; 3085 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm)); 3086 3087 real = DECL_RTL (VEC_index (tree, fnargs, i)); 3088 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1)); 3089 if (inner != GET_MODE (real)) 3090 { 3091 real = gen_lowpart_SUBREG (inner, real); 3092 imag = gen_lowpart_SUBREG (inner, imag); 3093 } 3094 3095 if (TREE_ADDRESSABLE (parm)) 3096 { 3097 rtx rmem, imem; 3098 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm)); 3099 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm), 3100 DECL_MODE (parm), 3101 TYPE_ALIGN (TREE_TYPE (parm))); 3102 3103 /* split_complex_arg put the real and imag parts in 3104 pseudos. Move them to memory. */ 3105 tmp = assign_stack_local (DECL_MODE (parm), size, align); 3106 set_mem_attributes (tmp, parm, 1); 3107 rmem = adjust_address_nv (tmp, inner, 0); 3108 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner)); 3109 push_to_sequence2 (all->first_conversion_insn, 3110 all->last_conversion_insn); 3111 emit_move_insn (rmem, real); 3112 emit_move_insn (imem, imag); 3113 all->first_conversion_insn = get_insns (); 3114 all->last_conversion_insn = get_last_insn (); 3115 end_sequence (); 3116 } 3117 else 3118 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); 3119 SET_DECL_RTL (parm, tmp); 3120 3121 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i)); 3122 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1)); 3123 if (inner != GET_MODE (real)) 3124 { 3125 real = gen_lowpart_SUBREG (inner, real); 3126 imag = gen_lowpart_SUBREG (inner, imag); 3127 } 3128 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); 3129 set_decl_incoming_rtl (parm, tmp, false); 3130 i++; 3131 } 3132 } 3133} 3134 3135/* Assign RTL expressions to the function's parameters. This may involve 3136 copying them into registers and using those registers as the DECL_RTL. */ 3137 3138static void 3139assign_parms (tree fndecl) 3140{ 3141 struct assign_parm_data_all all; 3142 tree parm; 3143 VEC(tree, heap) *fnargs; 3144 unsigned i; 3145 3146 crtl->args.internal_arg_pointer 3147 = targetm.calls.internal_arg_pointer (); 3148 3149 assign_parms_initialize_all (&all); 3150 fnargs = assign_parms_augmented_arg_list (&all); 3151 3152 for (i = 0; VEC_iterate (tree, fnargs, i, parm); ++i) 3153 { 3154 struct assign_parm_data_one data; 3155 3156 /* Extract the type of PARM; adjust it according to ABI. */ 3157 assign_parm_find_data_types (&all, parm, &data); 3158 3159 /* Early out for errors and void parameters. */ 3160 if (data.passed_mode == VOIDmode) 3161 { 3162 SET_DECL_RTL (parm, const0_rtx); 3163 DECL_INCOMING_RTL (parm) = DECL_RTL (parm); 3164 continue; 3165 } 3166 3167 /* Estimate stack alignment from parameter alignment. */ 3168 if (SUPPORTS_STACK_ALIGNMENT) 3169 { 3170 unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode, 3171 data.passed_type); 3172 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode, 3173 align); 3174 if (TYPE_ALIGN (data.nominal_type) > align) 3175 align = MINIMUM_ALIGNMENT (data.nominal_type, 3176 TYPE_MODE (data.nominal_type), 3177 TYPE_ALIGN (data.nominal_type)); 3178 if (crtl->stack_alignment_estimated < align) 3179 { 3180 gcc_assert (!crtl->stack_realign_processed); 3181 crtl->stack_alignment_estimated = align; 3182 } 3183 } 3184 3185 if (cfun->stdarg && !TREE_CHAIN (parm)) 3186 assign_parms_setup_varargs (&all, &data, false); 3187 3188 /* Find out where the parameter arrives in this function. */ 3189 assign_parm_find_entry_rtl (&all, &data); 3190 3191 /* Find out where stack space for this parameter might be. */ 3192 if (assign_parm_is_stack_parm (&all, &data)) 3193 { 3194 assign_parm_find_stack_rtl (parm, &data); 3195 assign_parm_adjust_entry_rtl (&data); 3196 } 3197 3198 /* Record permanently how this parm was passed. */ 3199 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer); 3200 3201 /* Update info on where next arg arrives in registers. */ 3202 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode, 3203 data.passed_type, data.named_arg); 3204 3205 assign_parm_adjust_stack_rtl (&data); 3206 3207 if (assign_parm_setup_block_p (&data)) 3208 assign_parm_setup_block (&all, parm, &data); 3209 else if (data.passed_pointer || use_register_for_decl (parm)) 3210 assign_parm_setup_reg (&all, parm, &data); 3211 else 3212 assign_parm_setup_stack (&all, parm, &data); 3213 } 3214 3215 if (targetm.calls.split_complex_arg) 3216 assign_parms_unsplit_complex (&all, fnargs); 3217 3218 VEC_free (tree, heap, fnargs); 3219 3220 /* Output all parameter conversion instructions (possibly including calls) 3221 now that all parameters have been copied out of hard registers. */ 3222 emit_insn (all.first_conversion_insn); 3223 3224 /* Estimate reload stack alignment from scalar return mode. */ 3225 if (SUPPORTS_STACK_ALIGNMENT) 3226 { 3227 if (DECL_RESULT (fndecl)) 3228 { 3229 tree type = TREE_TYPE (DECL_RESULT (fndecl)); 3230 enum machine_mode mode = TYPE_MODE (type); 3231 3232 if (mode != BLKmode 3233 && mode != VOIDmode 3234 && !AGGREGATE_TYPE_P (type)) 3235 { 3236 unsigned int align = GET_MODE_ALIGNMENT (mode); 3237 if (crtl->stack_alignment_estimated < align) 3238 { 3239 gcc_assert (!crtl->stack_realign_processed); 3240 crtl->stack_alignment_estimated = align; 3241 } 3242 } 3243 } 3244 } 3245 3246 /* If we are receiving a struct value address as the first argument, set up 3247 the RTL for the function result. As this might require code to convert 3248 the transmitted address to Pmode, we do this here to ensure that possible 3249 preliminary conversions of the address have been emitted already. */ 3250 if (all.function_result_decl) 3251 { 3252 tree result = DECL_RESULT (current_function_decl); 3253 rtx addr = DECL_RTL (all.function_result_decl); 3254 rtx x; 3255 3256 if (DECL_BY_REFERENCE (result)) 3257 x = addr; 3258 else 3259 { 3260 addr = convert_memory_address (Pmode, addr); 3261 x = gen_rtx_MEM (DECL_MODE (result), addr); 3262 set_mem_attributes (x, result, 1); 3263 } 3264 SET_DECL_RTL (result, x); 3265 } 3266 3267 /* We have aligned all the args, so add space for the pretend args. */ 3268 crtl->args.pretend_args_size = all.pretend_args_size; 3269 all.stack_args_size.constant += all.extra_pretend_bytes; 3270 crtl->args.size = all.stack_args_size.constant; 3271 3272 /* Adjust function incoming argument size for alignment and 3273 minimum length. */ 3274 3275#ifdef REG_PARM_STACK_SPACE 3276 crtl->args.size = MAX (crtl->args.size, 3277 REG_PARM_STACK_SPACE (fndecl)); 3278#endif 3279 3280 crtl->args.size = CEIL_ROUND (crtl->args.size, 3281 PARM_BOUNDARY / BITS_PER_UNIT); 3282 3283#ifdef ARGS_GROW_DOWNWARD 3284 crtl->args.arg_offset_rtx 3285 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) 3286 : expand_expr (size_diffop (all.stack_args_size.var, 3287 size_int (-all.stack_args_size.constant)), 3288 NULL_RTX, VOIDmode, EXPAND_NORMAL)); 3289#else 3290 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size); 3291#endif 3292 3293 /* See how many bytes, if any, of its args a function should try to pop 3294 on return. */ 3295 3296 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl), 3297 crtl->args.size); 3298 3299 /* For stdarg.h function, save info about 3300 regs and stack space used by the named args. */ 3301 3302 crtl->args.info = all.args_so_far; 3303 3304 /* Set the rtx used for the function return value. Put this in its 3305 own variable so any optimizers that need this information don't have 3306 to include tree.h. Do this here so it gets done when an inlined 3307 function gets output. */ 3308 3309 crtl->return_rtx 3310 = (DECL_RTL_SET_P (DECL_RESULT (fndecl)) 3311 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX); 3312 3313 /* If scalar return value was computed in a pseudo-reg, or was a named 3314 return value that got dumped to the stack, copy that to the hard 3315 return register. */ 3316 if (DECL_RTL_SET_P (DECL_RESULT (fndecl))) 3317 { 3318 tree decl_result = DECL_RESULT (fndecl); 3319 rtx decl_rtl = DECL_RTL (decl_result); 3320 3321 if (REG_P (decl_rtl) 3322 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER 3323 : DECL_REGISTER (decl_result)) 3324 { 3325 rtx real_decl_rtl; 3326 3327 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), 3328 fndecl, true); 3329 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; 3330 /* The delay slot scheduler assumes that crtl->return_rtx 3331 holds the hard register containing the return value, not a 3332 temporary pseudo. */ 3333 crtl->return_rtx = real_decl_rtl; 3334 } 3335 } 3336} 3337 3338/* A subroutine of gimplify_parameters, invoked via walk_tree. 3339 For all seen types, gimplify their sizes. */ 3340 3341static tree 3342gimplify_parm_type (tree *tp, int *walk_subtrees, void *data) 3343{ 3344 tree t = *tp; 3345 3346 *walk_subtrees = 0; 3347 if (TYPE_P (t)) 3348 { 3349 if (POINTER_TYPE_P (t)) 3350 *walk_subtrees = 1; 3351 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t)) 3352 && !TYPE_SIZES_GIMPLIFIED (t)) 3353 { 3354 gimplify_type_sizes (t, (gimple_seq *) data); 3355 *walk_subtrees = 1; 3356 } 3357 } 3358 3359 return NULL; 3360} 3361 3362/* Gimplify the parameter list for current_function_decl. This involves 3363 evaluating SAVE_EXPRs of variable sized parameters and generating code 3364 to implement callee-copies reference parameters. Returns a sequence of 3365 statements to add to the beginning of the function. */ 3366 3367gimple_seq 3368gimplify_parameters (void) 3369{ 3370 struct assign_parm_data_all all; 3371 tree parm; 3372 gimple_seq stmts = NULL; 3373 VEC(tree, heap) *fnargs; 3374 unsigned i; 3375 3376 assign_parms_initialize_all (&all); 3377 fnargs = assign_parms_augmented_arg_list (&all); 3378 3379 for (i = 0; VEC_iterate (tree, fnargs, i, parm); ++i) 3380 { 3381 struct assign_parm_data_one data; 3382 3383 /* Extract the type of PARM; adjust it according to ABI. */ 3384 assign_parm_find_data_types (&all, parm, &data); 3385 3386 /* Early out for errors and void parameters. */ 3387 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL) 3388 continue; 3389 3390 /* Update info on where next arg arrives in registers. */ 3391 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode, 3392 data.passed_type, data.named_arg); 3393 3394 /* ??? Once upon a time variable_size stuffed parameter list 3395 SAVE_EXPRs (amongst others) onto a pending sizes list. This 3396 turned out to be less than manageable in the gimple world. 3397 Now we have to hunt them down ourselves. */ 3398 walk_tree_without_duplicates (&data.passed_type, 3399 gimplify_parm_type, &stmts); 3400 3401 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST) 3402 { 3403 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts); 3404 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts); 3405 } 3406 3407 if (data.passed_pointer) 3408 { 3409 tree type = TREE_TYPE (data.passed_type); 3410 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type), 3411 type, data.named_arg)) 3412 { 3413 tree local, t; 3414 3415 /* For constant-sized objects, this is trivial; for 3416 variable-sized objects, we have to play games. */ 3417 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST 3418 && !(flag_stack_check == GENERIC_STACK_CHECK 3419 && compare_tree_int (DECL_SIZE_UNIT (parm), 3420 STACK_CHECK_MAX_VAR_SIZE) > 0)) 3421 { 3422 local = create_tmp_var (type, get_name (parm)); 3423 DECL_IGNORED_P (local) = 0; 3424 /* If PARM was addressable, move that flag over 3425 to the local copy, as its address will be taken, 3426 not the PARMs. Keep the parms address taken 3427 as we'll query that flag during gimplification. */ 3428 if (TREE_ADDRESSABLE (parm)) 3429 TREE_ADDRESSABLE (local) = 1; 3430 } 3431 else 3432 { 3433 tree ptr_type, addr; 3434 3435 ptr_type = build_pointer_type (type); 3436 addr = create_tmp_var (ptr_type, get_name (parm)); 3437 DECL_IGNORED_P (addr) = 0; 3438 local = build_fold_indirect_ref (addr); 3439 3440 t = built_in_decls[BUILT_IN_ALLOCA]; 3441 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm)); 3442 t = fold_convert (ptr_type, t); 3443 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); 3444 gimplify_and_add (t, &stmts); 3445 } 3446 3447 gimplify_assign (local, parm, &stmts); 3448 3449 SET_DECL_VALUE_EXPR (parm, local); 3450 DECL_HAS_VALUE_EXPR_P (parm) = 1; 3451 } 3452 } 3453 } 3454 3455 VEC_free (tree, heap, fnargs); 3456 3457 return stmts; 3458} 3459 3460/* Compute the size and offset from the start of the stacked arguments for a 3461 parm passed in mode PASSED_MODE and with type TYPE. 3462 3463 INITIAL_OFFSET_PTR points to the current offset into the stacked 3464 arguments. 3465 3466 The starting offset and size for this parm are returned in 3467 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is 3468 nonzero, the offset is that of stack slot, which is returned in 3469 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of 3470 padding required from the initial offset ptr to the stack slot. 3471 3472 IN_REGS is nonzero if the argument will be passed in registers. It will 3473 never be set if REG_PARM_STACK_SPACE is not defined. 3474 3475 FNDECL is the function in which the argument was defined. 3476 3477 There are two types of rounding that are done. The first, controlled by 3478 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument 3479 list to be aligned to the specific boundary (in bits). This rounding 3480 affects the initial and starting offsets, but not the argument size. 3481 3482 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY, 3483 optionally rounds the size of the parm to PARM_BOUNDARY. The 3484 initial offset is not affected by this rounding, while the size always 3485 is and the starting offset may be. */ 3486 3487/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case; 3488 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's 3489 callers pass in the total size of args so far as 3490 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */ 3491 3492void 3493locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs, 3494 int partial, tree fndecl ATTRIBUTE_UNUSED, 3495 struct args_size *initial_offset_ptr, 3496 struct locate_and_pad_arg_data *locate) 3497{ 3498 tree sizetree; 3499 enum direction where_pad; 3500 unsigned int boundary; 3501 int reg_parm_stack_space = 0; 3502 int part_size_in_regs; 3503 3504#ifdef REG_PARM_STACK_SPACE 3505 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); 3506 3507 /* If we have found a stack parm before we reach the end of the 3508 area reserved for registers, skip that area. */ 3509 if (! in_regs) 3510 { 3511 if (reg_parm_stack_space > 0) 3512 { 3513 if (initial_offset_ptr->var) 3514 { 3515 initial_offset_ptr->var 3516 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), 3517 ssize_int (reg_parm_stack_space)); 3518 initial_offset_ptr->constant = 0; 3519 } 3520 else if (initial_offset_ptr->constant < reg_parm_stack_space) 3521 initial_offset_ptr->constant = reg_parm_stack_space; 3522 } 3523 } 3524#endif /* REG_PARM_STACK_SPACE */ 3525 3526 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0); 3527 3528 sizetree 3529 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); 3530 where_pad = FUNCTION_ARG_PADDING (passed_mode, type); 3531 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type); 3532 locate->where_pad = where_pad; 3533 3534 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */ 3535 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT) 3536 boundary = MAX_SUPPORTED_STACK_ALIGNMENT; 3537 3538 locate->boundary = boundary; 3539 3540 if (SUPPORTS_STACK_ALIGNMENT) 3541 { 3542 /* stack_alignment_estimated can't change after stack has been 3543 realigned. */ 3544 if (crtl->stack_alignment_estimated < boundary) 3545 { 3546 if (!crtl->stack_realign_processed) 3547 crtl->stack_alignment_estimated = boundary; 3548 else 3549 { 3550 /* If stack is realigned and stack alignment value 3551 hasn't been finalized, it is OK not to increase 3552 stack_alignment_estimated. The bigger alignment 3553 requirement is recorded in stack_alignment_needed 3554 below. */ 3555 gcc_assert (!crtl->stack_realign_finalized 3556 && crtl->stack_realign_needed); 3557 } 3558 } 3559 } 3560 3561 /* Remember if the outgoing parameter requires extra alignment on the 3562 calling function side. */ 3563 if (crtl->stack_alignment_needed < boundary) 3564 crtl->stack_alignment_needed = boundary; 3565 if (crtl->preferred_stack_boundary < boundary) 3566 crtl->preferred_stack_boundary = boundary; 3567 3568#ifdef ARGS_GROW_DOWNWARD 3569 locate->slot_offset.constant = -initial_offset_ptr->constant; 3570 if (initial_offset_ptr->var) 3571 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0), 3572 initial_offset_ptr->var); 3573 3574 { 3575 tree s2 = sizetree; 3576 if (where_pad != none 3577 && (!host_integerp (sizetree, 1) 3578 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY)) 3579 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT); 3580 SUB_PARM_SIZE (locate->slot_offset, s2); 3581 } 3582 3583 locate->slot_offset.constant += part_size_in_regs; 3584 3585 if (!in_regs 3586#ifdef REG_PARM_STACK_SPACE 3587 || REG_PARM_STACK_SPACE (fndecl) > 0 3588#endif 3589 ) 3590 pad_to_arg_alignment (&locate->slot_offset, boundary, 3591 &locate->alignment_pad); 3592 3593 locate->size.constant = (-initial_offset_ptr->constant 3594 - locate->slot_offset.constant); 3595 if (initial_offset_ptr->var) 3596 locate->size.var = size_binop (MINUS_EXPR, 3597 size_binop (MINUS_EXPR, 3598 ssize_int (0), 3599 initial_offset_ptr->var), 3600 locate->slot_offset.var); 3601 3602 /* Pad_below needs the pre-rounded size to know how much to pad 3603 below. */ 3604 locate->offset = locate->slot_offset; 3605 if (where_pad == downward) 3606 pad_below (&locate->offset, passed_mode, sizetree); 3607 3608#else /* !ARGS_GROW_DOWNWARD */ 3609 if (!in_regs 3610#ifdef REG_PARM_STACK_SPACE 3611 || REG_PARM_STACK_SPACE (fndecl) > 0 3612#endif 3613 ) 3614 pad_to_arg_alignment (initial_offset_ptr, boundary, 3615 &locate->alignment_pad); 3616 locate->slot_offset = *initial_offset_ptr; 3617 3618#ifdef PUSH_ROUNDING 3619 if (passed_mode != BLKmode) 3620 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); 3621#endif 3622 3623 /* Pad_below needs the pre-rounded size to know how much to pad below 3624 so this must be done before rounding up. */ 3625 locate->offset = locate->slot_offset; 3626 if (where_pad == downward) 3627 pad_below (&locate->offset, passed_mode, sizetree); 3628 3629 if (where_pad != none 3630 && (!host_integerp (sizetree, 1) 3631 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY)) 3632 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); 3633 3634 ADD_PARM_SIZE (locate->size, sizetree); 3635 3636 locate->size.constant -= part_size_in_regs; 3637#endif /* ARGS_GROW_DOWNWARD */ 3638 3639#ifdef FUNCTION_ARG_OFFSET 3640 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type); 3641#endif 3642} 3643 3644/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY. 3645 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */ 3646 3647static void 3648pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, 3649 struct args_size *alignment_pad) 3650{ 3651 tree save_var = NULL_TREE; 3652 HOST_WIDE_INT save_constant = 0; 3653 int boundary_in_bytes = boundary / BITS_PER_UNIT; 3654 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET; 3655 3656#ifdef SPARC_STACK_BOUNDARY_HACK 3657 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than 3658 the real alignment of %sp. However, when it does this, the 3659 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ 3660 if (SPARC_STACK_BOUNDARY_HACK) 3661 sp_offset = 0; 3662#endif 3663 3664 if (boundary > PARM_BOUNDARY) 3665 { 3666 save_var = offset_ptr->var; 3667 save_constant = offset_ptr->constant; 3668 } 3669 3670 alignment_pad->var = NULL_TREE; 3671 alignment_pad->constant = 0; 3672 3673 if (boundary > BITS_PER_UNIT) 3674 { 3675 if (offset_ptr->var) 3676 { 3677 tree sp_offset_tree = ssize_int (sp_offset); 3678 tree offset = size_binop (PLUS_EXPR, 3679 ARGS_SIZE_TREE (*offset_ptr), 3680 sp_offset_tree); 3681#ifdef ARGS_GROW_DOWNWARD 3682 tree rounded = round_down (offset, boundary / BITS_PER_UNIT); 3683#else 3684 tree rounded = round_up (offset, boundary / BITS_PER_UNIT); 3685#endif 3686 3687 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree); 3688 /* ARGS_SIZE_TREE includes constant term. */ 3689 offset_ptr->constant = 0; 3690 if (boundary > PARM_BOUNDARY) 3691 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, 3692 save_var); 3693 } 3694 else 3695 { 3696 offset_ptr->constant = -sp_offset + 3697#ifdef ARGS_GROW_DOWNWARD 3698 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes); 3699#else 3700 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes); 3701#endif 3702 if (boundary > PARM_BOUNDARY) 3703 alignment_pad->constant = offset_ptr->constant - save_constant; 3704 } 3705 } 3706} 3707 3708static void 3709pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree) 3710{ 3711 if (passed_mode != BLKmode) 3712 { 3713 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY) 3714 offset_ptr->constant 3715 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1) 3716 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT) 3717 - GET_MODE_SIZE (passed_mode)); 3718 } 3719 else 3720 { 3721 if (TREE_CODE (sizetree) != INTEGER_CST 3722 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY) 3723 { 3724 /* Round the size up to multiple of PARM_BOUNDARY bits. */ 3725 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); 3726 /* Add it in. */ 3727 ADD_PARM_SIZE (*offset_ptr, s2); 3728 SUB_PARM_SIZE (*offset_ptr, sizetree); 3729 } 3730 } 3731} 3732 3733 3734/* True if register REGNO was alive at a place where `setjmp' was 3735 called and was set more than once or is an argument. Such regs may 3736 be clobbered by `longjmp'. */ 3737 3738static bool 3739regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno) 3740{ 3741 /* There appear to be cases where some local vars never reach the 3742 backend but have bogus regnos. */ 3743 if (regno >= max_reg_num ()) 3744 return false; 3745 3746 return ((REG_N_SETS (regno) > 1 3747 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno)) 3748 && REGNO_REG_SET_P (setjmp_crosses, regno)); 3749} 3750 3751/* Walk the tree of blocks describing the binding levels within a 3752 function and warn about variables the might be killed by setjmp or 3753 vfork. This is done after calling flow_analysis before register 3754 allocation since that will clobber the pseudo-regs to hard 3755 regs. */ 3756 3757static void 3758setjmp_vars_warning (bitmap setjmp_crosses, tree block) 3759{ 3760 tree decl, sub; 3761 3762 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) 3763 { 3764 if (TREE_CODE (decl) == VAR_DECL 3765 && DECL_RTL_SET_P (decl) 3766 && REG_P (DECL_RTL (decl)) 3767 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 3768 warning (OPT_Wclobbered, "variable %q+D might be clobbered by" 3769 " %<longjmp%> or %<vfork%>", decl); 3770 } 3771 3772 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub)) 3773 setjmp_vars_warning (setjmp_crosses, sub); 3774} 3775 3776/* Do the appropriate part of setjmp_vars_warning 3777 but for arguments instead of local variables. */ 3778 3779static void 3780setjmp_args_warning (bitmap setjmp_crosses) 3781{ 3782 tree decl; 3783 for (decl = DECL_ARGUMENTS (current_function_decl); 3784 decl; decl = TREE_CHAIN (decl)) 3785 if (DECL_RTL (decl) != 0 3786 && REG_P (DECL_RTL (decl)) 3787 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 3788 warning (OPT_Wclobbered, 3789 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>", 3790 decl); 3791} 3792 3793/* Generate warning messages for variables live across setjmp. */ 3794 3795void 3796generate_setjmp_warnings (void) 3797{ 3798 bitmap setjmp_crosses = regstat_get_setjmp_crosses (); 3799 3800 if (n_basic_blocks == NUM_FIXED_BLOCKS 3801 || bitmap_empty_p (setjmp_crosses)) 3802 return; 3803 3804 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl)); 3805 setjmp_args_warning (setjmp_crosses); 3806} 3807 3808 3809/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, 3810 and create duplicate blocks. */ 3811/* ??? Need an option to either create block fragments or to create 3812 abstract origin duplicates of a source block. It really depends 3813 on what optimization has been performed. */ 3814 3815void 3816reorder_blocks (void) 3817{ 3818 tree block = DECL_INITIAL (current_function_decl); 3819 VEC(tree,heap) *block_stack; 3820 3821 if (block == NULL_TREE) 3822 return; 3823 3824 block_stack = VEC_alloc (tree, heap, 10); 3825 3826 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */ 3827 clear_block_marks (block); 3828 3829 /* Prune the old trees away, so that they don't get in the way. */ 3830 BLOCK_SUBBLOCKS (block) = NULL_TREE; 3831 BLOCK_CHAIN (block) = NULL_TREE; 3832 3833 /* Recreate the block tree from the note nesting. */ 3834 reorder_blocks_1 (get_insns (), block, &block_stack); 3835 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block)); 3836 3837 VEC_free (tree, heap, block_stack); 3838} 3839 3840/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */ 3841 3842void 3843clear_block_marks (tree block) 3844{ 3845 while (block) 3846 { 3847 TREE_ASM_WRITTEN (block) = 0; 3848 clear_block_marks (BLOCK_SUBBLOCKS (block)); 3849 block = BLOCK_CHAIN (block); 3850 } 3851} 3852 3853static void 3854reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) 3855{ 3856 rtx insn; 3857 3858 for (insn = insns; insn; insn = NEXT_INSN (insn)) 3859 { 3860 if (NOTE_P (insn)) 3861 { 3862 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG) 3863 { 3864 tree block = NOTE_BLOCK (insn); 3865 tree origin; 3866 3867 origin = (BLOCK_FRAGMENT_ORIGIN (block) 3868 ? BLOCK_FRAGMENT_ORIGIN (block) 3869 : block); 3870 3871 /* If we have seen this block before, that means it now 3872 spans multiple address regions. Create a new fragment. */ 3873 if (TREE_ASM_WRITTEN (block)) 3874 { 3875 tree new_block = copy_node (block); 3876 3877 BLOCK_FRAGMENT_ORIGIN (new_block) = origin; 3878 BLOCK_FRAGMENT_CHAIN (new_block) 3879 = BLOCK_FRAGMENT_CHAIN (origin); 3880 BLOCK_FRAGMENT_CHAIN (origin) = new_block; 3881 3882 NOTE_BLOCK (insn) = new_block; 3883 block = new_block; 3884 } 3885 3886 BLOCK_SUBBLOCKS (block) = 0; 3887 TREE_ASM_WRITTEN (block) = 1; 3888 /* When there's only one block for the entire function, 3889 current_block == block and we mustn't do this, it 3890 will cause infinite recursion. */ 3891 if (block != current_block) 3892 { 3893 if (block != origin) 3894 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block); 3895 3896 BLOCK_SUPERCONTEXT (block) = current_block; 3897 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); 3898 BLOCK_SUBBLOCKS (current_block) = block; 3899 current_block = origin; 3900 } 3901 VEC_safe_push (tree, heap, *p_block_stack, block); 3902 } 3903 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END) 3904 { 3905 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack); 3906 BLOCK_SUBBLOCKS (current_block) 3907 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block)); 3908 current_block = BLOCK_SUPERCONTEXT (current_block); 3909 } 3910 } 3911 } 3912} 3913 3914/* Reverse the order of elements in the chain T of blocks, 3915 and return the new head of the chain (old last element). */ 3916 3917tree 3918blocks_nreverse (tree t) 3919{ 3920 tree prev = 0, decl, next; 3921 for (decl = t; decl; decl = next) 3922 { 3923 next = BLOCK_CHAIN (decl); 3924 BLOCK_CHAIN (decl) = prev; 3925 prev = decl; 3926 } 3927 return prev; 3928} 3929 3930/* Count the subblocks of the list starting with BLOCK. If VECTOR is 3931 non-NULL, list them all into VECTOR, in a depth-first preorder 3932 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all 3933 blocks. */ 3934 3935static int 3936all_blocks (tree block, tree *vector) 3937{ 3938 int n_blocks = 0; 3939 3940 while (block) 3941 { 3942 TREE_ASM_WRITTEN (block) = 0; 3943 3944 /* Record this block. */ 3945 if (vector) 3946 vector[n_blocks] = block; 3947 3948 ++n_blocks; 3949 3950 /* Record the subblocks, and their subblocks... */ 3951 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block), 3952 vector ? vector + n_blocks : 0); 3953 block = BLOCK_CHAIN (block); 3954 } 3955 3956 return n_blocks; 3957} 3958 3959/* Return a vector containing all the blocks rooted at BLOCK. The 3960 number of elements in the vector is stored in N_BLOCKS_P. The 3961 vector is dynamically allocated; it is the caller's responsibility 3962 to call `free' on the pointer returned. */ 3963 3964static tree * 3965get_block_vector (tree block, int *n_blocks_p) 3966{ 3967 tree *block_vector; 3968 3969 *n_blocks_p = all_blocks (block, NULL); 3970 block_vector = XNEWVEC (tree, *n_blocks_p); 3971 all_blocks (block, block_vector); 3972 3973 return block_vector; 3974} 3975 3976static GTY(()) int next_block_index = 2; 3977 3978/* Set BLOCK_NUMBER for all the blocks in FN. */ 3979 3980void 3981number_blocks (tree fn) 3982{ 3983 int i; 3984 int n_blocks; 3985 tree *block_vector; 3986 3987 /* For SDB and XCOFF debugging output, we start numbering the blocks 3988 from 1 within each function, rather than keeping a running 3989 count. */ 3990#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO) 3991 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG) 3992 next_block_index = 1; 3993#endif 3994 3995 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks); 3996 3997 /* The top-level BLOCK isn't numbered at all. */ 3998 for (i = 1; i < n_blocks; ++i) 3999 /* We number the blocks from two. */ 4000 BLOCK_NUMBER (block_vector[i]) = next_block_index++; 4001 4002 free (block_vector); 4003 4004 return; 4005} 4006 4007/* If VAR is present in a subblock of BLOCK, return the subblock. */ 4008 4009tree 4010debug_find_var_in_block_tree (tree var, tree block) 4011{ 4012 tree t; 4013 4014 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t)) 4015 if (t == var) 4016 return block; 4017 4018 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t)) 4019 { 4020 tree ret = debug_find_var_in_block_tree (var, t); 4021 if (ret) 4022 return ret; 4023 } 4024 4025 return NULL_TREE; 4026} 4027 4028/* Keep track of whether we're in a dummy function context. If we are, 4029 we don't want to invoke the set_current_function hook, because we'll 4030 get into trouble if the hook calls target_reinit () recursively or 4031 when the initial initialization is not yet complete. */ 4032 4033static bool in_dummy_function; 4034 4035/* Invoke the target hook when setting cfun. Update the optimization options 4036 if the function uses different options than the default. */ 4037 4038static void 4039invoke_set_current_function_hook (tree fndecl) 4040{ 4041 if (!in_dummy_function) 4042 { 4043 tree opts = ((fndecl) 4044 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) 4045 : optimization_default_node); 4046 4047 if (!opts) 4048 opts = optimization_default_node; 4049 4050 /* Change optimization options if needed. */ 4051 if (optimization_current_node != opts) 4052 { 4053 optimization_current_node = opts; 4054 cl_optimization_restore (TREE_OPTIMIZATION (opts)); 4055 } 4056 4057 targetm.set_current_function (fndecl); 4058 } 4059} 4060 4061/* cfun should never be set directly; use this function. */ 4062 4063void 4064set_cfun (struct function *new_cfun) 4065{ 4066 if (cfun != new_cfun) 4067 { 4068 cfun = new_cfun; 4069 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE); 4070 } 4071} 4072 4073/* Initialized with NOGC, making this poisonous to the garbage collector. */ 4074 4075static VEC(function_p,heap) *cfun_stack; 4076 4077/* Push the current cfun onto the stack, and set cfun to new_cfun. */ 4078 4079void 4080push_cfun (struct function *new_cfun) 4081{ 4082 VEC_safe_push (function_p, heap, cfun_stack, cfun); 4083 set_cfun (new_cfun); 4084} 4085 4086/* Pop cfun from the stack. */ 4087 4088void 4089pop_cfun (void) 4090{ 4091 struct function *new_cfun = VEC_pop (function_p, cfun_stack); 4092 set_cfun (new_cfun); 4093} 4094 4095/* Return value of funcdef and increase it. */ 4096int 4097get_next_funcdef_no (void) 4098{ 4099 return funcdef_no++; 4100} 4101 4102/* Allocate a function structure for FNDECL and set its contents 4103 to the defaults. Set cfun to the newly-allocated object. 4104 Some of the helper functions invoked during initialization assume 4105 that cfun has already been set. Therefore, assign the new object 4106 directly into cfun and invoke the back end hook explicitly at the 4107 very end, rather than initializing a temporary and calling set_cfun 4108 on it. 4109 4110 ABSTRACT_P is true if this is a function that will never be seen by 4111 the middle-end. Such functions are front-end concepts (like C++ 4112 function templates) that do not correspond directly to functions 4113 placed in object files. */ 4114 4115void 4116allocate_struct_function (tree fndecl, bool abstract_p) 4117{ 4118 tree result; 4119 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE; 4120 4121 cfun = GGC_CNEW (struct function); 4122 4123 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL; 4124 4125 init_eh_for_function (); 4126 4127 if (init_machine_status) 4128 cfun->machine = (*init_machine_status) (); 4129 4130#ifdef OVERRIDE_ABI_FORMAT 4131 OVERRIDE_ABI_FORMAT (fndecl); 4132#endif 4133 4134 invoke_set_current_function_hook (fndecl); 4135 4136 if (fndecl != NULL_TREE) 4137 { 4138 DECL_STRUCT_FUNCTION (fndecl) = cfun; 4139 cfun->decl = fndecl; 4140 current_function_funcdef_no = get_next_funcdef_no (); 4141 4142 result = DECL_RESULT (fndecl); 4143 if (!abstract_p && aggregate_value_p (result, fndecl)) 4144 { 4145#ifdef PCC_STATIC_STRUCT_RETURN 4146 cfun->returns_pcc_struct = 1; 4147#endif 4148 cfun->returns_struct = 1; 4149 } 4150 4151 cfun->stdarg 4152 = (fntype 4153 && TYPE_ARG_TYPES (fntype) != 0 4154 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) 4155 != void_type_node)); 4156 4157 /* Assume all registers in stdarg functions need to be saved. */ 4158 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 4159 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 4160 } 4161} 4162 4163/* This is like allocate_struct_function, but pushes a new cfun for FNDECL 4164 instead of just setting it. */ 4165 4166void 4167push_struct_function (tree fndecl) 4168{ 4169 VEC_safe_push (function_p, heap, cfun_stack, cfun); 4170 allocate_struct_function (fndecl, false); 4171} 4172 4173/* Reset cfun, and other non-struct-function variables to defaults as 4174 appropriate for emitting rtl at the start of a function. */ 4175 4176static void 4177prepare_function_start (void) 4178{ 4179 gcc_assert (!crtl->emit.x_last_insn); 4180 init_temp_slots (); 4181 init_emit (); 4182 init_varasm_status (); 4183 init_expr (); 4184 default_rtl_profile (); 4185 4186 cse_not_expected = ! optimize; 4187 4188 /* Caller save not needed yet. */ 4189 caller_save_needed = 0; 4190 4191 /* We haven't done register allocation yet. */ 4192 reg_renumber = 0; 4193 4194 /* Indicate that we have not instantiated virtual registers yet. */ 4195 virtuals_instantiated = 0; 4196 4197 /* Indicate that we want CONCATs now. */ 4198 generating_concat_p = 1; 4199 4200 /* Indicate we have no need of a frame pointer yet. */ 4201 frame_pointer_needed = 0; 4202} 4203 4204/* Initialize the rtl expansion mechanism so that we can do simple things 4205 like generate sequences. This is used to provide a context during global 4206 initialization of some passes. You must call expand_dummy_function_end 4207 to exit this context. */ 4208 4209void 4210init_dummy_function_start (void) 4211{ 4212 gcc_assert (!in_dummy_function); 4213 in_dummy_function = true; 4214 push_struct_function (NULL_TREE); 4215 prepare_function_start (); 4216} 4217 4218/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node) 4219 and initialize static variables for generating RTL for the statements 4220 of the function. */ 4221 4222void 4223init_function_start (tree subr) 4224{ 4225 if (subr && DECL_STRUCT_FUNCTION (subr)) 4226 set_cfun (DECL_STRUCT_FUNCTION (subr)); 4227 else 4228 allocate_struct_function (subr, false); 4229 prepare_function_start (); 4230 4231 /* Warn if this value is an aggregate type, 4232 regardless of which calling convention we are using for it. */ 4233 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)))) 4234 warning (OPT_Waggregate_return, "function returns an aggregate"); 4235} 4236 4237/* Make sure all values used by the optimization passes have sane defaults. */ 4238unsigned int 4239init_function_for_compilation (void) 4240{ 4241 reg_renumber = 0; 4242 return 0; 4243} 4244 4245struct rtl_opt_pass pass_init_function = 4246{ 4247 { 4248 RTL_PASS, 4249 "*init_function", /* name */ 4250 NULL, /* gate */ 4251 init_function_for_compilation, /* execute */ 4252 NULL, /* sub */ 4253 NULL, /* next */ 4254 0, /* static_pass_number */ 4255 TV_NONE, /* tv_id */ 4256 0, /* properties_required */ 4257 0, /* properties_provided */ 4258 0, /* properties_destroyed */ 4259 0, /* todo_flags_start */ 4260 0 /* todo_flags_finish */ 4261 } 4262}; 4263 4264 4265void 4266expand_main_function (void) 4267{ 4268#if (defined(INVOKE__main) \ 4269 || (!defined(HAS_INIT_SECTION) \ 4270 && !defined(INIT_SECTION_ASM_OP) \ 4271 && !defined(INIT_ARRAY_SECTION_ASM_OP))) 4272 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0); 4273#endif 4274} 4275 4276/* Expand code to initialize the stack_protect_guard. This is invoked at 4277 the beginning of a function to be protected. */ 4278 4279#ifndef HAVE_stack_protect_set 4280# define HAVE_stack_protect_set 0 4281# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX) 4282#endif 4283 4284void 4285stack_protect_prologue (void) 4286{ 4287 tree guard_decl = targetm.stack_protect_guard (); 4288 rtx x, y; 4289 4290 x = expand_normal (crtl->stack_protect_guard); 4291 y = expand_normal (guard_decl); 4292 4293 /* Allow the target to copy from Y to X without leaking Y into a 4294 register. */ 4295 if (HAVE_stack_protect_set) 4296 { 4297 rtx insn = gen_stack_protect_set (x, y); 4298 if (insn) 4299 { 4300 emit_insn (insn); 4301 return; 4302 } 4303 } 4304 4305 /* Otherwise do a straight move. */ 4306 emit_move_insn (x, y); 4307} 4308 4309/* Expand code to verify the stack_protect_guard. This is invoked at 4310 the end of a function to be protected. */ 4311 4312#ifndef HAVE_stack_protect_test 4313# define HAVE_stack_protect_test 0 4314# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX) 4315#endif 4316 4317void 4318stack_protect_epilogue (void) 4319{ 4320 tree guard_decl = targetm.stack_protect_guard (); 4321 rtx label = gen_label_rtx (); 4322 rtx x, y, tmp; 4323 4324 x = expand_normal (crtl->stack_protect_guard); 4325 y = expand_normal (guard_decl); 4326 4327 /* Allow the target to compare Y with X without leaking either into 4328 a register. */ 4329 switch (HAVE_stack_protect_test != 0) 4330 { 4331 case 1: 4332 tmp = gen_stack_protect_test (x, y, label); 4333 if (tmp) 4334 { 4335 emit_insn (tmp); 4336 break; 4337 } 4338 /* FALLTHRU */ 4339 4340 default: 4341 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label); 4342 break; 4343 } 4344 4345 /* The noreturn predictor has been moved to the tree level. The rtl-level 4346 predictors estimate this branch about 20%, which isn't enough to get 4347 things moved out of line. Since this is the only extant case of adding 4348 a noreturn function at the rtl level, it doesn't seem worth doing ought 4349 except adding the prediction by hand. */ 4350 tmp = get_last_insn (); 4351 if (JUMP_P (tmp)) 4352 predict_insn_def (tmp, PRED_NORETURN, TAKEN); 4353 4354 expand_expr_stmt (targetm.stack_protect_fail ()); 4355 emit_label (label); 4356} 4357 4358/* Start the RTL for a new function, and set variables used for 4359 emitting RTL. 4360 SUBR is the FUNCTION_DECL node. 4361 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with 4362 the function's parameters, which must be run at any return statement. */ 4363 4364void 4365expand_function_start (tree subr) 4366{ 4367 /* Make sure volatile mem refs aren't considered 4368 valid operands of arithmetic insns. */ 4369 init_recog_no_volatile (); 4370 4371 crtl->profile 4372 = (profile_flag 4373 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr)); 4374 4375 crtl->limit_stack 4376 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr)); 4377 4378 /* Make the label for return statements to jump to. Do not special 4379 case machines with special return instructions -- they will be 4380 handled later during jump, ifcvt, or epilogue creation. */ 4381 return_label = gen_label_rtx (); 4382 4383 /* Initialize rtx used to return the value. */ 4384 /* Do this before assign_parms so that we copy the struct value address 4385 before any library calls that assign parms might generate. */ 4386 4387 /* Decide whether to return the value in memory or in a register. */ 4388 if (aggregate_value_p (DECL_RESULT (subr), subr)) 4389 { 4390 /* Returning something that won't go in a register. */ 4391 rtx value_address = 0; 4392 4393#ifdef PCC_STATIC_STRUCT_RETURN 4394 if (cfun->returns_pcc_struct) 4395 { 4396 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr))); 4397 value_address = assemble_static_space (size); 4398 } 4399 else 4400#endif 4401 { 4402 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2); 4403 /* Expect to be passed the address of a place to store the value. 4404 If it is passed as an argument, assign_parms will take care of 4405 it. */ 4406 if (sv) 4407 { 4408 value_address = gen_reg_rtx (Pmode); 4409 emit_move_insn (value_address, sv); 4410 } 4411 } 4412 if (value_address) 4413 { 4414 rtx x = value_address; 4415 if (!DECL_BY_REFERENCE (DECL_RESULT (subr))) 4416 { 4417 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x); 4418 set_mem_attributes (x, DECL_RESULT (subr), 1); 4419 } 4420 SET_DECL_RTL (DECL_RESULT (subr), x); 4421 } 4422 } 4423 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode) 4424 /* If return mode is void, this decl rtl should not be used. */ 4425 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX); 4426 else 4427 { 4428 /* Compute the return values into a pseudo reg, which we will copy 4429 into the true return register after the cleanups are done. */ 4430 tree return_type = TREE_TYPE (DECL_RESULT (subr)); 4431 if (TYPE_MODE (return_type) != BLKmode 4432 && targetm.calls.return_in_msb (return_type)) 4433 /* expand_function_end will insert the appropriate padding in 4434 this case. Use the return value's natural (unpadded) mode 4435 within the function proper. */ 4436 SET_DECL_RTL (DECL_RESULT (subr), 4437 gen_reg_rtx (TYPE_MODE (return_type))); 4438 else 4439 { 4440 /* In order to figure out what mode to use for the pseudo, we 4441 figure out what the mode of the eventual return register will 4442 actually be, and use that. */ 4443 rtx hard_reg = hard_function_value (return_type, subr, 0, 1); 4444 4445 /* Structures that are returned in registers are not 4446 aggregate_value_p, so we may see a PARALLEL or a REG. */ 4447 if (REG_P (hard_reg)) 4448 SET_DECL_RTL (DECL_RESULT (subr), 4449 gen_reg_rtx (GET_MODE (hard_reg))); 4450 else 4451 { 4452 gcc_assert (GET_CODE (hard_reg) == PARALLEL); 4453 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg)); 4454 } 4455 } 4456 4457 /* Set DECL_REGISTER flag so that expand_function_end will copy the 4458 result to the real return register(s). */ 4459 DECL_REGISTER (DECL_RESULT (subr)) = 1; 4460 } 4461 4462 /* Initialize rtx for parameters and local variables. 4463 In some cases this requires emitting insns. */ 4464 assign_parms (subr); 4465 4466 /* If function gets a static chain arg, store it. */ 4467 if (cfun->static_chain_decl) 4468 { 4469 tree parm = cfun->static_chain_decl; 4470 rtx local, chain, insn; 4471 4472 local = gen_reg_rtx (Pmode); 4473 chain = targetm.calls.static_chain (current_function_decl, true); 4474 4475 set_decl_incoming_rtl (parm, chain, false); 4476 SET_DECL_RTL (parm, local); 4477 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); 4478 4479 insn = emit_move_insn (local, chain); 4480 4481 /* Mark the register as eliminable, similar to parameters. */ 4482 if (MEM_P (chain) 4483 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0))) 4484 set_unique_reg_note (insn, REG_EQUIV, chain); 4485 } 4486 4487 /* If the function receives a non-local goto, then store the 4488 bits we need to restore the frame pointer. */ 4489 if (cfun->nonlocal_goto_save_area) 4490 { 4491 tree t_save; 4492 rtx r_save; 4493 4494 /* ??? We need to do this save early. Unfortunately here is 4495 before the frame variable gets declared. Help out... */ 4496 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0); 4497 if (!DECL_RTL_SET_P (var)) 4498 expand_decl (var); 4499 4500 t_save = build4 (ARRAY_REF, ptr_type_node, 4501 cfun->nonlocal_goto_save_area, 4502 integer_zero_node, NULL_TREE, NULL_TREE); 4503 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); 4504 r_save = convert_memory_address (Pmode, r_save); 4505 4506 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ()); 4507 update_nonlocal_goto_save_area (); 4508 } 4509 4510 /* The following was moved from init_function_start. 4511 The move is supposed to make sdb output more accurate. */ 4512 /* Indicate the beginning of the function body, 4513 as opposed to parm setup. */ 4514 emit_note (NOTE_INSN_FUNCTION_BEG); 4515 4516 gcc_assert (NOTE_P (get_last_insn ())); 4517 4518 parm_birth_insn = get_last_insn (); 4519 4520 if (crtl->profile) 4521 { 4522#ifdef PROFILE_HOOK 4523 PROFILE_HOOK (current_function_funcdef_no); 4524#endif 4525 } 4526 4527 /* After the display initializations is where the stack checking 4528 probe should go. */ 4529 if(flag_stack_check) 4530 stack_check_probe_note = emit_note (NOTE_INSN_DELETED); 4531 4532 /* Make sure there is a line number after the function entry setup code. */ 4533 force_next_line_note (); 4534} 4535 4536/* Undo the effects of init_dummy_function_start. */ 4537void 4538expand_dummy_function_end (void) 4539{ 4540 gcc_assert (in_dummy_function); 4541 4542 /* End any sequences that failed to be closed due to syntax errors. */ 4543 while (in_sequence_p ()) 4544 end_sequence (); 4545 4546 /* Outside function body, can't compute type's actual size 4547 until next function's body starts. */ 4548 4549 free_after_parsing (cfun); 4550 free_after_compilation (cfun); 4551 pop_cfun (); 4552 in_dummy_function = false; 4553} 4554 4555/* Call DOIT for each hard register used as a return value from 4556 the current function. */ 4557 4558void 4559diddle_return_value (void (*doit) (rtx, void *), void *arg) 4560{ 4561 rtx outgoing = crtl->return_rtx; 4562 4563 if (! outgoing) 4564 return; 4565 4566 if (REG_P (outgoing)) 4567 (*doit) (outgoing, arg); 4568 else if (GET_CODE (outgoing) == PARALLEL) 4569 { 4570 int i; 4571 4572 for (i = 0; i < XVECLEN (outgoing, 0); i++) 4573 { 4574 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0); 4575 4576 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) 4577 (*doit) (x, arg); 4578 } 4579 } 4580} 4581 4582static void 4583do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) 4584{ 4585 emit_clobber (reg); 4586} 4587 4588void 4589clobber_return_register (void) 4590{ 4591 diddle_return_value (do_clobber_return_reg, NULL); 4592 4593 /* In case we do use pseudo to return value, clobber it too. */ 4594 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) 4595 { 4596 tree decl_result = DECL_RESULT (current_function_decl); 4597 rtx decl_rtl = DECL_RTL (decl_result); 4598 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER) 4599 { 4600 do_clobber_return_reg (decl_rtl, NULL); 4601 } 4602 } 4603} 4604 4605static void 4606do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) 4607{ 4608 emit_use (reg); 4609} 4610 4611static void 4612use_return_register (void) 4613{ 4614 diddle_return_value (do_use_return_reg, NULL); 4615} 4616 4617/* Possibly warn about unused parameters. */ 4618void 4619do_warn_unused_parameter (tree fn) 4620{ 4621 tree decl; 4622 4623 for (decl = DECL_ARGUMENTS (fn); 4624 decl; decl = TREE_CHAIN (decl)) 4625 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL 4626 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl) 4627 && !TREE_NO_WARNING (decl)) 4628 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl); 4629} 4630 4631static GTY(()) rtx initial_trampoline; 4632 4633/* Generate RTL for the end of the current function. */ 4634 4635void 4636expand_function_end (void) 4637{ 4638 rtx clobber_after; 4639 4640 /* If arg_pointer_save_area was referenced only from a nested 4641 function, we will not have initialized it yet. Do that now. */ 4642 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init) 4643 get_arg_pointer_save_area (); 4644 4645 /* If we are doing generic stack checking and this function makes calls, 4646 do a stack probe at the start of the function to ensure we have enough 4647 space for another stack frame. */ 4648 if (flag_stack_check == GENERIC_STACK_CHECK) 4649 { 4650 rtx insn, seq; 4651 4652 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 4653 if (CALL_P (insn)) 4654 { 4655 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE); 4656 start_sequence (); 4657 if (STACK_CHECK_MOVING_SP) 4658 anti_adjust_stack_and_probe (max_frame_size, true); 4659 else 4660 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size); 4661 seq = get_insns (); 4662 end_sequence (); 4663 emit_insn_before (seq, stack_check_probe_note); 4664 break; 4665 } 4666 } 4667 4668 /* End any sequences that failed to be closed due to syntax errors. */ 4669 while (in_sequence_p ()) 4670 end_sequence (); 4671 4672 clear_pending_stack_adjust (); 4673 do_pending_stack_adjust (); 4674 4675 /* Output a linenumber for the end of the function. 4676 SDB depends on this. */ 4677 force_next_line_note (); 4678 set_curr_insn_source_location (input_location); 4679 4680 /* Before the return label (if any), clobber the return 4681 registers so that they are not propagated live to the rest of 4682 the function. This can only happen with functions that drop 4683 through; if there had been a return statement, there would 4684 have either been a return rtx, or a jump to the return label. 4685 4686 We delay actual code generation after the current_function_value_rtx 4687 is computed. */ 4688 clobber_after = get_last_insn (); 4689 4690 /* Output the label for the actual return from the function. */ 4691 emit_label (return_label); 4692 4693 if (USING_SJLJ_EXCEPTIONS) 4694 { 4695 /* Let except.c know where it should emit the call to unregister 4696 the function context for sjlj exceptions. */ 4697 if (flag_exceptions) 4698 sjlj_emit_function_exit_after (get_last_insn ()); 4699 } 4700 else 4701 { 4702 /* We want to ensure that instructions that may trap are not 4703 moved into the epilogue by scheduling, because we don't 4704 always emit unwind information for the epilogue. */ 4705 if (flag_non_call_exceptions) 4706 emit_insn (gen_blockage ()); 4707 } 4708 4709 /* If this is an implementation of throw, do what's necessary to 4710 communicate between __builtin_eh_return and the epilogue. */ 4711 expand_eh_return (); 4712 4713 /* If scalar return value was computed in a pseudo-reg, or was a named 4714 return value that got dumped to the stack, copy that to the hard 4715 return register. */ 4716 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) 4717 { 4718 tree decl_result = DECL_RESULT (current_function_decl); 4719 rtx decl_rtl = DECL_RTL (decl_result); 4720 4721 if (REG_P (decl_rtl) 4722 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER 4723 : DECL_REGISTER (decl_result)) 4724 { 4725 rtx real_decl_rtl = crtl->return_rtx; 4726 4727 /* This should be set in assign_parms. */ 4728 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl)); 4729 4730 /* If this is a BLKmode structure being returned in registers, 4731 then use the mode computed in expand_return. Note that if 4732 decl_rtl is memory, then its mode may have been changed, 4733 but that crtl->return_rtx has not. */ 4734 if (GET_MODE (real_decl_rtl) == BLKmode) 4735 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl)); 4736 4737 /* If a non-BLKmode return value should be padded at the least 4738 significant end of the register, shift it left by the appropriate 4739 amount. BLKmode results are handled using the group load/store 4740 machinery. */ 4741 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode 4742 && targetm.calls.return_in_msb (TREE_TYPE (decl_result))) 4743 { 4744 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl), 4745 REGNO (real_decl_rtl)), 4746 decl_rtl); 4747 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl); 4748 } 4749 /* If a named return value dumped decl_return to memory, then 4750 we may need to re-do the PROMOTE_MODE signed/unsigned 4751 extension. */ 4752 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl)) 4753 { 4754 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result)); 4755 promote_function_mode (TREE_TYPE (decl_result), 4756 GET_MODE (decl_rtl), &unsignedp, 4757 TREE_TYPE (current_function_decl), 1); 4758 4759 convert_move (real_decl_rtl, decl_rtl, unsignedp); 4760 } 4761 else if (GET_CODE (real_decl_rtl) == PARALLEL) 4762 { 4763 /* If expand_function_start has created a PARALLEL for decl_rtl, 4764 move the result to the real return registers. Otherwise, do 4765 a group load from decl_rtl for a named return. */ 4766 if (GET_CODE (decl_rtl) == PARALLEL) 4767 emit_group_move (real_decl_rtl, decl_rtl); 4768 else 4769 emit_group_load (real_decl_rtl, decl_rtl, 4770 TREE_TYPE (decl_result), 4771 int_size_in_bytes (TREE_TYPE (decl_result))); 4772 } 4773 /* In the case of complex integer modes smaller than a word, we'll 4774 need to generate some non-trivial bitfield insertions. Do that 4775 on a pseudo and not the hard register. */ 4776 else if (GET_CODE (decl_rtl) == CONCAT 4777 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT 4778 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD) 4779 { 4780 int old_generating_concat_p; 4781 rtx tmp; 4782 4783 old_generating_concat_p = generating_concat_p; 4784 generating_concat_p = 0; 4785 tmp = gen_reg_rtx (GET_MODE (decl_rtl)); 4786 generating_concat_p = old_generating_concat_p; 4787 4788 emit_move_insn (tmp, decl_rtl); 4789 emit_move_insn (real_decl_rtl, tmp); 4790 } 4791 else 4792 emit_move_insn (real_decl_rtl, decl_rtl); 4793 } 4794 } 4795 4796 /* If returning a structure, arrange to return the address of the value 4797 in a place where debuggers expect to find it. 4798 4799 If returning a structure PCC style, 4800 the caller also depends on this value. 4801 And cfun->returns_pcc_struct is not necessarily set. */ 4802 if (cfun->returns_struct 4803 || cfun->returns_pcc_struct) 4804 { 4805 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl)); 4806 tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); 4807 rtx outgoing; 4808 4809 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))) 4810 type = TREE_TYPE (type); 4811 else 4812 value_address = XEXP (value_address, 0); 4813 4814 outgoing = targetm.calls.function_value (build_pointer_type (type), 4815 current_function_decl, true); 4816 4817 /* Mark this as a function return value so integrate will delete the 4818 assignment and USE below when inlining this function. */ 4819 REG_FUNCTION_VALUE_P (outgoing) = 1; 4820 4821 /* The address may be ptr_mode and OUTGOING may be Pmode. */ 4822 value_address = convert_memory_address (GET_MODE (outgoing), 4823 value_address); 4824 4825 emit_move_insn (outgoing, value_address); 4826 4827 /* Show return register used to hold result (in this case the address 4828 of the result. */ 4829 crtl->return_rtx = outgoing; 4830 } 4831 4832 /* Emit the actual code to clobber return register. */ 4833 { 4834 rtx seq; 4835 4836 start_sequence (); 4837 clobber_return_register (); 4838 seq = get_insns (); 4839 end_sequence (); 4840 4841 emit_insn_after (seq, clobber_after); 4842 } 4843 4844 /* Output the label for the naked return from the function. */ 4845 if (naked_return_label) 4846 emit_label (naked_return_label); 4847 4848 /* @@@ This is a kludge. We want to ensure that instructions that 4849 may trap are not moved into the epilogue by scheduling, because 4850 we don't always emit unwind information for the epilogue. */ 4851 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions) 4852 emit_insn (gen_blockage ()); 4853 4854 /* If stack protection is enabled for this function, check the guard. */ 4855 if (crtl->stack_protect_guard) 4856 stack_protect_epilogue (); 4857 4858 /* If we had calls to alloca, and this machine needs 4859 an accurate stack pointer to exit the function, 4860 insert some code to save and restore the stack pointer. */ 4861 if (! EXIT_IGNORE_STACK 4862 && cfun->calls_alloca) 4863 { 4864 rtx tem = 0; 4865 4866 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn); 4867 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX); 4868 } 4869 4870 /* ??? This should no longer be necessary since stupid is no longer with 4871 us, but there are some parts of the compiler (eg reload_combine, and 4872 sh mach_dep_reorg) that still try and compute their own lifetime info 4873 instead of using the general framework. */ 4874 use_return_register (); 4875} 4876 4877rtx 4878get_arg_pointer_save_area (void) 4879{ 4880 rtx ret = arg_pointer_save_area; 4881 4882 if (! ret) 4883 { 4884 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); 4885 arg_pointer_save_area = ret; 4886 } 4887 4888 if (! crtl->arg_pointer_save_area_init) 4889 { 4890 rtx seq; 4891 4892 /* Save the arg pointer at the beginning of the function. The 4893 generated stack slot may not be a valid memory address, so we 4894 have to check it and fix it if necessary. */ 4895 start_sequence (); 4896 emit_move_insn (validize_mem (ret), 4897 crtl->args.internal_arg_pointer); 4898 seq = get_insns (); 4899 end_sequence (); 4900 4901 push_topmost_sequence (); 4902 emit_insn_after (seq, entry_of_function ()); 4903 pop_topmost_sequence (); 4904 } 4905 4906 return ret; 4907} 4908 4909/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP 4910 for the first time. */ 4911 4912static void 4913record_insns (rtx insns, rtx end, htab_t *hashp) 4914{ 4915 rtx tmp; 4916 htab_t hash = *hashp; 4917 4918 if (hash == NULL) 4919 *hashp = hash 4920 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL); 4921 4922 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp)) 4923 { 4924 void **slot = htab_find_slot (hash, tmp, INSERT); 4925 gcc_assert (*slot == NULL); 4926 *slot = tmp; 4927 } 4928} 4929 4930/* INSN has been duplicated as COPY, as part of duping a basic block. 4931 If INSN is an epilogue insn, then record COPY as epilogue as well. */ 4932 4933void 4934maybe_copy_epilogue_insn (rtx insn, rtx copy) 4935{ 4936 void **slot; 4937 4938 if (epilogue_insn_hash == NULL 4939 || htab_find (epilogue_insn_hash, insn) == NULL) 4940 return; 4941 4942 slot = htab_find_slot (epilogue_insn_hash, copy, INSERT); 4943 gcc_assert (*slot == NULL); 4944 *slot = copy; 4945} 4946 4947/* Set the locator of the insn chain starting at INSN to LOC. */ 4948static void 4949set_insn_locators (rtx insn, int loc) 4950{ 4951 while (insn != NULL_RTX) 4952 { 4953 if (INSN_P (insn)) 4954 INSN_LOCATOR (insn) = loc; 4955 insn = NEXT_INSN (insn); 4956 } 4957} 4958 4959/* Determine if any INSNs in HASH are, or are part of, INSN. Because 4960 we can be running after reorg, SEQUENCE rtl is possible. */ 4961 4962static bool 4963contains (const_rtx insn, htab_t hash) 4964{ 4965 if (hash == NULL) 4966 return false; 4967 4968 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) 4969 { 4970 int i; 4971 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) 4972 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i))) 4973 return true; 4974 return false; 4975 } 4976 4977 return htab_find (hash, insn) != NULL; 4978} 4979 4980int 4981prologue_epilogue_contains (const_rtx insn) 4982{ 4983 if (contains (insn, prologue_insn_hash)) 4984 return 1; 4985 if (contains (insn, epilogue_insn_hash)) 4986 return 1; 4987 return 0; 4988} 4989 4990#ifdef HAVE_return 4991/* Insert gen_return at the end of block BB. This also means updating 4992 block_for_insn appropriately. */ 4993 4994static void 4995emit_return_into_block (basic_block bb) 4996{ 4997 emit_jump_insn_after (gen_return (), BB_END (bb)); 4998} 4999#endif /* HAVE_return */ 5000 5001/* Generate the prologue and epilogue RTL if the machine supports it. Thread 5002 this into place with notes indicating where the prologue ends and where 5003 the epilogue begins. Update the basic block information when possible. */ 5004 5005static void 5006thread_prologue_and_epilogue_insns (void) 5007{ 5008 int inserted = 0; 5009 edge e; 5010#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue) 5011 rtx seq; 5012#endif 5013#if defined (HAVE_epilogue) || defined(HAVE_return) 5014 rtx epilogue_end = NULL_RTX; 5015#endif 5016 edge_iterator ei; 5017 5018 rtl_profile_for_bb (ENTRY_BLOCK_PTR); 5019#ifdef HAVE_prologue 5020 if (HAVE_prologue) 5021 { 5022 start_sequence (); 5023 seq = gen_prologue (); 5024 emit_insn (seq); 5025 5026 /* Insert an explicit USE for the frame pointer 5027 if the profiling is on and the frame pointer is required. */ 5028 if (crtl->profile && frame_pointer_needed) 5029 emit_use (hard_frame_pointer_rtx); 5030 5031 /* Retain a map of the prologue insns. */ 5032 record_insns (seq, NULL, &prologue_insn_hash); 5033 emit_note (NOTE_INSN_PROLOGUE_END); 5034 5035#ifndef PROFILE_BEFORE_PROLOGUE 5036 /* Ensure that instructions are not moved into the prologue when 5037 profiling is on. The call to the profiling routine can be 5038 emitted within the live range of a call-clobbered register. */ 5039 if (crtl->profile) 5040 emit_insn (gen_blockage ()); 5041#endif 5042 5043 seq = get_insns (); 5044 end_sequence (); 5045 set_insn_locators (seq, prologue_locator); 5046 5047 /* Can't deal with multiple successors of the entry block 5048 at the moment. Function should always have at least one 5049 entry point. */ 5050 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR)); 5051 5052 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR)); 5053 inserted = 1; 5054 } 5055#endif 5056 5057 /* If the exit block has no non-fake predecessors, we don't need 5058 an epilogue. */ 5059 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 5060 if ((e->flags & EDGE_FAKE) == 0) 5061 break; 5062 if (e == NULL) 5063 goto epilogue_done; 5064 5065 rtl_profile_for_bb (EXIT_BLOCK_PTR); 5066#ifdef HAVE_return 5067 if (optimize && HAVE_return) 5068 { 5069 /* If we're allowed to generate a simple return instruction, 5070 then by definition we don't need a full epilogue. Examine 5071 the block that falls through to EXIT. If it does not 5072 contain any code, examine its predecessors and try to 5073 emit (conditional) return instructions. */ 5074 5075 basic_block last; 5076 rtx label; 5077 5078 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 5079 if (e->flags & EDGE_FALLTHRU) 5080 break; 5081 if (e == NULL) 5082 goto epilogue_done; 5083 last = e->src; 5084 5085 /* Verify that there are no active instructions in the last block. */ 5086 label = BB_END (last); 5087 while (label && !LABEL_P (label)) 5088 { 5089 if (active_insn_p (label)) 5090 break; 5091 label = PREV_INSN (label); 5092 } 5093 5094 if (BB_HEAD (last) == label && LABEL_P (label)) 5095 { 5096 edge_iterator ei2; 5097 5098 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); ) 5099 { 5100 basic_block bb = e->src; 5101 rtx jump; 5102 5103 if (bb == ENTRY_BLOCK_PTR) 5104 { 5105 ei_next (&ei2); 5106 continue; 5107 } 5108 5109 jump = BB_END (bb); 5110 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label) 5111 { 5112 ei_next (&ei2); 5113 continue; 5114 } 5115 5116 /* If we have an unconditional jump, we can replace that 5117 with a simple return instruction. */ 5118 if (simplejump_p (jump)) 5119 { 5120 emit_return_into_block (bb); 5121 delete_insn (jump); 5122 } 5123 5124 /* If we have a conditional jump, we can try to replace 5125 that with a conditional return instruction. */ 5126 else if (condjump_p (jump)) 5127 { 5128 if (! redirect_jump (jump, 0, 0)) 5129 { 5130 ei_next (&ei2); 5131 continue; 5132 } 5133 5134 /* If this block has only one successor, it both jumps 5135 and falls through to the fallthru block, so we can't 5136 delete the edge. */ 5137 if (single_succ_p (bb)) 5138 { 5139 ei_next (&ei2); 5140 continue; 5141 } 5142 } 5143 else 5144 { 5145 ei_next (&ei2); 5146 continue; 5147 } 5148 5149 /* Fix up the CFG for the successful change we just made. */ 5150 redirect_edge_succ (e, EXIT_BLOCK_PTR); 5151 } 5152 5153 /* Emit a return insn for the exit fallthru block. Whether 5154 this is still reachable will be determined later. */ 5155 5156 emit_barrier_after (BB_END (last)); 5157 emit_return_into_block (last); 5158 epilogue_end = BB_END (last); 5159 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU; 5160 goto epilogue_done; 5161 } 5162 } 5163#endif 5164 5165 /* A small fib -- epilogue is not yet completed, but we wish to re-use 5166 this marker for the splits of EH_RETURN patterns, and nothing else 5167 uses the flag in the meantime. */ 5168 epilogue_completed = 1; 5169 5170#ifdef HAVE_eh_return 5171 /* Find non-fallthru edges that end with EH_RETURN instructions. On 5172 some targets, these get split to a special version of the epilogue 5173 code. In order to be able to properly annotate these with unwind 5174 info, try to split them now. If we get a valid split, drop an 5175 EPILOGUE_BEG note and mark the insns as epilogue insns. */ 5176 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 5177 { 5178 rtx prev, last, trial; 5179 5180 if (e->flags & EDGE_FALLTHRU) 5181 continue; 5182 last = BB_END (e->src); 5183 if (!eh_returnjump_p (last)) 5184 continue; 5185 5186 prev = PREV_INSN (last); 5187 trial = try_split (PATTERN (last), last, 1); 5188 if (trial == last) 5189 continue; 5190 5191 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash); 5192 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev); 5193 } 5194#endif 5195 5196 /* Find the edge that falls through to EXIT. Other edges may exist 5197 due to RETURN instructions, but those don't need epilogues. 5198 There really shouldn't be a mixture -- either all should have 5199 been converted or none, however... */ 5200 5201 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 5202 if (e->flags & EDGE_FALLTHRU) 5203 break; 5204 if (e == NULL) 5205 goto epilogue_done; 5206 5207#ifdef HAVE_epilogue 5208 if (HAVE_epilogue) 5209 { 5210 start_sequence (); 5211 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG); 5212 seq = gen_epilogue (); 5213 emit_jump_insn (seq); 5214 5215 /* Retain a map of the epilogue insns. */ 5216 record_insns (seq, NULL, &epilogue_insn_hash); 5217 set_insn_locators (seq, epilogue_locator); 5218 5219 seq = get_insns (); 5220 end_sequence (); 5221 5222 insert_insn_on_edge (seq, e); 5223 inserted = 1; 5224 } 5225 else 5226#endif 5227 { 5228 basic_block cur_bb; 5229 5230 if (! next_active_insn (BB_END (e->src))) 5231 goto epilogue_done; 5232 /* We have a fall-through edge to the exit block, the source is not 5233 at the end of the function, and there will be an assembler epilogue 5234 at the end of the function. 5235 We can't use force_nonfallthru here, because that would try to 5236 use return. Inserting a jump 'by hand' is extremely messy, so 5237 we take advantage of cfg_layout_finalize using 5238 fixup_fallthru_exit_predecessor. */ 5239 cfg_layout_initialize (0); 5240 FOR_EACH_BB (cur_bb) 5241 if (cur_bb->index >= NUM_FIXED_BLOCKS 5242 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS) 5243 cur_bb->aux = cur_bb->next_bb; 5244 cfg_layout_finalize (); 5245 } 5246epilogue_done: 5247 default_rtl_profile (); 5248 5249 if (inserted) 5250 { 5251 commit_edge_insertions (); 5252 5253 /* The epilogue insns we inserted may cause the exit edge to no longer 5254 be fallthru. */ 5255 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 5256 { 5257 if (((e->flags & EDGE_FALLTHRU) != 0) 5258 && returnjump_p (BB_END (e->src))) 5259 e->flags &= ~EDGE_FALLTHRU; 5260 } 5261 } 5262 5263#ifdef HAVE_sibcall_epilogue 5264 /* Emit sibling epilogues before any sibling call sites. */ 5265 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); ) 5266 { 5267 basic_block bb = e->src; 5268 rtx insn = BB_END (bb); 5269 5270 if (!CALL_P (insn) 5271 || ! SIBLING_CALL_P (insn)) 5272 { 5273 ei_next (&ei); 5274 continue; 5275 } 5276 5277 start_sequence (); 5278 emit_note (NOTE_INSN_EPILOGUE_BEG); 5279 emit_insn (gen_sibcall_epilogue ()); 5280 seq = get_insns (); 5281 end_sequence (); 5282 5283 /* Retain a map of the epilogue insns. Used in life analysis to 5284 avoid getting rid of sibcall epilogue insns. Do this before we 5285 actually emit the sequence. */ 5286 record_insns (seq, NULL, &epilogue_insn_hash); 5287 set_insn_locators (seq, epilogue_locator); 5288 5289 emit_insn_before (seq, insn); 5290 ei_next (&ei); 5291 } 5292#endif 5293 5294#ifdef HAVE_epilogue 5295 if (epilogue_end) 5296 { 5297 rtx insn, next; 5298 5299 /* Similarly, move any line notes that appear after the epilogue. 5300 There is no need, however, to be quite so anal about the existence 5301 of such a note. Also possibly move 5302 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug 5303 info generation. */ 5304 for (insn = epilogue_end; insn; insn = next) 5305 { 5306 next = NEXT_INSN (insn); 5307 if (NOTE_P (insn) 5308 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)) 5309 reorder_insns (insn, insn, PREV_INSN (epilogue_end)); 5310 } 5311 } 5312#endif 5313 5314 /* Threading the prologue and epilogue changes the artificial refs 5315 in the entry and exit blocks. */ 5316 epilogue_completed = 1; 5317 df_update_entry_exit_and_calls (); 5318} 5319 5320/* Reposition the prologue-end and epilogue-begin notes after 5321 instruction scheduling. */ 5322 5323void 5324reposition_prologue_and_epilogue_notes (void) 5325{ 5326#if defined (HAVE_prologue) || defined (HAVE_epilogue) \ 5327 || defined (HAVE_sibcall_epilogue) 5328 /* Since the hash table is created on demand, the fact that it is 5329 non-null is a signal that it is non-empty. */ 5330 if (prologue_insn_hash != NULL) 5331 { 5332 size_t len = htab_elements (prologue_insn_hash); 5333 rtx insn, last = NULL, note = NULL; 5334 5335 /* Scan from the beginning until we reach the last prologue insn. */ 5336 /* ??? While we do have the CFG intact, there are two problems: 5337 (1) The prologue can contain loops (typically probing the stack), 5338 which means that the end of the prologue isn't in the first bb. 5339 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */ 5340 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 5341 { 5342 if (NOTE_P (insn)) 5343 { 5344 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END) 5345 note = insn; 5346 } 5347 else if (contains (insn, prologue_insn_hash)) 5348 { 5349 last = insn; 5350 if (--len == 0) 5351 break; 5352 } 5353 } 5354 5355 if (last) 5356 { 5357 if (note == NULL) 5358 { 5359 /* Scan forward looking for the PROLOGUE_END note. It should 5360 be right at the beginning of the block, possibly with other 5361 insn notes that got moved there. */ 5362 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note)) 5363 { 5364 if (NOTE_P (note) 5365 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END) 5366 break; 5367 } 5368 } 5369 5370 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */ 5371 if (LABEL_P (last)) 5372 last = NEXT_INSN (last); 5373 reorder_insns (note, note, last); 5374 } 5375 } 5376 5377 if (epilogue_insn_hash != NULL) 5378 { 5379 edge_iterator ei; 5380 edge e; 5381 5382 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 5383 { 5384 rtx insn, first = NULL, note = NULL; 5385 basic_block bb = e->src; 5386 5387 /* Scan from the beginning until we reach the first epilogue insn. */ 5388 FOR_BB_INSNS (bb, insn) 5389 { 5390 if (NOTE_P (insn)) 5391 { 5392 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) 5393 { 5394 note = insn; 5395 if (first != NULL) 5396 break; 5397 } 5398 } 5399 else if (first == NULL && contains (insn, epilogue_insn_hash)) 5400 { 5401 first = insn; 5402 if (note != NULL) 5403 break; 5404 } 5405 } 5406 5407 if (note) 5408 { 5409 /* If the function has a single basic block, and no real 5410 epilogue insns (e.g. sibcall with no cleanup), the 5411 epilogue note can get scheduled before the prologue 5412 note. If we have frame related prologue insns, having 5413 them scanned during the epilogue will result in a crash. 5414 In this case re-order the epilogue note to just before 5415 the last insn in the block. */ 5416 if (first == NULL) 5417 first = BB_END (bb); 5418 5419 if (PREV_INSN (first) != note) 5420 reorder_insns (note, note, PREV_INSN (first)); 5421 } 5422 } 5423 } 5424#endif /* HAVE_prologue or HAVE_epilogue */ 5425} 5426 5427/* Returns the name of the current function. */ 5428const char * 5429current_function_name (void) 5430{ 5431 if (cfun == NULL) 5432 return "<none>"; 5433 return lang_hooks.decl_printable_name (cfun->decl, 2); 5434} 5435 5436 5437static unsigned int 5438rest_of_handle_check_leaf_regs (void) 5439{ 5440#ifdef LEAF_REGISTERS 5441 current_function_uses_only_leaf_regs 5442 = optimize > 0 && only_leaf_regs_used () && leaf_function_p (); 5443#endif 5444 return 0; 5445} 5446 5447/* Insert a TYPE into the used types hash table of CFUN. */ 5448 5449static void 5450used_types_insert_helper (tree type, struct function *func) 5451{ 5452 if (type != NULL && func != NULL) 5453 { 5454 void **slot; 5455 5456 if (func->used_types_hash == NULL) 5457 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer, 5458 htab_eq_pointer, NULL); 5459 slot = htab_find_slot (func->used_types_hash, type, INSERT); 5460 if (*slot == NULL) 5461 *slot = type; 5462 } 5463} 5464 5465/* Given a type, insert it into the used hash table in cfun. */ 5466void 5467used_types_insert (tree t) 5468{ 5469 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE) 5470 if (TYPE_NAME (t)) 5471 break; 5472 else 5473 t = TREE_TYPE (t); 5474 if (TYPE_NAME (t) == NULL_TREE 5475 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t))) 5476 t = TYPE_MAIN_VARIANT (t); 5477 if (debug_info_level > DINFO_LEVEL_NONE) 5478 { 5479 if (cfun) 5480 used_types_insert_helper (t, cfun); 5481 else 5482 /* So this might be a type referenced by a global variable. 5483 Record that type so that we can later decide to emit its debug 5484 information. */ 5485 types_used_by_cur_var_decl = 5486 tree_cons (t, NULL, types_used_by_cur_var_decl); 5487 5488 } 5489} 5490 5491/* Helper to Hash a struct types_used_by_vars_entry. */ 5492 5493static hashval_t 5494hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry) 5495{ 5496 gcc_assert (entry && entry->var_decl && entry->type); 5497 5498 return iterative_hash_object (entry->type, 5499 iterative_hash_object (entry->var_decl, 0)); 5500} 5501 5502/* Hash function of the types_used_by_vars_entry hash table. */ 5503 5504hashval_t 5505types_used_by_vars_do_hash (const void *x) 5506{ 5507 const struct types_used_by_vars_entry *entry = 5508 (const struct types_used_by_vars_entry *) x; 5509 5510 return hash_types_used_by_vars_entry (entry); 5511} 5512 5513/*Equality function of the types_used_by_vars_entry hash table. */ 5514 5515int 5516types_used_by_vars_eq (const void *x1, const void *x2) 5517{ 5518 const struct types_used_by_vars_entry *e1 = 5519 (const struct types_used_by_vars_entry *) x1; 5520 const struct types_used_by_vars_entry *e2 = 5521 (const struct types_used_by_vars_entry *)x2; 5522 5523 return (e1->var_decl == e2->var_decl && e1->type == e2->type); 5524} 5525 5526/* Inserts an entry into the types_used_by_vars_hash hash table. */ 5527 5528void 5529types_used_by_var_decl_insert (tree type, tree var_decl) 5530{ 5531 if (type != NULL && var_decl != NULL) 5532 { 5533 void **slot; 5534 struct types_used_by_vars_entry e; 5535 e.var_decl = var_decl; 5536 e.type = type; 5537 if (types_used_by_vars_hash == NULL) 5538 types_used_by_vars_hash = 5539 htab_create_ggc (37, types_used_by_vars_do_hash, 5540 types_used_by_vars_eq, NULL); 5541 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e, 5542 hash_types_used_by_vars_entry (&e), INSERT); 5543 if (*slot == NULL) 5544 { 5545 struct types_used_by_vars_entry *entry; 5546 entry = (struct types_used_by_vars_entry*) ggc_alloc 5547 (sizeof (struct types_used_by_vars_entry)); 5548 entry->type = type; 5549 entry->var_decl = var_decl; 5550 *slot = entry; 5551 } 5552 } 5553} 5554 5555struct rtl_opt_pass pass_leaf_regs = 5556{ 5557 { 5558 RTL_PASS, 5559 "*leaf_regs", /* name */ 5560 NULL, /* gate */ 5561 rest_of_handle_check_leaf_regs, /* execute */ 5562 NULL, /* sub */ 5563 NULL, /* next */ 5564 0, /* static_pass_number */ 5565 TV_NONE, /* tv_id */ 5566 0, /* properties_required */ 5567 0, /* properties_provided */ 5568 0, /* properties_destroyed */ 5569 0, /* todo_flags_start */ 5570 0 /* todo_flags_finish */ 5571 } 5572}; 5573 5574static unsigned int 5575rest_of_handle_thread_prologue_and_epilogue (void) 5576{ 5577 if (optimize) 5578 cleanup_cfg (CLEANUP_EXPENSIVE); 5579 /* On some machines, the prologue and epilogue code, or parts thereof, 5580 can be represented as RTL. Doing so lets us schedule insns between 5581 it and the rest of the code and also allows delayed branch 5582 scheduling to operate in the epilogue. */ 5583 5584 thread_prologue_and_epilogue_insns (); 5585 return 0; 5586} 5587 5588struct rtl_opt_pass pass_thread_prologue_and_epilogue = 5589{ 5590 { 5591 RTL_PASS, 5592 "pro_and_epilogue", /* name */ 5593 NULL, /* gate */ 5594 rest_of_handle_thread_prologue_and_epilogue, /* execute */ 5595 NULL, /* sub */ 5596 NULL, /* next */ 5597 0, /* static_pass_number */ 5598 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */ 5599 0, /* properties_required */ 5600 0, /* properties_provided */ 5601 0, /* properties_destroyed */ 5602 TODO_verify_flow, /* todo_flags_start */ 5603 TODO_dump_func | 5604 TODO_df_verify | 5605 TODO_df_finish | TODO_verify_rtl_sharing | 5606 TODO_ggc_collect /* todo_flags_finish */ 5607 } 5608}; 5609 5610 5611/* This mini-pass fixes fall-out from SSA in asm statements that have 5612 in-out constraints. Say you start with 5613 5614 orig = inout; 5615 asm ("": "+mr" (inout)); 5616 use (orig); 5617 5618 which is transformed very early to use explicit output and match operands: 5619 5620 orig = inout; 5621 asm ("": "=mr" (inout) : "0" (inout)); 5622 use (orig); 5623 5624 Or, after SSA and copyprop, 5625 5626 asm ("": "=mr" (inout_2) : "0" (inout_1)); 5627 use (inout_1); 5628 5629 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as 5630 they represent two separate values, so they will get different pseudo 5631 registers during expansion. Then, since the two operands need to match 5632 per the constraints, but use different pseudo registers, reload can 5633 only register a reload for these operands. But reloads can only be 5634 satisfied by hardregs, not by memory, so we need a register for this 5635 reload, just because we are presented with non-matching operands. 5636 So, even though we allow memory for this operand, no memory can be 5637 used for it, just because the two operands don't match. This can 5638 cause reload failures on register-starved targets. 5639 5640 So it's a symptom of reload not being able to use memory for reloads 5641 or, alternatively it's also a symptom of both operands not coming into 5642 reload as matching (in which case the pseudo could go to memory just 5643 fine, as the alternative allows it, and no reload would be necessary). 5644 We fix the latter problem here, by transforming 5645 5646 asm ("": "=mr" (inout_2) : "0" (inout_1)); 5647 5648 back to 5649 5650 inout_2 = inout_1; 5651 asm ("": "=mr" (inout_2) : "0" (inout_2)); */ 5652 5653static void 5654match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs) 5655{ 5656 int i; 5657 bool changed = false; 5658 rtx op = SET_SRC (p_sets[0]); 5659 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op); 5660 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op); 5661 bool *output_matched = XALLOCAVEC (bool, noutputs); 5662 5663 memset (output_matched, 0, noutputs * sizeof (bool)); 5664 for (i = 0; i < ninputs; i++) 5665 { 5666 rtx input, output, insns; 5667 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i); 5668 char *end; 5669 int match, j; 5670 5671 if (*constraint == '%') 5672 constraint++; 5673 5674 match = strtoul (constraint, &end, 10); 5675 if (end == constraint) 5676 continue; 5677 5678 gcc_assert (match < noutputs); 5679 output = SET_DEST (p_sets[match]); 5680 input = RTVEC_ELT (inputs, i); 5681 /* Only do the transformation for pseudos. */ 5682 if (! REG_P (output) 5683 || rtx_equal_p (output, input) 5684 || (GET_MODE (input) != VOIDmode 5685 && GET_MODE (input) != GET_MODE (output))) 5686 continue; 5687 5688 /* We can't do anything if the output is also used as input, 5689 as we're going to overwrite it. */ 5690 for (j = 0; j < ninputs; j++) 5691 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j))) 5692 break; 5693 if (j != ninputs) 5694 continue; 5695 5696 /* Avoid changing the same input several times. For 5697 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in)); 5698 only change in once (to out1), rather than changing it 5699 first to out1 and afterwards to out2. */ 5700 if (i > 0) 5701 { 5702 for (j = 0; j < noutputs; j++) 5703 if (output_matched[j] && input == SET_DEST (p_sets[j])) 5704 break; 5705 if (j != noutputs) 5706 continue; 5707 } 5708 output_matched[match] = true; 5709 5710 start_sequence (); 5711 emit_move_insn (output, input); 5712 insns = get_insns (); 5713 end_sequence (); 5714 emit_insn_before (insns, insn); 5715 5716 /* Now replace all mentions of the input with output. We can't 5717 just replace the occurrence in inputs[i], as the register might 5718 also be used in some other input (or even in an address of an 5719 output), which would mean possibly increasing the number of 5720 inputs by one (namely 'output' in addition), which might pose 5721 a too complicated problem for reload to solve. E.g. this situation: 5722 5723 asm ("" : "=r" (output), "=m" (input) : "0" (input)) 5724 5725 Here 'input' is used in two occurrences as input (once for the 5726 input operand, once for the address in the second output operand). 5727 If we would replace only the occurrence of the input operand (to 5728 make the matching) we would be left with this: 5729 5730 output = input 5731 asm ("" : "=r" (output), "=m" (input) : "0" (output)) 5732 5733 Now we suddenly have two different input values (containing the same 5734 value, but different pseudos) where we formerly had only one. 5735 With more complicated asms this might lead to reload failures 5736 which wouldn't have happen without this pass. So, iterate over 5737 all operands and replace all occurrences of the register used. */ 5738 for (j = 0; j < noutputs; j++) 5739 if (!rtx_equal_p (SET_DEST (p_sets[j]), input) 5740 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j]))) 5741 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]), 5742 input, output); 5743 for (j = 0; j < ninputs; j++) 5744 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j))) 5745 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j), 5746 input, output); 5747 5748 changed = true; 5749 } 5750 5751 if (changed) 5752 df_insn_rescan (insn); 5753} 5754 5755static unsigned 5756rest_of_match_asm_constraints (void) 5757{ 5758 basic_block bb; 5759 rtx insn, pat, *p_sets; 5760 int noutputs; 5761 5762 if (!crtl->has_asm_statement) 5763 return 0; 5764 5765 df_set_flags (DF_DEFER_INSN_RESCAN); 5766 FOR_EACH_BB (bb) 5767 { 5768 FOR_BB_INSNS (bb, insn) 5769 { 5770 if (!INSN_P (insn)) 5771 continue; 5772 5773 pat = PATTERN (insn); 5774 if (GET_CODE (pat) == PARALLEL) 5775 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0); 5776 else if (GET_CODE (pat) == SET) 5777 p_sets = &PATTERN (insn), noutputs = 1; 5778 else 5779 continue; 5780 5781 if (GET_CODE (*p_sets) == SET 5782 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS) 5783 match_asm_constraints_1 (insn, p_sets, noutputs); 5784 } 5785 } 5786 5787 return TODO_df_finish; 5788} 5789 5790struct rtl_opt_pass pass_match_asm_constraints = 5791{ 5792 { 5793 RTL_PASS, 5794 "asmcons", /* name */ 5795 NULL, /* gate */ 5796 rest_of_match_asm_constraints, /* execute */ 5797 NULL, /* sub */ 5798 NULL, /* next */ 5799 0, /* static_pass_number */ 5800 TV_NONE, /* tv_id */ 5801 0, /* properties_required */ 5802 0, /* properties_provided */ 5803 0, /* properties_destroyed */ 5804 0, /* todo_flags_start */ 5805 TODO_dump_func /* todo_flags_finish */ 5806 } 5807}; 5808 5809 5810#include "gt-function.h" 5811