expr.c revision 220150
1/* Convert tree expression to rtl instructions, for GNU compiler. 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, 4 Inc. 5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify it under 9the terms of the GNU General Public License as published by the Free 10Software Foundation; either version 2, or (at your option) any later 11version. 12 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14WARRANTY; without even the implied warranty of MERCHANTABILITY or 15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING. If not, write to the Free 20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 2102110-1301, USA. */ 22 23#include "config.h" 24#include "system.h" 25#include "coretypes.h" 26#include "tm.h" 27#include "machmode.h" 28#include "real.h" 29#include "rtl.h" 30#include "tree.h" 31#include "flags.h" 32#include "regs.h" 33#include "hard-reg-set.h" 34#include "except.h" 35#include "function.h" 36#include "insn-config.h" 37#include "insn-attr.h" 38/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ 39#include "expr.h" 40#include "optabs.h" 41#include "libfuncs.h" 42#include "recog.h" 43#include "reload.h" 44#include "output.h" 45#include "typeclass.h" 46#include "toplev.h" 47#include "ggc.h" 48#include "langhooks.h" 49#include "intl.h" 50#include "tm_p.h" 51#include "tree-iterator.h" 52#include "tree-pass.h" 53#include "tree-flow.h" 54#include "target.h" 55#include "timevar.h" 56 57/* Decide whether a function's arguments should be processed 58 from first to last or from last to first. 59 60 They should if the stack and args grow in opposite directions, but 61 only if we have push insns. */ 62 63#ifdef PUSH_ROUNDING 64 65#ifndef PUSH_ARGS_REVERSED 66#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) 67#define PUSH_ARGS_REVERSED /* If it's last to first. */ 68#endif 69#endif 70 71#endif 72 73#ifndef STACK_PUSH_CODE 74#ifdef STACK_GROWS_DOWNWARD 75#define STACK_PUSH_CODE PRE_DEC 76#else 77#define STACK_PUSH_CODE PRE_INC 78#endif 79#endif 80 81 82/* If this is nonzero, we do not bother generating VOLATILE 83 around volatile memory references, and we are willing to 84 output indirect addresses. If cse is to follow, we reject 85 indirect addresses so a useful potential cse is generated; 86 if it is used only once, instruction combination will produce 87 the same indirect address eventually. */ 88int cse_not_expected; 89 90/* This structure is used by move_by_pieces to describe the move to 91 be performed. */ 92struct move_by_pieces 93{ 94 rtx to; 95 rtx to_addr; 96 int autinc_to; 97 int explicit_inc_to; 98 rtx from; 99 rtx from_addr; 100 int autinc_from; 101 int explicit_inc_from; 102 unsigned HOST_WIDE_INT len; 103 HOST_WIDE_INT offset; 104 int reverse; 105}; 106 107/* This structure is used by store_by_pieces to describe the clear to 108 be performed. */ 109 110struct store_by_pieces 111{ 112 rtx to; 113 rtx to_addr; 114 int autinc_to; 115 int explicit_inc_to; 116 unsigned HOST_WIDE_INT len; 117 HOST_WIDE_INT offset; 118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode); 119 void *constfundata; 120 int reverse; 121}; 122 123static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT, 124 unsigned int, 125 unsigned int); 126static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode, 127 struct move_by_pieces *); 128static bool block_move_libcall_safe_for_call_parm (void); 129static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned); 130static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool); 131static tree emit_block_move_libcall_fn (int); 132static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); 133static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode); 134static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); 135static void store_by_pieces_1 (struct store_by_pieces *, unsigned int); 136static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode, 137 struct store_by_pieces *); 138static rtx clear_storage_via_libcall (rtx, rtx, bool); 139static tree clear_storage_libcall_fn (int); 140static rtx compress_float_constant (rtx, rtx); 141static rtx get_subtarget (rtx); 142static void store_constructor_field (rtx, unsigned HOST_WIDE_INT, 143 HOST_WIDE_INT, enum machine_mode, 144 tree, tree, int, int); 145static void store_constructor (tree, rtx, int, HOST_WIDE_INT); 146static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode, 147 tree, tree, int); 148 149static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree); 150 151static int is_aligning_offset (tree, tree); 152static void expand_operands (tree, tree, rtx, rtx*, rtx*, 153 enum expand_modifier); 154static rtx reduce_to_bit_field_precision (rtx, rtx, tree); 155static rtx do_store_flag (tree, rtx, enum machine_mode, int); 156#ifdef PUSH_ROUNDING 157static void emit_single_push_insn (enum machine_mode, rtx, tree); 158#endif 159static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx); 160static rtx const_vector_from_tree (tree); 161static void write_complex_part (rtx, rtx, bool); 162 163/* Record for each mode whether we can move a register directly to or 164 from an object of that mode in memory. If we can't, we won't try 165 to use that mode directly when accessing a field of that mode. */ 166 167static char direct_load[NUM_MACHINE_MODES]; 168static char direct_store[NUM_MACHINE_MODES]; 169 170/* Record for each mode whether we can float-extend from memory. */ 171 172static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; 173 174/* This macro is used to determine whether move_by_pieces should be called 175 to perform a structure copy. */ 176#ifndef MOVE_BY_PIECES_P 177#define MOVE_BY_PIECES_P(SIZE, ALIGN) \ 178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \ 179 < (unsigned int) MOVE_RATIO) 180#endif 181 182/* This macro is used to determine whether clear_by_pieces should be 183 called to clear storage. */ 184#ifndef CLEAR_BY_PIECES_P 185#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \ 186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ 187 < (unsigned int) CLEAR_RATIO) 188#endif 189 190/* This macro is used to determine whether store_by_pieces should be 191 called to "memset" storage with byte values other than zero, or 192 to "memcpy" storage when the source is a constant string. */ 193#ifndef STORE_BY_PIECES_P 194#define STORE_BY_PIECES_P(SIZE, ALIGN) \ 195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ 196 < (unsigned int) MOVE_RATIO) 197#endif 198 199/* This array records the insn_code of insns to perform block moves. */ 200enum insn_code movmem_optab[NUM_MACHINE_MODES]; 201 202/* This array records the insn_code of insns to perform block sets. */ 203enum insn_code setmem_optab[NUM_MACHINE_MODES]; 204 205/* These arrays record the insn_code of three different kinds of insns 206 to perform block compares. */ 207enum insn_code cmpstr_optab[NUM_MACHINE_MODES]; 208enum insn_code cmpstrn_optab[NUM_MACHINE_MODES]; 209enum insn_code cmpmem_optab[NUM_MACHINE_MODES]; 210 211/* Synchronization primitives. */ 212enum insn_code sync_add_optab[NUM_MACHINE_MODES]; 213enum insn_code sync_sub_optab[NUM_MACHINE_MODES]; 214enum insn_code sync_ior_optab[NUM_MACHINE_MODES]; 215enum insn_code sync_and_optab[NUM_MACHINE_MODES]; 216enum insn_code sync_xor_optab[NUM_MACHINE_MODES]; 217enum insn_code sync_nand_optab[NUM_MACHINE_MODES]; 218enum insn_code sync_old_add_optab[NUM_MACHINE_MODES]; 219enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES]; 220enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES]; 221enum insn_code sync_old_and_optab[NUM_MACHINE_MODES]; 222enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES]; 223enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES]; 224enum insn_code sync_new_add_optab[NUM_MACHINE_MODES]; 225enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES]; 226enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES]; 227enum insn_code sync_new_and_optab[NUM_MACHINE_MODES]; 228enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES]; 229enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES]; 230enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES]; 231enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES]; 232enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES]; 233enum insn_code sync_lock_release[NUM_MACHINE_MODES]; 234 235/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */ 236 237#ifndef SLOW_UNALIGNED_ACCESS 238#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT 239#endif 240 241/* This is run once per compilation to set up which modes can be used 242 directly in memory and to initialize the block move optab. */ 243 244void 245init_expr_once (void) 246{ 247 rtx insn, pat; 248 enum machine_mode mode; 249 int num_clobbers; 250 rtx mem, mem1; 251 rtx reg; 252 253 /* Try indexing by frame ptr and try by stack ptr. 254 It is known that on the Convex the stack ptr isn't a valid index. 255 With luck, one or the other is valid on any machine. */ 256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx); 257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx); 258 259 /* A scratch register we can modify in-place below to avoid 260 useless RTL allocations. */ 261 reg = gen_rtx_REG (VOIDmode, -1); 262 263 insn = rtx_alloc (INSN); 264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX); 265 PATTERN (insn) = pat; 266 267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; 268 mode = (enum machine_mode) ((int) mode + 1)) 269 { 270 int regno; 271 272 direct_load[(int) mode] = direct_store[(int) mode] = 0; 273 PUT_MODE (mem, mode); 274 PUT_MODE (mem1, mode); 275 PUT_MODE (reg, mode); 276 277 /* See if there is some register that can be used in this mode and 278 directly loaded or stored from memory. */ 279 280 if (mode != VOIDmode && mode != BLKmode) 281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER 282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0); 283 regno++) 284 { 285 if (! HARD_REGNO_MODE_OK (regno, mode)) 286 continue; 287 288 REGNO (reg) = regno; 289 290 SET_SRC (pat) = mem; 291 SET_DEST (pat) = reg; 292 if (recog (pat, insn, &num_clobbers) >= 0) 293 direct_load[(int) mode] = 1; 294 295 SET_SRC (pat) = mem1; 296 SET_DEST (pat) = reg; 297 if (recog (pat, insn, &num_clobbers) >= 0) 298 direct_load[(int) mode] = 1; 299 300 SET_SRC (pat) = reg; 301 SET_DEST (pat) = mem; 302 if (recog (pat, insn, &num_clobbers) >= 0) 303 direct_store[(int) mode] = 1; 304 305 SET_SRC (pat) = reg; 306 SET_DEST (pat) = mem1; 307 if (recog (pat, insn, &num_clobbers) >= 0) 308 direct_store[(int) mode] = 1; 309 } 310 } 311 312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000)); 313 314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode; 315 mode = GET_MODE_WIDER_MODE (mode)) 316 { 317 enum machine_mode srcmode; 318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode; 319 srcmode = GET_MODE_WIDER_MODE (srcmode)) 320 { 321 enum insn_code ic; 322 323 ic = can_extend_p (mode, srcmode, 0); 324 if (ic == CODE_FOR_nothing) 325 continue; 326 327 PUT_MODE (mem, srcmode); 328 329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode)) 330 float_extend_from_mem[mode][srcmode] = true; 331 } 332 } 333} 334 335/* This is run at the start of compiling a function. */ 336 337void 338init_expr (void) 339{ 340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status)); 341} 342 343/* Copy data from FROM to TO, where the machine modes are not the same. 344 Both modes may be integer, or both may be floating. 345 UNSIGNEDP should be nonzero if FROM is an unsigned type. 346 This causes zero-extension instead of sign-extension. */ 347 348void 349convert_move (rtx to, rtx from, int unsignedp) 350{ 351 enum machine_mode to_mode = GET_MODE (to); 352 enum machine_mode from_mode = GET_MODE (from); 353 int to_real = SCALAR_FLOAT_MODE_P (to_mode); 354 int from_real = SCALAR_FLOAT_MODE_P (from_mode); 355 enum insn_code code; 356 rtx libcall; 357 358 /* rtx code for making an equivalent value. */ 359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN 360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND)); 361 362 363 gcc_assert (to_real == from_real); 364 365 /* If the source and destination are already the same, then there's 366 nothing to do. */ 367 if (to == from) 368 return; 369 370 /* If FROM is a SUBREG that indicates that we have already done at least 371 the required extension, strip it. We don't handle such SUBREGs as 372 TO here. */ 373 374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from) 375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from))) 376 >= GET_MODE_SIZE (to_mode)) 377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) 378 from = gen_lowpart (to_mode, from), from_mode = to_mode; 379 380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to)); 381 382 if (to_mode == from_mode 383 || (from_mode == VOIDmode && CONSTANT_P (from))) 384 { 385 emit_move_insn (to, from); 386 return; 387 } 388 389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) 390 { 391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode)); 392 393 if (VECTOR_MODE_P (to_mode)) 394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); 395 else 396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); 397 398 emit_move_insn (to, from); 399 return; 400 } 401 402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT) 403 { 404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp); 405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp); 406 return; 407 } 408 409 if (to_real) 410 { 411 rtx value, insns; 412 convert_optab tab; 413 414 gcc_assert ((GET_MODE_PRECISION (from_mode) 415 != GET_MODE_PRECISION (to_mode)) 416 || (DECIMAL_FLOAT_MODE_P (from_mode) 417 != DECIMAL_FLOAT_MODE_P (to_mode))); 418 419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode)) 420 /* Conversion between decimal float and binary float, same size. */ 421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab; 422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)) 423 tab = sext_optab; 424 else 425 tab = trunc_optab; 426 427 /* Try converting directly if the insn is supported. */ 428 429 code = tab->handlers[to_mode][from_mode].insn_code; 430 if (code != CODE_FOR_nothing) 431 { 432 emit_unop_insn (code, to, from, 433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE); 434 return; 435 } 436 437 /* Otherwise use a libcall. */ 438 libcall = tab->handlers[to_mode][from_mode].libfunc; 439 440 /* Is this conversion implemented yet? */ 441 gcc_assert (libcall); 442 443 start_sequence (); 444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, 445 1, from, from_mode); 446 insns = get_insns (); 447 end_sequence (); 448 emit_libcall_block (insns, to, value, 449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode, 450 from) 451 : gen_rtx_FLOAT_EXTEND (to_mode, from)); 452 return; 453 } 454 455 /* Handle pointer conversion. */ /* SPEE 900220. */ 456 /* Targets are expected to provide conversion insns between PxImode and 457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */ 458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT) 459 { 460 enum machine_mode full_mode 461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT); 462 463 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code 464 != CODE_FOR_nothing); 465 466 if (full_mode != from_mode) 467 from = convert_to_mode (full_mode, from, unsignedp); 468 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code, 469 to, from, UNKNOWN); 470 return; 471 } 472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT) 473 { 474 rtx new_from; 475 enum machine_mode full_mode 476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT); 477 478 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code 479 != CODE_FOR_nothing); 480 481 if (to_mode == full_mode) 482 { 483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, 484 to, from, UNKNOWN); 485 return; 486 } 487 488 new_from = gen_reg_rtx (full_mode); 489 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, 490 new_from, from, UNKNOWN); 491 492 /* else proceed to integer conversions below. */ 493 from_mode = full_mode; 494 from = new_from; 495 } 496 497 /* Now both modes are integers. */ 498 499 /* Handle expanding beyond a word. */ 500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode) 501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD) 502 { 503 rtx insns; 504 rtx lowpart; 505 rtx fill_value; 506 rtx lowfrom; 507 int i; 508 enum machine_mode lowpart_mode; 509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD); 510 511 /* Try converting directly if the insn is supported. */ 512 if ((code = can_extend_p (to_mode, from_mode, unsignedp)) 513 != CODE_FOR_nothing) 514 { 515 /* If FROM is a SUBREG, put it into a register. Do this 516 so that we always generate the same set of insns for 517 better cse'ing; if an intermediate assignment occurred, 518 we won't be doing the operation directly on the SUBREG. */ 519 if (optimize > 0 && GET_CODE (from) == SUBREG) 520 from = force_reg (from_mode, from); 521 emit_unop_insn (code, to, from, equiv_code); 522 return; 523 } 524 /* Next, try converting via full word. */ 525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD 526 && ((code = can_extend_p (to_mode, word_mode, unsignedp)) 527 != CODE_FOR_nothing)) 528 { 529 if (REG_P (to)) 530 { 531 if (reg_overlap_mentioned_p (to, from)) 532 from = force_reg (from_mode, from); 533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to)); 534 } 535 convert_move (gen_lowpart (word_mode, to), from, unsignedp); 536 emit_unop_insn (code, to, 537 gen_lowpart (word_mode, to), equiv_code); 538 return; 539 } 540 541 /* No special multiword conversion insn; do it by hand. */ 542 start_sequence (); 543 544 /* Since we will turn this into a no conflict block, we must ensure 545 that the source does not overlap the target. */ 546 547 if (reg_overlap_mentioned_p (to, from)) 548 from = force_reg (from_mode, from); 549 550 /* Get a copy of FROM widened to a word, if necessary. */ 551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD) 552 lowpart_mode = word_mode; 553 else 554 lowpart_mode = from_mode; 555 556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp); 557 558 lowpart = gen_lowpart (lowpart_mode, to); 559 emit_move_insn (lowpart, lowfrom); 560 561 /* Compute the value to put in each remaining word. */ 562 if (unsignedp) 563 fill_value = const0_rtx; 564 else 565 { 566#ifdef HAVE_slt 567 if (HAVE_slt 568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode 569 && STORE_FLAG_VALUE == -1) 570 { 571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, 572 lowpart_mode, 0); 573 fill_value = gen_reg_rtx (word_mode); 574 emit_insn (gen_slt (fill_value)); 575 } 576 else 577#endif 578 { 579 fill_value 580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom, 581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1), 582 NULL_RTX, 0); 583 fill_value = convert_to_mode (word_mode, fill_value, 1); 584 } 585 } 586 587 /* Fill the remaining words. */ 588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) 589 { 590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); 591 rtx subword = operand_subword (to, index, 1, to_mode); 592 593 gcc_assert (subword); 594 595 if (fill_value != subword) 596 emit_move_insn (subword, fill_value); 597 } 598 599 insns = get_insns (); 600 end_sequence (); 601 602 emit_no_conflict_block (insns, to, from, NULL_RTX, 603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from))); 604 return; 605 } 606 607 /* Truncating multi-word to a word or less. */ 608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD 609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD) 610 { 611 if (!((MEM_P (from) 612 && ! MEM_VOLATILE_P (from) 613 && direct_load[(int) to_mode] 614 && ! mode_dependent_address_p (XEXP (from, 0))) 615 || REG_P (from) 616 || GET_CODE (from) == SUBREG)) 617 from = force_reg (from_mode, from); 618 convert_move (to, gen_lowpart (word_mode, from), 0); 619 return; 620 } 621 622 /* Now follow all the conversions between integers 623 no more than a word long. */ 624 625 /* For truncation, usually we can just refer to FROM in a narrower mode. */ 626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode) 627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), 628 GET_MODE_BITSIZE (from_mode))) 629 { 630 if (!((MEM_P (from) 631 && ! MEM_VOLATILE_P (from) 632 && direct_load[(int) to_mode] 633 && ! mode_dependent_address_p (XEXP (from, 0))) 634 || REG_P (from) 635 || GET_CODE (from) == SUBREG)) 636 from = force_reg (from_mode, from); 637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER 638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode)) 639 from = copy_to_reg (from); 640 emit_move_insn (to, gen_lowpart (to_mode, from)); 641 return; 642 } 643 644 /* Handle extension. */ 645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode)) 646 { 647 /* Convert directly if that works. */ 648 if ((code = can_extend_p (to_mode, from_mode, unsignedp)) 649 != CODE_FOR_nothing) 650 { 651 emit_unop_insn (code, to, from, equiv_code); 652 return; 653 } 654 else 655 { 656 enum machine_mode intermediate; 657 rtx tmp; 658 tree shift_amount; 659 660 /* Search for a mode to convert via. */ 661 for (intermediate = from_mode; intermediate != VOIDmode; 662 intermediate = GET_MODE_WIDER_MODE (intermediate)) 663 if (((can_extend_p (to_mode, intermediate, unsignedp) 664 != CODE_FOR_nothing) 665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate) 666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), 667 GET_MODE_BITSIZE (intermediate)))) 668 && (can_extend_p (intermediate, from_mode, unsignedp) 669 != CODE_FOR_nothing)) 670 { 671 convert_move (to, convert_to_mode (intermediate, from, 672 unsignedp), unsignedp); 673 return; 674 } 675 676 /* No suitable intermediate mode. 677 Generate what we need with shifts. */ 678 shift_amount = build_int_cst (NULL_TREE, 679 GET_MODE_BITSIZE (to_mode) 680 - GET_MODE_BITSIZE (from_mode)); 681 from = gen_lowpart (to_mode, force_reg (from_mode, from)); 682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount, 683 to, unsignedp); 684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, 685 to, unsignedp); 686 if (tmp != to) 687 emit_move_insn (to, tmp); 688 return; 689 } 690 } 691 692 /* Support special truncate insns for certain modes. */ 693 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing) 694 { 695 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code, 696 to, from, UNKNOWN); 697 return; 698 } 699 700 /* Handle truncation of volatile memrefs, and so on; 701 the things that couldn't be truncated directly, 702 and for which there was no special instruction. 703 704 ??? Code above formerly short-circuited this, for most integer 705 mode pairs, with a force_reg in from_mode followed by a recursive 706 call to this routine. Appears always to have been wrong. */ 707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)) 708 { 709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from)); 710 emit_move_insn (to, temp); 711 return; 712 } 713 714 /* Mode combination is not recognized. */ 715 gcc_unreachable (); 716} 717 718/* Return an rtx for a value that would result 719 from converting X to mode MODE. 720 Both X and MODE may be floating, or both integer. 721 UNSIGNEDP is nonzero if X is an unsigned value. 722 This can be done by referring to a part of X in place 723 or by copying to a new temporary with conversion. */ 724 725rtx 726convert_to_mode (enum machine_mode mode, rtx x, int unsignedp) 727{ 728 return convert_modes (mode, VOIDmode, x, unsignedp); 729} 730 731/* Return an rtx for a value that would result 732 from converting X from mode OLDMODE to mode MODE. 733 Both modes may be floating, or both integer. 734 UNSIGNEDP is nonzero if X is an unsigned value. 735 736 This can be done by referring to a part of X in place 737 or by copying to a new temporary with conversion. 738 739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */ 740 741rtx 742convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp) 743{ 744 rtx temp; 745 746 /* If FROM is a SUBREG that indicates that we have already done at least 747 the required extension, strip it. */ 748 749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) 750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode) 751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp) 752 x = gen_lowpart (mode, x); 753 754 if (GET_MODE (x) != VOIDmode) 755 oldmode = GET_MODE (x); 756 757 if (mode == oldmode) 758 return x; 759 760 /* There is one case that we must handle specially: If we are converting 761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and 762 we are to interpret the constant as unsigned, gen_lowpart will do 763 the wrong if the constant appears negative. What we want to do is 764 make the high-order word of the constant zero, not all ones. */ 765 766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT 767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT 768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0) 769 { 770 HOST_WIDE_INT val = INTVAL (x); 771 772 if (oldmode != VOIDmode 773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode)) 774 { 775 int width = GET_MODE_BITSIZE (oldmode); 776 777 /* We need to zero extend VAL. */ 778 val &= ((HOST_WIDE_INT) 1 << width) - 1; 779 } 780 781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode); 782 } 783 784 /* We can do this with a gen_lowpart if both desired and current modes 785 are integer, and this is either a constant integer, a register, or a 786 non-volatile MEM. Except for the constant case where MODE is no 787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */ 788 789 if ((GET_CODE (x) == CONST_INT 790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) 791 || (GET_MODE_CLASS (mode) == MODE_INT 792 && GET_MODE_CLASS (oldmode) == MODE_INT 793 && (GET_CODE (x) == CONST_DOUBLE 794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode) 795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x) 796 && direct_load[(int) mode]) 797 || (REG_P (x) 798 && (! HARD_REGISTER_P (x) 799 || HARD_REGNO_MODE_OK (REGNO (x), mode)) 800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), 801 GET_MODE_BITSIZE (GET_MODE (x))))))))) 802 { 803 /* ?? If we don't know OLDMODE, we have to assume here that 804 X does not need sign- or zero-extension. This may not be 805 the case, but it's the best we can do. */ 806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode 807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode)) 808 { 809 HOST_WIDE_INT val = INTVAL (x); 810 int width = GET_MODE_BITSIZE (oldmode); 811 812 /* We must sign or zero-extend in this case. Start by 813 zero-extending, then sign extend if we need to. */ 814 val &= ((HOST_WIDE_INT) 1 << width) - 1; 815 if (! unsignedp 816 && (val & ((HOST_WIDE_INT) 1 << (width - 1)))) 817 val |= (HOST_WIDE_INT) (-1) << width; 818 819 return gen_int_mode (val, mode); 820 } 821 822 return gen_lowpart (mode, x); 823 } 824 825 /* Converting from integer constant into mode is always equivalent to an 826 subreg operation. */ 827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode) 828 { 829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode)); 830 return simplify_gen_subreg (mode, x, oldmode, 0); 831 } 832 833 temp = gen_reg_rtx (mode); 834 convert_move (temp, x, unsignedp); 835 return temp; 836} 837 838/* STORE_MAX_PIECES is the number of bytes at a time that we can 839 store efficiently. Due to internal GCC limitations, this is 840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent 841 for an immediate constant. */ 842 843#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT)) 844 845/* Determine whether the LEN bytes can be moved by using several move 846 instructions. Return nonzero if a call to move_by_pieces should 847 succeed. */ 848 849int 850can_move_by_pieces (unsigned HOST_WIDE_INT len, 851 unsigned int align ATTRIBUTE_UNUSED) 852{ 853 return MOVE_BY_PIECES_P (len, align); 854} 855 856/* Generate several move instructions to copy LEN bytes from block FROM to 857 block TO. (These are MEM rtx's with BLKmode). 858 859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is 860 used to push FROM to the stack. 861 862 ALIGN is maximum stack alignment we can assume. 863 864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala 865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala 866 stpcpy. */ 867 868rtx 869move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, 870 unsigned int align, int endp) 871{ 872 struct move_by_pieces data; 873 rtx to_addr, from_addr = XEXP (from, 0); 874 unsigned int max_size = MOVE_MAX_PIECES + 1; 875 enum machine_mode mode = VOIDmode, tmode; 876 enum insn_code icode; 877 878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from)); 879 880 data.offset = 0; 881 data.from_addr = from_addr; 882 if (to) 883 { 884 to_addr = XEXP (to, 0); 885 data.to = to; 886 data.autinc_to 887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC 888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); 889 data.reverse 890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); 891 } 892 else 893 { 894 to_addr = NULL_RTX; 895 data.to = NULL_RTX; 896 data.autinc_to = 1; 897#ifdef STACK_GROWS_DOWNWARD 898 data.reverse = 1; 899#else 900 data.reverse = 0; 901#endif 902 } 903 data.to_addr = to_addr; 904 data.from = from; 905 data.autinc_from 906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC 907 || GET_CODE (from_addr) == POST_INC 908 || GET_CODE (from_addr) == POST_DEC); 909 910 data.explicit_inc_from = 0; 911 data.explicit_inc_to = 0; 912 if (data.reverse) data.offset = len; 913 data.len = len; 914 915 /* If copying requires more than two move insns, 916 copy addresses to registers (to make displacements shorter) 917 and use post-increment if available. */ 918 if (!(data.autinc_from && data.autinc_to) 919 && move_by_pieces_ninsns (len, align, max_size) > 2) 920 { 921 /* Find the mode of the largest move... */ 922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 924 if (GET_MODE_SIZE (tmode) < max_size) 925 mode = tmode; 926 927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) 928 { 929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len)); 930 data.autinc_from = 1; 931 data.explicit_inc_from = -1; 932 } 933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from) 934 { 935 data.from_addr = copy_addr_to_reg (from_addr); 936 data.autinc_from = 1; 937 data.explicit_inc_from = 1; 938 } 939 if (!data.autinc_from && CONSTANT_P (from_addr)) 940 data.from_addr = copy_addr_to_reg (from_addr); 941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) 942 { 943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); 944 data.autinc_to = 1; 945 data.explicit_inc_to = -1; 946 } 947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) 948 { 949 data.to_addr = copy_addr_to_reg (to_addr); 950 data.autinc_to = 1; 951 data.explicit_inc_to = 1; 952 } 953 if (!data.autinc_to && CONSTANT_P (to_addr)) 954 data.to_addr = copy_addr_to_reg (to_addr); 955 } 956 957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); 958 if (align >= GET_MODE_ALIGNMENT (tmode)) 959 align = GET_MODE_ALIGNMENT (tmode); 960 else 961 { 962 enum machine_mode xmode; 963 964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; 965 tmode != VOIDmode; 966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) 967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES 968 || SLOW_UNALIGNED_ACCESS (tmode, align)) 969 break; 970 971 align = MAX (align, GET_MODE_ALIGNMENT (xmode)); 972 } 973 974 /* First move what we can in the largest integer mode, then go to 975 successively smaller modes. */ 976 977 while (max_size > 1) 978 { 979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 981 if (GET_MODE_SIZE (tmode) < max_size) 982 mode = tmode; 983 984 if (mode == VOIDmode) 985 break; 986 987 icode = mov_optab->handlers[(int) mode].insn_code; 988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) 989 move_by_pieces_1 (GEN_FCN (icode), mode, &data); 990 991 max_size = GET_MODE_SIZE (mode); 992 } 993 994 /* The code above should have handled everything. */ 995 gcc_assert (!data.len); 996 997 if (endp) 998 { 999 rtx to1; 1000 1001 gcc_assert (!data.reverse); 1002 if (data.autinc_to) 1003 { 1004 if (endp == 2) 1005 { 1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) 1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); 1008 else 1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, 1010 -1)); 1011 } 1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr, 1013 data.offset); 1014 } 1015 else 1016 { 1017 if (endp == 2) 1018 --data.offset; 1019 to1 = adjust_address (data.to, QImode, data.offset); 1020 } 1021 return to1; 1022 } 1023 else 1024 return data.to; 1025} 1026 1027/* Return number of insns required to move L bytes by pieces. 1028 ALIGN (in bits) is maximum alignment we can assume. */ 1029 1030static unsigned HOST_WIDE_INT 1031move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, 1032 unsigned int max_size) 1033{ 1034 unsigned HOST_WIDE_INT n_insns = 0; 1035 enum machine_mode tmode; 1036 1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); 1038 if (align >= GET_MODE_ALIGNMENT (tmode)) 1039 align = GET_MODE_ALIGNMENT (tmode); 1040 else 1041 { 1042 enum machine_mode tmode, xmode; 1043 1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; 1045 tmode != VOIDmode; 1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) 1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES 1048 || SLOW_UNALIGNED_ACCESS (tmode, align)) 1049 break; 1050 1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode)); 1052 } 1053 1054 while (max_size > 1) 1055 { 1056 enum machine_mode mode = VOIDmode; 1057 enum insn_code icode; 1058 1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 1061 if (GET_MODE_SIZE (tmode) < max_size) 1062 mode = tmode; 1063 1064 if (mode == VOIDmode) 1065 break; 1066 1067 icode = mov_optab->handlers[(int) mode].insn_code; 1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) 1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode); 1070 1071 max_size = GET_MODE_SIZE (mode); 1072 } 1073 1074 gcc_assert (!l); 1075 return n_insns; 1076} 1077 1078/* Subroutine of move_by_pieces. Move as many bytes as appropriate 1079 with move instructions for mode MODE. GENFUN is the gen_... function 1080 to make a move insn for that mode. DATA has all the other info. */ 1081 1082static void 1083move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode, 1084 struct move_by_pieces *data) 1085{ 1086 unsigned int size = GET_MODE_SIZE (mode); 1087 rtx to1 = NULL_RTX, from1; 1088 1089 while (data->len >= size) 1090 { 1091 if (data->reverse) 1092 data->offset -= size; 1093 1094 if (data->to) 1095 { 1096 if (data->autinc_to) 1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr, 1098 data->offset); 1099 else 1100 to1 = adjust_address (data->to, mode, data->offset); 1101 } 1102 1103 if (data->autinc_from) 1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr, 1105 data->offset); 1106 else 1107 from1 = adjust_address (data->from, mode, data->offset); 1108 1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) 1110 emit_insn (gen_add2_insn (data->to_addr, 1111 GEN_INT (-(HOST_WIDE_INT)size))); 1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) 1113 emit_insn (gen_add2_insn (data->from_addr, 1114 GEN_INT (-(HOST_WIDE_INT)size))); 1115 1116 if (data->to) 1117 emit_insn ((*genfun) (to1, from1)); 1118 else 1119 { 1120#ifdef PUSH_ROUNDING 1121 emit_single_push_insn (mode, from1, NULL); 1122#else 1123 gcc_unreachable (); 1124#endif 1125 } 1126 1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) 1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); 1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0) 1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size))); 1131 1132 if (! data->reverse) 1133 data->offset += size; 1134 1135 data->len -= size; 1136 } 1137} 1138 1139/* Emit code to move a block Y to a block X. This may be done with 1140 string-move instructions, with multiple scalar move instructions, 1141 or with a library call. 1142 1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode. 1144 SIZE is an rtx that says how long they are. 1145 ALIGN is the maximum alignment we can assume they have. 1146 METHOD describes what kind of copy this is, and what mechanisms may be used. 1147 1148 Return the address of the new block, if memcpy is called and returns it, 1149 0 otherwise. */ 1150 1151rtx 1152emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method) 1153{ 1154 bool may_use_call; 1155 rtx retval = 0; 1156 unsigned int align; 1157 1158 switch (method) 1159 { 1160 case BLOCK_OP_NORMAL: 1161 case BLOCK_OP_TAILCALL: 1162 may_use_call = true; 1163 break; 1164 1165 case BLOCK_OP_CALL_PARM: 1166 may_use_call = block_move_libcall_safe_for_call_parm (); 1167 1168 /* Make inhibit_defer_pop nonzero around the library call 1169 to force it to pop the arguments right away. */ 1170 NO_DEFER_POP; 1171 break; 1172 1173 case BLOCK_OP_NO_LIBCALL: 1174 may_use_call = false; 1175 break; 1176 1177 default: 1178 gcc_unreachable (); 1179 } 1180 1181 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); 1182 1183 gcc_assert (MEM_P (x)); 1184 gcc_assert (MEM_P (y)); 1185 gcc_assert (size); 1186 1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that 1188 block copy is more efficient for other large modes, e.g. DCmode. */ 1189 x = adjust_address (x, BLKmode, 0); 1190 y = adjust_address (y, BLKmode, 0); 1191 1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this 1193 can be incorrect is coming from __builtin_memcpy. */ 1194 if (GET_CODE (size) == CONST_INT) 1195 { 1196 if (INTVAL (size) == 0) 1197 return 0; 1198 1199 x = shallow_copy_rtx (x); 1200 y = shallow_copy_rtx (y); 1201 set_mem_size (x, size); 1202 set_mem_size (y, size); 1203 } 1204 1205 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) 1206 move_by_pieces (x, y, INTVAL (size), align, 0); 1207 else if (emit_block_move_via_movmem (x, y, size, align)) 1208 ; 1209 else if (may_use_call) 1210 retval = emit_block_move_via_libcall (x, y, size, 1211 method == BLOCK_OP_TAILCALL); 1212 else 1213 emit_block_move_via_loop (x, y, size, align); 1214 1215 if (method == BLOCK_OP_CALL_PARM) 1216 OK_DEFER_POP; 1217 1218 return retval; 1219} 1220 1221/* A subroutine of emit_block_move. Returns true if calling the 1222 block move libcall will not clobber any parameters which may have 1223 already been placed on the stack. */ 1224 1225static bool 1226block_move_libcall_safe_for_call_parm (void) 1227{ 1228 /* If arguments are pushed on the stack, then they're safe. */ 1229 if (PUSH_ARGS) 1230 return true; 1231 1232 /* If registers go on the stack anyway, any argument is sure to clobber 1233 an outgoing argument. */ 1234#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE) 1235 { 1236 tree fn = emit_block_move_libcall_fn (false); 1237 (void) fn; 1238 if (REG_PARM_STACK_SPACE (fn) != 0) 1239 return false; 1240 } 1241#endif 1242 1243 /* If any argument goes in memory, then it might clobber an outgoing 1244 argument. */ 1245 { 1246 CUMULATIVE_ARGS args_so_far; 1247 tree fn, arg; 1248 1249 fn = emit_block_move_libcall_fn (false); 1250 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3); 1251 1252 arg = TYPE_ARG_TYPES (TREE_TYPE (fn)); 1253 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg)) 1254 { 1255 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg)); 1256 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1); 1257 if (!tmp || !REG_P (tmp)) 1258 return false; 1259 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1)) 1260 return false; 1261 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1); 1262 } 1263 } 1264 return true; 1265} 1266 1267/* A subroutine of emit_block_move. Expand a movmem pattern; 1268 return true if successful. */ 1269 1270static bool 1271emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align) 1272{ 1273 rtx opalign = GEN_INT (align / BITS_PER_UNIT); 1274 int save_volatile_ok = volatile_ok; 1275 enum machine_mode mode; 1276 1277 /* Since this is a move insn, we don't care about volatility. */ 1278 volatile_ok = 1; 1279 1280 /* Try the most limited insn first, because there's no point 1281 including more than one in the machine description unless 1282 the more limited one has some advantage. */ 1283 1284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; 1285 mode = GET_MODE_WIDER_MODE (mode)) 1286 { 1287 enum insn_code code = movmem_optab[(int) mode]; 1288 insn_operand_predicate_fn pred; 1289 1290 if (code != CODE_FOR_nothing 1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT 1292 here because if SIZE is less than the mode mask, as it is 1293 returned by the macro, it will definitely be less than the 1294 actual mode mask. */ 1295 && ((GET_CODE (size) == CONST_INT 1296 && ((unsigned HOST_WIDE_INT) INTVAL (size) 1297 <= (GET_MODE_MASK (mode) >> 1))) 1298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) 1299 && ((pred = insn_data[(int) code].operand[0].predicate) == 0 1300 || (*pred) (x, BLKmode)) 1301 && ((pred = insn_data[(int) code].operand[1].predicate) == 0 1302 || (*pred) (y, BLKmode)) 1303 && ((pred = insn_data[(int) code].operand[3].predicate) == 0 1304 || (*pred) (opalign, VOIDmode))) 1305 { 1306 rtx op2; 1307 rtx last = get_last_insn (); 1308 rtx pat; 1309 1310 op2 = convert_to_mode (mode, size, 1); 1311 pred = insn_data[(int) code].operand[2].predicate; 1312 if (pred != 0 && ! (*pred) (op2, mode)) 1313 op2 = copy_to_mode_reg (mode, op2); 1314 1315 /* ??? When called via emit_block_move_for_call, it'd be 1316 nice if there were some way to inform the backend, so 1317 that it doesn't fail the expansion because it thinks 1318 emitting the libcall would be more efficient. */ 1319 1320 pat = GEN_FCN ((int) code) (x, y, op2, opalign); 1321 if (pat) 1322 { 1323 emit_insn (pat); 1324 volatile_ok = save_volatile_ok; 1325 return true; 1326 } 1327 else 1328 delete_insns_since (last); 1329 } 1330 } 1331 1332 volatile_ok = save_volatile_ok; 1333 return false; 1334} 1335 1336/* A subroutine of emit_block_move. Expand a call to memcpy. 1337 Return the return value from memcpy, 0 otherwise. */ 1338 1339static rtx 1340emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall) 1341{ 1342 rtx dst_addr, src_addr; 1343 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree; 1344 enum machine_mode size_mode; 1345 rtx retval; 1346 1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new 1348 pseudos. We can then place those new pseudos into a VAR_DECL and 1349 use them later. */ 1350 1351 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0)); 1352 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0)); 1353 1354 dst_addr = convert_memory_address (ptr_mode, dst_addr); 1355 src_addr = convert_memory_address (ptr_mode, src_addr); 1356 1357 dst_tree = make_tree (ptr_type_node, dst_addr); 1358 src_tree = make_tree (ptr_type_node, src_addr); 1359 1360 size_mode = TYPE_MODE (sizetype); 1361 1362 size = convert_to_mode (size_mode, size, 1); 1363 size = copy_to_mode_reg (size_mode, size); 1364 1365 /* It is incorrect to use the libcall calling conventions to call 1366 memcpy in this context. This could be a user call to memcpy and 1367 the user may wish to examine the return value from memcpy. For 1368 targets where libcalls and normal calls have different conventions 1369 for returning pointers, we could end up generating incorrect code. */ 1370 1371 size_tree = make_tree (sizetype, size); 1372 1373 fn = emit_block_move_libcall_fn (true); 1374 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); 1375 arg_list = tree_cons (NULL_TREE, src_tree, arg_list); 1376 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list); 1377 1378 /* Now we have to build up the CALL_EXPR itself. */ 1379 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); 1380 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), 1381 call_expr, arg_list, NULL_TREE); 1382 CALL_EXPR_TAILCALL (call_expr) = tailcall; 1383 1384 retval = expand_normal (call_expr); 1385 1386 return retval; 1387} 1388 1389/* A subroutine of emit_block_move_via_libcall. Create the tree node 1390 for the function we use for block copies. The first time FOR_CALL 1391 is true, we call assemble_external. */ 1392 1393static GTY(()) tree block_move_fn; 1394 1395void 1396init_block_move_fn (const char *asmspec) 1397{ 1398 if (!block_move_fn) 1399 { 1400 tree args, fn; 1401 1402 fn = get_identifier ("memcpy"); 1403 args = build_function_type_list (ptr_type_node, ptr_type_node, 1404 const_ptr_type_node, sizetype, 1405 NULL_TREE); 1406 1407 fn = build_decl (FUNCTION_DECL, fn, args); 1408 DECL_EXTERNAL (fn) = 1; 1409 TREE_PUBLIC (fn) = 1; 1410 DECL_ARTIFICIAL (fn) = 1; 1411 TREE_NOTHROW (fn) = 1; 1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT; 1413 DECL_VISIBILITY_SPECIFIED (fn) = 1; 1414 1415 block_move_fn = fn; 1416 } 1417 1418 if (asmspec) 1419 set_user_assembler_name (block_move_fn, asmspec); 1420} 1421 1422static tree 1423emit_block_move_libcall_fn (int for_call) 1424{ 1425 static bool emitted_extern; 1426 1427 if (!block_move_fn) 1428 init_block_move_fn (NULL); 1429 1430 if (for_call && !emitted_extern) 1431 { 1432 emitted_extern = true; 1433 make_decl_rtl (block_move_fn); 1434 assemble_external (block_move_fn); 1435 } 1436 1437 return block_move_fn; 1438} 1439 1440/* A subroutine of emit_block_move. Copy the data via an explicit 1441 loop. This is used only when libcalls are forbidden. */ 1442/* ??? It'd be nice to copy in hunks larger than QImode. */ 1443 1444static void 1445emit_block_move_via_loop (rtx x, rtx y, rtx size, 1446 unsigned int align ATTRIBUTE_UNUSED) 1447{ 1448 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp; 1449 enum machine_mode iter_mode; 1450 1451 iter_mode = GET_MODE (size); 1452 if (iter_mode == VOIDmode) 1453 iter_mode = word_mode; 1454 1455 top_label = gen_label_rtx (); 1456 cmp_label = gen_label_rtx (); 1457 iter = gen_reg_rtx (iter_mode); 1458 1459 emit_move_insn (iter, const0_rtx); 1460 1461 x_addr = force_operand (XEXP (x, 0), NULL_RTX); 1462 y_addr = force_operand (XEXP (y, 0), NULL_RTX); 1463 do_pending_stack_adjust (); 1464 1465 emit_jump (cmp_label); 1466 emit_label (top_label); 1467 1468 tmp = convert_modes (Pmode, iter_mode, iter, true); 1469 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp); 1470 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp); 1471 x = change_address (x, QImode, x_addr); 1472 y = change_address (y, QImode, y_addr); 1473 1474 emit_move_insn (x, y); 1475 1476 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter, 1477 true, OPTAB_LIB_WIDEN); 1478 if (tmp != iter) 1479 emit_move_insn (iter, tmp); 1480 1481 emit_label (cmp_label); 1482 1483 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode, 1484 true, top_label); 1485} 1486 1487/* Copy all or part of a value X into registers starting at REGNO. 1488 The number of registers to be filled is NREGS. */ 1489 1490void 1491move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode) 1492{ 1493 int i; 1494#ifdef HAVE_load_multiple 1495 rtx pat; 1496 rtx last; 1497#endif 1498 1499 if (nregs == 0) 1500 return; 1501 1502 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) 1503 x = validize_mem (force_const_mem (mode, x)); 1504 1505 /* See if the machine can do this with a load multiple insn. */ 1506#ifdef HAVE_load_multiple 1507 if (HAVE_load_multiple) 1508 { 1509 last = get_last_insn (); 1510 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x, 1511 GEN_INT (nregs)); 1512 if (pat) 1513 { 1514 emit_insn (pat); 1515 return; 1516 } 1517 else 1518 delete_insns_since (last); 1519 } 1520#endif 1521 1522 for (i = 0; i < nregs; i++) 1523 emit_move_insn (gen_rtx_REG (word_mode, regno + i), 1524 operand_subword_force (x, i, mode)); 1525} 1526 1527/* Copy all or part of a BLKmode value X out of registers starting at REGNO. 1528 The number of registers to be filled is NREGS. */ 1529 1530void 1531move_block_from_reg (int regno, rtx x, int nregs) 1532{ 1533 int i; 1534 1535 if (nregs == 0) 1536 return; 1537 1538 /* See if the machine can do this with a store multiple insn. */ 1539#ifdef HAVE_store_multiple 1540 if (HAVE_store_multiple) 1541 { 1542 rtx last = get_last_insn (); 1543 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno), 1544 GEN_INT (nregs)); 1545 if (pat) 1546 { 1547 emit_insn (pat); 1548 return; 1549 } 1550 else 1551 delete_insns_since (last); 1552 } 1553#endif 1554 1555 for (i = 0; i < nregs; i++) 1556 { 1557 rtx tem = operand_subword (x, i, 1, BLKmode); 1558 1559 gcc_assert (tem); 1560 1561 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); 1562 } 1563} 1564 1565/* Generate a PARALLEL rtx for a new non-consecutive group of registers from 1566 ORIG, where ORIG is a non-consecutive group of registers represented by 1567 a PARALLEL. The clone is identical to the original except in that the 1568 original set of registers is replaced by a new set of pseudo registers. 1569 The new set has the same modes as the original set. */ 1570 1571rtx 1572gen_group_rtx (rtx orig) 1573{ 1574 int i, length; 1575 rtx *tmps; 1576 1577 gcc_assert (GET_CODE (orig) == PARALLEL); 1578 1579 length = XVECLEN (orig, 0); 1580 tmps = alloca (sizeof (rtx) * length); 1581 1582 /* Skip a NULL entry in first slot. */ 1583 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1; 1584 1585 if (i) 1586 tmps[0] = 0; 1587 1588 for (; i < length; i++) 1589 { 1590 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0)); 1591 rtx offset = XEXP (XVECEXP (orig, 0, i), 1); 1592 1593 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset); 1594 } 1595 1596 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps)); 1597} 1598 1599/* A subroutine of emit_group_load. Arguments as for emit_group_load, 1600 except that values are placed in TMPS[i], and must later be moved 1601 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */ 1602 1603static void 1604emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize) 1605{ 1606 rtx src; 1607 int start, i; 1608 enum machine_mode m = GET_MODE (orig_src); 1609 1610 gcc_assert (GET_CODE (dst) == PARALLEL); 1611 1612 if (m != VOIDmode 1613 && !SCALAR_INT_MODE_P (m) 1614 && !MEM_P (orig_src) 1615 && GET_CODE (orig_src) != CONCAT) 1616 { 1617 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src)); 1618 if (imode == BLKmode) 1619 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0); 1620 else 1621 src = gen_reg_rtx (imode); 1622 if (imode != BLKmode) 1623 src = gen_lowpart (GET_MODE (orig_src), src); 1624 emit_move_insn (src, orig_src); 1625 /* ...and back again. */ 1626 if (imode != BLKmode) 1627 src = gen_lowpart (imode, src); 1628 emit_group_load_1 (tmps, dst, src, type, ssize); 1629 return; 1630 } 1631 1632 /* Check for a NULL entry, used to indicate that the parameter goes 1633 both on the stack and in registers. */ 1634 if (XEXP (XVECEXP (dst, 0, 0), 0)) 1635 start = 0; 1636 else 1637 start = 1; 1638 1639 /* Process the pieces. */ 1640 for (i = start; i < XVECLEN (dst, 0); i++) 1641 { 1642 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); 1643 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); 1644 unsigned int bytelen = GET_MODE_SIZE (mode); 1645 int shift = 0; 1646 1647 /* Handle trailing fragments that run over the size of the struct. */ 1648 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) 1649 { 1650 /* Arrange to shift the fragment to where it belongs. 1651 extract_bit_field loads to the lsb of the reg. */ 1652 if ( 1653#ifdef BLOCK_REG_PADDING 1654 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start) 1655 == (BYTES_BIG_ENDIAN ? upward : downward) 1656#else 1657 BYTES_BIG_ENDIAN 1658#endif 1659 ) 1660 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; 1661 bytelen = ssize - bytepos; 1662 gcc_assert (bytelen > 0); 1663 } 1664 1665 /* If we won't be loading directly from memory, protect the real source 1666 from strange tricks we might play; but make sure that the source can 1667 be loaded directly into the destination. */ 1668 src = orig_src; 1669 if (!MEM_P (orig_src) 1670 && (!CONSTANT_P (orig_src) 1671 || (GET_MODE (orig_src) != mode 1672 && GET_MODE (orig_src) != VOIDmode))) 1673 { 1674 if (GET_MODE (orig_src) == VOIDmode) 1675 src = gen_reg_rtx (mode); 1676 else 1677 src = gen_reg_rtx (GET_MODE (orig_src)); 1678 1679 emit_move_insn (src, orig_src); 1680 } 1681 1682 /* Optimize the access just a bit. */ 1683 if (MEM_P (src) 1684 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src)) 1685 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)) 1686 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 1687 && bytelen == GET_MODE_SIZE (mode)) 1688 { 1689 tmps[i] = gen_reg_rtx (mode); 1690 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); 1691 } 1692 else if (COMPLEX_MODE_P (mode) 1693 && GET_MODE (src) == mode 1694 && bytelen == GET_MODE_SIZE (mode)) 1695 /* Let emit_move_complex do the bulk of the work. */ 1696 tmps[i] = src; 1697 else if (GET_CODE (src) == CONCAT) 1698 { 1699 unsigned int slen = GET_MODE_SIZE (GET_MODE (src)); 1700 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0))); 1701 1702 if ((bytepos == 0 && bytelen == slen0) 1703 || (bytepos != 0 && bytepos + bytelen <= slen)) 1704 { 1705 /* The following assumes that the concatenated objects all 1706 have the same size. In this case, a simple calculation 1707 can be used to determine the object and the bit field 1708 to be extracted. */ 1709 tmps[i] = XEXP (src, bytepos / slen0); 1710 if (! CONSTANT_P (tmps[i]) 1711 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)) 1712 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT, 1713 (bytepos % slen0) * BITS_PER_UNIT, 1714 1, NULL_RTX, mode, mode); 1715 } 1716 else 1717 { 1718 rtx mem; 1719 1720 gcc_assert (!bytepos); 1721 mem = assign_stack_temp (GET_MODE (src), slen, 0); 1722 emit_move_insn (mem, src); 1723 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT, 1724 0, 1, NULL_RTX, mode, mode); 1725 } 1726 } 1727 /* FIXME: A SIMD parallel will eventually lead to a subreg of a 1728 SIMD register, which is currently broken. While we get GCC 1729 to emit proper RTL for these cases, let's dump to memory. */ 1730 else if (VECTOR_MODE_P (GET_MODE (dst)) 1731 && REG_P (src)) 1732 { 1733 int slen = GET_MODE_SIZE (GET_MODE (src)); 1734 rtx mem; 1735 1736 mem = assign_stack_temp (GET_MODE (src), slen, 0); 1737 emit_move_insn (mem, src); 1738 tmps[i] = adjust_address (mem, mode, (int) bytepos); 1739 } 1740 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode 1741 && XVECLEN (dst, 0) > 1) 1742 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos); 1743 else if (CONSTANT_P (src) 1744 || (REG_P (src) && GET_MODE (src) == mode)) 1745 tmps[i] = src; 1746 else 1747 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, 1748 bytepos * BITS_PER_UNIT, 1, NULL_RTX, 1749 mode, mode); 1750 1751 if (shift) 1752 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i], 1753 build_int_cst (NULL_TREE, shift), tmps[i], 0); 1754 } 1755} 1756 1757/* Emit code to move a block SRC of type TYPE to a block DST, 1758 where DST is non-consecutive registers represented by a PARALLEL. 1759 SSIZE represents the total size of block ORIG_SRC in bytes, or -1 1760 if not known. */ 1761 1762void 1763emit_group_load (rtx dst, rtx src, tree type, int ssize) 1764{ 1765 rtx *tmps; 1766 int i; 1767 1768 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0)); 1769 emit_group_load_1 (tmps, dst, src, type, ssize); 1770 1771 /* Copy the extracted pieces into the proper (probable) hard regs. */ 1772 for (i = 0; i < XVECLEN (dst, 0); i++) 1773 { 1774 rtx d = XEXP (XVECEXP (dst, 0, i), 0); 1775 if (d == NULL) 1776 continue; 1777 emit_move_insn (d, tmps[i]); 1778 } 1779} 1780 1781/* Similar, but load SRC into new pseudos in a format that looks like 1782 PARALLEL. This can later be fed to emit_group_move to get things 1783 in the right place. */ 1784 1785rtx 1786emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize) 1787{ 1788 rtvec vec; 1789 int i; 1790 1791 vec = rtvec_alloc (XVECLEN (parallel, 0)); 1792 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize); 1793 1794 /* Convert the vector to look just like the original PARALLEL, except 1795 with the computed values. */ 1796 for (i = 0; i < XVECLEN (parallel, 0); i++) 1797 { 1798 rtx e = XVECEXP (parallel, 0, i); 1799 rtx d = XEXP (e, 0); 1800 1801 if (d) 1802 { 1803 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i)); 1804 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1)); 1805 } 1806 RTVEC_ELT (vec, i) = e; 1807 } 1808 1809 return gen_rtx_PARALLEL (GET_MODE (parallel), vec); 1810} 1811 1812/* Emit code to move a block SRC to block DST, where SRC and DST are 1813 non-consecutive groups of registers, each represented by a PARALLEL. */ 1814 1815void 1816emit_group_move (rtx dst, rtx src) 1817{ 1818 int i; 1819 1820 gcc_assert (GET_CODE (src) == PARALLEL 1821 && GET_CODE (dst) == PARALLEL 1822 && XVECLEN (src, 0) == XVECLEN (dst, 0)); 1823 1824 /* Skip first entry if NULL. */ 1825 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++) 1826 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), 1827 XEXP (XVECEXP (src, 0, i), 0)); 1828} 1829 1830/* Move a group of registers represented by a PARALLEL into pseudos. */ 1831 1832rtx 1833emit_group_move_into_temps (rtx src) 1834{ 1835 rtvec vec = rtvec_alloc (XVECLEN (src, 0)); 1836 int i; 1837 1838 for (i = 0; i < XVECLEN (src, 0); i++) 1839 { 1840 rtx e = XVECEXP (src, 0, i); 1841 rtx d = XEXP (e, 0); 1842 1843 if (d) 1844 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1)); 1845 RTVEC_ELT (vec, i) = e; 1846 } 1847 1848 return gen_rtx_PARALLEL (GET_MODE (src), vec); 1849} 1850 1851/* Emit code to move a block SRC to a block ORIG_DST of type TYPE, 1852 where SRC is non-consecutive registers represented by a PARALLEL. 1853 SSIZE represents the total size of block ORIG_DST, or -1 if not 1854 known. */ 1855 1856void 1857emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) 1858{ 1859 rtx *tmps, dst; 1860 int start, finish, i; 1861 enum machine_mode m = GET_MODE (orig_dst); 1862 1863 gcc_assert (GET_CODE (src) == PARALLEL); 1864 1865 if (!SCALAR_INT_MODE_P (m) 1866 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT) 1867 { 1868 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst)); 1869 if (imode == BLKmode) 1870 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0); 1871 else 1872 dst = gen_reg_rtx (imode); 1873 emit_group_store (dst, src, type, ssize); 1874 if (imode != BLKmode) 1875 dst = gen_lowpart (GET_MODE (orig_dst), dst); 1876 emit_move_insn (orig_dst, dst); 1877 return; 1878 } 1879 1880 /* Check for a NULL entry, used to indicate that the parameter goes 1881 both on the stack and in registers. */ 1882 if (XEXP (XVECEXP (src, 0, 0), 0)) 1883 start = 0; 1884 else 1885 start = 1; 1886 finish = XVECLEN (src, 0); 1887 1888 tmps = alloca (sizeof (rtx) * finish); 1889 1890 /* Copy the (probable) hard regs into pseudos. */ 1891 for (i = start; i < finish; i++) 1892 { 1893 rtx reg = XEXP (XVECEXP (src, 0, i), 0); 1894 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER) 1895 { 1896 tmps[i] = gen_reg_rtx (GET_MODE (reg)); 1897 emit_move_insn (tmps[i], reg); 1898 } 1899 else 1900 tmps[i] = reg; 1901 } 1902 1903 /* If we won't be storing directly into memory, protect the real destination 1904 from strange tricks we might play. */ 1905 dst = orig_dst; 1906 if (GET_CODE (dst) == PARALLEL) 1907 { 1908 rtx temp; 1909 1910 /* We can get a PARALLEL dst if there is a conditional expression in 1911 a return statement. In that case, the dst and src are the same, 1912 so no action is necessary. */ 1913 if (rtx_equal_p (dst, src)) 1914 return; 1915 1916 /* It is unclear if we can ever reach here, but we may as well handle 1917 it. Allocate a temporary, and split this into a store/load to/from 1918 the temporary. */ 1919 1920 temp = assign_stack_temp (GET_MODE (dst), ssize, 0); 1921 emit_group_store (temp, src, type, ssize); 1922 emit_group_load (dst, temp, type, ssize); 1923 return; 1924 } 1925 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT) 1926 { 1927 enum machine_mode outer = GET_MODE (dst); 1928 enum machine_mode inner; 1929 HOST_WIDE_INT bytepos; 1930 bool done = false; 1931 rtx temp; 1932 1933 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER) 1934 dst = gen_reg_rtx (outer); 1935 1936 /* Make life a bit easier for combine. */ 1937 /* If the first element of the vector is the low part 1938 of the destination mode, use a paradoxical subreg to 1939 initialize the destination. */ 1940 if (start < finish) 1941 { 1942 inner = GET_MODE (tmps[start]); 1943 bytepos = subreg_lowpart_offset (inner, outer); 1944 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos) 1945 { 1946 temp = simplify_gen_subreg (outer, tmps[start], 1947 inner, 0); 1948 if (temp) 1949 { 1950 emit_move_insn (dst, temp); 1951 done = true; 1952 start++; 1953 } 1954 } 1955 } 1956 1957 /* If the first element wasn't the low part, try the last. */ 1958 if (!done 1959 && start < finish - 1) 1960 { 1961 inner = GET_MODE (tmps[finish - 1]); 1962 bytepos = subreg_lowpart_offset (inner, outer); 1963 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos) 1964 { 1965 temp = simplify_gen_subreg (outer, tmps[finish - 1], 1966 inner, 0); 1967 if (temp) 1968 { 1969 emit_move_insn (dst, temp); 1970 done = true; 1971 finish--; 1972 } 1973 } 1974 } 1975 1976 /* Otherwise, simply initialize the result to zero. */ 1977 if (!done) 1978 emit_move_insn (dst, CONST0_RTX (outer)); 1979 } 1980 1981 /* Process the pieces. */ 1982 for (i = start; i < finish; i++) 1983 { 1984 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); 1985 enum machine_mode mode = GET_MODE (tmps[i]); 1986 unsigned int bytelen = GET_MODE_SIZE (mode); 1987 rtx dest = dst; 1988 1989 /* Handle trailing fragments that run over the size of the struct. */ 1990 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) 1991 { 1992 /* store_bit_field always takes its value from the lsb. 1993 Move the fragment to the lsb if it's not already there. */ 1994 if ( 1995#ifdef BLOCK_REG_PADDING 1996 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start) 1997 == (BYTES_BIG_ENDIAN ? upward : downward) 1998#else 1999 BYTES_BIG_ENDIAN 2000#endif 2001 ) 2002 { 2003 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; 2004 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i], 2005 build_int_cst (NULL_TREE, shift), 2006 tmps[i], 0); 2007 } 2008 bytelen = ssize - bytepos; 2009 } 2010 2011 if (GET_CODE (dst) == CONCAT) 2012 { 2013 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) 2014 dest = XEXP (dst, 0); 2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) 2016 { 2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); 2018 dest = XEXP (dst, 1); 2019 } 2020 else 2021 { 2022 gcc_assert (bytepos == 0 && XVECLEN (src, 0)); 2023 dest = assign_stack_temp (GET_MODE (dest), 2024 GET_MODE_SIZE (GET_MODE (dest)), 0); 2025 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos), 2026 tmps[i]); 2027 dst = dest; 2028 break; 2029 } 2030 } 2031 2032 /* Optimize the access just a bit. */ 2033 if (MEM_P (dest) 2034 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest)) 2035 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)) 2036 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 2037 && bytelen == GET_MODE_SIZE (mode)) 2038 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]); 2039 else 2040 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, 2041 mode, tmps[i]); 2042 } 2043 2044 /* Copy from the pseudo into the (probable) hard reg. */ 2045 if (orig_dst != dst) 2046 emit_move_insn (orig_dst, dst); 2047} 2048 2049/* Generate code to copy a BLKmode object of TYPE out of a 2050 set of registers starting with SRCREG into TGTBLK. If TGTBLK 2051 is null, a stack temporary is created. TGTBLK is returned. 2052 2053 The purpose of this routine is to handle functions that return 2054 BLKmode structures in registers. Some machines (the PA for example) 2055 want to return all small structures in registers regardless of the 2056 structure's alignment. */ 2057 2058rtx 2059copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type) 2060{ 2061 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); 2062 rtx src = NULL, dst = NULL; 2063 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); 2064 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0; 2065 2066 if (tgtblk == 0) 2067 { 2068 tgtblk = assign_temp (build_qualified_type (type, 2069 (TYPE_QUALS (type) 2070 | TYPE_QUAL_CONST)), 2071 0, 1, 1); 2072 preserve_temp_slots (tgtblk); 2073 } 2074 2075 /* This code assumes srcreg is at least a full word. If it isn't, copy it 2076 into a new pseudo which is a full word. */ 2077 2078 if (GET_MODE (srcreg) != BLKmode 2079 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) 2080 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type)); 2081 2082 /* If the structure doesn't take up a whole number of words, see whether 2083 SRCREG is padded on the left or on the right. If it's on the left, 2084 set PADDING_CORRECTION to the number of bits to skip. 2085 2086 In most ABIs, the structure will be returned at the least end of 2087 the register, which translates to right padding on little-endian 2088 targets and left padding on big-endian targets. The opposite 2089 holds if the structure is returned at the most significant 2090 end of the register. */ 2091 if (bytes % UNITS_PER_WORD != 0 2092 && (targetm.calls.return_in_msb (type) 2093 ? !BYTES_BIG_ENDIAN 2094 : BYTES_BIG_ENDIAN)) 2095 padding_correction 2096 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT)); 2097 2098 /* Copy the structure BITSIZE bites at a time. 2099 2100 We could probably emit more efficient code for machines which do not use 2101 strict alignment, but it doesn't seem worth the effort at the current 2102 time. */ 2103 for (bitpos = 0, xbitpos = padding_correction; 2104 bitpos < bytes * BITS_PER_UNIT; 2105 bitpos += bitsize, xbitpos += bitsize) 2106 { 2107 /* We need a new source operand each time xbitpos is on a 2108 word boundary and when xbitpos == padding_correction 2109 (the first time through). */ 2110 if (xbitpos % BITS_PER_WORD == 0 2111 || xbitpos == padding_correction) 2112 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, 2113 GET_MODE (srcreg)); 2114 2115 /* We need a new destination operand each time bitpos is on 2116 a word boundary. */ 2117 if (bitpos % BITS_PER_WORD == 0) 2118 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); 2119 2120 /* Use xbitpos for the source extraction (right justified) and 2121 xbitpos for the destination store (left justified). */ 2122 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, 2123 extract_bit_field (src, bitsize, 2124 xbitpos % BITS_PER_WORD, 1, 2125 NULL_RTX, word_mode, word_mode)); 2126 } 2127 2128 return tgtblk; 2129} 2130 2131/* Add a USE expression for REG to the (possibly empty) list pointed 2132 to by CALL_FUSAGE. REG must denote a hard register. */ 2133 2134void 2135use_reg (rtx *call_fusage, rtx reg) 2136{ 2137 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER); 2138 2139 *call_fusage 2140 = gen_rtx_EXPR_LIST (VOIDmode, 2141 gen_rtx_USE (VOIDmode, reg), *call_fusage); 2142} 2143 2144/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs, 2145 starting at REGNO. All of these registers must be hard registers. */ 2146 2147void 2148use_regs (rtx *call_fusage, int regno, int nregs) 2149{ 2150 int i; 2151 2152 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER); 2153 2154 for (i = 0; i < nregs; i++) 2155 use_reg (call_fusage, regno_reg_rtx[regno + i]); 2156} 2157 2158/* Add USE expressions to *CALL_FUSAGE for each REG contained in the 2159 PARALLEL REGS. This is for calls that pass values in multiple 2160 non-contiguous locations. The Irix 6 ABI has examples of this. */ 2161 2162void 2163use_group_regs (rtx *call_fusage, rtx regs) 2164{ 2165 int i; 2166 2167 for (i = 0; i < XVECLEN (regs, 0); i++) 2168 { 2169 rtx reg = XEXP (XVECEXP (regs, 0, i), 0); 2170 2171 /* A NULL entry means the parameter goes both on the stack and in 2172 registers. This can also be a MEM for targets that pass values 2173 partially on the stack and partially in registers. */ 2174 if (reg != 0 && REG_P (reg)) 2175 use_reg (call_fusage, reg); 2176 } 2177} 2178 2179 2180/* Determine whether the LEN bytes generated by CONSTFUN can be 2181 stored to memory using several move instructions. CONSTFUNDATA is 2182 a pointer which will be passed as argument in every CONSTFUN call. 2183 ALIGN is maximum alignment we can assume. Return nonzero if a 2184 call to store_by_pieces should succeed. */ 2185 2186int 2187can_store_by_pieces (unsigned HOST_WIDE_INT len, 2188 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), 2189 void *constfundata, unsigned int align) 2190{ 2191 unsigned HOST_WIDE_INT l; 2192 unsigned int max_size; 2193 HOST_WIDE_INT offset = 0; 2194 enum machine_mode mode, tmode; 2195 enum insn_code icode; 2196 int reverse; 2197 rtx cst; 2198 2199 if (len == 0) 2200 return 1; 2201 2202 if (! STORE_BY_PIECES_P (len, align)) 2203 return 0; 2204 2205 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); 2206 if (align >= GET_MODE_ALIGNMENT (tmode)) 2207 align = GET_MODE_ALIGNMENT (tmode); 2208 else 2209 { 2210 enum machine_mode xmode; 2211 2212 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; 2213 tmode != VOIDmode; 2214 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) 2215 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES 2216 || SLOW_UNALIGNED_ACCESS (tmode, align)) 2217 break; 2218 2219 align = MAX (align, GET_MODE_ALIGNMENT (xmode)); 2220 } 2221 2222 /* We would first store what we can in the largest integer mode, then go to 2223 successively smaller modes. */ 2224 2225 for (reverse = 0; 2226 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT); 2227 reverse++) 2228 { 2229 l = len; 2230 mode = VOIDmode; 2231 max_size = STORE_MAX_PIECES + 1; 2232 while (max_size > 1) 2233 { 2234 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 2235 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 2236 if (GET_MODE_SIZE (tmode) < max_size) 2237 mode = tmode; 2238 2239 if (mode == VOIDmode) 2240 break; 2241 2242 icode = mov_optab->handlers[(int) mode].insn_code; 2243 if (icode != CODE_FOR_nothing 2244 && align >= GET_MODE_ALIGNMENT (mode)) 2245 { 2246 unsigned int size = GET_MODE_SIZE (mode); 2247 2248 while (l >= size) 2249 { 2250 if (reverse) 2251 offset -= size; 2252 2253 cst = (*constfun) (constfundata, offset, mode); 2254 if (!LEGITIMATE_CONSTANT_P (cst)) 2255 return 0; 2256 2257 if (!reverse) 2258 offset += size; 2259 2260 l -= size; 2261 } 2262 } 2263 2264 max_size = GET_MODE_SIZE (mode); 2265 } 2266 2267 /* The code above should have handled everything. */ 2268 gcc_assert (!l); 2269 } 2270 2271 return 1; 2272} 2273 2274/* Generate several move instructions to store LEN bytes generated by 2275 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a 2276 pointer which will be passed as argument in every CONSTFUN call. 2277 ALIGN is maximum alignment we can assume. 2278 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala 2279 mempcpy, and if ENDP is 2 return memory the end minus one byte ala 2280 stpcpy. */ 2281 2282rtx 2283store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, 2284 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), 2285 void *constfundata, unsigned int align, int endp) 2286{ 2287 struct store_by_pieces data; 2288 2289 if (len == 0) 2290 { 2291 gcc_assert (endp != 2); 2292 return to; 2293 } 2294 2295 gcc_assert (STORE_BY_PIECES_P (len, align)); 2296 data.constfun = constfun; 2297 data.constfundata = constfundata; 2298 data.len = len; 2299 data.to = to; 2300 store_by_pieces_1 (&data, align); 2301 if (endp) 2302 { 2303 rtx to1; 2304 2305 gcc_assert (!data.reverse); 2306 if (data.autinc_to) 2307 { 2308 if (endp == 2) 2309 { 2310 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) 2311 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); 2312 else 2313 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, 2314 -1)); 2315 } 2316 to1 = adjust_automodify_address (data.to, QImode, data.to_addr, 2317 data.offset); 2318 } 2319 else 2320 { 2321 if (endp == 2) 2322 --data.offset; 2323 to1 = adjust_address (data.to, QImode, data.offset); 2324 } 2325 return to1; 2326 } 2327 else 2328 return data.to; 2329} 2330 2331/* Generate several move instructions to clear LEN bytes of block TO. (A MEM 2332 rtx with BLKmode). ALIGN is maximum alignment we can assume. */ 2333 2334static void 2335clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align) 2336{ 2337 struct store_by_pieces data; 2338 2339 if (len == 0) 2340 return; 2341 2342 data.constfun = clear_by_pieces_1; 2343 data.constfundata = NULL; 2344 data.len = len; 2345 data.to = to; 2346 store_by_pieces_1 (&data, align); 2347} 2348 2349/* Callback routine for clear_by_pieces. 2350 Return const0_rtx unconditionally. */ 2351 2352static rtx 2353clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED, 2354 HOST_WIDE_INT offset ATTRIBUTE_UNUSED, 2355 enum machine_mode mode ATTRIBUTE_UNUSED) 2356{ 2357 return const0_rtx; 2358} 2359 2360/* Subroutine of clear_by_pieces and store_by_pieces. 2361 Generate several move instructions to store LEN bytes of block TO. (A MEM 2362 rtx with BLKmode). ALIGN is maximum alignment we can assume. */ 2363 2364static void 2365store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, 2366 unsigned int align ATTRIBUTE_UNUSED) 2367{ 2368 rtx to_addr = XEXP (data->to, 0); 2369 unsigned int max_size = STORE_MAX_PIECES + 1; 2370 enum machine_mode mode = VOIDmode, tmode; 2371 enum insn_code icode; 2372 2373 data->offset = 0; 2374 data->to_addr = to_addr; 2375 data->autinc_to 2376 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC 2377 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); 2378 2379 data->explicit_inc_to = 0; 2380 data->reverse 2381 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); 2382 if (data->reverse) 2383 data->offset = data->len; 2384 2385 /* If storing requires more than two move insns, 2386 copy addresses to registers (to make displacements shorter) 2387 and use post-increment if available. */ 2388 if (!data->autinc_to 2389 && move_by_pieces_ninsns (data->len, align, max_size) > 2) 2390 { 2391 /* Determine the main mode we'll be using. */ 2392 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 2393 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 2394 if (GET_MODE_SIZE (tmode) < max_size) 2395 mode = tmode; 2396 2397 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) 2398 { 2399 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); 2400 data->autinc_to = 1; 2401 data->explicit_inc_to = -1; 2402 } 2403 2404 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse 2405 && ! data->autinc_to) 2406 { 2407 data->to_addr = copy_addr_to_reg (to_addr); 2408 data->autinc_to = 1; 2409 data->explicit_inc_to = 1; 2410 } 2411 2412 if ( !data->autinc_to && CONSTANT_P (to_addr)) 2413 data->to_addr = copy_addr_to_reg (to_addr); 2414 } 2415 2416 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); 2417 if (align >= GET_MODE_ALIGNMENT (tmode)) 2418 align = GET_MODE_ALIGNMENT (tmode); 2419 else 2420 { 2421 enum machine_mode xmode; 2422 2423 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; 2424 tmode != VOIDmode; 2425 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) 2426 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES 2427 || SLOW_UNALIGNED_ACCESS (tmode, align)) 2428 break; 2429 2430 align = MAX (align, GET_MODE_ALIGNMENT (xmode)); 2431 } 2432 2433 /* First store what we can in the largest integer mode, then go to 2434 successively smaller modes. */ 2435 2436 while (max_size > 1) 2437 { 2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); 2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) 2440 if (GET_MODE_SIZE (tmode) < max_size) 2441 mode = tmode; 2442 2443 if (mode == VOIDmode) 2444 break; 2445 2446 icode = mov_optab->handlers[(int) mode].insn_code; 2447 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) 2448 store_by_pieces_2 (GEN_FCN (icode), mode, data); 2449 2450 max_size = GET_MODE_SIZE (mode); 2451 } 2452 2453 /* The code above should have handled everything. */ 2454 gcc_assert (!data->len); 2455} 2456 2457/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate 2458 with move instructions for mode MODE. GENFUN is the gen_... function 2459 to make a move insn for that mode. DATA has all the other info. */ 2460 2461static void 2462store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode, 2463 struct store_by_pieces *data) 2464{ 2465 unsigned int size = GET_MODE_SIZE (mode); 2466 rtx to1, cst; 2467 2468 while (data->len >= size) 2469 { 2470 if (data->reverse) 2471 data->offset -= size; 2472 2473 if (data->autinc_to) 2474 to1 = adjust_automodify_address (data->to, mode, data->to_addr, 2475 data->offset); 2476 else 2477 to1 = adjust_address (data->to, mode, data->offset); 2478 2479 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) 2480 emit_insn (gen_add2_insn (data->to_addr, 2481 GEN_INT (-(HOST_WIDE_INT) size))); 2482 2483 cst = (*data->constfun) (data->constfundata, data->offset, mode); 2484 emit_insn ((*genfun) (to1, cst)); 2485 2486 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) 2487 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); 2488 2489 if (! data->reverse) 2490 data->offset += size; 2491 2492 data->len -= size; 2493 } 2494} 2495 2496/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is 2497 its length in bytes. */ 2498 2499rtx 2500clear_storage (rtx object, rtx size, enum block_op_methods method) 2501{ 2502 enum machine_mode mode = GET_MODE (object); 2503 unsigned int align; 2504 2505 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL); 2506 2507 /* If OBJECT is not BLKmode and SIZE is the same size as its mode, 2508 just move a zero. Otherwise, do this a piece at a time. */ 2509 if (mode != BLKmode 2510 && GET_CODE (size) == CONST_INT 2511 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode)) 2512 { 2513 rtx zero = CONST0_RTX (mode); 2514 if (zero != NULL) 2515 { 2516 emit_move_insn (object, zero); 2517 return NULL; 2518 } 2519 2520 if (COMPLEX_MODE_P (mode)) 2521 { 2522 zero = CONST0_RTX (GET_MODE_INNER (mode)); 2523 if (zero != NULL) 2524 { 2525 write_complex_part (object, zero, 0); 2526 write_complex_part (object, zero, 1); 2527 return NULL; 2528 } 2529 } 2530 } 2531 2532 if (size == const0_rtx) 2533 return NULL; 2534 2535 align = MEM_ALIGN (object); 2536 2537 if (GET_CODE (size) == CONST_INT 2538 && CLEAR_BY_PIECES_P (INTVAL (size), align)) 2539 clear_by_pieces (object, INTVAL (size), align); 2540 else if (set_storage_via_setmem (object, size, const0_rtx, align)) 2541 ; 2542 else 2543 return clear_storage_via_libcall (object, size, 2544 method == BLOCK_OP_TAILCALL); 2545 2546 return NULL; 2547} 2548 2549/* A subroutine of clear_storage. Expand a call to memset. 2550 Return the return value of memset, 0 otherwise. */ 2551 2552static rtx 2553clear_storage_via_libcall (rtx object, rtx size, bool tailcall) 2554{ 2555 tree call_expr, arg_list, fn, object_tree, size_tree; 2556 enum machine_mode size_mode; 2557 rtx retval; 2558 2559 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then 2560 place those into new pseudos into a VAR_DECL and use them later. */ 2561 2562 object = copy_to_mode_reg (Pmode, XEXP (object, 0)); 2563 2564 size_mode = TYPE_MODE (sizetype); 2565 size = convert_to_mode (size_mode, size, 1); 2566 size = copy_to_mode_reg (size_mode, size); 2567 2568 /* It is incorrect to use the libcall calling conventions to call 2569 memset in this context. This could be a user call to memset and 2570 the user may wish to examine the return value from memset. For 2571 targets where libcalls and normal calls have different conventions 2572 for returning pointers, we could end up generating incorrect code. */ 2573 2574 object_tree = make_tree (ptr_type_node, object); 2575 size_tree = make_tree (sizetype, size); 2576 2577 fn = clear_storage_libcall_fn (true); 2578 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); 2579 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list); 2580 arg_list = tree_cons (NULL_TREE, object_tree, arg_list); 2581 2582 /* Now we have to build up the CALL_EXPR itself. */ 2583 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); 2584 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), 2585 call_expr, arg_list, NULL_TREE); 2586 CALL_EXPR_TAILCALL (call_expr) = tailcall; 2587 2588 retval = expand_normal (call_expr); 2589 2590 return retval; 2591} 2592 2593/* A subroutine of clear_storage_via_libcall. Create the tree node 2594 for the function we use for block clears. The first time FOR_CALL 2595 is true, we call assemble_external. */ 2596 2597static GTY(()) tree block_clear_fn; 2598 2599void 2600init_block_clear_fn (const char *asmspec) 2601{ 2602 if (!block_clear_fn) 2603 { 2604 tree fn, args; 2605 2606 fn = get_identifier ("memset"); 2607 args = build_function_type_list (ptr_type_node, ptr_type_node, 2608 integer_type_node, sizetype, 2609 NULL_TREE); 2610 2611 fn = build_decl (FUNCTION_DECL, fn, args); 2612 DECL_EXTERNAL (fn) = 1; 2613 TREE_PUBLIC (fn) = 1; 2614 DECL_ARTIFICIAL (fn) = 1; 2615 TREE_NOTHROW (fn) = 1; 2616 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT; 2617 DECL_VISIBILITY_SPECIFIED (fn) = 1; 2618 2619 block_clear_fn = fn; 2620 } 2621 2622 if (asmspec) 2623 set_user_assembler_name (block_clear_fn, asmspec); 2624} 2625 2626static tree 2627clear_storage_libcall_fn (int for_call) 2628{ 2629 static bool emitted_extern; 2630 2631 if (!block_clear_fn) 2632 init_block_clear_fn (NULL); 2633 2634 if (for_call && !emitted_extern) 2635 { 2636 emitted_extern = true; 2637 make_decl_rtl (block_clear_fn); 2638 assemble_external (block_clear_fn); 2639 } 2640 2641 return block_clear_fn; 2642} 2643 2644/* Expand a setmem pattern; return true if successful. */ 2645 2646bool 2647set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align) 2648{ 2649 /* Try the most limited insn first, because there's no point 2650 including more than one in the machine description unless 2651 the more limited one has some advantage. */ 2652 2653 rtx opalign = GEN_INT (align / BITS_PER_UNIT); 2654 enum machine_mode mode; 2655 2656 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; 2657 mode = GET_MODE_WIDER_MODE (mode)) 2658 { 2659 enum insn_code code = setmem_optab[(int) mode]; 2660 insn_operand_predicate_fn pred; 2661 2662 if (code != CODE_FOR_nothing 2663 /* We don't need MODE to be narrower than 2664 BITS_PER_HOST_WIDE_INT here because if SIZE is less than 2665 the mode mask, as it is returned by the macro, it will 2666 definitely be less than the actual mode mask. */ 2667 && ((GET_CODE (size) == CONST_INT 2668 && ((unsigned HOST_WIDE_INT) INTVAL (size) 2669 <= (GET_MODE_MASK (mode) >> 1))) 2670 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) 2671 && ((pred = insn_data[(int) code].operand[0].predicate) == 0 2672 || (*pred) (object, BLKmode)) 2673 && ((pred = insn_data[(int) code].operand[3].predicate) == 0 2674 || (*pred) (opalign, VOIDmode))) 2675 { 2676 rtx opsize, opchar; 2677 enum machine_mode char_mode; 2678 rtx last = get_last_insn (); 2679 rtx pat; 2680 2681 opsize = convert_to_mode (mode, size, 1); 2682 pred = insn_data[(int) code].operand[1].predicate; 2683 if (pred != 0 && ! (*pred) (opsize, mode)) 2684 opsize = copy_to_mode_reg (mode, opsize); 2685 2686 opchar = val; 2687 char_mode = insn_data[(int) code].operand[2].mode; 2688 if (char_mode != VOIDmode) 2689 { 2690 opchar = convert_to_mode (char_mode, opchar, 1); 2691 pred = insn_data[(int) code].operand[2].predicate; 2692 if (pred != 0 && ! (*pred) (opchar, char_mode)) 2693 opchar = copy_to_mode_reg (char_mode, opchar); 2694 } 2695 2696 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign); 2697 if (pat) 2698 { 2699 emit_insn (pat); 2700 return true; 2701 } 2702 else 2703 delete_insns_since (last); 2704 } 2705 } 2706 2707 return false; 2708} 2709 2710 2711/* Write to one of the components of the complex value CPLX. Write VAL to 2712 the real part if IMAG_P is false, and the imaginary part if its true. */ 2713 2714static void 2715write_complex_part (rtx cplx, rtx val, bool imag_p) 2716{ 2717 enum machine_mode cmode; 2718 enum machine_mode imode; 2719 unsigned ibitsize; 2720 2721 if (GET_CODE (cplx) == CONCAT) 2722 { 2723 emit_move_insn (XEXP (cplx, imag_p), val); 2724 return; 2725 } 2726 2727 cmode = GET_MODE (cplx); 2728 imode = GET_MODE_INNER (cmode); 2729 ibitsize = GET_MODE_BITSIZE (imode); 2730 2731 /* For MEMs simplify_gen_subreg may generate an invalid new address 2732 because, e.g., the original address is considered mode-dependent 2733 by the target, which restricts simplify_subreg from invoking 2734 adjust_address_nv. Instead of preparing fallback support for an 2735 invalid address, we call adjust_address_nv directly. */ 2736 if (MEM_P (cplx)) 2737 { 2738 emit_move_insn (adjust_address_nv (cplx, imode, 2739 imag_p ? GET_MODE_SIZE (imode) : 0), 2740 val); 2741 return; 2742 } 2743 2744 /* If the sub-object is at least word sized, then we know that subregging 2745 will work. This special case is important, since store_bit_field 2746 wants to operate on integer modes, and there's rarely an OImode to 2747 correspond to TCmode. */ 2748 if (ibitsize >= BITS_PER_WORD 2749 /* For hard regs we have exact predicates. Assume we can split 2750 the original object if it spans an even number of hard regs. 2751 This special case is important for SCmode on 64-bit platforms 2752 where the natural size of floating-point regs is 32-bit. */ 2753 || (REG_P (cplx) 2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER 2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)) 2756 { 2757 rtx part = simplify_gen_subreg (imode, cplx, cmode, 2758 imag_p ? GET_MODE_SIZE (imode) : 0); 2759 if (part) 2760 { 2761 emit_move_insn (part, val); 2762 return; 2763 } 2764 else 2765 /* simplify_gen_subreg may fail for sub-word MEMs. */ 2766 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD); 2767 } 2768 2769 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val); 2770} 2771 2772/* Extract one of the components of the complex value CPLX. Extract the 2773 real part if IMAG_P is false, and the imaginary part if it's true. */ 2774 2775static rtx 2776read_complex_part (rtx cplx, bool imag_p) 2777{ 2778 enum machine_mode cmode, imode; 2779 unsigned ibitsize; 2780 2781 if (GET_CODE (cplx) == CONCAT) 2782 return XEXP (cplx, imag_p); 2783 2784 cmode = GET_MODE (cplx); 2785 imode = GET_MODE_INNER (cmode); 2786 ibitsize = GET_MODE_BITSIZE (imode); 2787 2788 /* Special case reads from complex constants that got spilled to memory. */ 2789 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF) 2790 { 2791 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0)); 2792 if (decl && TREE_CODE (decl) == COMPLEX_CST) 2793 { 2794 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl); 2795 if (CONSTANT_CLASS_P (part)) 2796 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL); 2797 } 2798 } 2799 2800 /* For MEMs simplify_gen_subreg may generate an invalid new address 2801 because, e.g., the original address is considered mode-dependent 2802 by the target, which restricts simplify_subreg from invoking 2803 adjust_address_nv. Instead of preparing fallback support for an 2804 invalid address, we call adjust_address_nv directly. */ 2805 if (MEM_P (cplx)) 2806 return adjust_address_nv (cplx, imode, 2807 imag_p ? GET_MODE_SIZE (imode) : 0); 2808 2809 /* If the sub-object is at least word sized, then we know that subregging 2810 will work. This special case is important, since extract_bit_field 2811 wants to operate on integer modes, and there's rarely an OImode to 2812 correspond to TCmode. */ 2813 if (ibitsize >= BITS_PER_WORD 2814 /* For hard regs we have exact predicates. Assume we can split 2815 the original object if it spans an even number of hard regs. 2816 This special case is important for SCmode on 64-bit platforms 2817 where the natural size of floating-point regs is 32-bit. */ 2818 || (REG_P (cplx) 2819 && REGNO (cplx) < FIRST_PSEUDO_REGISTER 2820 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)) 2821 { 2822 rtx ret = simplify_gen_subreg (imode, cplx, cmode, 2823 imag_p ? GET_MODE_SIZE (imode) : 0); 2824 if (ret) 2825 return ret; 2826 else 2827 /* simplify_gen_subreg may fail for sub-word MEMs. */ 2828 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD); 2829 } 2830 2831 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 2832 true, NULL_RTX, imode, imode); 2833} 2834 2835/* A subroutine of emit_move_insn_1. Yet another lowpart generator. 2836 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be 2837 represented in NEW_MODE. If FORCE is true, this will never happen, as 2838 we'll force-create a SUBREG if needed. */ 2839 2840static rtx 2841emit_move_change_mode (enum machine_mode new_mode, 2842 enum machine_mode old_mode, rtx x, bool force) 2843{ 2844 rtx ret; 2845 2846 if (MEM_P (x)) 2847 { 2848 /* We don't have to worry about changing the address since the 2849 size in bytes is supposed to be the same. */ 2850 if (reload_in_progress) 2851 { 2852 /* Copy the MEM to change the mode and move any 2853 substitutions from the old MEM to the new one. */ 2854 ret = adjust_address_nv (x, new_mode, 0); 2855 copy_replacements (x, ret); 2856 } 2857 else 2858 ret = adjust_address (x, new_mode, 0); 2859 } 2860 else 2861 { 2862 /* Note that we do want simplify_subreg's behavior of validating 2863 that the new mode is ok for a hard register. If we were to use 2864 simplify_gen_subreg, we would create the subreg, but would 2865 probably run into the target not being able to implement it. */ 2866 /* Except, of course, when FORCE is true, when this is exactly what 2867 we want. Which is needed for CCmodes on some targets. */ 2868 if (force) 2869 ret = simplify_gen_subreg (new_mode, x, old_mode, 0); 2870 else 2871 ret = simplify_subreg (new_mode, x, old_mode, 0); 2872 } 2873 2874 return ret; 2875} 2876 2877/* A subroutine of emit_move_insn_1. Generate a move from Y into X using 2878 an integer mode of the same size as MODE. Returns the instruction 2879 emitted, or NULL if such a move could not be generated. */ 2880 2881static rtx 2882emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force) 2883{ 2884 enum machine_mode imode; 2885 enum insn_code code; 2886 2887 /* There must exist a mode of the exact size we require. */ 2888 imode = int_mode_for_mode (mode); 2889 if (imode == BLKmode) 2890 return NULL_RTX; 2891 2892 /* The target must support moves in this mode. */ 2893 code = mov_optab->handlers[imode].insn_code; 2894 if (code == CODE_FOR_nothing) 2895 return NULL_RTX; 2896 2897 x = emit_move_change_mode (imode, mode, x, force); 2898 if (x == NULL_RTX) 2899 return NULL_RTX; 2900 y = emit_move_change_mode (imode, mode, y, force); 2901 if (y == NULL_RTX) 2902 return NULL_RTX; 2903 return emit_insn (GEN_FCN (code) (x, y)); 2904} 2905 2906/* A subroutine of emit_move_insn_1. X is a push_operand in MODE. 2907 Return an equivalent MEM that does not use an auto-increment. */ 2908 2909static rtx 2910emit_move_resolve_push (enum machine_mode mode, rtx x) 2911{ 2912 enum rtx_code code = GET_CODE (XEXP (x, 0)); 2913 HOST_WIDE_INT adjust; 2914 rtx temp; 2915 2916 adjust = GET_MODE_SIZE (mode); 2917#ifdef PUSH_ROUNDING 2918 adjust = PUSH_ROUNDING (adjust); 2919#endif 2920 if (code == PRE_DEC || code == POST_DEC) 2921 adjust = -adjust; 2922 else if (code == PRE_MODIFY || code == POST_MODIFY) 2923 { 2924 rtx expr = XEXP (XEXP (x, 0), 1); 2925 HOST_WIDE_INT val; 2926 2927 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS); 2928 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT); 2929 val = INTVAL (XEXP (expr, 1)); 2930 if (GET_CODE (expr) == MINUS) 2931 val = -val; 2932 gcc_assert (adjust == val || adjust == -val); 2933 adjust = val; 2934 } 2935 2936 /* Do not use anti_adjust_stack, since we don't want to update 2937 stack_pointer_delta. */ 2938 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx, 2939 GEN_INT (adjust), stack_pointer_rtx, 2940 0, OPTAB_LIB_WIDEN); 2941 if (temp != stack_pointer_rtx) 2942 emit_move_insn (stack_pointer_rtx, temp); 2943 2944 switch (code) 2945 { 2946 case PRE_INC: 2947 case PRE_DEC: 2948 case PRE_MODIFY: 2949 temp = stack_pointer_rtx; 2950 break; 2951 case POST_INC: 2952 case POST_DEC: 2953 case POST_MODIFY: 2954 temp = plus_constant (stack_pointer_rtx, -adjust); 2955 break; 2956 default: 2957 gcc_unreachable (); 2958 } 2959 2960 return replace_equiv_address (x, temp); 2961} 2962 2963/* A subroutine of emit_move_complex. Generate a move from Y into X. 2964 X is known to satisfy push_operand, and MODE is known to be complex. 2965 Returns the last instruction emitted. */ 2966 2967static rtx 2968emit_move_complex_push (enum machine_mode mode, rtx x, rtx y) 2969{ 2970 enum machine_mode submode = GET_MODE_INNER (mode); 2971 bool imag_first; 2972 2973#ifdef PUSH_ROUNDING 2974 unsigned int submodesize = GET_MODE_SIZE (submode); 2975 2976 /* In case we output to the stack, but the size is smaller than the 2977 machine can push exactly, we need to use move instructions. */ 2978 if (PUSH_ROUNDING (submodesize) != submodesize) 2979 { 2980 x = emit_move_resolve_push (mode, x); 2981 return emit_move_insn (x, y); 2982 } 2983#endif 2984 2985 /* Note that the real part always precedes the imag part in memory 2986 regardless of machine's endianness. */ 2987 switch (GET_CODE (XEXP (x, 0))) 2988 { 2989 case PRE_DEC: 2990 case POST_DEC: 2991 imag_first = true; 2992 break; 2993 case PRE_INC: 2994 case POST_INC: 2995 imag_first = false; 2996 break; 2997 default: 2998 gcc_unreachable (); 2999 } 3000 3001 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), 3002 read_complex_part (y, imag_first)); 3003 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), 3004 read_complex_part (y, !imag_first)); 3005} 3006 3007/* A subroutine of emit_move_insn_1. Generate a move from Y into X. 3008 MODE is known to be complex. Returns the last instruction emitted. */ 3009 3010static rtx 3011emit_move_complex (enum machine_mode mode, rtx x, rtx y) 3012{ 3013 bool try_int; 3014 3015 /* Need to take special care for pushes, to maintain proper ordering 3016 of the data, and possibly extra padding. */ 3017 if (push_operand (x, mode)) 3018 return emit_move_complex_push (mode, x, y); 3019 3020 /* See if we can coerce the target into moving both values at once. */ 3021 3022 /* Move floating point as parts. */ 3023 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT 3024 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing) 3025 try_int = false; 3026 /* Not possible if the values are inherently not adjacent. */ 3027 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT) 3028 try_int = false; 3029 /* Is possible if both are registers (or subregs of registers). */ 3030 else if (register_operand (x, mode) && register_operand (y, mode)) 3031 try_int = true; 3032 /* If one of the operands is a memory, and alignment constraints 3033 are friendly enough, we may be able to do combined memory operations. 3034 We do not attempt this if Y is a constant because that combination is 3035 usually better with the by-parts thing below. */ 3036 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y)) 3037 && (!STRICT_ALIGNMENT 3038 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT)) 3039 try_int = true; 3040 else 3041 try_int = false; 3042 3043 if (try_int) 3044 { 3045 rtx ret; 3046 3047 /* For memory to memory moves, optimal behavior can be had with the 3048 existing block move logic. */ 3049 if (MEM_P (x) && MEM_P (y)) 3050 { 3051 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)), 3052 BLOCK_OP_NO_LIBCALL); 3053 return get_last_insn (); 3054 } 3055 3056 ret = emit_move_via_integer (mode, x, y, true); 3057 if (ret) 3058 return ret; 3059 } 3060 3061 /* Show the output dies here. This is necessary for SUBREGs 3062 of pseudos since we cannot track their lifetimes correctly; 3063 hard regs shouldn't appear here except as return values. */ 3064 if (!reload_completed && !reload_in_progress 3065 && REG_P (x) && !reg_overlap_mentioned_p (x, y)) 3066 emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); 3067 3068 write_complex_part (x, read_complex_part (y, false), false); 3069 write_complex_part (x, read_complex_part (y, true), true); 3070 return get_last_insn (); 3071} 3072 3073/* A subroutine of emit_move_insn_1. Generate a move from Y into X. 3074 MODE is known to be MODE_CC. Returns the last instruction emitted. */ 3075 3076static rtx 3077emit_move_ccmode (enum machine_mode mode, rtx x, rtx y) 3078{ 3079 rtx ret; 3080 3081 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */ 3082 if (mode != CCmode) 3083 { 3084 enum insn_code code = mov_optab->handlers[CCmode].insn_code; 3085 if (code != CODE_FOR_nothing) 3086 { 3087 x = emit_move_change_mode (CCmode, mode, x, true); 3088 y = emit_move_change_mode (CCmode, mode, y, true); 3089 return emit_insn (GEN_FCN (code) (x, y)); 3090 } 3091 } 3092 3093 /* Otherwise, find the MODE_INT mode of the same width. */ 3094 ret = emit_move_via_integer (mode, x, y, false); 3095 gcc_assert (ret != NULL); 3096 return ret; 3097} 3098 3099/* Return true if word I of OP lies entirely in the 3100 undefined bits of a paradoxical subreg. */ 3101 3102static bool 3103undefined_operand_subword_p (rtx op, int i) 3104{ 3105 enum machine_mode innermode, innermostmode; 3106 int offset; 3107 if (GET_CODE (op) != SUBREG) 3108 return false; 3109 innermode = GET_MODE (op); 3110 innermostmode = GET_MODE (SUBREG_REG (op)); 3111 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op); 3112 /* The SUBREG_BYTE represents offset, as if the value were stored in 3113 memory, except for a paradoxical subreg where we define 3114 SUBREG_BYTE to be 0; undo this exception as in 3115 simplify_subreg. */ 3116 if (SUBREG_BYTE (op) == 0 3117 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode)) 3118 { 3119 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode)); 3120 if (WORDS_BIG_ENDIAN) 3121 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; 3122 if (BYTES_BIG_ENDIAN) 3123 offset += difference % UNITS_PER_WORD; 3124 } 3125 if (offset >= GET_MODE_SIZE (innermostmode) 3126 || offset <= -GET_MODE_SIZE (word_mode)) 3127 return true; 3128 return false; 3129} 3130 3131/* A subroutine of emit_move_insn_1. Generate a move from Y into X. 3132 MODE is any multi-word or full-word mode that lacks a move_insn 3133 pattern. Note that you will get better code if you define such 3134 patterns, even if they must turn into multiple assembler instructions. */ 3135 3136static rtx 3137emit_move_multi_word (enum machine_mode mode, rtx x, rtx y) 3138{ 3139 rtx last_insn = 0; 3140 rtx seq, inner; 3141 bool need_clobber; 3142 int i; 3143 3144 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD); 3145 3146 /* If X is a push on the stack, do the push now and replace 3147 X with a reference to the stack pointer. */ 3148 if (push_operand (x, mode)) 3149 x = emit_move_resolve_push (mode, x); 3150 3151 /* If we are in reload, see if either operand is a MEM whose address 3152 is scheduled for replacement. */ 3153 if (reload_in_progress && MEM_P (x) 3154 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0)) 3155 x = replace_equiv_address_nv (x, inner); 3156 if (reload_in_progress && MEM_P (y) 3157 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0)) 3158 y = replace_equiv_address_nv (y, inner); 3159 3160 start_sequence (); 3161 3162 need_clobber = false; 3163 for (i = 0; 3164 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; 3165 i++) 3166 { 3167 rtx xpart = operand_subword (x, i, 1, mode); 3168 rtx ypart; 3169 3170 /* Do not generate code for a move if it would come entirely 3171 from the undefined bits of a paradoxical subreg. */ 3172 if (undefined_operand_subword_p (y, i)) 3173 continue; 3174 3175 ypart = operand_subword (y, i, 1, mode); 3176 3177 /* If we can't get a part of Y, put Y into memory if it is a 3178 constant. Otherwise, force it into a register. Then we must 3179 be able to get a part of Y. */ 3180 if (ypart == 0 && CONSTANT_P (y)) 3181 { 3182 y = use_anchored_address (force_const_mem (mode, y)); 3183 ypart = operand_subword (y, i, 1, mode); 3184 } 3185 else if (ypart == 0) 3186 ypart = operand_subword_force (y, i, mode); 3187 3188 gcc_assert (xpart && ypart); 3189 3190 need_clobber |= (GET_CODE (xpart) == SUBREG); 3191 3192 last_insn = emit_move_insn (xpart, ypart); 3193 } 3194 3195 seq = get_insns (); 3196 end_sequence (); 3197 3198 /* Show the output dies here. This is necessary for SUBREGs 3199 of pseudos since we cannot track their lifetimes correctly; 3200 hard regs shouldn't appear here except as return values. 3201 We never want to emit such a clobber after reload. */ 3202 if (x != y 3203 && ! (reload_in_progress || reload_completed) 3204 && need_clobber != 0) 3205 emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); 3206 3207 emit_insn (seq); 3208 3209 return last_insn; 3210} 3211 3212/* Low level part of emit_move_insn. 3213 Called just like emit_move_insn, but assumes X and Y 3214 are basically valid. */ 3215 3216rtx 3217emit_move_insn_1 (rtx x, rtx y) 3218{ 3219 enum machine_mode mode = GET_MODE (x); 3220 enum insn_code code; 3221 3222 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE); 3223 3224 code = mov_optab->handlers[mode].insn_code; 3225 if (code != CODE_FOR_nothing) 3226 return emit_insn (GEN_FCN (code) (x, y)); 3227 3228 /* Expand complex moves by moving real part and imag part. */ 3229 if (COMPLEX_MODE_P (mode)) 3230 return emit_move_complex (mode, x, y); 3231 3232 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT) 3233 { 3234 rtx result = emit_move_via_integer (mode, x, y, true); 3235 3236 /* If we can't find an integer mode, use multi words. */ 3237 if (result) 3238 return result; 3239 else 3240 return emit_move_multi_word (mode, x, y); 3241 } 3242 3243 if (GET_MODE_CLASS (mode) == MODE_CC) 3244 return emit_move_ccmode (mode, x, y); 3245 3246 /* Try using a move pattern for the corresponding integer mode. This is 3247 only safe when simplify_subreg can convert MODE constants into integer 3248 constants. At present, it can only do this reliably if the value 3249 fits within a HOST_WIDE_INT. */ 3250 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) 3251 { 3252 rtx ret = emit_move_via_integer (mode, x, y, false); 3253 if (ret) 3254 return ret; 3255 } 3256 3257 return emit_move_multi_word (mode, x, y); 3258} 3259 3260/* Generate code to copy Y into X. 3261 Both Y and X must have the same mode, except that 3262 Y can be a constant with VOIDmode. 3263 This mode cannot be BLKmode; use emit_block_move for that. 3264 3265 Return the last instruction emitted. */ 3266 3267rtx 3268emit_move_insn (rtx x, rtx y) 3269{ 3270 enum machine_mode mode = GET_MODE (x); 3271 rtx y_cst = NULL_RTX; 3272 rtx last_insn, set; 3273 3274 gcc_assert (mode != BLKmode 3275 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode)); 3276 3277 if (CONSTANT_P (y)) 3278 { 3279 if (optimize 3280 && SCALAR_FLOAT_MODE_P (GET_MODE (x)) 3281 && (last_insn = compress_float_constant (x, y))) 3282 return last_insn; 3283 3284 y_cst = y; 3285 3286 if (!LEGITIMATE_CONSTANT_P (y)) 3287 { 3288 y = force_const_mem (mode, y); 3289 3290 /* If the target's cannot_force_const_mem prevented the spill, 3291 assume that the target's move expanders will also take care 3292 of the non-legitimate constant. */ 3293 if (!y) 3294 y = y_cst; 3295 else 3296 y = use_anchored_address (y); 3297 } 3298 } 3299 3300 /* If X or Y are memory references, verify that their addresses are valid 3301 for the machine. */ 3302 if (MEM_P (x) 3303 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0)) 3304 && ! push_operand (x, GET_MODE (x))) 3305 || (flag_force_addr 3306 && CONSTANT_ADDRESS_P (XEXP (x, 0))))) 3307 x = validize_mem (x); 3308 3309 if (MEM_P (y) 3310 && (! memory_address_p (GET_MODE (y), XEXP (y, 0)) 3311 || (flag_force_addr 3312 && CONSTANT_ADDRESS_P (XEXP (y, 0))))) 3313 y = validize_mem (y); 3314 3315 gcc_assert (mode != BLKmode); 3316 3317 last_insn = emit_move_insn_1 (x, y); 3318 3319 if (y_cst && REG_P (x) 3320 && (set = single_set (last_insn)) != NULL_RTX 3321 && SET_DEST (set) == x 3322 && ! rtx_equal_p (y_cst, SET_SRC (set))) 3323 set_unique_reg_note (last_insn, REG_EQUAL, y_cst); 3324 3325 return last_insn; 3326} 3327 3328/* If Y is representable exactly in a narrower mode, and the target can 3329 perform the extension directly from constant or memory, then emit the 3330 move as an extension. */ 3331 3332static rtx 3333compress_float_constant (rtx x, rtx y) 3334{ 3335 enum machine_mode dstmode = GET_MODE (x); 3336 enum machine_mode orig_srcmode = GET_MODE (y); 3337 enum machine_mode srcmode; 3338 REAL_VALUE_TYPE r; 3339 int oldcost, newcost; 3340 3341 REAL_VALUE_FROM_CONST_DOUBLE (r, y); 3342 3343 if (LEGITIMATE_CONSTANT_P (y)) 3344 oldcost = rtx_cost (y, SET); 3345 else 3346 oldcost = rtx_cost (force_const_mem (dstmode, y), SET); 3347 3348 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode)); 3349 srcmode != orig_srcmode; 3350 srcmode = GET_MODE_WIDER_MODE (srcmode)) 3351 { 3352 enum insn_code ic; 3353 rtx trunc_y, last_insn; 3354 3355 /* Skip if the target can't extend this way. */ 3356 ic = can_extend_p (dstmode, srcmode, 0); 3357 if (ic == CODE_FOR_nothing) 3358 continue; 3359 3360 /* Skip if the narrowed value isn't exact. */ 3361 if (! exact_real_truncate (srcmode, &r)) 3362 continue; 3363 3364 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode); 3365 3366 if (LEGITIMATE_CONSTANT_P (trunc_y)) 3367 { 3368 /* Skip if the target needs extra instructions to perform 3369 the extension. */ 3370 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode)) 3371 continue; 3372 /* This is valid, but may not be cheaper than the original. */ 3373 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET); 3374 if (oldcost < newcost) 3375 continue; 3376 } 3377 else if (float_extend_from_mem[dstmode][srcmode]) 3378 { 3379 trunc_y = force_const_mem (srcmode, trunc_y); 3380 /* This is valid, but may not be cheaper than the original. */ 3381 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET); 3382 if (oldcost < newcost) 3383 continue; 3384 trunc_y = validize_mem (trunc_y); 3385 } 3386 else 3387 continue; 3388 3389 /* For CSE's benefit, force the compressed constant pool entry 3390 into a new pseudo. This constant may be used in different modes, 3391 and if not, combine will put things back together for us. */ 3392 trunc_y = force_reg (srcmode, trunc_y); 3393 emit_unop_insn (ic, x, trunc_y, UNKNOWN); 3394 last_insn = get_last_insn (); 3395 3396 if (REG_P (x)) 3397 set_unique_reg_note (last_insn, REG_EQUAL, y); 3398 3399 return last_insn; 3400 } 3401 3402 return NULL_RTX; 3403} 3404 3405/* Pushing data onto the stack. */ 3406 3407/* Push a block of length SIZE (perhaps variable) 3408 and return an rtx to address the beginning of the block. 3409 The value may be virtual_outgoing_args_rtx. 3410 3411 EXTRA is the number of bytes of padding to push in addition to SIZE. 3412 BELOW nonzero means this padding comes at low addresses; 3413 otherwise, the padding comes at high addresses. */ 3414 3415rtx 3416push_block (rtx size, int extra, int below) 3417{ 3418 rtx temp; 3419 3420 size = convert_modes (Pmode, ptr_mode, size, 1); 3421 if (CONSTANT_P (size)) 3422 anti_adjust_stack (plus_constant (size, extra)); 3423 else if (REG_P (size) && extra == 0) 3424 anti_adjust_stack (size); 3425 else 3426 { 3427 temp = copy_to_mode_reg (Pmode, size); 3428 if (extra != 0) 3429 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra), 3430 temp, 0, OPTAB_LIB_WIDEN); 3431 anti_adjust_stack (temp); 3432 } 3433 3434#ifndef STACK_GROWS_DOWNWARD 3435 if (0) 3436#else 3437 if (1) 3438#endif 3439 { 3440 temp = virtual_outgoing_args_rtx; 3441 if (extra != 0 && below) 3442 temp = plus_constant (temp, extra); 3443 } 3444 else 3445 { 3446 if (GET_CODE (size) == CONST_INT) 3447 temp = plus_constant (virtual_outgoing_args_rtx, 3448 -INTVAL (size) - (below ? 0 : extra)); 3449 else if (extra != 0 && !below) 3450 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, 3451 negate_rtx (Pmode, plus_constant (size, extra))); 3452 else 3453 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, 3454 negate_rtx (Pmode, size)); 3455 } 3456 3457 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp); 3458} 3459 3460#ifdef PUSH_ROUNDING 3461 3462/* Emit single push insn. */ 3463 3464static void 3465emit_single_push_insn (enum machine_mode mode, rtx x, tree type) 3466{ 3467 rtx dest_addr; 3468 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); 3469 rtx dest; 3470 enum insn_code icode; 3471 insn_operand_predicate_fn pred; 3472 3473 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); 3474 /* If there is push pattern, use it. Otherwise try old way of throwing 3475 MEM representing push operation to move expander. */ 3476 icode = push_optab->handlers[(int) mode].insn_code; 3477 if (icode != CODE_FOR_nothing) 3478 { 3479 if (((pred = insn_data[(int) icode].operand[0].predicate) 3480 && !((*pred) (x, mode)))) 3481 x = force_reg (mode, x); 3482 emit_insn (GEN_FCN (icode) (x)); 3483 return; 3484 } 3485 if (GET_MODE_SIZE (mode) == rounded_size) 3486 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); 3487 /* If we are to pad downward, adjust the stack pointer first and 3488 then store X into the stack location using an offset. This is 3489 because emit_move_insn does not know how to pad; it does not have 3490 access to type. */ 3491 else if (FUNCTION_ARG_PADDING (mode, type) == downward) 3492 { 3493 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode); 3494 HOST_WIDE_INT offset; 3495 3496 emit_move_insn (stack_pointer_rtx, 3497 expand_binop (Pmode, 3498#ifdef STACK_GROWS_DOWNWARD 3499 sub_optab, 3500#else 3501 add_optab, 3502#endif 3503 stack_pointer_rtx, 3504 GEN_INT (rounded_size), 3505 NULL_RTX, 0, OPTAB_LIB_WIDEN)); 3506 3507 offset = (HOST_WIDE_INT) padding_size; 3508#ifdef STACK_GROWS_DOWNWARD 3509 if (STACK_PUSH_CODE == POST_DEC) 3510 /* We have already decremented the stack pointer, so get the 3511 previous value. */ 3512 offset += (HOST_WIDE_INT) rounded_size; 3513#else 3514 if (STACK_PUSH_CODE == POST_INC) 3515 /* We have already incremented the stack pointer, so get the 3516 previous value. */ 3517 offset -= (HOST_WIDE_INT) rounded_size; 3518#endif 3519 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset)); 3520 } 3521 else 3522 { 3523#ifdef STACK_GROWS_DOWNWARD 3524 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */ 3525 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, 3526 GEN_INT (-(HOST_WIDE_INT) rounded_size)); 3527#else 3528 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */ 3529 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, 3530 GEN_INT (rounded_size)); 3531#endif 3532 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); 3533 } 3534 3535 dest = gen_rtx_MEM (mode, dest_addr); 3536 3537 if (type != 0) 3538 { 3539 set_mem_attributes (dest, type, 1); 3540 3541 if (flag_optimize_sibling_calls) 3542 /* Function incoming arguments may overlap with sibling call 3543 outgoing arguments and we cannot allow reordering of reads 3544 from function arguments with stores to outgoing arguments 3545 of sibling calls. */ 3546 set_mem_alias_set (dest, 0); 3547 } 3548 emit_move_insn (dest, x); 3549} 3550#endif 3551 3552/* Generate code to push X onto the stack, assuming it has mode MODE and 3553 type TYPE. 3554 MODE is redundant except when X is a CONST_INT (since they don't 3555 carry mode info). 3556 SIZE is an rtx for the size of data to be copied (in bytes), 3557 needed only if X is BLKmode. 3558 3559 ALIGN (in bits) is maximum alignment we can assume. 3560 3561 If PARTIAL and REG are both nonzero, then copy that many of the first 3562 bytes of X into registers starting with REG, and push the rest of X. 3563 The amount of space pushed is decreased by PARTIAL bytes. 3564 REG must be a hard register in this case. 3565 If REG is zero but PARTIAL is not, take any all others actions for an 3566 argument partially in registers, but do not actually load any 3567 registers. 3568 3569 EXTRA is the amount in bytes of extra space to leave next to this arg. 3570 This is ignored if an argument block has already been allocated. 3571 3572 On a machine that lacks real push insns, ARGS_ADDR is the address of 3573 the bottom of the argument block for this call. We use indexing off there 3574 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a 3575 argument block has not been preallocated. 3576 3577 ARGS_SO_FAR is the size of args previously pushed for this call. 3578 3579 REG_PARM_STACK_SPACE is nonzero if functions require stack space 3580 for arguments passed in registers. If nonzero, it will be the number 3581 of bytes required. */ 3582 3583void 3584emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, 3585 unsigned int align, int partial, rtx reg, int extra, 3586 rtx args_addr, rtx args_so_far, int reg_parm_stack_space, 3587 rtx alignment_pad) 3588{ 3589 rtx xinner; 3590 enum direction stack_direction 3591#ifdef STACK_GROWS_DOWNWARD 3592 = downward; 3593#else 3594 = upward; 3595#endif 3596 3597 /* Decide where to pad the argument: `downward' for below, 3598 `upward' for above, or `none' for don't pad it. 3599 Default is below for small data on big-endian machines; else above. */ 3600 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type); 3601 3602 /* Invert direction if stack is post-decrement. 3603 FIXME: why? */ 3604 if (STACK_PUSH_CODE == POST_DEC) 3605 if (where_pad != none) 3606 where_pad = (where_pad == downward ? upward : downward); 3607 3608 xinner = x; 3609 3610 if (mode == BLKmode) 3611 { 3612 /* Copy a block into the stack, entirely or partially. */ 3613 3614 rtx temp; 3615 int used; 3616 int offset; 3617 int skip; 3618 3619 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT); 3620 used = partial - offset; 3621 3622 gcc_assert (size); 3623 3624 /* USED is now the # of bytes we need not copy to the stack 3625 because registers will take care of them. */ 3626 3627 if (partial != 0) 3628 xinner = adjust_address (xinner, BLKmode, used); 3629 3630 /* If the partial register-part of the arg counts in its stack size, 3631 skip the part of stack space corresponding to the registers. 3632 Otherwise, start copying to the beginning of the stack space, 3633 by setting SKIP to 0. */ 3634 skip = (reg_parm_stack_space == 0) ? 0 : used; 3635 3636#ifdef PUSH_ROUNDING 3637 /* Do it with several push insns if that doesn't take lots of insns 3638 and if there is no difficulty with push insns that skip bytes 3639 on the stack for alignment purposes. */ 3640 if (args_addr == 0 3641 && PUSH_ARGS 3642 && GET_CODE (size) == CONST_INT 3643 && skip == 0 3644 && MEM_ALIGN (xinner) >= align 3645 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) 3646 /* Here we avoid the case of a structure whose weak alignment 3647 forces many pushes of a small amount of data, 3648 and such small pushes do rounding that causes trouble. */ 3649 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align)) 3650 || align >= BIGGEST_ALIGNMENT 3651 || (PUSH_ROUNDING (align / BITS_PER_UNIT) 3652 == (align / BITS_PER_UNIT))) 3653 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) 3654 { 3655 /* Push padding now if padding above and stack grows down, 3656 or if padding below and stack grows up. 3657 But if space already allocated, this has already been done. */ 3658 if (extra && args_addr == 0 3659 && where_pad != none && where_pad != stack_direction) 3660 anti_adjust_stack (GEN_INT (extra)); 3661 3662 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0); 3663 } 3664 else 3665#endif /* PUSH_ROUNDING */ 3666 { 3667 rtx target; 3668 3669 /* Otherwise make space on the stack and copy the data 3670 to the address of that space. */ 3671 3672 /* Deduct words put into registers from the size we must copy. */ 3673 if (partial != 0) 3674 { 3675 if (GET_CODE (size) == CONST_INT) 3676 size = GEN_INT (INTVAL (size) - used); 3677 else 3678 size = expand_binop (GET_MODE (size), sub_optab, size, 3679 GEN_INT (used), NULL_RTX, 0, 3680 OPTAB_LIB_WIDEN); 3681 } 3682 3683 /* Get the address of the stack space. 3684 In this case, we do not deal with EXTRA separately. 3685 A single stack adjust will do. */ 3686 if (! args_addr) 3687 { 3688 temp = push_block (size, extra, where_pad == downward); 3689 extra = 0; 3690 } 3691 else if (GET_CODE (args_so_far) == CONST_INT) 3692 temp = memory_address (BLKmode, 3693 plus_constant (args_addr, 3694 skip + INTVAL (args_so_far))); 3695 else 3696 temp = memory_address (BLKmode, 3697 plus_constant (gen_rtx_PLUS (Pmode, 3698 args_addr, 3699 args_so_far), 3700 skip)); 3701 3702 if (!ACCUMULATE_OUTGOING_ARGS) 3703 { 3704 /* If the source is referenced relative to the stack pointer, 3705 copy it to another register to stabilize it. We do not need 3706 to do this if we know that we won't be changing sp. */ 3707 3708 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) 3709 || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) 3710 temp = copy_to_reg (temp); 3711 } 3712 3713 target = gen_rtx_MEM (BLKmode, temp); 3714 3715 /* We do *not* set_mem_attributes here, because incoming arguments 3716 may overlap with sibling call outgoing arguments and we cannot 3717 allow reordering of reads from function arguments with stores 3718 to outgoing arguments of sibling calls. We do, however, want 3719 to record the alignment of the stack slot. */ 3720 /* ALIGN may well be better aligned than TYPE, e.g. due to 3721 PARM_BOUNDARY. Assume the caller isn't lying. */ 3722 set_mem_align (target, align); 3723 3724 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM); 3725 } 3726 } 3727 else if (partial > 0) 3728 { 3729 /* Scalar partly in registers. */ 3730 3731 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; 3732 int i; 3733 int not_stack; 3734 /* # bytes of start of argument 3735 that we must make space for but need not store. */ 3736 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT); 3737 int args_offset = INTVAL (args_so_far); 3738 int skip; 3739 3740 /* Push padding now if padding above and stack grows down, 3741 or if padding below and stack grows up. 3742 But if space already allocated, this has already been done. */ 3743 if (extra && args_addr == 0 3744 && where_pad != none && where_pad != stack_direction) 3745 anti_adjust_stack (GEN_INT (extra)); 3746 3747 /* If we make space by pushing it, we might as well push 3748 the real data. Otherwise, we can leave OFFSET nonzero 3749 and leave the space uninitialized. */ 3750 if (args_addr == 0) 3751 offset = 0; 3752 3753 /* Now NOT_STACK gets the number of words that we don't need to 3754 allocate on the stack. Convert OFFSET to words too. */ 3755 not_stack = (partial - offset) / UNITS_PER_WORD; 3756 offset /= UNITS_PER_WORD; 3757 3758 /* If the partial register-part of the arg counts in its stack size, 3759 skip the part of stack space corresponding to the registers. 3760 Otherwise, start copying to the beginning of the stack space, 3761 by setting SKIP to 0. */ 3762 skip = (reg_parm_stack_space == 0) ? 0 : not_stack; 3763 3764 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) 3765 x = validize_mem (force_const_mem (mode, x)); 3766 3767 /* If X is a hard register in a non-integer mode, copy it into a pseudo; 3768 SUBREGs of such registers are not allowed. */ 3769 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER 3770 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)) 3771 x = copy_to_reg (x); 3772 3773 /* Loop over all the words allocated on the stack for this arg. */ 3774 /* We can do it by words, because any scalar bigger than a word 3775 has a size a multiple of a word. */ 3776#ifndef PUSH_ARGS_REVERSED 3777 for (i = not_stack; i < size; i++) 3778#else 3779 for (i = size - 1; i >= not_stack; i--) 3780#endif 3781 if (i >= not_stack + offset) 3782 emit_push_insn (operand_subword_force (x, i, mode), 3783 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX, 3784 0, args_addr, 3785 GEN_INT (args_offset + ((i - not_stack + skip) 3786 * UNITS_PER_WORD)), 3787 reg_parm_stack_space, alignment_pad); 3788 } 3789 else 3790 { 3791 rtx addr; 3792 rtx dest; 3793 3794 /* Push padding now if padding above and stack grows down, 3795 or if padding below and stack grows up. 3796 But if space already allocated, this has already been done. */ 3797 if (extra && args_addr == 0 3798 && where_pad != none && where_pad != stack_direction) 3799 anti_adjust_stack (GEN_INT (extra)); 3800 3801#ifdef PUSH_ROUNDING 3802 if (args_addr == 0 && PUSH_ARGS) 3803 emit_single_push_insn (mode, x, type); 3804 else 3805#endif 3806 { 3807 if (GET_CODE (args_so_far) == CONST_INT) 3808 addr 3809 = memory_address (mode, 3810 plus_constant (args_addr, 3811 INTVAL (args_so_far))); 3812 else 3813 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr, 3814 args_so_far)); 3815 dest = gen_rtx_MEM (mode, addr); 3816 3817 /* We do *not* set_mem_attributes here, because incoming arguments 3818 may overlap with sibling call outgoing arguments and we cannot 3819 allow reordering of reads from function arguments with stores 3820 to outgoing arguments of sibling calls. We do, however, want 3821 to record the alignment of the stack slot. */ 3822 /* ALIGN may well be better aligned than TYPE, e.g. due to 3823 PARM_BOUNDARY. Assume the caller isn't lying. */ 3824 set_mem_align (dest, align); 3825 3826 emit_move_insn (dest, x); 3827 } 3828 } 3829 3830 /* If part should go in registers, copy that part 3831 into the appropriate registers. Do this now, at the end, 3832 since mem-to-mem copies above may do function calls. */ 3833 if (partial > 0 && reg != 0) 3834 { 3835 /* Handle calls that pass values in multiple non-contiguous locations. 3836 The Irix 6 ABI has examples of this. */ 3837 if (GET_CODE (reg) == PARALLEL) 3838 emit_group_load (reg, x, type, -1); 3839 else 3840 { 3841 gcc_assert (partial % UNITS_PER_WORD == 0); 3842 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode); 3843 } 3844 } 3845 3846 if (extra && args_addr == 0 && where_pad == stack_direction) 3847 anti_adjust_stack (GEN_INT (extra)); 3848 3849 if (alignment_pad && args_addr == 0) 3850 anti_adjust_stack (alignment_pad); 3851} 3852 3853/* Return X if X can be used as a subtarget in a sequence of arithmetic 3854 operations. */ 3855 3856static rtx 3857get_subtarget (rtx x) 3858{ 3859 return (optimize 3860 || x == 0 3861 /* Only registers can be subtargets. */ 3862 || !REG_P (x) 3863 /* Don't use hard regs to avoid extending their life. */ 3864 || REGNO (x) < FIRST_PSEUDO_REGISTER 3865 ? 0 : x); 3866} 3867 3868/* A subroutine of expand_assignment. Optimize FIELD op= VAL, where 3869 FIELD is a bitfield. Returns true if the optimization was successful, 3870 and there's nothing else to do. */ 3871 3872static bool 3873optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize, 3874 unsigned HOST_WIDE_INT bitpos, 3875 enum machine_mode mode1, rtx str_rtx, 3876 tree to, tree src) 3877{ 3878 enum machine_mode str_mode = GET_MODE (str_rtx); 3879 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode); 3880 tree op0, op1; 3881 rtx value, result; 3882 optab binop; 3883 3884 if (mode1 != VOIDmode 3885 || bitsize >= BITS_PER_WORD 3886 || str_bitsize > BITS_PER_WORD 3887 || TREE_SIDE_EFFECTS (to) 3888 || TREE_THIS_VOLATILE (to)) 3889 return false; 3890 3891 STRIP_NOPS (src); 3892 if (!BINARY_CLASS_P (src) 3893 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE) 3894 return false; 3895 3896 op0 = TREE_OPERAND (src, 0); 3897 op1 = TREE_OPERAND (src, 1); 3898 STRIP_NOPS (op0); 3899 3900 if (!operand_equal_p (to, op0, 0)) 3901 return false; 3902 3903 if (MEM_P (str_rtx)) 3904 { 3905 unsigned HOST_WIDE_INT offset1; 3906 3907 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD) 3908 str_mode = word_mode; 3909 str_mode = get_best_mode (bitsize, bitpos, 3910 MEM_ALIGN (str_rtx), str_mode, 0); 3911 if (str_mode == VOIDmode) 3912 return false; 3913 str_bitsize = GET_MODE_BITSIZE (str_mode); 3914 3915 offset1 = bitpos; 3916 bitpos %= str_bitsize; 3917 offset1 = (offset1 - bitpos) / BITS_PER_UNIT; 3918 str_rtx = adjust_address (str_rtx, str_mode, offset1); 3919 } 3920 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG) 3921 return false; 3922 3923 /* If the bit field covers the whole REG/MEM, store_field 3924 will likely generate better code. */ 3925 if (bitsize >= str_bitsize) 3926 return false; 3927 3928 /* We can't handle fields split across multiple entities. */ 3929 if (bitpos + bitsize > str_bitsize) 3930 return false; 3931 3932 if (BYTES_BIG_ENDIAN) 3933 bitpos = str_bitsize - bitpos - bitsize; 3934 3935 switch (TREE_CODE (src)) 3936 { 3937 case PLUS_EXPR: 3938 case MINUS_EXPR: 3939 /* For now, just optimize the case of the topmost bitfield 3940 where we don't need to do any masking and also 3941 1 bit bitfields where xor can be used. 3942 We might win by one instruction for the other bitfields 3943 too if insv/extv instructions aren't used, so that 3944 can be added later. */ 3945 if (bitpos + bitsize != str_bitsize 3946 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST)) 3947 break; 3948 3949 value = expand_expr (op1, NULL_RTX, str_mode, 0); 3950 value = convert_modes (str_mode, 3951 TYPE_MODE (TREE_TYPE (op1)), value, 3952 TYPE_UNSIGNED (TREE_TYPE (op1))); 3953 3954 /* We may be accessing data outside the field, which means 3955 we can alias adjacent data. */ 3956 if (MEM_P (str_rtx)) 3957 { 3958 str_rtx = shallow_copy_rtx (str_rtx); 3959 set_mem_alias_set (str_rtx, 0); 3960 set_mem_expr (str_rtx, 0); 3961 } 3962 3963 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab; 3964 if (bitsize == 1 && bitpos + bitsize != str_bitsize) 3965 { 3966 value = expand_and (str_mode, value, const1_rtx, NULL); 3967 binop = xor_optab; 3968 } 3969 value = expand_shift (LSHIFT_EXPR, str_mode, value, 3970 build_int_cst (NULL_TREE, bitpos), 3971 NULL_RTX, 1); 3972 result = expand_binop (str_mode, binop, str_rtx, 3973 value, str_rtx, 1, OPTAB_WIDEN); 3974 if (result != str_rtx) 3975 emit_move_insn (str_rtx, result); 3976 return true; 3977 3978 case BIT_IOR_EXPR: 3979 case BIT_XOR_EXPR: 3980 if (TREE_CODE (op1) != INTEGER_CST) 3981 break; 3982 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0); 3983 value = convert_modes (GET_MODE (str_rtx), 3984 TYPE_MODE (TREE_TYPE (op1)), value, 3985 TYPE_UNSIGNED (TREE_TYPE (op1))); 3986 3987 /* We may be accessing data outside the field, which means 3988 we can alias adjacent data. */ 3989 if (MEM_P (str_rtx)) 3990 { 3991 str_rtx = shallow_copy_rtx (str_rtx); 3992 set_mem_alias_set (str_rtx, 0); 3993 set_mem_expr (str_rtx, 0); 3994 } 3995 3996 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab; 3997 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))) 3998 { 3999 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) 4000 - 1); 4001 value = expand_and (GET_MODE (str_rtx), value, mask, 4002 NULL_RTX); 4003 } 4004 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value, 4005 build_int_cst (NULL_TREE, bitpos), 4006 NULL_RTX, 1); 4007 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx, 4008 value, str_rtx, 1, OPTAB_WIDEN); 4009 if (result != str_rtx) 4010 emit_move_insn (str_rtx, result); 4011 return true; 4012 4013 default: 4014 break; 4015 } 4016 4017 return false; 4018} 4019 4020 4021/* Expand an assignment that stores the value of FROM into TO. */ 4022 4023void 4024expand_assignment (tree to, tree from) 4025{ 4026 rtx to_rtx = 0; 4027 rtx result; 4028 4029 /* Don't crash if the lhs of the assignment was erroneous. */ 4030 if (TREE_CODE (to) == ERROR_MARK) 4031 { 4032 result = expand_normal (from); 4033 return; 4034 } 4035 4036 /* Optimize away no-op moves without side-effects. */ 4037 if (operand_equal_p (to, from, 0)) 4038 return; 4039 4040 /* Assignment of a structure component needs special treatment 4041 if the structure component's rtx is not simply a MEM. 4042 Assignment of an array element at a constant index, and assignment of 4043 an array element in an unaligned packed structure field, has the same 4044 problem. */ 4045 if (handled_component_p (to) 4046 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) 4047 { 4048 enum machine_mode mode1; 4049 HOST_WIDE_INT bitsize, bitpos; 4050 tree offset; 4051 int unsignedp; 4052 int volatilep = 0; 4053 tree tem; 4054 4055 push_temp_slots (); 4056 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, 4057 &unsignedp, &volatilep, true); 4058 4059 /* If we are going to use store_bit_field and extract_bit_field, 4060 make sure to_rtx will be safe for multiple use. */ 4061 4062 to_rtx = expand_normal (tem); 4063 4064 if (offset != 0) 4065 { 4066 rtx offset_rtx; 4067 4068 if (!MEM_P (to_rtx)) 4069 { 4070 /* We can get constant negative offsets into arrays with broken 4071 user code. Translate this to a trap instead of ICEing. */ 4072 gcc_assert (TREE_CODE (offset) == INTEGER_CST); 4073 expand_builtin_trap (); 4074 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx); 4075 } 4076 4077 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); 4078#ifdef POINTERS_EXTEND_UNSIGNED 4079 if (GET_MODE (offset_rtx) != Pmode) 4080 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); 4081#else 4082 if (GET_MODE (offset_rtx) != ptr_mode) 4083 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); 4084#endif 4085 4086 /* A constant address in TO_RTX can have VOIDmode, we must not try 4087 to call force_reg for that case. Avoid that case. */ 4088 if (MEM_P (to_rtx) 4089 && GET_MODE (to_rtx) == BLKmode 4090 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode 4091 && bitsize > 0 4092 && (bitpos % bitsize) == 0 4093 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 4094 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1)) 4095 { 4096 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); 4097 bitpos = 0; 4098 } 4099 4100 to_rtx = offset_address (to_rtx, offset_rtx, 4101 highest_pow2_factor_for_target (to, 4102 offset)); 4103 } 4104 4105 /* Handle expand_expr of a complex value returning a CONCAT. */ 4106 if (GET_CODE (to_rtx) == CONCAT) 4107 { 4108 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE) 4109 { 4110 gcc_assert (bitpos == 0); 4111 result = store_expr (from, to_rtx, false); 4112 } 4113 else 4114 { 4115 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1)); 4116 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false); 4117 } 4118 } 4119 else 4120 { 4121 if (MEM_P (to_rtx)) 4122 { 4123 /* If the field is at offset zero, we could have been given the 4124 DECL_RTX of the parent struct. Don't munge it. */ 4125 to_rtx = shallow_copy_rtx (to_rtx); 4126 4127 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos); 4128 4129 /* Deal with volatile and readonly fields. The former is only 4130 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */ 4131 if (volatilep) 4132 MEM_VOLATILE_P (to_rtx) = 1; 4133 if (component_uses_parent_alias_set (to)) 4134 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; 4135 } 4136 4137 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1, 4138 to_rtx, to, from)) 4139 result = NULL; 4140 else 4141 result = store_field (to_rtx, bitsize, bitpos, mode1, from, 4142 TREE_TYPE (tem), get_alias_set (to)); 4143 } 4144 4145 if (result) 4146 preserve_temp_slots (result); 4147 free_temp_slots (); 4148 pop_temp_slots (); 4149 return; 4150 } 4151 4152 /* If the rhs is a function call and its value is not an aggregate, 4153 call the function before we start to compute the lhs. 4154 This is needed for correct code for cases such as 4155 val = setjmp (buf) on machines where reference to val 4156 requires loading up part of an address in a separate insn. 4157 4158 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG 4159 since it might be a promoted variable where the zero- or sign- extension 4160 needs to be done. Handling this in the normal way is safe because no 4161 computation is done before the call. */ 4162 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from) 4163 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST 4164 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) 4165 && REG_P (DECL_RTL (to)))) 4166 { 4167 rtx value; 4168 4169 push_temp_slots (); 4170 value = expand_normal (from); 4171 if (to_rtx == 0) 4172 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); 4173 4174 /* Handle calls that return values in multiple non-contiguous locations. 4175 The Irix 6 ABI has examples of this. */ 4176 if (GET_CODE (to_rtx) == PARALLEL) 4177 emit_group_load (to_rtx, value, TREE_TYPE (from), 4178 int_size_in_bytes (TREE_TYPE (from))); 4179 else if (GET_MODE (to_rtx) == BLKmode) 4180 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL); 4181 else 4182 { 4183 if (POINTER_TYPE_P (TREE_TYPE (to))) 4184 value = convert_memory_address (GET_MODE (to_rtx), value); 4185 emit_move_insn (to_rtx, value); 4186 } 4187 preserve_temp_slots (to_rtx); 4188 free_temp_slots (); 4189 pop_temp_slots (); 4190 return; 4191 } 4192 4193 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. 4194 Don't re-expand if it was expanded already (in COMPONENT_REF case). */ 4195 4196 if (to_rtx == 0) 4197 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); 4198 4199 /* Don't move directly into a return register. */ 4200 if (TREE_CODE (to) == RESULT_DECL 4201 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL)) 4202 { 4203 rtx temp; 4204 4205 push_temp_slots (); 4206 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0); 4207 4208 if (GET_CODE (to_rtx) == PARALLEL) 4209 emit_group_load (to_rtx, temp, TREE_TYPE (from), 4210 int_size_in_bytes (TREE_TYPE (from))); 4211 else 4212 emit_move_insn (to_rtx, temp); 4213 4214 preserve_temp_slots (to_rtx); 4215 free_temp_slots (); 4216 pop_temp_slots (); 4217 return; 4218 } 4219 4220 /* In case we are returning the contents of an object which overlaps 4221 the place the value is being stored, use a safe function when copying 4222 a value through a pointer into a structure value return block. */ 4223 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF 4224 && current_function_returns_struct 4225 && !current_function_returns_pcc_struct) 4226 { 4227 rtx from_rtx, size; 4228 4229 push_temp_slots (); 4230 size = expr_size (from); 4231 from_rtx = expand_normal (from); 4232 4233 emit_library_call (memmove_libfunc, LCT_NORMAL, 4234 VOIDmode, 3, XEXP (to_rtx, 0), Pmode, 4235 XEXP (from_rtx, 0), Pmode, 4236 convert_to_mode (TYPE_MODE (sizetype), 4237 size, TYPE_UNSIGNED (sizetype)), 4238 TYPE_MODE (sizetype)); 4239 4240 preserve_temp_slots (to_rtx); 4241 free_temp_slots (); 4242 pop_temp_slots (); 4243 return; 4244 } 4245 4246 /* Compute FROM and store the value in the rtx we got. */ 4247 4248 push_temp_slots (); 4249 result = store_expr (from, to_rtx, 0); 4250 preserve_temp_slots (result); 4251 free_temp_slots (); 4252 pop_temp_slots (); 4253 return; 4254} 4255 4256/* Generate code for computing expression EXP, 4257 and storing the value into TARGET. 4258 4259 If the mode is BLKmode then we may return TARGET itself. 4260 It turns out that in BLKmode it doesn't cause a problem. 4261 because C has no operators that could combine two different 4262 assignments into the same BLKmode object with different values 4263 with no sequence point. Will other languages need this to 4264 be more thorough? 4265 4266 If CALL_PARAM_P is nonzero, this is a store into a call param on the 4267 stack, and block moves may need to be treated specially. */ 4268 4269rtx 4270store_expr (tree exp, rtx target, int call_param_p) 4271{ 4272 rtx temp; 4273 rtx alt_rtl = NULL_RTX; 4274 int dont_return_target = 0; 4275 4276 if (VOID_TYPE_P (TREE_TYPE (exp))) 4277 { 4278 /* C++ can generate ?: expressions with a throw expression in one 4279 branch and an rvalue in the other. Here, we resolve attempts to 4280 store the throw expression's nonexistent result. */ 4281 gcc_assert (!call_param_p); 4282 expand_expr (exp, const0_rtx, VOIDmode, 0); 4283 return NULL_RTX; 4284 } 4285 if (TREE_CODE (exp) == COMPOUND_EXPR) 4286 { 4287 /* Perform first part of compound expression, then assign from second 4288 part. */ 4289 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 4290 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); 4291 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p); 4292 } 4293 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) 4294 { 4295 /* For conditional expression, get safe form of the target. Then 4296 test the condition, doing the appropriate assignment on either 4297 side. This avoids the creation of unnecessary temporaries. 4298 For non-BLKmode, it is more efficient not to do this. */ 4299 4300 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx (); 4301 4302 do_pending_stack_adjust (); 4303 NO_DEFER_POP; 4304 jumpifnot (TREE_OPERAND (exp, 0), lab1); 4305 store_expr (TREE_OPERAND (exp, 1), target, call_param_p); 4306 emit_jump_insn (gen_jump (lab2)); 4307 emit_barrier (); 4308 emit_label (lab1); 4309 store_expr (TREE_OPERAND (exp, 2), target, call_param_p); 4310 emit_label (lab2); 4311 OK_DEFER_POP; 4312 4313 return NULL_RTX; 4314 } 4315 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) 4316 /* If this is a scalar in a register that is stored in a wider mode 4317 than the declared mode, compute the result into its declared mode 4318 and then convert to the wider mode. Our value is the computed 4319 expression. */ 4320 { 4321 rtx inner_target = 0; 4322 4323 /* We can do the conversion inside EXP, which will often result 4324 in some optimizations. Do the conversion in two steps: first 4325 change the signedness, if needed, then the extend. But don't 4326 do this if the type of EXP is a subtype of something else 4327 since then the conversion might involve more than just 4328 converting modes. */ 4329 if (INTEGRAL_TYPE_P (TREE_TYPE (exp)) 4330 && TREE_TYPE (TREE_TYPE (exp)) == 0 4331 && (!lang_hooks.reduce_bit_field_operations 4332 || (GET_MODE_PRECISION (GET_MODE (target)) 4333 == TYPE_PRECISION (TREE_TYPE (exp))))) 4334 { 4335 if (TYPE_UNSIGNED (TREE_TYPE (exp)) 4336 != SUBREG_PROMOTED_UNSIGNED_P (target)) 4337 exp = fold_convert 4338 (lang_hooks.types.signed_or_unsigned_type 4339 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp); 4340 4341 exp = fold_convert (lang_hooks.types.type_for_mode 4342 (GET_MODE (SUBREG_REG (target)), 4343 SUBREG_PROMOTED_UNSIGNED_P (target)), 4344 exp); 4345 4346 inner_target = SUBREG_REG (target); 4347 } 4348 4349 temp = expand_expr (exp, inner_target, VOIDmode, 4350 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); 4351 4352 /* If TEMP is a VOIDmode constant, use convert_modes to make 4353 sure that we properly convert it. */ 4354 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) 4355 { 4356 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), 4357 temp, SUBREG_PROMOTED_UNSIGNED_P (target)); 4358 temp = convert_modes (GET_MODE (SUBREG_REG (target)), 4359 GET_MODE (target), temp, 4360 SUBREG_PROMOTED_UNSIGNED_P (target)); 4361 } 4362 4363 convert_move (SUBREG_REG (target), temp, 4364 SUBREG_PROMOTED_UNSIGNED_P (target)); 4365 4366 return NULL_RTX; 4367 } 4368 else 4369 { 4370 temp = expand_expr_real (exp, target, GET_MODE (target), 4371 (call_param_p 4372 ? EXPAND_STACK_PARM : EXPAND_NORMAL), 4373 &alt_rtl); 4374 /* Return TARGET if it's a specified hardware register. 4375 If TARGET is a volatile mem ref, either return TARGET 4376 or return a reg copied *from* TARGET; ANSI requires this. 4377 4378 Otherwise, if TEMP is not TARGET, return TEMP 4379 if it is constant (for efficiency), 4380 or if we really want the correct value. */ 4381 if (!(target && REG_P (target) 4382 && REGNO (target) < FIRST_PSEUDO_REGISTER) 4383 && !(MEM_P (target) && MEM_VOLATILE_P (target)) 4384 && ! rtx_equal_p (temp, target) 4385 && CONSTANT_P (temp)) 4386 dont_return_target = 1; 4387 } 4388 4389 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not 4390 the same as that of TARGET, adjust the constant. This is needed, for 4391 example, in case it is a CONST_DOUBLE and we want only a word-sized 4392 value. */ 4393 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode 4394 && TREE_CODE (exp) != ERROR_MARK 4395 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) 4396 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), 4397 temp, TYPE_UNSIGNED (TREE_TYPE (exp))); 4398 4399 /* If value was not generated in the target, store it there. 4400 Convert the value to TARGET's type first if necessary and emit the 4401 pending incrementations that have been queued when expanding EXP. 4402 Note that we cannot emit the whole queue blindly because this will 4403 effectively disable the POST_INC optimization later. 4404 4405 If TEMP and TARGET compare equal according to rtx_equal_p, but 4406 one or both of them are volatile memory refs, we have to distinguish 4407 two cases: 4408 - expand_expr has used TARGET. In this case, we must not generate 4409 another copy. This can be detected by TARGET being equal according 4410 to == . 4411 - expand_expr has not used TARGET - that means that the source just 4412 happens to have the same RTX form. Since temp will have been created 4413 by expand_expr, it will compare unequal according to == . 4414 We must generate a copy in this case, to reach the correct number 4415 of volatile memory references. */ 4416 4417 if ((! rtx_equal_p (temp, target) 4418 || (temp != target && (side_effects_p (temp) 4419 || side_effects_p (target)))) 4420 && TREE_CODE (exp) != ERROR_MARK 4421 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET, 4422 but TARGET is not valid memory reference, TEMP will differ 4423 from TARGET although it is really the same location. */ 4424 && !(alt_rtl && rtx_equal_p (alt_rtl, target)) 4425 /* If there's nothing to copy, don't bother. Don't call 4426 expr_size unless necessary, because some front-ends (C++) 4427 expr_size-hook must not be given objects that are not 4428 supposed to be bit-copied or bit-initialized. */ 4429 && expr_size (exp) != const0_rtx) 4430 { 4431 if (GET_MODE (temp) != GET_MODE (target) 4432 && GET_MODE (temp) != VOIDmode) 4433 { 4434 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); 4435 if (dont_return_target) 4436 { 4437 /* In this case, we will return TEMP, 4438 so make sure it has the proper mode. 4439 But don't forget to store the value into TARGET. */ 4440 temp = convert_to_mode (GET_MODE (target), temp, unsignedp); 4441 emit_move_insn (target, temp); 4442 } 4443 else 4444 convert_move (target, temp, unsignedp); 4445 } 4446 4447 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST) 4448 { 4449 /* Handle copying a string constant into an array. The string 4450 constant may be shorter than the array. So copy just the string's 4451 actual length, and clear the rest. First get the size of the data 4452 type of the string, which is actually the size of the target. */ 4453 rtx size = expr_size (exp); 4454 4455 if (GET_CODE (size) == CONST_INT 4456 && INTVAL (size) < TREE_STRING_LENGTH (exp)) 4457 emit_block_move (target, temp, size, 4458 (call_param_p 4459 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 4460 else 4461 { 4462 /* Compute the size of the data to copy from the string. */ 4463 tree copy_size 4464 = size_binop (MIN_EXPR, 4465 make_tree (sizetype, size), 4466 size_int (TREE_STRING_LENGTH (exp))); 4467 rtx copy_size_rtx 4468 = expand_expr (copy_size, NULL_RTX, VOIDmode, 4469 (call_param_p 4470 ? EXPAND_STACK_PARM : EXPAND_NORMAL)); 4471 rtx label = 0; 4472 4473 /* Copy that much. */ 4474 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 4475 TYPE_UNSIGNED (sizetype)); 4476 emit_block_move (target, temp, copy_size_rtx, 4477 (call_param_p 4478 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 4479 4480 /* Figure out how much is left in TARGET that we have to clear. 4481 Do all calculations in ptr_mode. */ 4482 if (GET_CODE (copy_size_rtx) == CONST_INT) 4483 { 4484 size = plus_constant (size, -INTVAL (copy_size_rtx)); 4485 target = adjust_address (target, BLKmode, 4486 INTVAL (copy_size_rtx)); 4487 } 4488 else 4489 { 4490 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size, 4491 copy_size_rtx, NULL_RTX, 0, 4492 OPTAB_LIB_WIDEN); 4493 4494#ifdef POINTERS_EXTEND_UNSIGNED 4495 if (GET_MODE (copy_size_rtx) != Pmode) 4496 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx, 4497 TYPE_UNSIGNED (sizetype)); 4498#endif 4499 4500 target = offset_address (target, copy_size_rtx, 4501 highest_pow2_factor (copy_size)); 4502 label = gen_label_rtx (); 4503 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX, 4504 GET_MODE (size), 0, label); 4505 } 4506 4507 if (size != const0_rtx) 4508 clear_storage (target, size, BLOCK_OP_NORMAL); 4509 4510 if (label) 4511 emit_label (label); 4512 } 4513 } 4514 /* Handle calls that return values in multiple non-contiguous locations. 4515 The Irix 6 ABI has examples of this. */ 4516 else if (GET_CODE (target) == PARALLEL) 4517 emit_group_load (target, temp, TREE_TYPE (exp), 4518 int_size_in_bytes (TREE_TYPE (exp))); 4519 else if (GET_MODE (temp) == BLKmode) 4520 emit_block_move (target, temp, expr_size (exp), 4521 (call_param_p 4522 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 4523 else 4524 { 4525 temp = force_operand (temp, target); 4526 if (temp != target) 4527 emit_move_insn (target, temp); 4528 } 4529 } 4530 4531 return NULL_RTX; 4532} 4533 4534/* Helper for categorize_ctor_elements. Identical interface. */ 4535 4536static bool 4537categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts, 4538 HOST_WIDE_INT *p_elt_count, 4539 bool *p_must_clear) 4540{ 4541 unsigned HOST_WIDE_INT idx; 4542 HOST_WIDE_INT nz_elts, elt_count; 4543 tree value, purpose; 4544 4545 /* Whether CTOR is a valid constant initializer, in accordance with what 4546 initializer_constant_valid_p does. If inferred from the constructor 4547 elements, true until proven otherwise. */ 4548 bool const_from_elts_p = constructor_static_from_elts_p (ctor); 4549 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor); 4550 4551 nz_elts = 0; 4552 elt_count = 0; 4553 4554 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value) 4555 { 4556 HOST_WIDE_INT mult; 4557 4558 mult = 1; 4559 if (TREE_CODE (purpose) == RANGE_EXPR) 4560 { 4561 tree lo_index = TREE_OPERAND (purpose, 0); 4562 tree hi_index = TREE_OPERAND (purpose, 1); 4563 4564 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1)) 4565 mult = (tree_low_cst (hi_index, 1) 4566 - tree_low_cst (lo_index, 1) + 1); 4567 } 4568 4569 switch (TREE_CODE (value)) 4570 { 4571 case CONSTRUCTOR: 4572 { 4573 HOST_WIDE_INT nz = 0, ic = 0; 4574 4575 bool const_elt_p 4576 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear); 4577 4578 nz_elts += mult * nz; 4579 elt_count += mult * ic; 4580 4581 if (const_from_elts_p && const_p) 4582 const_p = const_elt_p; 4583 } 4584 break; 4585 4586 case INTEGER_CST: 4587 case REAL_CST: 4588 if (!initializer_zerop (value)) 4589 nz_elts += mult; 4590 elt_count += mult; 4591 break; 4592 4593 case STRING_CST: 4594 nz_elts += mult * TREE_STRING_LENGTH (value); 4595 elt_count += mult * TREE_STRING_LENGTH (value); 4596 break; 4597 4598 case COMPLEX_CST: 4599 if (!initializer_zerop (TREE_REALPART (value))) 4600 nz_elts += mult; 4601 if (!initializer_zerop (TREE_IMAGPART (value))) 4602 nz_elts += mult; 4603 elt_count += mult; 4604 break; 4605 4606 case VECTOR_CST: 4607 { 4608 tree v; 4609 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v)) 4610 { 4611 if (!initializer_zerop (TREE_VALUE (v))) 4612 nz_elts += mult; 4613 elt_count += mult; 4614 } 4615 } 4616 break; 4617 4618 default: 4619 nz_elts += mult; 4620 elt_count += mult; 4621 4622 if (const_from_elts_p && const_p) 4623 const_p = initializer_constant_valid_p (value, TREE_TYPE (value)) 4624 != NULL_TREE; 4625 break; 4626 } 4627 } 4628 4629 if (!*p_must_clear 4630 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE 4631 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE)) 4632 { 4633 tree init_sub_type; 4634 bool clear_this = true; 4635 4636 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor))) 4637 { 4638 /* We don't expect more than one element of the union to be 4639 initialized. Not sure what we should do otherwise... */ 4640 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor)) 4641 == 1); 4642 4643 init_sub_type = TREE_TYPE (VEC_index (constructor_elt, 4644 CONSTRUCTOR_ELTS (ctor), 4645 0)->value); 4646 4647 /* ??? We could look at each element of the union, and find the 4648 largest element. Which would avoid comparing the size of the 4649 initialized element against any tail padding in the union. 4650 Doesn't seem worth the effort... */ 4651 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)), 4652 TYPE_SIZE (init_sub_type)) == 1) 4653 { 4654 /* And now we have to find out if the element itself is fully 4655 constructed. E.g. for union { struct { int a, b; } s; } u 4656 = { .s = { .a = 1 } }. */ 4657 if (elt_count == count_type_elements (init_sub_type, false)) 4658 clear_this = false; 4659 } 4660 } 4661 4662 *p_must_clear = clear_this; 4663 } 4664 4665 *p_nz_elts += nz_elts; 4666 *p_elt_count += elt_count; 4667 4668 return const_p; 4669} 4670 4671/* Examine CTOR to discover: 4672 * how many scalar fields are set to nonzero values, 4673 and place it in *P_NZ_ELTS; 4674 * how many scalar fields in total are in CTOR, 4675 and place it in *P_ELT_COUNT. 4676 * if a type is a union, and the initializer from the constructor 4677 is not the largest element in the union, then set *p_must_clear. 4678 4679 Return whether or not CTOR is a valid static constant initializer, the same 4680 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */ 4681 4682bool 4683categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts, 4684 HOST_WIDE_INT *p_elt_count, 4685 bool *p_must_clear) 4686{ 4687 *p_nz_elts = 0; 4688 *p_elt_count = 0; 4689 *p_must_clear = false; 4690 4691 return 4692 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear); 4693} 4694 4695/* Count the number of scalars in TYPE. Return -1 on overflow or 4696 variable-sized. If ALLOW_FLEXARR is true, don't count flexible 4697 array member at the end of the structure. */ 4698 4699HOST_WIDE_INT 4700count_type_elements (tree type, bool allow_flexarr) 4701{ 4702 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1)); 4703 switch (TREE_CODE (type)) 4704 { 4705 case ARRAY_TYPE: 4706 { 4707 tree telts = array_type_nelts (type); 4708 if (telts && host_integerp (telts, 1)) 4709 { 4710 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1; 4711 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false); 4712 if (n == 0) 4713 return 0; 4714 else if (max / n > m) 4715 return n * m; 4716 } 4717 return -1; 4718 } 4719 4720 case RECORD_TYPE: 4721 { 4722 HOST_WIDE_INT n = 0, t; 4723 tree f; 4724 4725 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f)) 4726 if (TREE_CODE (f) == FIELD_DECL) 4727 { 4728 t = count_type_elements (TREE_TYPE (f), false); 4729 if (t < 0) 4730 { 4731 /* Check for structures with flexible array member. */ 4732 tree tf = TREE_TYPE (f); 4733 if (allow_flexarr 4734 && TREE_CHAIN (f) == NULL 4735 && TREE_CODE (tf) == ARRAY_TYPE 4736 && TYPE_DOMAIN (tf) 4737 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf)) 4738 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf))) 4739 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf)) 4740 && int_size_in_bytes (type) >= 0) 4741 break; 4742 4743 return -1; 4744 } 4745 n += t; 4746 } 4747 4748 return n; 4749 } 4750 4751 case UNION_TYPE: 4752 case QUAL_UNION_TYPE: 4753 { 4754 /* Ho hum. How in the world do we guess here? Clearly it isn't 4755 right to count the fields. Guess based on the number of words. */ 4756 HOST_WIDE_INT n = int_size_in_bytes (type); 4757 if (n < 0) 4758 return -1; 4759 return n / UNITS_PER_WORD; 4760 } 4761 4762 case COMPLEX_TYPE: 4763 return 2; 4764 4765 case VECTOR_TYPE: 4766 return TYPE_VECTOR_SUBPARTS (type); 4767 4768 case INTEGER_TYPE: 4769 case REAL_TYPE: 4770 case ENUMERAL_TYPE: 4771 case BOOLEAN_TYPE: 4772 case POINTER_TYPE: 4773 case OFFSET_TYPE: 4774 case REFERENCE_TYPE: 4775 return 1; 4776 4777 case VOID_TYPE: 4778 case METHOD_TYPE: 4779 case FUNCTION_TYPE: 4780 case LANG_TYPE: 4781 default: 4782 gcc_unreachable (); 4783 } 4784} 4785 4786/* Return 1 if EXP contains mostly (3/4) zeros. */ 4787 4788static int 4789mostly_zeros_p (tree exp) 4790{ 4791 if (TREE_CODE (exp) == CONSTRUCTOR) 4792 4793 { 4794 HOST_WIDE_INT nz_elts, count, elts; 4795 bool must_clear; 4796 4797 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear); 4798 if (must_clear) 4799 return 1; 4800 4801 elts = count_type_elements (TREE_TYPE (exp), false); 4802 4803 return nz_elts < elts / 4; 4804 } 4805 4806 return initializer_zerop (exp); 4807} 4808 4809/* Return 1 if EXP contains all zeros. */ 4810 4811static int 4812all_zeros_p (tree exp) 4813{ 4814 if (TREE_CODE (exp) == CONSTRUCTOR) 4815 4816 { 4817 HOST_WIDE_INT nz_elts, count; 4818 bool must_clear; 4819 4820 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear); 4821 return nz_elts == 0; 4822 } 4823 4824 return initializer_zerop (exp); 4825} 4826 4827/* Helper function for store_constructor. 4828 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. 4829 TYPE is the type of the CONSTRUCTOR, not the element type. 4830 CLEARED is as for store_constructor. 4831 ALIAS_SET is the alias set to use for any stores. 4832 4833 This provides a recursive shortcut back to store_constructor when it isn't 4834 necessary to go through store_field. This is so that we can pass through 4835 the cleared field to let store_constructor know that we may not have to 4836 clear a substructure if the outer structure has already been cleared. */ 4837 4838static void 4839store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize, 4840 HOST_WIDE_INT bitpos, enum machine_mode mode, 4841 tree exp, tree type, int cleared, int alias_set) 4842{ 4843 if (TREE_CODE (exp) == CONSTRUCTOR 4844 /* We can only call store_constructor recursively if the size and 4845 bit position are on a byte boundary. */ 4846 && bitpos % BITS_PER_UNIT == 0 4847 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0) 4848 /* If we have a nonzero bitpos for a register target, then we just 4849 let store_field do the bitfield handling. This is unlikely to 4850 generate unnecessary clear instructions anyways. */ 4851 && (bitpos == 0 || MEM_P (target))) 4852 { 4853 if (MEM_P (target)) 4854 target 4855 = adjust_address (target, 4856 GET_MODE (target) == BLKmode 4857 || 0 != (bitpos 4858 % GET_MODE_ALIGNMENT (GET_MODE (target))) 4859 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); 4860 4861 4862 /* Update the alias set, if required. */ 4863 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target) 4864 && MEM_ALIAS_SET (target) != 0) 4865 { 4866 target = copy_rtx (target); 4867 set_mem_alias_set (target, alias_set); 4868 } 4869 4870 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT); 4871 } 4872 else 4873 store_field (target, bitsize, bitpos, mode, exp, type, alias_set); 4874} 4875 4876/* Store the value of constructor EXP into the rtx TARGET. 4877 TARGET is either a REG or a MEM; we know it cannot conflict, since 4878 safe_from_p has been called. 4879 CLEARED is true if TARGET is known to have been zero'd. 4880 SIZE is the number of bytes of TARGET we are allowed to modify: this 4881 may not be the same as the size of EXP if we are assigning to a field 4882 which has been packed to exclude padding bits. */ 4883 4884static void 4885store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) 4886{ 4887 tree type = TREE_TYPE (exp); 4888#ifdef WORD_REGISTER_OPERATIONS 4889 HOST_WIDE_INT exp_size = int_size_in_bytes (type); 4890#endif 4891 4892 switch (TREE_CODE (type)) 4893 { 4894 case RECORD_TYPE: 4895 case UNION_TYPE: 4896 case QUAL_UNION_TYPE: 4897 { 4898 unsigned HOST_WIDE_INT idx; 4899 tree field, value; 4900 4901 /* If size is zero or the target is already cleared, do nothing. */ 4902 if (size == 0 || cleared) 4903 cleared = 1; 4904 /* We either clear the aggregate or indicate the value is dead. */ 4905 else if ((TREE_CODE (type) == UNION_TYPE 4906 || TREE_CODE (type) == QUAL_UNION_TYPE) 4907 && ! CONSTRUCTOR_ELTS (exp)) 4908 /* If the constructor is empty, clear the union. */ 4909 { 4910 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL); 4911 cleared = 1; 4912 } 4913 4914 /* If we are building a static constructor into a register, 4915 set the initial value as zero so we can fold the value into 4916 a constant. But if more than one register is involved, 4917 this probably loses. */ 4918 else if (REG_P (target) && TREE_STATIC (exp) 4919 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) 4920 { 4921 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); 4922 cleared = 1; 4923 } 4924 4925 /* If the constructor has fewer fields than the structure or 4926 if we are initializing the structure to mostly zeros, clear 4927 the whole structure first. Don't do this if TARGET is a 4928 register whose mode size isn't equal to SIZE since 4929 clear_storage can't handle this case. */ 4930 else if (size > 0 4931 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp)) 4932 != fields_length (type)) 4933 || mostly_zeros_p (exp)) 4934 && (!REG_P (target) 4935 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) 4936 == size))) 4937 { 4938 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); 4939 cleared = 1; 4940 } 4941 4942 if (! cleared) 4943 emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); 4944 4945 /* Store each element of the constructor into the 4946 corresponding field of TARGET. */ 4947 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value) 4948 { 4949 enum machine_mode mode; 4950 HOST_WIDE_INT bitsize; 4951 HOST_WIDE_INT bitpos = 0; 4952 tree offset; 4953 rtx to_rtx = target; 4954 4955 /* Just ignore missing fields. We cleared the whole 4956 structure, above, if any fields are missing. */ 4957 if (field == 0) 4958 continue; 4959 4960 if (cleared && initializer_zerop (value)) 4961 continue; 4962 4963 if (host_integerp (DECL_SIZE (field), 1)) 4964 bitsize = tree_low_cst (DECL_SIZE (field), 1); 4965 else 4966 bitsize = -1; 4967 4968 mode = DECL_MODE (field); 4969 if (DECL_BIT_FIELD (field)) 4970 mode = VOIDmode; 4971 4972 offset = DECL_FIELD_OFFSET (field); 4973 if (host_integerp (offset, 0) 4974 && host_integerp (bit_position (field), 0)) 4975 { 4976 bitpos = int_bit_position (field); 4977 offset = 0; 4978 } 4979 else 4980 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); 4981 4982 if (offset) 4983 { 4984 rtx offset_rtx; 4985 4986 offset 4987 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset, 4988 make_tree (TREE_TYPE (exp), 4989 target)); 4990 4991 offset_rtx = expand_normal (offset); 4992 gcc_assert (MEM_P (to_rtx)); 4993 4994#ifdef POINTERS_EXTEND_UNSIGNED 4995 if (GET_MODE (offset_rtx) != Pmode) 4996 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); 4997#else 4998 if (GET_MODE (offset_rtx) != ptr_mode) 4999 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); 5000#endif 5001 5002 to_rtx = offset_address (to_rtx, offset_rtx, 5003 highest_pow2_factor (offset)); 5004 } 5005 5006#ifdef WORD_REGISTER_OPERATIONS 5007 /* If this initializes a field that is smaller than a 5008 word, at the start of a word, try to widen it to a full 5009 word. This special case allows us to output C++ member 5010 function initializations in a form that the optimizers 5011 can understand. */ 5012 if (REG_P (target) 5013 && bitsize < BITS_PER_WORD 5014 && bitpos % BITS_PER_WORD == 0 5015 && GET_MODE_CLASS (mode) == MODE_INT 5016 && TREE_CODE (value) == INTEGER_CST 5017 && exp_size >= 0 5018 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) 5019 { 5020 tree type = TREE_TYPE (value); 5021 5022 if (TYPE_PRECISION (type) < BITS_PER_WORD) 5023 { 5024 type = lang_hooks.types.type_for_size 5025 (BITS_PER_WORD, TYPE_UNSIGNED (type)); 5026 value = fold_convert (type, value); 5027 } 5028 5029 if (BYTES_BIG_ENDIAN) 5030 value 5031 = fold_build2 (LSHIFT_EXPR, type, value, 5032 build_int_cst (type, 5033 BITS_PER_WORD - bitsize)); 5034 bitsize = BITS_PER_WORD; 5035 mode = word_mode; 5036 } 5037#endif 5038 5039 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx) 5040 && DECL_NONADDRESSABLE_P (field)) 5041 { 5042 to_rtx = copy_rtx (to_rtx); 5043 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; 5044 } 5045 5046 store_constructor_field (to_rtx, bitsize, bitpos, mode, 5047 value, type, cleared, 5048 get_alias_set (TREE_TYPE (field))); 5049 } 5050 break; 5051 } 5052 case ARRAY_TYPE: 5053 { 5054 tree value, index; 5055 unsigned HOST_WIDE_INT i; 5056 int need_to_clear; 5057 tree domain; 5058 tree elttype = TREE_TYPE (type); 5059 int const_bounds_p; 5060 HOST_WIDE_INT minelt = 0; 5061 HOST_WIDE_INT maxelt = 0; 5062 5063 domain = TYPE_DOMAIN (type); 5064 const_bounds_p = (TYPE_MIN_VALUE (domain) 5065 && TYPE_MAX_VALUE (domain) 5066 && host_integerp (TYPE_MIN_VALUE (domain), 0) 5067 && host_integerp (TYPE_MAX_VALUE (domain), 0)); 5068 5069 /* If we have constant bounds for the range of the type, get them. */ 5070 if (const_bounds_p) 5071 { 5072 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); 5073 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); 5074 } 5075 5076 /* If the constructor has fewer elements than the array, clear 5077 the whole array first. Similarly if this is static 5078 constructor of a non-BLKmode object. */ 5079 if (cleared) 5080 need_to_clear = 0; 5081 else if (REG_P (target) && TREE_STATIC (exp)) 5082 need_to_clear = 1; 5083 else 5084 { 5085 unsigned HOST_WIDE_INT idx; 5086 tree index, value; 5087 HOST_WIDE_INT count = 0, zero_count = 0; 5088 need_to_clear = ! const_bounds_p; 5089 5090 /* This loop is a more accurate version of the loop in 5091 mostly_zeros_p (it handles RANGE_EXPR in an index). It 5092 is also needed to check for missing elements. */ 5093 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value) 5094 { 5095 HOST_WIDE_INT this_node_count; 5096 5097 if (need_to_clear) 5098 break; 5099 5100 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) 5101 { 5102 tree lo_index = TREE_OPERAND (index, 0); 5103 tree hi_index = TREE_OPERAND (index, 1); 5104 5105 if (! host_integerp (lo_index, 1) 5106 || ! host_integerp (hi_index, 1)) 5107 { 5108 need_to_clear = 1; 5109 break; 5110 } 5111 5112 this_node_count = (tree_low_cst (hi_index, 1) 5113 - tree_low_cst (lo_index, 1) + 1); 5114 } 5115 else 5116 this_node_count = 1; 5117 5118 count += this_node_count; 5119 if (mostly_zeros_p (value)) 5120 zero_count += this_node_count; 5121 } 5122 5123 /* Clear the entire array first if there are any missing 5124 elements, or if the incidence of zero elements is >= 5125 75%. */ 5126 if (! need_to_clear 5127 && (count < maxelt - minelt + 1 5128 || 4 * zero_count >= 3 * count)) 5129 need_to_clear = 1; 5130 } 5131 5132 if (need_to_clear && size > 0) 5133 { 5134 if (REG_P (target)) 5135 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); 5136 else 5137 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); 5138 cleared = 1; 5139 } 5140 5141 if (!cleared && REG_P (target)) 5142 /* Inform later passes that the old value is dead. */ 5143 emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); 5144 5145 /* Store each element of the constructor into the 5146 corresponding element of TARGET, determined by counting the 5147 elements. */ 5148 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value) 5149 { 5150 enum machine_mode mode; 5151 HOST_WIDE_INT bitsize; 5152 HOST_WIDE_INT bitpos; 5153 int unsignedp; 5154 rtx xtarget = target; 5155 5156 if (cleared && initializer_zerop (value)) 5157 continue; 5158 5159 unsignedp = TYPE_UNSIGNED (elttype); 5160 mode = TYPE_MODE (elttype); 5161 if (mode == BLKmode) 5162 bitsize = (host_integerp (TYPE_SIZE (elttype), 1) 5163 ? tree_low_cst (TYPE_SIZE (elttype), 1) 5164 : -1); 5165 else 5166 bitsize = GET_MODE_BITSIZE (mode); 5167 5168 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) 5169 { 5170 tree lo_index = TREE_OPERAND (index, 0); 5171 tree hi_index = TREE_OPERAND (index, 1); 5172 rtx index_r, pos_rtx; 5173 HOST_WIDE_INT lo, hi, count; 5174 tree position; 5175 5176 /* If the range is constant and "small", unroll the loop. */ 5177 if (const_bounds_p 5178 && host_integerp (lo_index, 0) 5179 && host_integerp (hi_index, 0) 5180 && (lo = tree_low_cst (lo_index, 0), 5181 hi = tree_low_cst (hi_index, 0), 5182 count = hi - lo + 1, 5183 (!MEM_P (target) 5184 || count <= 2 5185 || (host_integerp (TYPE_SIZE (elttype), 1) 5186 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count 5187 <= 40 * 8))))) 5188 { 5189 lo -= minelt; hi -= minelt; 5190 for (; lo <= hi; lo++) 5191 { 5192 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); 5193 5194 if (MEM_P (target) 5195 && !MEM_KEEP_ALIAS_SET_P (target) 5196 && TREE_CODE (type) == ARRAY_TYPE 5197 && TYPE_NONALIASED_COMPONENT (type)) 5198 { 5199 target = copy_rtx (target); 5200 MEM_KEEP_ALIAS_SET_P (target) = 1; 5201 } 5202 5203 store_constructor_field 5204 (target, bitsize, bitpos, mode, value, type, cleared, 5205 get_alias_set (elttype)); 5206 } 5207 } 5208 else 5209 { 5210 rtx loop_start = gen_label_rtx (); 5211 rtx loop_end = gen_label_rtx (); 5212 tree exit_cond; 5213 5214 expand_normal (hi_index); 5215 unsignedp = TYPE_UNSIGNED (domain); 5216 5217 index = build_decl (VAR_DECL, NULL_TREE, domain); 5218 5219 index_r 5220 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), 5221 &unsignedp, 0)); 5222 SET_DECL_RTL (index, index_r); 5223 store_expr (lo_index, index_r, 0); 5224 5225 /* Build the head of the loop. */ 5226 do_pending_stack_adjust (); 5227 emit_label (loop_start); 5228 5229 /* Assign value to element index. */ 5230 position = 5231 fold_convert (ssizetype, 5232 fold_build2 (MINUS_EXPR, 5233 TREE_TYPE (index), 5234 index, 5235 TYPE_MIN_VALUE (domain))); 5236 5237 position = 5238 size_binop (MULT_EXPR, position, 5239 fold_convert (ssizetype, 5240 TYPE_SIZE_UNIT (elttype))); 5241 5242 pos_rtx = expand_normal (position); 5243 xtarget = offset_address (target, pos_rtx, 5244 highest_pow2_factor (position)); 5245 xtarget = adjust_address (xtarget, mode, 0); 5246 if (TREE_CODE (value) == CONSTRUCTOR) 5247 store_constructor (value, xtarget, cleared, 5248 bitsize / BITS_PER_UNIT); 5249 else 5250 store_expr (value, xtarget, 0); 5251 5252 /* Generate a conditional jump to exit the loop. */ 5253 exit_cond = build2 (LT_EXPR, integer_type_node, 5254 index, hi_index); 5255 jumpif (exit_cond, loop_end); 5256 5257 /* Update the loop counter, and jump to the head of 5258 the loop. */ 5259 expand_assignment (index, 5260 build2 (PLUS_EXPR, TREE_TYPE (index), 5261 index, integer_one_node)); 5262 5263 emit_jump (loop_start); 5264 5265 /* Build the end of the loop. */ 5266 emit_label (loop_end); 5267 } 5268 } 5269 else if ((index != 0 && ! host_integerp (index, 0)) 5270 || ! host_integerp (TYPE_SIZE (elttype), 1)) 5271 { 5272 tree position; 5273 5274 if (index == 0) 5275 index = ssize_int (1); 5276 5277 if (minelt) 5278 index = fold_convert (ssizetype, 5279 fold_build2 (MINUS_EXPR, 5280 TREE_TYPE (index), 5281 index, 5282 TYPE_MIN_VALUE (domain))); 5283 5284 position = 5285 size_binop (MULT_EXPR, index, 5286 fold_convert (ssizetype, 5287 TYPE_SIZE_UNIT (elttype))); 5288 xtarget = offset_address (target, 5289 expand_normal (position), 5290 highest_pow2_factor (position)); 5291 xtarget = adjust_address (xtarget, mode, 0); 5292 store_expr (value, xtarget, 0); 5293 } 5294 else 5295 { 5296 if (index != 0) 5297 bitpos = ((tree_low_cst (index, 0) - minelt) 5298 * tree_low_cst (TYPE_SIZE (elttype), 1)); 5299 else 5300 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); 5301 5302 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target) 5303 && TREE_CODE (type) == ARRAY_TYPE 5304 && TYPE_NONALIASED_COMPONENT (type)) 5305 { 5306 target = copy_rtx (target); 5307 MEM_KEEP_ALIAS_SET_P (target) = 1; 5308 } 5309 store_constructor_field (target, bitsize, bitpos, mode, value, 5310 type, cleared, get_alias_set (elttype)); 5311 } 5312 } 5313 break; 5314 } 5315 5316 case VECTOR_TYPE: 5317 { 5318 unsigned HOST_WIDE_INT idx; 5319 constructor_elt *ce; 5320 int i; 5321 int need_to_clear; 5322 int icode = 0; 5323 tree elttype = TREE_TYPE (type); 5324 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1); 5325 enum machine_mode eltmode = TYPE_MODE (elttype); 5326 HOST_WIDE_INT bitsize; 5327 HOST_WIDE_INT bitpos; 5328 rtvec vector = NULL; 5329 unsigned n_elts; 5330 5331 gcc_assert (eltmode != BLKmode); 5332 5333 n_elts = TYPE_VECTOR_SUBPARTS (type); 5334 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target))) 5335 { 5336 enum machine_mode mode = GET_MODE (target); 5337 5338 icode = (int) vec_init_optab->handlers[mode].insn_code; 5339 if (icode != CODE_FOR_nothing) 5340 { 5341 unsigned int i; 5342 5343 vector = rtvec_alloc (n_elts); 5344 for (i = 0; i < n_elts; i++) 5345 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode)); 5346 } 5347 } 5348 5349 /* If the constructor has fewer elements than the vector, 5350 clear the whole array first. Similarly if this is static 5351 constructor of a non-BLKmode object. */ 5352 if (cleared) 5353 need_to_clear = 0; 5354 else if (REG_P (target) && TREE_STATIC (exp)) 5355 need_to_clear = 1; 5356 else 5357 { 5358 unsigned HOST_WIDE_INT count = 0, zero_count = 0; 5359 tree value; 5360 5361 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) 5362 { 5363 int n_elts_here = tree_low_cst 5364 (int_const_binop (TRUNC_DIV_EXPR, 5365 TYPE_SIZE (TREE_TYPE (value)), 5366 TYPE_SIZE (elttype), 0), 1); 5367 5368 count += n_elts_here; 5369 if (mostly_zeros_p (value)) 5370 zero_count += n_elts_here; 5371 } 5372 5373 /* Clear the entire vector first if there are any missing elements, 5374 or if the incidence of zero elements is >= 75%. */ 5375 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count); 5376 } 5377 5378 if (need_to_clear && size > 0 && !vector) 5379 { 5380 if (REG_P (target)) 5381 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); 5382 else 5383 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); 5384 cleared = 1; 5385 } 5386 5387 /* Inform later passes that the old value is dead. */ 5388 if (!cleared && !vector && REG_P (target)) 5389 emit_move_insn (target, CONST0_RTX (GET_MODE (target))); 5390 5391 /* Store each element of the constructor into the corresponding 5392 element of TARGET, determined by counting the elements. */ 5393 for (idx = 0, i = 0; 5394 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce); 5395 idx++, i += bitsize / elt_size) 5396 { 5397 HOST_WIDE_INT eltpos; 5398 tree value = ce->value; 5399 5400 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1); 5401 if (cleared && initializer_zerop (value)) 5402 continue; 5403 5404 if (ce->index) 5405 eltpos = tree_low_cst (ce->index, 1); 5406 else 5407 eltpos = i; 5408 5409 if (vector) 5410 { 5411 /* Vector CONSTRUCTORs should only be built from smaller 5412 vectors in the case of BLKmode vectors. */ 5413 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE); 5414 RTVEC_ELT (vector, eltpos) 5415 = expand_normal (value); 5416 } 5417 else 5418 { 5419 enum machine_mode value_mode = 5420 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE 5421 ? TYPE_MODE (TREE_TYPE (value)) 5422 : eltmode; 5423 bitpos = eltpos * elt_size; 5424 store_constructor_field (target, bitsize, bitpos, 5425 value_mode, value, type, 5426 cleared, get_alias_set (elttype)); 5427 } 5428 } 5429 5430 if (vector) 5431 emit_insn (GEN_FCN (icode) 5432 (target, 5433 gen_rtx_PARALLEL (GET_MODE (target), vector))); 5434 break; 5435 } 5436 5437 default: 5438 gcc_unreachable (); 5439 } 5440} 5441 5442/* Store the value of EXP (an expression tree) 5443 into a subfield of TARGET which has mode MODE and occupies 5444 BITSIZE bits, starting BITPOS bits from the start of TARGET. 5445 If MODE is VOIDmode, it means that we are storing into a bit-field. 5446 5447 Always return const0_rtx unless we have something particular to 5448 return. 5449 5450 TYPE is the type of the underlying object, 5451 5452 ALIAS_SET is the alias set for the destination. This value will 5453 (in general) be different from that for TARGET, since TARGET is a 5454 reference to the containing structure. */ 5455 5456static rtx 5457store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, 5458 enum machine_mode mode, tree exp, tree type, int alias_set) 5459{ 5460 HOST_WIDE_INT width_mask = 0; 5461 5462 if (TREE_CODE (exp) == ERROR_MARK) 5463 return const0_rtx; 5464 5465 /* If we have nothing to store, do nothing unless the expression has 5466 side-effects. */ 5467 if (bitsize == 0) 5468 return expand_expr (exp, const0_rtx, VOIDmode, 0); 5469 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT) 5470 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1; 5471 5472 /* If we are storing into an unaligned field of an aligned union that is 5473 in a register, we may have the mode of TARGET being an integer mode but 5474 MODE == BLKmode. In that case, get an aligned object whose size and 5475 alignment are the same as TARGET and store TARGET into it (we can avoid 5476 the store if the field being stored is the entire width of TARGET). Then 5477 call ourselves recursively to store the field into a BLKmode version of 5478 that object. Finally, load from the object into TARGET. This is not 5479 very efficient in general, but should only be slightly more expensive 5480 than the otherwise-required unaligned accesses. Perhaps this can be 5481 cleaned up later. It's tempting to make OBJECT readonly, but it's set 5482 twice, once with emit_move_insn and once via store_field. */ 5483 5484 if (mode == BLKmode 5485 && (REG_P (target) || GET_CODE (target) == SUBREG)) 5486 { 5487 rtx object = assign_temp (type, 0, 1, 1); 5488 rtx blk_object = adjust_address (object, BLKmode, 0); 5489 5490 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))) 5491 emit_move_insn (object, target); 5492 5493 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set); 5494 5495 emit_move_insn (target, object); 5496 5497 /* We want to return the BLKmode version of the data. */ 5498 return blk_object; 5499 } 5500 5501 if (GET_CODE (target) == CONCAT) 5502 { 5503 /* We're storing into a struct containing a single __complex. */ 5504 5505 gcc_assert (!bitpos); 5506 return store_expr (exp, target, 0); 5507 } 5508 5509 /* If the structure is in a register or if the component 5510 is a bit field, we cannot use addressing to access it. 5511 Use bit-field techniques or SUBREG to store in it. */ 5512 5513 if (mode == VOIDmode 5514 || (mode != BLKmode && ! direct_store[(int) mode] 5515 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT 5516 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) 5517 || REG_P (target) 5518 || GET_CODE (target) == SUBREG 5519 /* If the field isn't aligned enough to store as an ordinary memref, 5520 store it as a bit field. */ 5521 || (mode != BLKmode 5522 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)) 5523 || bitpos % GET_MODE_ALIGNMENT (mode)) 5524 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))) 5525 || (bitpos % BITS_PER_UNIT != 0))) 5526 /* If the RHS and field are a constant size and the size of the 5527 RHS isn't the same size as the bitfield, we must use bitfield 5528 operations. */ 5529 || (bitsize >= 0 5530 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST 5531 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) 5532 { 5533 rtx temp; 5534 5535 /* If EXP is a NOP_EXPR of precision less than its mode, then that 5536 implies a mask operation. If the precision is the same size as 5537 the field we're storing into, that mask is redundant. This is 5538 particularly common with bit field assignments generated by the 5539 C front end. */ 5540 if (TREE_CODE (exp) == NOP_EXPR) 5541 { 5542 tree type = TREE_TYPE (exp); 5543 if (INTEGRAL_TYPE_P (type) 5544 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type)) 5545 && bitsize == TYPE_PRECISION (type)) 5546 { 5547 type = TREE_TYPE (TREE_OPERAND (exp, 0)); 5548 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize) 5549 exp = TREE_OPERAND (exp, 0); 5550 } 5551 } 5552 5553 temp = expand_normal (exp); 5554 5555 /* If BITSIZE is narrower than the size of the type of EXP 5556 we will be narrowing TEMP. Normally, what's wanted are the 5557 low-order bits. However, if EXP's type is a record and this is 5558 big-endian machine, we want the upper BITSIZE bits. */ 5559 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT 5560 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp)) 5561 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE) 5562 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp, 5563 size_int (GET_MODE_BITSIZE (GET_MODE (temp)) 5564 - bitsize), 5565 NULL_RTX, 1); 5566 5567 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to 5568 MODE. */ 5569 if (mode != VOIDmode && mode != BLKmode 5570 && mode != TYPE_MODE (TREE_TYPE (exp))) 5571 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1); 5572 5573 /* If the modes of TARGET and TEMP are both BLKmode, both 5574 must be in memory and BITPOS must be aligned on a byte 5575 boundary. If so, we simply do a block copy. */ 5576 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode) 5577 { 5578 gcc_assert (MEM_P (target) && MEM_P (temp) 5579 && !(bitpos % BITS_PER_UNIT)); 5580 5581 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); 5582 emit_block_move (target, temp, 5583 GEN_INT ((bitsize + BITS_PER_UNIT - 1) 5584 / BITS_PER_UNIT), 5585 BLOCK_OP_NORMAL); 5586 5587 return const0_rtx; 5588 } 5589 5590 /* Store the value in the bitfield. */ 5591 store_bit_field (target, bitsize, bitpos, mode, temp); 5592 5593 return const0_rtx; 5594 } 5595 else 5596 { 5597 /* Now build a reference to just the desired component. */ 5598 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); 5599 5600 if (to_rtx == target) 5601 to_rtx = copy_rtx (to_rtx); 5602 5603 MEM_SET_IN_STRUCT_P (to_rtx, 1); 5604 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) 5605 set_mem_alias_set (to_rtx, alias_set); 5606 5607 return store_expr (exp, to_rtx, 0); 5608 } 5609} 5610 5611/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF, 5612 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these 5613 codes and find the ultimate containing object, which we return. 5614 5615 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the 5616 bit position, and *PUNSIGNEDP to the signedness of the field. 5617 If the position of the field is variable, we store a tree 5618 giving the variable offset (in units) in *POFFSET. 5619 This offset is in addition to the bit position. 5620 If the position is not variable, we store 0 in *POFFSET. 5621 5622 If any of the extraction expressions is volatile, 5623 we store 1 in *PVOLATILEP. Otherwise we don't change that. 5624 5625 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it 5626 is a mode that can be used to access the field. In that case, *PBITSIZE 5627 is redundant. 5628 5629 If the field describes a variable-sized object, *PMODE is set to 5630 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in 5631 this case, but the address of the object can be found. 5632 5633 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't 5634 look through nodes that serve as markers of a greater alignment than 5635 the one that can be deduced from the expression. These nodes make it 5636 possible for front-ends to prevent temporaries from being created by 5637 the middle-end on alignment considerations. For that purpose, the 5638 normal operating mode at high-level is to always pass FALSE so that 5639 the ultimate containing object is really returned; moreover, the 5640 associated predicate handled_component_p will always return TRUE 5641 on these nodes, thus indicating that they are essentially handled 5642 by get_inner_reference. TRUE should only be passed when the caller 5643 is scanning the expression in order to build another representation 5644 and specifically knows how to handle these nodes; as such, this is 5645 the normal operating mode in the RTL expanders. */ 5646 5647tree 5648get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, 5649 HOST_WIDE_INT *pbitpos, tree *poffset, 5650 enum machine_mode *pmode, int *punsignedp, 5651 int *pvolatilep, bool keep_aligning) 5652{ 5653 tree size_tree = 0; 5654 enum machine_mode mode = VOIDmode; 5655 tree offset = size_zero_node; 5656 tree bit_offset = bitsize_zero_node; 5657 5658 /* First get the mode, signedness, and size. We do this from just the 5659 outermost expression. */ 5660 if (TREE_CODE (exp) == COMPONENT_REF) 5661 { 5662 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); 5663 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) 5664 mode = DECL_MODE (TREE_OPERAND (exp, 1)); 5665 5666 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1)); 5667 } 5668 else if (TREE_CODE (exp) == BIT_FIELD_REF) 5669 { 5670 size_tree = TREE_OPERAND (exp, 1); 5671 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp); 5672 } 5673 else 5674 { 5675 mode = TYPE_MODE (TREE_TYPE (exp)); 5676 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); 5677 5678 if (mode == BLKmode) 5679 size_tree = TYPE_SIZE (TREE_TYPE (exp)); 5680 else 5681 *pbitsize = GET_MODE_BITSIZE (mode); 5682 } 5683 5684 if (size_tree != 0) 5685 { 5686 if (! host_integerp (size_tree, 1)) 5687 mode = BLKmode, *pbitsize = -1; 5688 else 5689 *pbitsize = tree_low_cst (size_tree, 1); 5690 } 5691 5692 *pmode = mode; 5693 5694 /* Compute cumulative bit-offset for nested component-refs and array-refs, 5695 and find the ultimate containing object. */ 5696 while (1) 5697 { 5698 switch (TREE_CODE (exp)) 5699 { 5700 case BIT_FIELD_REF: 5701 bit_offset = size_binop (PLUS_EXPR, bit_offset, 5702 TREE_OPERAND (exp, 2)); 5703 break; 5704 5705 case COMPONENT_REF: 5706 { 5707 tree field = TREE_OPERAND (exp, 1); 5708 tree this_offset = component_ref_field_offset (exp); 5709 5710 /* If this field hasn't been filled in yet, don't go past it. 5711 This should only happen when folding expressions made during 5712 type construction. */ 5713 if (this_offset == 0) 5714 break; 5715 5716 offset = size_binop (PLUS_EXPR, offset, this_offset); 5717 bit_offset = size_binop (PLUS_EXPR, bit_offset, 5718 DECL_FIELD_BIT_OFFSET (field)); 5719 5720 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ 5721 } 5722 break; 5723 5724 case ARRAY_REF: 5725 case ARRAY_RANGE_REF: 5726 { 5727 tree index = TREE_OPERAND (exp, 1); 5728 tree low_bound = array_ref_low_bound (exp); 5729 tree unit_size = array_ref_element_size (exp); 5730 5731 /* We assume all arrays have sizes that are a multiple of a byte. 5732 First subtract the lower bound, if any, in the type of the 5733 index, then convert to sizetype and multiply by the size of 5734 the array element. */ 5735 if (! integer_zerop (low_bound)) 5736 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), 5737 index, low_bound); 5738 5739 offset = size_binop (PLUS_EXPR, offset, 5740 size_binop (MULT_EXPR, 5741 fold_convert (sizetype, index), 5742 unit_size)); 5743 } 5744 break; 5745 5746 case REALPART_EXPR: 5747 break; 5748 5749 case IMAGPART_EXPR: 5750 bit_offset = size_binop (PLUS_EXPR, bit_offset, 5751 bitsize_int (*pbitsize)); 5752 break; 5753 5754 case VIEW_CONVERT_EXPR: 5755 if (keep_aligning && STRICT_ALIGNMENT 5756 && (TYPE_ALIGN (TREE_TYPE (exp)) 5757 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))) 5758 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) 5759 < BIGGEST_ALIGNMENT) 5760 && (TYPE_ALIGN_OK (TREE_TYPE (exp)) 5761 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0))))) 5762 goto done; 5763 break; 5764 5765 default: 5766 goto done; 5767 } 5768 5769 /* If any reference in the chain is volatile, the effect is volatile. */ 5770 if (TREE_THIS_VOLATILE (exp)) 5771 *pvolatilep = 1; 5772 5773 exp = TREE_OPERAND (exp, 0); 5774 } 5775 done: 5776 5777 /* If OFFSET is constant, see if we can return the whole thing as a 5778 constant bit position. Make sure to handle overflow during 5779 this conversion. */ 5780 if (host_integerp (offset, 0)) 5781 { 5782 double_int tem = double_int_mul (tree_to_double_int (offset), 5783 uhwi_to_double_int (BITS_PER_UNIT)); 5784 tem = double_int_add (tem, tree_to_double_int (bit_offset)); 5785 if (double_int_fits_in_shwi_p (tem)) 5786 { 5787 *pbitpos = double_int_to_shwi (tem); 5788 *poffset = NULL_TREE; 5789 return exp; 5790 } 5791 } 5792 5793 /* Otherwise, split it up. */ 5794 *pbitpos = tree_low_cst (bit_offset, 0); 5795 *poffset = offset; 5796 5797 return exp; 5798} 5799 5800/* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF, 5801 look for whether EXP or any nested component-refs within EXP is marked 5802 as PACKED. */ 5803 5804bool 5805contains_packed_reference (tree exp) 5806{ 5807 bool packed_p = false; 5808 5809 while (1) 5810 { 5811 switch (TREE_CODE (exp)) 5812 { 5813 case COMPONENT_REF: 5814 { 5815 tree field = TREE_OPERAND (exp, 1); 5816 packed_p = DECL_PACKED (field) 5817 || TYPE_PACKED (TREE_TYPE (field)) 5818 || TYPE_PACKED (TREE_TYPE (exp)); 5819 if (packed_p) 5820 goto done; 5821 } 5822 break; 5823 5824 case BIT_FIELD_REF: 5825 case ARRAY_REF: 5826 case ARRAY_RANGE_REF: 5827 case REALPART_EXPR: 5828 case IMAGPART_EXPR: 5829 case VIEW_CONVERT_EXPR: 5830 break; 5831 5832 default: 5833 goto done; 5834 } 5835 exp = TREE_OPERAND (exp, 0); 5836 } 5837 done: 5838 return packed_p; 5839} 5840 5841/* Return a tree of sizetype representing the size, in bytes, of the element 5842 of EXP, an ARRAY_REF. */ 5843 5844tree 5845array_ref_element_size (tree exp) 5846{ 5847 tree aligned_size = TREE_OPERAND (exp, 3); 5848 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))); 5849 5850 /* If a size was specified in the ARRAY_REF, it's the size measured 5851 in alignment units of the element type. So multiply by that value. */ 5852 if (aligned_size) 5853 { 5854 /* ??? tree_ssa_useless_type_conversion will eliminate casts to 5855 sizetype from another type of the same width and signedness. */ 5856 if (TREE_TYPE (aligned_size) != sizetype) 5857 aligned_size = fold_convert (sizetype, aligned_size); 5858 return size_binop (MULT_EXPR, aligned_size, 5859 size_int (TYPE_ALIGN_UNIT (elmt_type))); 5860 } 5861 5862 /* Otherwise, take the size from that of the element type. Substitute 5863 any PLACEHOLDER_EXPR that we have. */ 5864 else 5865 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp); 5866} 5867 5868/* Return a tree representing the lower bound of the array mentioned in 5869 EXP, an ARRAY_REF. */ 5870 5871tree 5872array_ref_low_bound (tree exp) 5873{ 5874 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); 5875 5876 /* If a lower bound is specified in EXP, use it. */ 5877 if (TREE_OPERAND (exp, 2)) 5878 return TREE_OPERAND (exp, 2); 5879 5880 /* Otherwise, if there is a domain type and it has a lower bound, use it, 5881 substituting for a PLACEHOLDER_EXPR as needed. */ 5882 if (domain_type && TYPE_MIN_VALUE (domain_type)) 5883 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp); 5884 5885 /* Otherwise, return a zero of the appropriate type. */ 5886 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0); 5887} 5888 5889/* Return a tree representing the upper bound of the array mentioned in 5890 EXP, an ARRAY_REF. */ 5891 5892tree 5893array_ref_up_bound (tree exp) 5894{ 5895 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); 5896 5897 /* If there is a domain type and it has an upper bound, use it, substituting 5898 for a PLACEHOLDER_EXPR as needed. */ 5899 if (domain_type && TYPE_MAX_VALUE (domain_type)) 5900 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp); 5901 5902 /* Otherwise fail. */ 5903 return NULL_TREE; 5904} 5905 5906/* Return a tree representing the offset, in bytes, of the field referenced 5907 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */ 5908 5909tree 5910component_ref_field_offset (tree exp) 5911{ 5912 tree aligned_offset = TREE_OPERAND (exp, 2); 5913 tree field = TREE_OPERAND (exp, 1); 5914 5915 /* If an offset was specified in the COMPONENT_REF, it's the offset measured 5916 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that 5917 value. */ 5918 if (aligned_offset) 5919 { 5920 /* ??? tree_ssa_useless_type_conversion will eliminate casts to 5921 sizetype from another type of the same width and signedness. */ 5922 if (TREE_TYPE (aligned_offset) != sizetype) 5923 aligned_offset = fold_convert (sizetype, aligned_offset); 5924 return size_binop (MULT_EXPR, aligned_offset, 5925 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT)); 5926 } 5927 5928 /* Otherwise, take the offset from that of the field. Substitute 5929 any PLACEHOLDER_EXPR that we have. */ 5930 else 5931 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp); 5932} 5933 5934/* Return 1 if T is an expression that get_inner_reference handles. */ 5935 5936int 5937handled_component_p (tree t) 5938{ 5939 switch (TREE_CODE (t)) 5940 { 5941 case BIT_FIELD_REF: 5942 case COMPONENT_REF: 5943 case ARRAY_REF: 5944 case ARRAY_RANGE_REF: 5945 case VIEW_CONVERT_EXPR: 5946 case REALPART_EXPR: 5947 case IMAGPART_EXPR: 5948 return 1; 5949 5950 default: 5951 return 0; 5952 } 5953} 5954 5955/* Given an rtx VALUE that may contain additions and multiplications, return 5956 an equivalent value that just refers to a register, memory, or constant. 5957 This is done by generating instructions to perform the arithmetic and 5958 returning a pseudo-register containing the value. 5959 5960 The returned value may be a REG, SUBREG, MEM or constant. */ 5961 5962rtx 5963force_operand (rtx value, rtx target) 5964{ 5965 rtx op1, op2; 5966 /* Use subtarget as the target for operand 0 of a binary operation. */ 5967 rtx subtarget = get_subtarget (target); 5968 enum rtx_code code = GET_CODE (value); 5969 5970 /* Check for subreg applied to an expression produced by loop optimizer. */ 5971 if (code == SUBREG 5972 && !REG_P (SUBREG_REG (value)) 5973 && !MEM_P (SUBREG_REG (value))) 5974 { 5975 value = simplify_gen_subreg (GET_MODE (value), 5976 force_reg (GET_MODE (SUBREG_REG (value)), 5977 force_operand (SUBREG_REG (value), 5978 NULL_RTX)), 5979 GET_MODE (SUBREG_REG (value)), 5980 SUBREG_BYTE (value)); 5981 code = GET_CODE (value); 5982 } 5983 5984 /* Check for a PIC address load. */ 5985 if ((code == PLUS || code == MINUS) 5986 && XEXP (value, 0) == pic_offset_table_rtx 5987 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF 5988 || GET_CODE (XEXP (value, 1)) == LABEL_REF 5989 || GET_CODE (XEXP (value, 1)) == CONST)) 5990 { 5991 if (!subtarget) 5992 subtarget = gen_reg_rtx (GET_MODE (value)); 5993 emit_move_insn (subtarget, value); 5994 return subtarget; 5995 } 5996 5997 if (ARITHMETIC_P (value)) 5998 { 5999 op2 = XEXP (value, 1); 6000 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget)) 6001 subtarget = 0; 6002 if (code == MINUS && GET_CODE (op2) == CONST_INT) 6003 { 6004 code = PLUS; 6005 op2 = negate_rtx (GET_MODE (value), op2); 6006 } 6007 6008 /* Check for an addition with OP2 a constant integer and our first 6009 operand a PLUS of a virtual register and something else. In that 6010 case, we want to emit the sum of the virtual register and the 6011 constant first and then add the other value. This allows virtual 6012 register instantiation to simply modify the constant rather than 6013 creating another one around this addition. */ 6014 if (code == PLUS && GET_CODE (op2) == CONST_INT 6015 && GET_CODE (XEXP (value, 0)) == PLUS 6016 && REG_P (XEXP (XEXP (value, 0), 0)) 6017 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER 6018 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER) 6019 { 6020 rtx temp = expand_simple_binop (GET_MODE (value), code, 6021 XEXP (XEXP (value, 0), 0), op2, 6022 subtarget, 0, OPTAB_LIB_WIDEN); 6023 return expand_simple_binop (GET_MODE (value), code, temp, 6024 force_operand (XEXP (XEXP (value, 6025 0), 1), 0), 6026 target, 0, OPTAB_LIB_WIDEN); 6027 } 6028 6029 op1 = force_operand (XEXP (value, 0), subtarget); 6030 op2 = force_operand (op2, NULL_RTX); 6031 switch (code) 6032 { 6033 case MULT: 6034 return expand_mult (GET_MODE (value), op1, op2, target, 1); 6035 case DIV: 6036 if (!INTEGRAL_MODE_P (GET_MODE (value))) 6037 return expand_simple_binop (GET_MODE (value), code, op1, op2, 6038 target, 1, OPTAB_LIB_WIDEN); 6039 else 6040 return expand_divmod (0, 6041 FLOAT_MODE_P (GET_MODE (value)) 6042 ? RDIV_EXPR : TRUNC_DIV_EXPR, 6043 GET_MODE (value), op1, op2, target, 0); 6044 break; 6045 case MOD: 6046 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, 6047 target, 0); 6048 break; 6049 case UDIV: 6050 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2, 6051 target, 1); 6052 break; 6053 case UMOD: 6054 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, 6055 target, 1); 6056 break; 6057 case ASHIFTRT: 6058 return expand_simple_binop (GET_MODE (value), code, op1, op2, 6059 target, 0, OPTAB_LIB_WIDEN); 6060 break; 6061 default: 6062 return expand_simple_binop (GET_MODE (value), code, op1, op2, 6063 target, 1, OPTAB_LIB_WIDEN); 6064 } 6065 } 6066 if (UNARY_P (value)) 6067 { 6068 if (!target) 6069 target = gen_reg_rtx (GET_MODE (value)); 6070 op1 = force_operand (XEXP (value, 0), NULL_RTX); 6071 switch (code) 6072 { 6073 case ZERO_EXTEND: 6074 case SIGN_EXTEND: 6075 case TRUNCATE: 6076 case FLOAT_EXTEND: 6077 case FLOAT_TRUNCATE: 6078 convert_move (target, op1, code == ZERO_EXTEND); 6079 return target; 6080 6081 case FIX: 6082 case UNSIGNED_FIX: 6083 expand_fix (target, op1, code == UNSIGNED_FIX); 6084 return target; 6085 6086 case FLOAT: 6087 case UNSIGNED_FLOAT: 6088 expand_float (target, op1, code == UNSIGNED_FLOAT); 6089 return target; 6090 6091 default: 6092 return expand_simple_unop (GET_MODE (value), code, op1, target, 0); 6093 } 6094 } 6095 6096#ifdef INSN_SCHEDULING 6097 /* On machines that have insn scheduling, we want all memory reference to be 6098 explicit, so we need to deal with such paradoxical SUBREGs. */ 6099 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value)) 6100 && (GET_MODE_SIZE (GET_MODE (value)) 6101 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value))))) 6102 value 6103 = simplify_gen_subreg (GET_MODE (value), 6104 force_reg (GET_MODE (SUBREG_REG (value)), 6105 force_operand (SUBREG_REG (value), 6106 NULL_RTX)), 6107 GET_MODE (SUBREG_REG (value)), 6108 SUBREG_BYTE (value)); 6109#endif 6110 6111 return value; 6112} 6113 6114/* Subroutine of expand_expr: return nonzero iff there is no way that 6115 EXP can reference X, which is being modified. TOP_P is nonzero if this 6116 call is going to be used to determine whether we need a temporary 6117 for EXP, as opposed to a recursive call to this function. 6118 6119 It is always safe for this routine to return zero since it merely 6120 searches for optimization opportunities. */ 6121 6122int 6123safe_from_p (rtx x, tree exp, int top_p) 6124{ 6125 rtx exp_rtl = 0; 6126 int i, nops; 6127 6128 if (x == 0 6129 /* If EXP has varying size, we MUST use a target since we currently 6130 have no way of allocating temporaries of variable size 6131 (except for arrays that have TYPE_ARRAY_MAX_SIZE set). 6132 So we assume here that something at a higher level has prevented a 6133 clash. This is somewhat bogus, but the best we can do. Only 6134 do this when X is BLKmode and when we are at the top level. */ 6135 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) 6136 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST 6137 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE 6138 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE 6139 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp))) 6140 != INTEGER_CST) 6141 && GET_MODE (x) == BLKmode) 6142 /* If X is in the outgoing argument area, it is always safe. */ 6143 || (MEM_P (x) 6144 && (XEXP (x, 0) == virtual_outgoing_args_rtx 6145 || (GET_CODE (XEXP (x, 0)) == PLUS 6146 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) 6147 return 1; 6148 6149 /* If this is a subreg of a hard register, declare it unsafe, otherwise, 6150 find the underlying pseudo. */ 6151 if (GET_CODE (x) == SUBREG) 6152 { 6153 x = SUBREG_REG (x); 6154 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) 6155 return 0; 6156 } 6157 6158 /* Now look at our tree code and possibly recurse. */ 6159 switch (TREE_CODE_CLASS (TREE_CODE (exp))) 6160 { 6161 case tcc_declaration: 6162 exp_rtl = DECL_RTL_IF_SET (exp); 6163 break; 6164 6165 case tcc_constant: 6166 return 1; 6167 6168 case tcc_exceptional: 6169 if (TREE_CODE (exp) == TREE_LIST) 6170 { 6171 while (1) 6172 { 6173 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0)) 6174 return 0; 6175 exp = TREE_CHAIN (exp); 6176 if (!exp) 6177 return 1; 6178 if (TREE_CODE (exp) != TREE_LIST) 6179 return safe_from_p (x, exp, 0); 6180 } 6181 } 6182 else if (TREE_CODE (exp) == CONSTRUCTOR) 6183 { 6184 constructor_elt *ce; 6185 unsigned HOST_WIDE_INT idx; 6186 6187 for (idx = 0; 6188 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce); 6189 idx++) 6190 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0)) 6191 || !safe_from_p (x, ce->value, 0)) 6192 return 0; 6193 return 1; 6194 } 6195 else if (TREE_CODE (exp) == ERROR_MARK) 6196 return 1; /* An already-visited SAVE_EXPR? */ 6197 else 6198 return 0; 6199 6200 case tcc_statement: 6201 /* The only case we look at here is the DECL_INITIAL inside a 6202 DECL_EXPR. */ 6203 return (TREE_CODE (exp) != DECL_EXPR 6204 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL 6205 || !DECL_INITIAL (DECL_EXPR_DECL (exp)) 6206 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0)); 6207 6208 case tcc_binary: 6209 case tcc_comparison: 6210 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0)) 6211 return 0; 6212 /* Fall through. */ 6213 6214 case tcc_unary: 6215 return safe_from_p (x, TREE_OPERAND (exp, 0), 0); 6216 6217 case tcc_expression: 6218 case tcc_reference: 6219 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in 6220 the expression. If it is set, we conflict iff we are that rtx or 6221 both are in memory. Otherwise, we check all operands of the 6222 expression recursively. */ 6223 6224 switch (TREE_CODE (exp)) 6225 { 6226 case ADDR_EXPR: 6227 /* If the operand is static or we are static, we can't conflict. 6228 Likewise if we don't conflict with the operand at all. */ 6229 if (staticp (TREE_OPERAND (exp, 0)) 6230 || TREE_STATIC (exp) 6231 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)) 6232 return 1; 6233 6234 /* Otherwise, the only way this can conflict is if we are taking 6235 the address of a DECL a that address if part of X, which is 6236 very rare. */ 6237 exp = TREE_OPERAND (exp, 0); 6238 if (DECL_P (exp)) 6239 { 6240 if (!DECL_RTL_SET_P (exp) 6241 || !MEM_P (DECL_RTL (exp))) 6242 return 0; 6243 else 6244 exp_rtl = XEXP (DECL_RTL (exp), 0); 6245 } 6246 break; 6247 6248 case MISALIGNED_INDIRECT_REF: 6249 case ALIGN_INDIRECT_REF: 6250 case INDIRECT_REF: 6251 if (MEM_P (x) 6252 && alias_sets_conflict_p (MEM_ALIAS_SET (x), 6253 get_alias_set (exp))) 6254 return 0; 6255 break; 6256 6257 case CALL_EXPR: 6258 /* Assume that the call will clobber all hard registers and 6259 all of memory. */ 6260 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) 6261 || MEM_P (x)) 6262 return 0; 6263 break; 6264 6265 case WITH_CLEANUP_EXPR: 6266 case CLEANUP_POINT_EXPR: 6267 /* Lowered by gimplify.c. */ 6268 gcc_unreachable (); 6269 6270 case SAVE_EXPR: 6271 return safe_from_p (x, TREE_OPERAND (exp, 0), 0); 6272 6273 default: 6274 break; 6275 } 6276 6277 /* If we have an rtx, we do not need to scan our operands. */ 6278 if (exp_rtl) 6279 break; 6280 6281 nops = TREE_CODE_LENGTH (TREE_CODE (exp)); 6282 for (i = 0; i < nops; i++) 6283 if (TREE_OPERAND (exp, i) != 0 6284 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) 6285 return 0; 6286 6287 /* If this is a language-specific tree code, it may require 6288 special handling. */ 6289 if ((unsigned int) TREE_CODE (exp) 6290 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE 6291 && !lang_hooks.safe_from_p (x, exp)) 6292 return 0; 6293 break; 6294 6295 case tcc_type: 6296 /* Should never get a type here. */ 6297 gcc_unreachable (); 6298 } 6299 6300 /* If we have an rtl, find any enclosed object. Then see if we conflict 6301 with it. */ 6302 if (exp_rtl) 6303 { 6304 if (GET_CODE (exp_rtl) == SUBREG) 6305 { 6306 exp_rtl = SUBREG_REG (exp_rtl); 6307 if (REG_P (exp_rtl) 6308 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER) 6309 return 0; 6310 } 6311 6312 /* If the rtl is X, then it is not safe. Otherwise, it is unless both 6313 are memory and they conflict. */ 6314 return ! (rtx_equal_p (x, exp_rtl) 6315 || (MEM_P (x) && MEM_P (exp_rtl) 6316 && true_dependence (exp_rtl, VOIDmode, x, 6317 rtx_addr_varies_p))); 6318 } 6319 6320 /* If we reach here, it is safe. */ 6321 return 1; 6322} 6323 6324 6325/* Return the highest power of two that EXP is known to be a multiple of. 6326 This is used in updating alignment of MEMs in array references. */ 6327 6328unsigned HOST_WIDE_INT 6329highest_pow2_factor (tree exp) 6330{ 6331 unsigned HOST_WIDE_INT c0, c1; 6332 6333 switch (TREE_CODE (exp)) 6334 { 6335 case INTEGER_CST: 6336 /* We can find the lowest bit that's a one. If the low 6337 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT. 6338 We need to handle this case since we can find it in a COND_EXPR, 6339 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an 6340 erroneous program, so return BIGGEST_ALIGNMENT to avoid any 6341 later ICE. */ 6342 if (TREE_CONSTANT_OVERFLOW (exp)) 6343 return BIGGEST_ALIGNMENT; 6344 else 6345 { 6346 /* Note: tree_low_cst is intentionally not used here, 6347 we don't care about the upper bits. */ 6348 c0 = TREE_INT_CST_LOW (exp); 6349 c0 &= -c0; 6350 return c0 ? c0 : BIGGEST_ALIGNMENT; 6351 } 6352 break; 6353 6354 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR: 6355 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); 6356 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); 6357 return MIN (c0, c1); 6358 6359 case MULT_EXPR: 6360 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); 6361 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); 6362 return c0 * c1; 6363 6364 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR: 6365 case CEIL_DIV_EXPR: 6366 if (integer_pow2p (TREE_OPERAND (exp, 1)) 6367 && host_integerp (TREE_OPERAND (exp, 1), 1)) 6368 { 6369 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); 6370 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1); 6371 return MAX (1, c0 / c1); 6372 } 6373 break; 6374 6375 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR: 6376 case SAVE_EXPR: 6377 return highest_pow2_factor (TREE_OPERAND (exp, 0)); 6378 6379 case COMPOUND_EXPR: 6380 return highest_pow2_factor (TREE_OPERAND (exp, 1)); 6381 6382 case COND_EXPR: 6383 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1)); 6384 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2)); 6385 return MIN (c0, c1); 6386 6387 default: 6388 break; 6389 } 6390 6391 return 1; 6392} 6393 6394/* Similar, except that the alignment requirements of TARGET are 6395 taken into account. Assume it is at least as aligned as its 6396 type, unless it is a COMPONENT_REF in which case the layout of 6397 the structure gives the alignment. */ 6398 6399static unsigned HOST_WIDE_INT 6400highest_pow2_factor_for_target (tree target, tree exp) 6401{ 6402 unsigned HOST_WIDE_INT target_align, factor; 6403 6404 factor = highest_pow2_factor (exp); 6405 if (TREE_CODE (target) == COMPONENT_REF) 6406 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1)); 6407 else 6408 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target)); 6409 return MAX (factor, target_align); 6410} 6411 6412/* Expands variable VAR. */ 6413 6414void 6415expand_var (tree var) 6416{ 6417 if (DECL_EXTERNAL (var)) 6418 return; 6419 6420 if (TREE_STATIC (var)) 6421 /* If this is an inlined copy of a static local variable, 6422 look up the original decl. */ 6423 var = DECL_ORIGIN (var); 6424 6425 if (TREE_STATIC (var) 6426 ? !TREE_ASM_WRITTEN (var) 6427 : !DECL_RTL_SET_P (var)) 6428 { 6429 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var)) 6430 /* Should be ignored. */; 6431 else if (lang_hooks.expand_decl (var)) 6432 /* OK. */; 6433 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var)) 6434 expand_decl (var); 6435 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var)) 6436 rest_of_decl_compilation (var, 0, 0); 6437 else 6438 /* No expansion needed. */ 6439 gcc_assert (TREE_CODE (var) == TYPE_DECL 6440 || TREE_CODE (var) == CONST_DECL 6441 || TREE_CODE (var) == FUNCTION_DECL 6442 || TREE_CODE (var) == LABEL_DECL); 6443 } 6444} 6445 6446/* Subroutine of expand_expr. Expand the two operands of a binary 6447 expression EXP0 and EXP1 placing the results in OP0 and OP1. 6448 The value may be stored in TARGET if TARGET is nonzero. The 6449 MODIFIER argument is as documented by expand_expr. */ 6450 6451static void 6452expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1, 6453 enum expand_modifier modifier) 6454{ 6455 if (! safe_from_p (target, exp1, 1)) 6456 target = 0; 6457 if (operand_equal_p (exp0, exp1, 0)) 6458 { 6459 *op0 = expand_expr (exp0, target, VOIDmode, modifier); 6460 *op1 = copy_rtx (*op0); 6461 } 6462 else 6463 { 6464 /* If we need to preserve evaluation order, copy exp0 into its own 6465 temporary variable so that it can't be clobbered by exp1. */ 6466 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1)) 6467 exp0 = save_expr (exp0); 6468 *op0 = expand_expr (exp0, target, VOIDmode, modifier); 6469 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier); 6470 } 6471} 6472 6473 6474/* Return a MEM that contains constant EXP. DEFER is as for 6475 output_constant_def and MODIFIER is as for expand_expr. */ 6476 6477static rtx 6478expand_expr_constant (tree exp, int defer, enum expand_modifier modifier) 6479{ 6480 rtx mem; 6481 6482 mem = output_constant_def (exp, defer); 6483 if (modifier != EXPAND_INITIALIZER) 6484 mem = use_anchored_address (mem); 6485 return mem; 6486} 6487 6488/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP. 6489 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */ 6490 6491static rtx 6492expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, 6493 enum expand_modifier modifier) 6494{ 6495 rtx result, subtarget; 6496 tree inner, offset; 6497 HOST_WIDE_INT bitsize, bitpos; 6498 int volatilep, unsignedp; 6499 enum machine_mode mode1; 6500 6501 /* If we are taking the address of a constant and are at the top level, 6502 we have to use output_constant_def since we can't call force_const_mem 6503 at top level. */ 6504 /* ??? This should be considered a front-end bug. We should not be 6505 generating ADDR_EXPR of something that isn't an LVALUE. The only 6506 exception here is STRING_CST. */ 6507 if (TREE_CODE (exp) == CONSTRUCTOR 6508 || CONSTANT_CLASS_P (exp)) 6509 return XEXP (expand_expr_constant (exp, 0, modifier), 0); 6510 6511 /* Everything must be something allowed by is_gimple_addressable. */ 6512 switch (TREE_CODE (exp)) 6513 { 6514 case INDIRECT_REF: 6515 /* This case will happen via recursion for &a->b. */ 6516 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); 6517 6518 case CONST_DECL: 6519 /* Recurse and make the output_constant_def clause above handle this. */ 6520 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target, 6521 tmode, modifier); 6522 6523 case REALPART_EXPR: 6524 /* The real part of the complex number is always first, therefore 6525 the address is the same as the address of the parent object. */ 6526 offset = 0; 6527 bitpos = 0; 6528 inner = TREE_OPERAND (exp, 0); 6529 break; 6530 6531 case IMAGPART_EXPR: 6532 /* The imaginary part of the complex number is always second. 6533 The expression is therefore always offset by the size of the 6534 scalar type. */ 6535 offset = 0; 6536 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp))); 6537 inner = TREE_OPERAND (exp, 0); 6538 break; 6539 6540 default: 6541 /* If the object is a DECL, then expand it for its rtl. Don't bypass 6542 expand_expr, as that can have various side effects; LABEL_DECLs for 6543 example, may not have their DECL_RTL set yet. Assume language 6544 specific tree nodes can be expanded in some interesting way. */ 6545 if (DECL_P (exp) 6546 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE) 6547 { 6548 result = expand_expr (exp, target, tmode, 6549 modifier == EXPAND_INITIALIZER 6550 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS); 6551 6552 /* If the DECL isn't in memory, then the DECL wasn't properly 6553 marked TREE_ADDRESSABLE, which will be either a front-end 6554 or a tree optimizer bug. */ 6555 gcc_assert (MEM_P (result)); 6556 result = XEXP (result, 0); 6557 6558 /* ??? Is this needed anymore? */ 6559 if (DECL_P (exp) && !TREE_USED (exp) == 0) 6560 { 6561 assemble_external (exp); 6562 TREE_USED (exp) = 1; 6563 } 6564 6565 if (modifier != EXPAND_INITIALIZER 6566 && modifier != EXPAND_CONST_ADDRESS) 6567 result = force_operand (result, target); 6568 return result; 6569 } 6570 6571 /* Pass FALSE as the last argument to get_inner_reference although 6572 we are expanding to RTL. The rationale is that we know how to 6573 handle "aligning nodes" here: we can just bypass them because 6574 they won't change the final object whose address will be returned 6575 (they actually exist only for that purpose). */ 6576 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, 6577 &mode1, &unsignedp, &volatilep, false); 6578 break; 6579 } 6580 6581 /* We must have made progress. */ 6582 gcc_assert (inner != exp); 6583 6584 subtarget = offset || bitpos ? NULL_RTX : target; 6585 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier); 6586 6587 if (offset) 6588 { 6589 rtx tmp; 6590 6591 if (modifier != EXPAND_NORMAL) 6592 result = force_operand (result, NULL); 6593 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL); 6594 6595 result = convert_memory_address (tmode, result); 6596 tmp = convert_memory_address (tmode, tmp); 6597 6598 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) 6599 result = gen_rtx_PLUS (tmode, result, tmp); 6600 else 6601 { 6602 subtarget = bitpos ? NULL_RTX : target; 6603 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget, 6604 1, OPTAB_LIB_WIDEN); 6605 } 6606 } 6607 6608 if (bitpos) 6609 { 6610 /* Someone beforehand should have rejected taking the address 6611 of such an object. */ 6612 gcc_assert ((bitpos % BITS_PER_UNIT) == 0); 6613 6614 result = plus_constant (result, bitpos / BITS_PER_UNIT); 6615 if (modifier < EXPAND_SUM) 6616 result = force_operand (result, target); 6617 } 6618 6619 return result; 6620} 6621 6622/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR. 6623 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */ 6624 6625static rtx 6626expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, 6627 enum expand_modifier modifier) 6628{ 6629 enum machine_mode rmode; 6630 rtx result; 6631 6632 /* Target mode of VOIDmode says "whatever's natural". */ 6633 if (tmode == VOIDmode) 6634 tmode = TYPE_MODE (TREE_TYPE (exp)); 6635 6636 /* We can get called with some Weird Things if the user does silliness 6637 like "(short) &a". In that case, convert_memory_address won't do 6638 the right thing, so ignore the given target mode. */ 6639 if (tmode != Pmode && tmode != ptr_mode) 6640 tmode = Pmode; 6641 6642 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target, 6643 tmode, modifier); 6644 6645 /* Despite expand_expr claims concerning ignoring TMODE when not 6646 strictly convenient, stuff breaks if we don't honor it. Note 6647 that combined with the above, we only do this for pointer modes. */ 6648 rmode = GET_MODE (result); 6649 if (rmode == VOIDmode) 6650 rmode = tmode; 6651 if (rmode != tmode) 6652 result = convert_memory_address (tmode, result); 6653 6654 return result; 6655} 6656 6657 6658/* expand_expr: generate code for computing expression EXP. 6659 An rtx for the computed value is returned. The value is never null. 6660 In the case of a void EXP, const0_rtx is returned. 6661 6662 The value may be stored in TARGET if TARGET is nonzero. 6663 TARGET is just a suggestion; callers must assume that 6664 the rtx returned may not be the same as TARGET. 6665 6666 If TARGET is CONST0_RTX, it means that the value will be ignored. 6667 6668 If TMODE is not VOIDmode, it suggests generating the 6669 result in mode TMODE. But this is done only when convenient. 6670 Otherwise, TMODE is ignored and the value generated in its natural mode. 6671 TMODE is just a suggestion; callers must assume that 6672 the rtx returned may not have mode TMODE. 6673 6674 Note that TARGET may have neither TMODE nor MODE. In that case, it 6675 probably will not be used. 6676 6677 If MODIFIER is EXPAND_SUM then when EXP is an addition 6678 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...)) 6679 or a nest of (PLUS ...) and (MINUS ...) where the terms are 6680 products as above, or REG or MEM, or constant. 6681 Ordinarily in such cases we would output mul or add instructions 6682 and then return a pseudo reg containing the sum. 6683 6684 EXPAND_INITIALIZER is much like EXPAND_SUM except that 6685 it also marks a label as absolutely required (it can't be dead). 6686 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns. 6687 This is used for outputting expressions used in initializers. 6688 6689 EXPAND_CONST_ADDRESS says that it is okay to return a MEM 6690 with a constant address even if that address is not normally legitimate. 6691 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. 6692 6693 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for 6694 a call parameter. Such targets require special care as we haven't yet 6695 marked TARGET so that it's safe from being trashed by libcalls. We 6696 don't want to use TARGET for anything but the final result; 6697 Intermediate values must go elsewhere. Additionally, calls to 6698 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. 6699 6700 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid 6701 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the 6702 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a 6703 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on 6704 recursively. */ 6705 6706static rtx expand_expr_real_1 (tree, rtx, enum machine_mode, 6707 enum expand_modifier, rtx *); 6708 6709rtx 6710expand_expr_real (tree exp, rtx target, enum machine_mode tmode, 6711 enum expand_modifier modifier, rtx *alt_rtl) 6712{ 6713 int rn = -1; 6714 rtx ret, last = NULL; 6715 6716 /* Handle ERROR_MARK before anybody tries to access its type. */ 6717 if (TREE_CODE (exp) == ERROR_MARK 6718 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK) 6719 { 6720 ret = CONST0_RTX (tmode); 6721 return ret ? ret : const0_rtx; 6722 } 6723 6724 if (flag_non_call_exceptions) 6725 { 6726 rn = lookup_stmt_eh_region (exp); 6727 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */ 6728 if (rn >= 0) 6729 last = get_last_insn (); 6730 } 6731 6732 /* If this is an expression of some kind and it has an associated line 6733 number, then emit the line number before expanding the expression. 6734 6735 We need to save and restore the file and line information so that 6736 errors discovered during expansion are emitted with the right 6737 information. It would be better of the diagnostic routines 6738 used the file/line information embedded in the tree nodes rather 6739 than globals. */ 6740 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp)) 6741 { 6742 location_t saved_location = input_location; 6743 input_location = EXPR_LOCATION (exp); 6744 emit_line_note (input_location); 6745 6746 /* Record where the insns produced belong. */ 6747 record_block_change (TREE_BLOCK (exp)); 6748 6749 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); 6750 6751 input_location = saved_location; 6752 } 6753 else 6754 { 6755 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); 6756 } 6757 6758 /* If using non-call exceptions, mark all insns that may trap. 6759 expand_call() will mark CALL_INSNs before we get to this code, 6760 but it doesn't handle libcalls, and these may trap. */ 6761 if (rn >= 0) 6762 { 6763 rtx insn; 6764 for (insn = next_real_insn (last); insn; 6765 insn = next_real_insn (insn)) 6766 { 6767 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) 6768 /* If we want exceptions for non-call insns, any 6769 may_trap_p instruction may throw. */ 6770 && GET_CODE (PATTERN (insn)) != CLOBBER 6771 && GET_CODE (PATTERN (insn)) != USE 6772 && (CALL_P (insn) || may_trap_p (PATTERN (insn)))) 6773 { 6774 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn), 6775 REG_NOTES (insn)); 6776 } 6777 } 6778 } 6779 6780 return ret; 6781} 6782 6783static rtx 6784expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, 6785 enum expand_modifier modifier, rtx *alt_rtl) 6786{ 6787 rtx op0, op1, temp, decl_rtl; 6788 tree type = TREE_TYPE (exp); 6789 int unsignedp; 6790 enum machine_mode mode; 6791 enum tree_code code = TREE_CODE (exp); 6792 optab this_optab; 6793 rtx subtarget, original_target; 6794 int ignore; 6795 tree context, subexp0, subexp1; 6796 bool reduce_bit_field = false; 6797#define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \ 6798 ? reduce_to_bit_field_precision ((expr), \ 6799 target, \ 6800 type) \ 6801 : (expr)) 6802 6803 mode = TYPE_MODE (type); 6804 unsignedp = TYPE_UNSIGNED (type); 6805 if (lang_hooks.reduce_bit_field_operations 6806 && TREE_CODE (type) == INTEGER_TYPE 6807 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type)) 6808 { 6809 /* An operation in what may be a bit-field type needs the 6810 result to be reduced to the precision of the bit-field type, 6811 which is narrower than that of the type's mode. */ 6812 reduce_bit_field = true; 6813 if (modifier == EXPAND_STACK_PARM) 6814 target = 0; 6815 } 6816 6817 /* Use subtarget as the target for operand 0 of a binary operation. */ 6818 subtarget = get_subtarget (target); 6819 original_target = target; 6820 ignore = (target == const0_rtx 6821 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR 6822 || code == CONVERT_EXPR || code == COND_EXPR 6823 || code == VIEW_CONVERT_EXPR) 6824 && TREE_CODE (type) == VOID_TYPE)); 6825 6826 /* If we are going to ignore this result, we need only do something 6827 if there is a side-effect somewhere in the expression. If there 6828 is, short-circuit the most common cases here. Note that we must 6829 not call expand_expr with anything but const0_rtx in case this 6830 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */ 6831 6832 if (ignore) 6833 { 6834 if (! TREE_SIDE_EFFECTS (exp)) 6835 return const0_rtx; 6836 6837 /* Ensure we reference a volatile object even if value is ignored, but 6838 don't do this if all we are doing is taking its address. */ 6839 if (TREE_THIS_VOLATILE (exp) 6840 && TREE_CODE (exp) != FUNCTION_DECL 6841 && mode != VOIDmode && mode != BLKmode 6842 && modifier != EXPAND_CONST_ADDRESS) 6843 { 6844 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier); 6845 if (MEM_P (temp)) 6846 temp = copy_to_reg (temp); 6847 return const0_rtx; 6848 } 6849 6850 if (TREE_CODE_CLASS (code) == tcc_unary 6851 || code == COMPONENT_REF || code == INDIRECT_REF) 6852 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 6853 modifier); 6854 6855 else if (TREE_CODE_CLASS (code) == tcc_binary 6856 || TREE_CODE_CLASS (code) == tcc_comparison 6857 || code == ARRAY_REF || code == ARRAY_RANGE_REF) 6858 { 6859 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); 6860 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); 6861 return const0_rtx; 6862 } 6863 else if (code == BIT_FIELD_REF) 6864 { 6865 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); 6866 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); 6867 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier); 6868 return const0_rtx; 6869 } 6870 6871 target = 0; 6872 } 6873 6874 6875 switch (code) 6876 { 6877 case LABEL_DECL: 6878 { 6879 tree function = decl_function_context (exp); 6880 6881 temp = label_rtx (exp); 6882 temp = gen_rtx_LABEL_REF (Pmode, temp); 6883 6884 if (function != current_function_decl 6885 && function != 0) 6886 LABEL_REF_NONLOCAL_P (temp) = 1; 6887 6888 temp = gen_rtx_MEM (FUNCTION_MODE, temp); 6889 return temp; 6890 } 6891 6892 case SSA_NAME: 6893 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, 6894 NULL); 6895 6896 case PARM_DECL: 6897 case VAR_DECL: 6898 /* If a static var's type was incomplete when the decl was written, 6899 but the type is complete now, lay out the decl now. */ 6900 if (DECL_SIZE (exp) == 0 6901 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp)) 6902 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp))) 6903 layout_decl (exp, 0); 6904 6905 /* ... fall through ... */ 6906 6907 case FUNCTION_DECL: 6908 case RESULT_DECL: 6909 decl_rtl = DECL_RTL (exp); 6910 gcc_assert (decl_rtl); 6911 6912 /* Ensure variable marked as used even if it doesn't go through 6913 a parser. If it hasn't be used yet, write out an external 6914 definition. */ 6915 if (! TREE_USED (exp)) 6916 { 6917 assemble_external (exp); 6918 TREE_USED (exp) = 1; 6919 } 6920 6921 /* Show we haven't gotten RTL for this yet. */ 6922 temp = 0; 6923 6924 /* Variables inherited from containing functions should have 6925 been lowered by this point. */ 6926 context = decl_function_context (exp); 6927 gcc_assert (!context 6928 || context == current_function_decl 6929 || TREE_STATIC (exp) 6930 /* ??? C++ creates functions that are not TREE_STATIC. */ 6931 || TREE_CODE (exp) == FUNCTION_DECL); 6932 6933 /* This is the case of an array whose size is to be determined 6934 from its initializer, while the initializer is still being parsed. 6935 See expand_decl. */ 6936 6937 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0))) 6938 temp = validize_mem (decl_rtl); 6939 6940 /* If DECL_RTL is memory, we are in the normal case and either 6941 the address is not valid or it is not a register and -fforce-addr 6942 is specified, get the address into a register. */ 6943 6944 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER) 6945 { 6946 if (alt_rtl) 6947 *alt_rtl = decl_rtl; 6948 decl_rtl = use_anchored_address (decl_rtl); 6949 if (modifier != EXPAND_CONST_ADDRESS 6950 && modifier != EXPAND_SUM 6951 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)) 6952 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0))))) 6953 temp = replace_equiv_address (decl_rtl, 6954 copy_rtx (XEXP (decl_rtl, 0))); 6955 } 6956 6957 /* If we got something, return it. But first, set the alignment 6958 if the address is a register. */ 6959 if (temp != 0) 6960 { 6961 if (MEM_P (temp) && REG_P (XEXP (temp, 0))) 6962 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp)); 6963 6964 return temp; 6965 } 6966 6967 /* If the mode of DECL_RTL does not match that of the decl, it 6968 must be a promoted value. We return a SUBREG of the wanted mode, 6969 but mark it so that we know that it was already extended. */ 6970 6971 if (REG_P (decl_rtl) 6972 && GET_MODE (decl_rtl) != DECL_MODE (exp)) 6973 { 6974 enum machine_mode pmode; 6975 6976 /* Get the signedness used for this variable. Ensure we get the 6977 same mode we got when the variable was declared. */ 6978 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp, 6979 (TREE_CODE (exp) == RESULT_DECL 6980 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0); 6981 gcc_assert (GET_MODE (decl_rtl) == pmode); 6982 6983 temp = gen_lowpart_SUBREG (mode, decl_rtl); 6984 SUBREG_PROMOTED_VAR_P (temp) = 1; 6985 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); 6986 return temp; 6987 } 6988 6989 return decl_rtl; 6990 6991 case INTEGER_CST: 6992 temp = immed_double_const (TREE_INT_CST_LOW (exp), 6993 TREE_INT_CST_HIGH (exp), mode); 6994 6995 /* ??? If overflow is set, fold will have done an incomplete job, 6996 which can result in (plus xx (const_int 0)), which can get 6997 simplified by validate_replace_rtx during virtual register 6998 instantiation, which can result in unrecognizable insns. 6999 Avoid this by forcing all overflows into registers. */ 7000 if (TREE_CONSTANT_OVERFLOW (exp) 7001 && modifier != EXPAND_INITIALIZER) 7002 temp = force_reg (mode, temp); 7003 7004 return temp; 7005 7006 case VECTOR_CST: 7007 { 7008 tree tmp = NULL_TREE; 7009 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT 7010 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT) 7011 return const_vector_from_tree (exp); 7012 if (GET_MODE_CLASS (mode) == MODE_INT) 7013 { 7014 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1); 7015 if (type_for_mode) 7016 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp); 7017 } 7018 if (!tmp) 7019 tmp = build_constructor_from_list (type, 7020 TREE_VECTOR_CST_ELTS (exp)); 7021 return expand_expr (tmp, ignore ? const0_rtx : target, 7022 tmode, modifier); 7023 } 7024 7025 case CONST_DECL: 7026 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier); 7027 7028 case REAL_CST: 7029 /* If optimized, generate immediate CONST_DOUBLE 7030 which will be turned into memory by reload if necessary. 7031 7032 We used to force a register so that loop.c could see it. But 7033 this does not allow gen_* patterns to perform optimizations with 7034 the constants. It also produces two insns in cases like "x = 1.0;". 7035 On most machines, floating-point constants are not permitted in 7036 many insns, so we'd end up copying it to a register in any case. 7037 7038 Now, we do the copying in expand_binop, if appropriate. */ 7039 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp), 7040 TYPE_MODE (TREE_TYPE (exp))); 7041 7042 case COMPLEX_CST: 7043 /* Handle evaluating a complex constant in a CONCAT target. */ 7044 if (original_target && GET_CODE (original_target) == CONCAT) 7045 { 7046 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); 7047 rtx rtarg, itarg; 7048 7049 rtarg = XEXP (original_target, 0); 7050 itarg = XEXP (original_target, 1); 7051 7052 /* Move the real and imaginary parts separately. */ 7053 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0); 7054 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0); 7055 7056 if (op0 != rtarg) 7057 emit_move_insn (rtarg, op0); 7058 if (op1 != itarg) 7059 emit_move_insn (itarg, op1); 7060 7061 return original_target; 7062 } 7063 7064 /* ... fall through ... */ 7065 7066 case STRING_CST: 7067 temp = expand_expr_constant (exp, 1, modifier); 7068 7069 /* temp contains a constant address. 7070 On RISC machines where a constant address isn't valid, 7071 make some insns to get that address into a register. */ 7072 if (modifier != EXPAND_CONST_ADDRESS 7073 && modifier != EXPAND_INITIALIZER 7074 && modifier != EXPAND_SUM 7075 && (! memory_address_p (mode, XEXP (temp, 0)) 7076 || flag_force_addr)) 7077 return replace_equiv_address (temp, 7078 copy_rtx (XEXP (temp, 0))); 7079 return temp; 7080 7081 case SAVE_EXPR: 7082 { 7083 tree val = TREE_OPERAND (exp, 0); 7084 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl); 7085 7086 if (!SAVE_EXPR_RESOLVED_P (exp)) 7087 { 7088 /* We can indeed still hit this case, typically via builtin 7089 expanders calling save_expr immediately before expanding 7090 something. Assume this means that we only have to deal 7091 with non-BLKmode values. */ 7092 gcc_assert (GET_MODE (ret) != BLKmode); 7093 7094 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp)); 7095 DECL_ARTIFICIAL (val) = 1; 7096 DECL_IGNORED_P (val) = 1; 7097 TREE_OPERAND (exp, 0) = val; 7098 SAVE_EXPR_RESOLVED_P (exp) = 1; 7099 7100 if (!CONSTANT_P (ret)) 7101 ret = copy_to_reg (ret); 7102 SET_DECL_RTL (val, ret); 7103 } 7104 7105 return ret; 7106 } 7107 7108 case GOTO_EXPR: 7109 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL) 7110 expand_goto (TREE_OPERAND (exp, 0)); 7111 else 7112 expand_computed_goto (TREE_OPERAND (exp, 0)); 7113 return const0_rtx; 7114 7115 case CONSTRUCTOR: 7116 /* If we don't need the result, just ensure we evaluate any 7117 subexpressions. */ 7118 if (ignore) 7119 { 7120 unsigned HOST_WIDE_INT idx; 7121 tree value; 7122 7123 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) 7124 expand_expr (value, const0_rtx, VOIDmode, 0); 7125 7126 return const0_rtx; 7127 } 7128 7129 /* Try to avoid creating a temporary at all. This is possible 7130 if all of the initializer is zero. 7131 FIXME: try to handle all [0..255] initializers we can handle 7132 with memset. */ 7133 else if (TREE_STATIC (exp) 7134 && !TREE_ADDRESSABLE (exp) 7135 && target != 0 && mode == BLKmode 7136 && all_zeros_p (exp)) 7137 { 7138 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL); 7139 return target; 7140 } 7141 7142 /* All elts simple constants => refer to a constant in memory. But 7143 if this is a non-BLKmode mode, let it store a field at a time 7144 since that should make a CONST_INT or CONST_DOUBLE when we 7145 fold. Likewise, if we have a target we can use, it is best to 7146 store directly into the target unless the type is large enough 7147 that memcpy will be used. If we are making an initializer and 7148 all operands are constant, put it in memory as well. 7149 7150 FIXME: Avoid trying to fill vector constructors piece-meal. 7151 Output them with output_constant_def below unless we're sure 7152 they're zeros. This should go away when vector initializers 7153 are treated like VECTOR_CST instead of arrays. 7154 */ 7155 else if ((TREE_STATIC (exp) 7156 && ((mode == BLKmode 7157 && ! (target != 0 && safe_from_p (target, exp, 1))) 7158 || TREE_ADDRESSABLE (exp) 7159 || (host_integerp (TYPE_SIZE_UNIT (type), 1) 7160 && (! MOVE_BY_PIECES_P 7161 (tree_low_cst (TYPE_SIZE_UNIT (type), 1), 7162 TYPE_ALIGN (type))) 7163 && ! mostly_zeros_p (exp)))) 7164 || ((modifier == EXPAND_INITIALIZER 7165 || modifier == EXPAND_CONST_ADDRESS) 7166 && TREE_CONSTANT (exp))) 7167 { 7168 rtx constructor = expand_expr_constant (exp, 1, modifier); 7169 7170 if (modifier != EXPAND_CONST_ADDRESS 7171 && modifier != EXPAND_INITIALIZER 7172 && modifier != EXPAND_SUM) 7173 constructor = validize_mem (constructor); 7174 7175 return constructor; 7176 } 7177 else 7178 { 7179 /* Handle calls that pass values in multiple non-contiguous 7180 locations. The Irix 6 ABI has examples of this. */ 7181 if (target == 0 || ! safe_from_p (target, exp, 1) 7182 || GET_CODE (target) == PARALLEL 7183 || modifier == EXPAND_STACK_PARM) 7184 target 7185 = assign_temp (build_qualified_type (type, 7186 (TYPE_QUALS (type) 7187 | (TREE_READONLY (exp) 7188 * TYPE_QUAL_CONST))), 7189 0, TREE_ADDRESSABLE (exp), 1); 7190 7191 store_constructor (exp, target, 0, int_expr_size (exp)); 7192 return target; 7193 } 7194 7195 case MISALIGNED_INDIRECT_REF: 7196 case ALIGN_INDIRECT_REF: 7197 case INDIRECT_REF: 7198 { 7199 tree exp1 = TREE_OPERAND (exp, 0); 7200 7201 if (modifier != EXPAND_WRITE) 7202 { 7203 tree t; 7204 7205 t = fold_read_from_constant_string (exp); 7206 if (t) 7207 return expand_expr (t, target, tmode, modifier); 7208 } 7209 7210 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); 7211 op0 = memory_address (mode, op0); 7212 7213 if (code == ALIGN_INDIRECT_REF) 7214 { 7215 int align = TYPE_ALIGN_UNIT (type); 7216 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align)); 7217 op0 = memory_address (mode, op0); 7218 } 7219 7220 temp = gen_rtx_MEM (mode, op0); 7221 7222 set_mem_attributes (temp, exp, 0); 7223 7224 /* Resolve the misalignment now, so that we don't have to remember 7225 to resolve it later. Of course, this only works for reads. */ 7226 /* ??? When we get around to supporting writes, we'll have to handle 7227 this in store_expr directly. The vectorizer isn't generating 7228 those yet, however. */ 7229 if (code == MISALIGNED_INDIRECT_REF) 7230 { 7231 int icode; 7232 rtx reg, insn; 7233 7234 gcc_assert (modifier == EXPAND_NORMAL 7235 || modifier == EXPAND_STACK_PARM); 7236 7237 /* The vectorizer should have already checked the mode. */ 7238 icode = movmisalign_optab->handlers[mode].insn_code; 7239 gcc_assert (icode != CODE_FOR_nothing); 7240 7241 /* We've already validated the memory, and we're creating a 7242 new pseudo destination. The predicates really can't fail. */ 7243 reg = gen_reg_rtx (mode); 7244 7245 /* Nor can the insn generator. */ 7246 insn = GEN_FCN (icode) (reg, temp); 7247 emit_insn (insn); 7248 7249 return reg; 7250 } 7251 7252 return temp; 7253 } 7254 7255 case TARGET_MEM_REF: 7256 { 7257 struct mem_address addr; 7258 7259 get_address_description (exp, &addr); 7260 op0 = addr_for_mem_ref (&addr, true); 7261 op0 = memory_address (mode, op0); 7262 temp = gen_rtx_MEM (mode, op0); 7263 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0); 7264 } 7265 return temp; 7266 7267 case ARRAY_REF: 7268 7269 { 7270 tree array = TREE_OPERAND (exp, 0); 7271 tree index = TREE_OPERAND (exp, 1); 7272 7273 /* Fold an expression like: "foo"[2]. 7274 This is not done in fold so it won't happen inside &. 7275 Don't fold if this is for wide characters since it's too 7276 difficult to do correctly and this is a very rare case. */ 7277 7278 if (modifier != EXPAND_CONST_ADDRESS 7279 && modifier != EXPAND_INITIALIZER 7280 && modifier != EXPAND_MEMORY) 7281 { 7282 tree t = fold_read_from_constant_string (exp); 7283 7284 if (t) 7285 return expand_expr (t, target, tmode, modifier); 7286 } 7287 7288 /* If this is a constant index into a constant array, 7289 just get the value from the array. Handle both the cases when 7290 we have an explicit constructor and when our operand is a variable 7291 that was declared const. */ 7292 7293 if (modifier != EXPAND_CONST_ADDRESS 7294 && modifier != EXPAND_INITIALIZER 7295 && modifier != EXPAND_MEMORY 7296 && TREE_CODE (array) == CONSTRUCTOR 7297 && ! TREE_SIDE_EFFECTS (array) 7298 && TREE_CODE (index) == INTEGER_CST) 7299 { 7300 unsigned HOST_WIDE_INT ix; 7301 tree field, value; 7302 7303 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix, 7304 field, value) 7305 if (tree_int_cst_equal (field, index)) 7306 { 7307 if (!TREE_SIDE_EFFECTS (value)) 7308 return expand_expr (fold (value), target, tmode, modifier); 7309 break; 7310 } 7311 } 7312 7313 else if (optimize >= 1 7314 && modifier != EXPAND_CONST_ADDRESS 7315 && modifier != EXPAND_INITIALIZER 7316 && modifier != EXPAND_MEMORY 7317 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) 7318 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) 7319 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK 7320 && targetm.binds_local_p (array)) 7321 { 7322 if (TREE_CODE (index) == INTEGER_CST) 7323 { 7324 tree init = DECL_INITIAL (array); 7325 7326 if (TREE_CODE (init) == CONSTRUCTOR) 7327 { 7328 unsigned HOST_WIDE_INT ix; 7329 tree field, value; 7330 7331 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix, 7332 field, value) 7333 if (tree_int_cst_equal (field, index)) 7334 { 7335 if (!TREE_SIDE_EFFECTS (value)) 7336 return expand_expr (fold (value), target, tmode, 7337 modifier); 7338 break; 7339 } 7340 } 7341 else if(TREE_CODE (init) == STRING_CST) 7342 { 7343 tree index1 = index; 7344 tree low_bound = array_ref_low_bound (exp); 7345 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1)); 7346 7347 /* Optimize the special-case of a zero lower bound. 7348 7349 We convert the low_bound to sizetype to avoid some problems 7350 with constant folding. (E.g. suppose the lower bound is 1, 7351 and its mode is QI. Without the conversion,l (ARRAY 7352 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) 7353 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */ 7354 7355 if (! integer_zerop (low_bound)) 7356 index1 = size_diffop (index1, fold_convert (sizetype, 7357 low_bound)); 7358 7359 if (0 > compare_tree_int (index1, 7360 TREE_STRING_LENGTH (init))) 7361 { 7362 tree type = TREE_TYPE (TREE_TYPE (init)); 7363 enum machine_mode mode = TYPE_MODE (type); 7364 7365 if (GET_MODE_CLASS (mode) == MODE_INT 7366 && GET_MODE_SIZE (mode) == 1) 7367 return gen_int_mode (TREE_STRING_POINTER (init) 7368 [TREE_INT_CST_LOW (index1)], 7369 mode); 7370 } 7371 } 7372 } 7373 } 7374 } 7375 goto normal_inner_ref; 7376 7377 case COMPONENT_REF: 7378 /* If the operand is a CONSTRUCTOR, we can just extract the 7379 appropriate field if it is present. */ 7380 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR) 7381 { 7382 unsigned HOST_WIDE_INT idx; 7383 tree field, value; 7384 7385 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), 7386 idx, field, value) 7387 if (field == TREE_OPERAND (exp, 1) 7388 /* We can normally use the value of the field in the 7389 CONSTRUCTOR. However, if this is a bitfield in 7390 an integral mode that we can fit in a HOST_WIDE_INT, 7391 we must mask only the number of bits in the bitfield, 7392 since this is done implicitly by the constructor. If 7393 the bitfield does not meet either of those conditions, 7394 we can't do this optimization. */ 7395 && (! DECL_BIT_FIELD (field) 7396 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT) 7397 && (GET_MODE_BITSIZE (DECL_MODE (field)) 7398 <= HOST_BITS_PER_WIDE_INT)))) 7399 { 7400 if (DECL_BIT_FIELD (field) 7401 && modifier == EXPAND_STACK_PARM) 7402 target = 0; 7403 op0 = expand_expr (value, target, tmode, modifier); 7404 if (DECL_BIT_FIELD (field)) 7405 { 7406 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)); 7407 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field)); 7408 7409 if (TYPE_UNSIGNED (TREE_TYPE (field))) 7410 { 7411 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1); 7412 op0 = expand_and (imode, op0, op1, target); 7413 } 7414 else 7415 { 7416 tree count 7417 = build_int_cst (NULL_TREE, 7418 GET_MODE_BITSIZE (imode) - bitsize); 7419 7420 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count, 7421 target, 0); 7422 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count, 7423 target, 0); 7424 } 7425 } 7426 7427 return op0; 7428 } 7429 } 7430 goto normal_inner_ref; 7431 7432 case BIT_FIELD_REF: 7433 case ARRAY_RANGE_REF: 7434 normal_inner_ref: 7435 { 7436 enum machine_mode mode1; 7437 HOST_WIDE_INT bitsize, bitpos; 7438 tree offset; 7439 int volatilep = 0; 7440 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, 7441 &mode1, &unsignedp, &volatilep, true); 7442 rtx orig_op0; 7443 7444 /* If we got back the original object, something is wrong. Perhaps 7445 we are evaluating an expression too early. In any event, don't 7446 infinitely recurse. */ 7447 gcc_assert (tem != exp); 7448 7449 /* If TEM's type is a union of variable size, pass TARGET to the inner 7450 computation, since it will need a temporary and TARGET is known 7451 to have to do. This occurs in unchecked conversion in Ada. */ 7452 7453 orig_op0 = op0 7454 = expand_expr (tem, 7455 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE 7456 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) 7457 != INTEGER_CST) 7458 && modifier != EXPAND_STACK_PARM 7459 ? target : NULL_RTX), 7460 VOIDmode, 7461 (modifier == EXPAND_INITIALIZER 7462 || modifier == EXPAND_CONST_ADDRESS 7463 || modifier == EXPAND_STACK_PARM) 7464 ? modifier : EXPAND_NORMAL); 7465 7466 /* If this is a constant, put it into a register if it is a legitimate 7467 constant, OFFSET is 0, and we won't try to extract outside the 7468 register (in case we were passed a partially uninitialized object 7469 or a view_conversion to a larger size). Force the constant to 7470 memory otherwise. */ 7471 if (CONSTANT_P (op0)) 7472 { 7473 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); 7474 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) 7475 && offset == 0 7476 && bitpos + bitsize <= GET_MODE_BITSIZE (mode)) 7477 op0 = force_reg (mode, op0); 7478 else 7479 op0 = validize_mem (force_const_mem (mode, op0)); 7480 } 7481 7482 /* Otherwise, if this object not in memory and we either have an 7483 offset, a BLKmode result, or a reference outside the object, put it 7484 there. Such cases can occur in Ada if we have unchecked conversion 7485 of an expression from a scalar type to an array or record type or 7486 for an ARRAY_RANGE_REF whose type is BLKmode. */ 7487 else if (!MEM_P (op0) 7488 && (offset != 0 7489 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0))) 7490 || (code == ARRAY_RANGE_REF && mode == BLKmode))) 7491 { 7492 tree nt = build_qualified_type (TREE_TYPE (tem), 7493 (TYPE_QUALS (TREE_TYPE (tem)) 7494 | TYPE_QUAL_CONST)); 7495 rtx memloc = assign_temp (nt, 1, 1, 1); 7496 7497 emit_move_insn (memloc, op0); 7498 op0 = memloc; 7499 } 7500 7501 if (offset != 0) 7502 { 7503 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 7504 EXPAND_SUM); 7505 7506 gcc_assert (MEM_P (op0)); 7507 7508#ifdef POINTERS_EXTEND_UNSIGNED 7509 if (GET_MODE (offset_rtx) != Pmode) 7510 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); 7511#else 7512 if (GET_MODE (offset_rtx) != ptr_mode) 7513 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); 7514#endif 7515 7516 if (GET_MODE (op0) == BLKmode 7517 /* A constant address in OP0 can have VOIDmode, we must 7518 not try to call force_reg in that case. */ 7519 && GET_MODE (XEXP (op0, 0)) != VOIDmode 7520 && bitsize != 0 7521 && (bitpos % bitsize) == 0 7522 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 7523 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1)) 7524 { 7525 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); 7526 bitpos = 0; 7527 } 7528 7529 op0 = offset_address (op0, offset_rtx, 7530 highest_pow2_factor (offset)); 7531 } 7532 7533 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT, 7534 record its alignment as BIGGEST_ALIGNMENT. */ 7535 if (MEM_P (op0) && bitpos == 0 && offset != 0 7536 && is_aligning_offset (offset, tem)) 7537 set_mem_align (op0, BIGGEST_ALIGNMENT); 7538 7539 /* Don't forget about volatility even if this is a bitfield. */ 7540 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0)) 7541 { 7542 if (op0 == orig_op0) 7543 op0 = copy_rtx (op0); 7544 7545 MEM_VOLATILE_P (op0) = 1; 7546 } 7547 7548 /* The following code doesn't handle CONCAT. 7549 Assume only bitpos == 0 can be used for CONCAT, due to 7550 one element arrays having the same mode as its element. */ 7551 if (GET_CODE (op0) == CONCAT) 7552 { 7553 gcc_assert (bitpos == 0 7554 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0))); 7555 return op0; 7556 } 7557 7558 /* In cases where an aligned union has an unaligned object 7559 as a field, we might be extracting a BLKmode value from 7560 an integer-mode (e.g., SImode) object. Handle this case 7561 by doing the extract into an object as wide as the field 7562 (which we know to be the width of a basic mode), then 7563 storing into memory, and changing the mode to BLKmode. */ 7564 if (mode1 == VOIDmode 7565 || REG_P (op0) || GET_CODE (op0) == SUBREG 7566 || (mode1 != BLKmode && ! direct_load[(int) mode1] 7567 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT 7568 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT 7569 && modifier != EXPAND_CONST_ADDRESS 7570 && modifier != EXPAND_INITIALIZER) 7571 /* If the field isn't aligned enough to fetch as a memref, 7572 fetch it as a bit field. */ 7573 || (mode1 != BLKmode 7574 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) 7575 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0) 7576 || (MEM_P (op0) 7577 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1) 7578 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0)))) 7579 && ((modifier == EXPAND_CONST_ADDRESS 7580 || modifier == EXPAND_INITIALIZER) 7581 ? STRICT_ALIGNMENT 7582 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))) 7583 || (bitpos % BITS_PER_UNIT != 0))) 7584 /* If the type and the field are a constant size and the 7585 size of the type isn't the same size as the bitfield, 7586 we must use bitfield operations. */ 7587 || (bitsize >= 0 7588 && TYPE_SIZE (TREE_TYPE (exp)) 7589 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST 7590 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), 7591 bitsize))) 7592 { 7593 enum machine_mode ext_mode = mode; 7594 7595 if (ext_mode == BLKmode 7596 && ! (target != 0 && MEM_P (op0) 7597 && MEM_P (target) 7598 && bitpos % BITS_PER_UNIT == 0)) 7599 ext_mode = mode_for_size (bitsize, MODE_INT, 1); 7600 7601 if (ext_mode == BLKmode) 7602 { 7603 if (target == 0) 7604 target = assign_temp (type, 0, 1, 1); 7605 7606 if (bitsize == 0) 7607 return target; 7608 7609 /* In this case, BITPOS must start at a byte boundary and 7610 TARGET, if specified, must be a MEM. */ 7611 gcc_assert (MEM_P (op0) 7612 && (!target || MEM_P (target)) 7613 && !(bitpos % BITS_PER_UNIT)); 7614 7615 emit_block_move (target, 7616 adjust_address (op0, VOIDmode, 7617 bitpos / BITS_PER_UNIT), 7618 GEN_INT ((bitsize + BITS_PER_UNIT - 1) 7619 / BITS_PER_UNIT), 7620 (modifier == EXPAND_STACK_PARM 7621 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 7622 7623 return target; 7624 } 7625 7626 op0 = validize_mem (op0); 7627 7628 if (MEM_P (op0) && REG_P (XEXP (op0, 0))) 7629 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); 7630 7631 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, 7632 (modifier == EXPAND_STACK_PARM 7633 ? NULL_RTX : target), 7634 ext_mode, ext_mode); 7635 7636 /* If the result is a record type and BITSIZE is narrower than 7637 the mode of OP0, an integral mode, and this is a big endian 7638 machine, we must put the field into the high-order bits. */ 7639 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN 7640 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT 7641 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0))) 7642 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, 7643 size_int (GET_MODE_BITSIZE (GET_MODE (op0)) 7644 - bitsize), 7645 op0, 1); 7646 7647 /* If the result type is BLKmode, store the data into a temporary 7648 of the appropriate type, but with the mode corresponding to the 7649 mode for the data we have (op0's mode). It's tempting to make 7650 this a constant type, since we know it's only being stored once, 7651 but that can cause problems if we are taking the address of this 7652 COMPONENT_REF because the MEM of any reference via that address 7653 will have flags corresponding to the type, which will not 7654 necessarily be constant. */ 7655 if (mode == BLKmode) 7656 { 7657 rtx new 7658 = assign_stack_temp_for_type 7659 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type); 7660 7661 emit_move_insn (new, op0); 7662 op0 = copy_rtx (new); 7663 PUT_MODE (op0, BLKmode); 7664 set_mem_attributes (op0, exp, 1); 7665 } 7666 7667 return op0; 7668 } 7669 7670 /* If the result is BLKmode, use that to access the object 7671 now as well. */ 7672 if (mode == BLKmode) 7673 mode1 = BLKmode; 7674 7675 /* Get a reference to just this component. */ 7676 if (modifier == EXPAND_CONST_ADDRESS 7677 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) 7678 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); 7679 else 7680 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); 7681 7682 if (op0 == orig_op0) 7683 op0 = copy_rtx (op0); 7684 7685 set_mem_attributes (op0, exp, 0); 7686 if (REG_P (XEXP (op0, 0))) 7687 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); 7688 7689 MEM_VOLATILE_P (op0) |= volatilep; 7690 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode 7691 || modifier == EXPAND_CONST_ADDRESS 7692 || modifier == EXPAND_INITIALIZER) 7693 return op0; 7694 else if (target == 0) 7695 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); 7696 7697 convert_move (target, op0, unsignedp); 7698 return target; 7699 } 7700 7701 case OBJ_TYPE_REF: 7702 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier); 7703 7704 case CALL_EXPR: 7705 /* Check for a built-in function. */ 7706 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR 7707 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) 7708 == FUNCTION_DECL) 7709 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) 7710 { 7711 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) 7712 == BUILT_IN_FRONTEND) 7713 return lang_hooks.expand_expr (exp, original_target, 7714 tmode, modifier, 7715 alt_rtl); 7716 else 7717 return expand_builtin (exp, target, subtarget, tmode, ignore); 7718 } 7719 7720 return expand_call (exp, target, ignore); 7721 7722 case NON_LVALUE_EXPR: 7723 case NOP_EXPR: 7724 case CONVERT_EXPR: 7725 if (TREE_OPERAND (exp, 0) == error_mark_node) 7726 return const0_rtx; 7727 7728 if (TREE_CODE (type) == UNION_TYPE) 7729 { 7730 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0)); 7731 7732 /* If both input and output are BLKmode, this conversion isn't doing 7733 anything except possibly changing memory attribute. */ 7734 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) 7735 { 7736 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode, 7737 modifier); 7738 7739 result = copy_rtx (result); 7740 set_mem_attributes (result, exp, 0); 7741 return result; 7742 } 7743 7744 if (target == 0) 7745 { 7746 if (TYPE_MODE (type) != BLKmode) 7747 target = gen_reg_rtx (TYPE_MODE (type)); 7748 else 7749 target = assign_temp (type, 0, 1, 1); 7750 } 7751 7752 if (MEM_P (target)) 7753 /* Store data into beginning of memory target. */ 7754 store_expr (TREE_OPERAND (exp, 0), 7755 adjust_address (target, TYPE_MODE (valtype), 0), 7756 modifier == EXPAND_STACK_PARM); 7757 7758 else 7759 { 7760 gcc_assert (REG_P (target)); 7761 7762 /* Store this field into a union of the proper type. */ 7763 store_field (target, 7764 MIN ((int_size_in_bytes (TREE_TYPE 7765 (TREE_OPERAND (exp, 0))) 7766 * BITS_PER_UNIT), 7767 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), 7768 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), 7769 type, 0); 7770 } 7771 7772 /* Return the entire union. */ 7773 return target; 7774 } 7775 7776 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) 7777 { 7778 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 7779 modifier); 7780 7781 /* If the signedness of the conversion differs and OP0 is 7782 a promoted SUBREG, clear that indication since we now 7783 have to do the proper extension. */ 7784 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp 7785 && GET_CODE (op0) == SUBREG) 7786 SUBREG_PROMOTED_VAR_P (op0) = 0; 7787 7788 return REDUCE_BIT_FIELD (op0); 7789 } 7790 7791 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 7792 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier); 7793 if (GET_MODE (op0) == mode) 7794 ; 7795 7796 /* If OP0 is a constant, just convert it into the proper mode. */ 7797 else if (CONSTANT_P (op0)) 7798 { 7799 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 7800 enum machine_mode inner_mode = TYPE_MODE (inner_type); 7801 7802 if (modifier == EXPAND_INITIALIZER) 7803 op0 = simplify_gen_subreg (mode, op0, inner_mode, 7804 subreg_lowpart_offset (mode, 7805 inner_mode)); 7806 else 7807 op0= convert_modes (mode, inner_mode, op0, 7808 TYPE_UNSIGNED (inner_type)); 7809 } 7810 7811 else if (modifier == EXPAND_INITIALIZER) 7812 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); 7813 7814 else if (target == 0) 7815 op0 = convert_to_mode (mode, op0, 7816 TYPE_UNSIGNED (TREE_TYPE 7817 (TREE_OPERAND (exp, 0)))); 7818 else 7819 { 7820 convert_move (target, op0, 7821 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); 7822 op0 = target; 7823 } 7824 7825 return REDUCE_BIT_FIELD (op0); 7826 7827 case VIEW_CONVERT_EXPR: 7828 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); 7829 7830 /* If the input and output modes are both the same, we are done. */ 7831 if (TYPE_MODE (type) == GET_MODE (op0)) 7832 ; 7833 /* If neither mode is BLKmode, and both modes are the same size 7834 then we can use gen_lowpart. */ 7835 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode 7836 && GET_MODE_SIZE (TYPE_MODE (type)) 7837 == GET_MODE_SIZE (GET_MODE (op0))) 7838 { 7839 if (GET_CODE (op0) == SUBREG) 7840 op0 = force_reg (GET_MODE (op0), op0); 7841 op0 = gen_lowpart (TYPE_MODE (type), op0); 7842 } 7843 /* If both modes are integral, then we can convert from one to the 7844 other. */ 7845 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) 7846 && SCALAR_INT_MODE_P (TYPE_MODE (type))) 7847 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0, 7848 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); 7849 /* As a last resort, spill op0 to memory, and reload it in a 7850 different mode. */ 7851 else if (!MEM_P (op0)) 7852 { 7853 /* If the operand is not a MEM, force it into memory. Since we 7854 are going to be changing the mode of the MEM, don't call 7855 force_const_mem for constants because we don't allow pool 7856 constants to change mode. */ 7857 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 7858 7859 gcc_assert (!TREE_ADDRESSABLE (exp)); 7860 7861 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) 7862 target 7863 = assign_stack_temp_for_type 7864 (TYPE_MODE (inner_type), 7865 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type); 7866 7867 emit_move_insn (target, op0); 7868 op0 = target; 7869 } 7870 7871 /* At this point, OP0 is in the correct mode. If the output type is such 7872 that the operand is known to be aligned, indicate that it is. 7873 Otherwise, we need only be concerned about alignment for non-BLKmode 7874 results. */ 7875 if (MEM_P (op0)) 7876 { 7877 op0 = copy_rtx (op0); 7878 7879 if (TYPE_ALIGN_OK (type)) 7880 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); 7881 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT 7882 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) 7883 { 7884 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 7885 HOST_WIDE_INT temp_size 7886 = MAX (int_size_in_bytes (inner_type), 7887 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type))); 7888 rtx new = assign_stack_temp_for_type (TYPE_MODE (type), 7889 temp_size, 0, type); 7890 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); 7891 7892 gcc_assert (!TREE_ADDRESSABLE (exp)); 7893 7894 if (GET_MODE (op0) == BLKmode) 7895 emit_block_move (new_with_op0_mode, op0, 7896 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))), 7897 (modifier == EXPAND_STACK_PARM 7898 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); 7899 else 7900 emit_move_insn (new_with_op0_mode, op0); 7901 7902 op0 = new; 7903 } 7904 7905 op0 = adjust_address (op0, TYPE_MODE (type), 0); 7906 } 7907 7908 return op0; 7909 7910 case PLUS_EXPR: 7911 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and 7912 something else, make sure we add the register to the constant and 7913 then to the other thing. This case can occur during strength 7914 reduction and doing it this way will produce better code if the 7915 frame pointer or argument pointer is eliminated. 7916 7917 fold-const.c will ensure that the constant is always in the inner 7918 PLUS_EXPR, so the only case we need to do anything about is if 7919 sp, ap, or fp is our second argument, in which case we must swap 7920 the innermost first argument and our second argument. */ 7921 7922 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR 7923 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST 7924 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL 7925 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx 7926 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx 7927 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx)) 7928 { 7929 tree t = TREE_OPERAND (exp, 1); 7930 7931 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); 7932 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t; 7933 } 7934 7935 /* If the result is to be ptr_mode and we are adding an integer to 7936 something, we might be forming a constant. So try to use 7937 plus_constant. If it produces a sum and we can't accept it, 7938 use force_operand. This allows P = &ARR[const] to generate 7939 efficient code on machines where a SYMBOL_REF is not a valid 7940 address. 7941 7942 If this is an EXPAND_SUM call, always return the sum. */ 7943 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER 7944 || (mode == ptr_mode && (unsignedp || ! flag_trapv))) 7945 { 7946 if (modifier == EXPAND_STACK_PARM) 7947 target = 0; 7948 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST 7949 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT 7950 && TREE_CONSTANT (TREE_OPERAND (exp, 1))) 7951 { 7952 rtx constant_part; 7953 7954 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, 7955 EXPAND_SUM); 7956 /* Use immed_double_const to ensure that the constant is 7957 truncated according to the mode of OP1, then sign extended 7958 to a HOST_WIDE_INT. Using the constant directly can result 7959 in non-canonical RTL in a 64x32 cross compile. */ 7960 constant_part 7961 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)), 7962 (HOST_WIDE_INT) 0, 7963 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))); 7964 op1 = plus_constant (op1, INTVAL (constant_part)); 7965 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) 7966 op1 = force_operand (op1, target); 7967 return REDUCE_BIT_FIELD (op1); 7968 } 7969 7970 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST 7971 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT 7972 && TREE_CONSTANT (TREE_OPERAND (exp, 0))) 7973 { 7974 rtx constant_part; 7975 7976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 7977 (modifier == EXPAND_INITIALIZER 7978 ? EXPAND_INITIALIZER : EXPAND_SUM)); 7979 if (! CONSTANT_P (op0)) 7980 { 7981 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, 7982 VOIDmode, modifier); 7983 /* Return a PLUS if modifier says it's OK. */ 7984 if (modifier == EXPAND_SUM 7985 || modifier == EXPAND_INITIALIZER) 7986 return simplify_gen_binary (PLUS, mode, op0, op1); 7987 goto binop2; 7988 } 7989 /* Use immed_double_const to ensure that the constant is 7990 truncated according to the mode of OP1, then sign extended 7991 to a HOST_WIDE_INT. Using the constant directly can result 7992 in non-canonical RTL in a 64x32 cross compile. */ 7993 constant_part 7994 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)), 7995 (HOST_WIDE_INT) 0, 7996 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))); 7997 op0 = plus_constant (op0, INTVAL (constant_part)); 7998 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) 7999 op0 = force_operand (op0, target); 8000 return REDUCE_BIT_FIELD (op0); 8001 } 8002 } 8003 8004 /* No sense saving up arithmetic to be done 8005 if it's all in the wrong mode to form part of an address. 8006 And force_operand won't know whether to sign-extend or 8007 zero-extend. */ 8008 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) 8009 || mode != ptr_mode) 8010 { 8011 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8012 subtarget, &op0, &op1, 0); 8013 if (op0 == const0_rtx) 8014 return op1; 8015 if (op1 == const0_rtx) 8016 return op0; 8017 goto binop2; 8018 } 8019 8020 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8021 subtarget, &op0, &op1, modifier); 8022 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); 8023 8024 case MINUS_EXPR: 8025 /* For initializers, we are allowed to return a MINUS of two 8026 symbolic constants. Here we handle all cases when both operands 8027 are constant. */ 8028 /* Handle difference of two symbolic constants, 8029 for the sake of an initializer. */ 8030 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) 8031 && really_constant_p (TREE_OPERAND (exp, 0)) 8032 && really_constant_p (TREE_OPERAND (exp, 1))) 8033 { 8034 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8035 NULL_RTX, &op0, &op1, modifier); 8036 8037 /* If the last operand is a CONST_INT, use plus_constant of 8038 the negated constant. Else make the MINUS. */ 8039 if (GET_CODE (op1) == CONST_INT) 8040 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1))); 8041 else 8042 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1)); 8043 } 8044 8045 /* No sense saving up arithmetic to be done 8046 if it's all in the wrong mode to form part of an address. 8047 And force_operand won't know whether to sign-extend or 8048 zero-extend. */ 8049 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) 8050 || mode != ptr_mode) 8051 goto binop; 8052 8053 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8054 subtarget, &op0, &op1, modifier); 8055 8056 /* Convert A - const to A + (-const). */ 8057 if (GET_CODE (op1) == CONST_INT) 8058 { 8059 op1 = negate_rtx (mode, op1); 8060 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); 8061 } 8062 8063 goto binop2; 8064 8065 case MULT_EXPR: 8066 /* If first operand is constant, swap them. 8067 Thus the following special case checks need only 8068 check the second operand. */ 8069 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) 8070 { 8071 tree t1 = TREE_OPERAND (exp, 0); 8072 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1); 8073 TREE_OPERAND (exp, 1) = t1; 8074 } 8075 8076 /* Attempt to return something suitable for generating an 8077 indexed address, for machines that support that. */ 8078 8079 if (modifier == EXPAND_SUM && mode == ptr_mode 8080 && host_integerp (TREE_OPERAND (exp, 1), 0)) 8081 { 8082 tree exp1 = TREE_OPERAND (exp, 1); 8083 8084 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 8085 EXPAND_SUM); 8086 8087 if (!REG_P (op0)) 8088 op0 = force_operand (op0, NULL_RTX); 8089 if (!REG_P (op0)) 8090 op0 = copy_to_mode_reg (mode, op0); 8091 8092 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0, 8093 gen_int_mode (tree_low_cst (exp1, 0), 8094 TYPE_MODE (TREE_TYPE (exp1))))); 8095 } 8096 8097 if (modifier == EXPAND_STACK_PARM) 8098 target = 0; 8099 8100 /* Check for multiplying things that have been extended 8101 from a narrower type. If this machine supports multiplying 8102 in that narrower type with a result in the desired type, 8103 do it that way, and avoid the explicit type-conversion. */ 8104 8105 subexp0 = TREE_OPERAND (exp, 0); 8106 subexp1 = TREE_OPERAND (exp, 1); 8107 /* First, check if we have a multiplication of one signed and one 8108 unsigned operand. */ 8109 if (TREE_CODE (subexp0) == NOP_EXPR 8110 && TREE_CODE (subexp1) == NOP_EXPR 8111 && TREE_CODE (type) == INTEGER_TYPE 8112 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) 8113 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) 8114 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) 8115 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0)))) 8116 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))) 8117 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0))))) 8118 { 8119 enum machine_mode innermode 8120 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0))); 8121 this_optab = usmul_widen_optab; 8122 if (mode == GET_MODE_WIDER_MODE (innermode)) 8123 { 8124 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) 8125 { 8126 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))) 8127 expand_operands (TREE_OPERAND (subexp0, 0), 8128 TREE_OPERAND (subexp1, 0), 8129 NULL_RTX, &op0, &op1, 0); 8130 else 8131 expand_operands (TREE_OPERAND (subexp0, 0), 8132 TREE_OPERAND (subexp1, 0), 8133 NULL_RTX, &op1, &op0, 0); 8134 8135 goto binop3; 8136 } 8137 } 8138 } 8139 /* Check for a multiplication with matching signedness. */ 8140 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR 8141 && TREE_CODE (type) == INTEGER_TYPE 8142 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) 8143 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) 8144 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST 8145 && int_fits_type_p (TREE_OPERAND (exp, 1), 8146 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) 8147 /* Don't use a widening multiply if a shift will do. */ 8148 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))) 8149 > HOST_BITS_PER_WIDE_INT) 8150 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)) 8151 || 8152 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR 8153 && (TYPE_PRECISION (TREE_TYPE 8154 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) 8155 == TYPE_PRECISION (TREE_TYPE 8156 (TREE_OPERAND 8157 (TREE_OPERAND (exp, 0), 0)))) 8158 /* If both operands are extended, they must either both 8159 be zero-extended or both be sign-extended. */ 8160 && (TYPE_UNSIGNED (TREE_TYPE 8161 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) 8162 == TYPE_UNSIGNED (TREE_TYPE 8163 (TREE_OPERAND 8164 (TREE_OPERAND (exp, 0), 0))))))) 8165 { 8166 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); 8167 enum machine_mode innermode = TYPE_MODE (op0type); 8168 bool zextend_p = TYPE_UNSIGNED (op0type); 8169 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; 8170 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; 8171 8172 if (mode == GET_MODE_2XWIDER_MODE (innermode)) 8173 { 8174 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) 8175 { 8176 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) 8177 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), 8178 TREE_OPERAND (exp, 1), 8179 NULL_RTX, &op0, &op1, EXPAND_NORMAL); 8180 else 8181 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), 8182 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 8183 NULL_RTX, &op0, &op1, EXPAND_NORMAL); 8184 goto binop3; 8185 } 8186 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing 8187 && innermode == word_mode) 8188 { 8189 rtx htem, hipart; 8190 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); 8191 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) 8192 op1 = convert_modes (innermode, mode, 8193 expand_normal (TREE_OPERAND (exp, 1)), 8194 unsignedp); 8195 else 8196 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)); 8197 temp = expand_binop (mode, other_optab, op0, op1, target, 8198 unsignedp, OPTAB_LIB_WIDEN); 8199 hipart = gen_highpart (innermode, temp); 8200 htem = expand_mult_highpart_adjust (innermode, hipart, 8201 op0, op1, hipart, 8202 zextend_p); 8203 if (htem != hipart) 8204 emit_move_insn (hipart, htem); 8205 return REDUCE_BIT_FIELD (temp); 8206 } 8207 } 8208 } 8209 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8210 subtarget, &op0, &op1, 0); 8211 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); 8212 8213 case TRUNC_DIV_EXPR: 8214 case FLOOR_DIV_EXPR: 8215 case CEIL_DIV_EXPR: 8216 case ROUND_DIV_EXPR: 8217 case EXACT_DIV_EXPR: 8218 if (modifier == EXPAND_STACK_PARM) 8219 target = 0; 8220 /* Possible optimization: compute the dividend with EXPAND_SUM 8221 then if the divisor is constant can optimize the case 8222 where some terms of the dividend have coeffs divisible by it. */ 8223 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8224 subtarget, &op0, &op1, 0); 8225 return expand_divmod (0, code, mode, op0, op1, target, unsignedp); 8226 8227 case RDIV_EXPR: 8228 goto binop; 8229 8230 case TRUNC_MOD_EXPR: 8231 case FLOOR_MOD_EXPR: 8232 case CEIL_MOD_EXPR: 8233 case ROUND_MOD_EXPR: 8234 if (modifier == EXPAND_STACK_PARM) 8235 target = 0; 8236 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8237 subtarget, &op0, &op1, 0); 8238 return expand_divmod (1, code, mode, op0, op1, target, unsignedp); 8239 8240 case FIX_ROUND_EXPR: 8241 case FIX_FLOOR_EXPR: 8242 case FIX_CEIL_EXPR: 8243 gcc_unreachable (); /* Not used for C. */ 8244 8245 case FIX_TRUNC_EXPR: 8246 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8247 if (target == 0 || modifier == EXPAND_STACK_PARM) 8248 target = gen_reg_rtx (mode); 8249 expand_fix (target, op0, unsignedp); 8250 return target; 8251 8252 case FLOAT_EXPR: 8253 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8254 if (target == 0 || modifier == EXPAND_STACK_PARM) 8255 target = gen_reg_rtx (mode); 8256 /* expand_float can't figure out what to do if FROM has VOIDmode. 8257 So give it the correct mode. With -O, cse will optimize this. */ 8258 if (GET_MODE (op0) == VOIDmode) 8259 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), 8260 op0); 8261 expand_float (target, op0, 8262 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); 8263 return target; 8264 8265 case NEGATE_EXPR: 8266 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); 8267 if (modifier == EXPAND_STACK_PARM) 8268 target = 0; 8269 temp = expand_unop (mode, 8270 optab_for_tree_code (NEGATE_EXPR, type), 8271 op0, target, 0); 8272 gcc_assert (temp); 8273 return REDUCE_BIT_FIELD (temp); 8274 8275 case ABS_EXPR: 8276 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); 8277 if (modifier == EXPAND_STACK_PARM) 8278 target = 0; 8279 8280 /* ABS_EXPR is not valid for complex arguments. */ 8281 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT 8282 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT); 8283 8284 /* Unsigned abs is simply the operand. Testing here means we don't 8285 risk generating incorrect code below. */ 8286 if (TYPE_UNSIGNED (type)) 8287 return op0; 8288 8289 return expand_abs (mode, op0, target, unsignedp, 8290 safe_from_p (target, TREE_OPERAND (exp, 0), 1)); 8291 8292 case MAX_EXPR: 8293 case MIN_EXPR: 8294 target = original_target; 8295 if (target == 0 8296 || modifier == EXPAND_STACK_PARM 8297 || (MEM_P (target) && MEM_VOLATILE_P (target)) 8298 || GET_MODE (target) != mode 8299 || (REG_P (target) 8300 && REGNO (target) < FIRST_PSEUDO_REGISTER)) 8301 target = gen_reg_rtx (mode); 8302 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8303 target, &op0, &op1, 0); 8304 8305 /* First try to do it with a special MIN or MAX instruction. 8306 If that does not win, use a conditional jump to select the proper 8307 value. */ 8308 this_optab = optab_for_tree_code (code, type); 8309 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, 8310 OPTAB_WIDEN); 8311 if (temp != 0) 8312 return temp; 8313 8314 /* At this point, a MEM target is no longer useful; we will get better 8315 code without it. */ 8316 8317 if (! REG_P (target)) 8318 target = gen_reg_rtx (mode); 8319 8320 /* If op1 was placed in target, swap op0 and op1. */ 8321 if (target != op0 && target == op1) 8322 { 8323 temp = op0; 8324 op0 = op1; 8325 op1 = temp; 8326 } 8327 8328 /* We generate better code and avoid problems with op1 mentioning 8329 target by forcing op1 into a pseudo if it isn't a constant. */ 8330 if (! CONSTANT_P (op1)) 8331 op1 = force_reg (mode, op1); 8332 8333 { 8334 enum rtx_code comparison_code; 8335 rtx cmpop1 = op1; 8336 8337 if (code == MAX_EXPR) 8338 comparison_code = unsignedp ? GEU : GE; 8339 else 8340 comparison_code = unsignedp ? LEU : LE; 8341 8342 /* Canonicalize to comparisons against 0. */ 8343 if (op1 == const1_rtx) 8344 { 8345 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1) 8346 or (a != 0 ? a : 1) for unsigned. 8347 For MIN we are safe converting (a <= 1 ? a : 1) 8348 into (a <= 0 ? a : 1) */ 8349 cmpop1 = const0_rtx; 8350 if (code == MAX_EXPR) 8351 comparison_code = unsignedp ? NE : GT; 8352 } 8353 if (op1 == constm1_rtx && !unsignedp) 8354 { 8355 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1) 8356 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */ 8357 cmpop1 = const0_rtx; 8358 if (code == MIN_EXPR) 8359 comparison_code = LT; 8360 } 8361#ifdef HAVE_conditional_move 8362 /* Use a conditional move if possible. */ 8363 if (can_conditionally_move_p (mode)) 8364 { 8365 rtx insn; 8366 8367 /* ??? Same problem as in expmed.c: emit_conditional_move 8368 forces a stack adjustment via compare_from_rtx, and we 8369 lose the stack adjustment if the sequence we are about 8370 to create is discarded. */ 8371 do_pending_stack_adjust (); 8372 8373 start_sequence (); 8374 8375 /* Try to emit the conditional move. */ 8376 insn = emit_conditional_move (target, comparison_code, 8377 op0, cmpop1, mode, 8378 op0, op1, mode, 8379 unsignedp); 8380 8381 /* If we could do the conditional move, emit the sequence, 8382 and return. */ 8383 if (insn) 8384 { 8385 rtx seq = get_insns (); 8386 end_sequence (); 8387 emit_insn (seq); 8388 return target; 8389 } 8390 8391 /* Otherwise discard the sequence and fall back to code with 8392 branches. */ 8393 end_sequence (); 8394 } 8395#endif 8396 if (target != op0) 8397 emit_move_insn (target, op0); 8398 8399 temp = gen_label_rtx (); 8400 do_compare_rtx_and_jump (target, cmpop1, comparison_code, 8401 unsignedp, mode, NULL_RTX, NULL_RTX, temp); 8402 } 8403 emit_move_insn (target, op1); 8404 emit_label (temp); 8405 return target; 8406 8407 case BIT_NOT_EXPR: 8408 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); 8409 if (modifier == EXPAND_STACK_PARM) 8410 target = 0; 8411 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); 8412 gcc_assert (temp); 8413 return temp; 8414 8415 /* ??? Can optimize bitwise operations with one arg constant. 8416 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) 8417 and (a bitwise1 b) bitwise2 b (etc) 8418 but that is probably not worth while. */ 8419 8420 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two 8421 boolean values when we want in all cases to compute both of them. In 8422 general it is fastest to do TRUTH_AND_EXPR by computing both operands 8423 as actual zero-or-1 values and then bitwise anding. In cases where 8424 there cannot be any side effects, better code would be made by 8425 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is 8426 how to recognize those cases. */ 8427 8428 case TRUTH_AND_EXPR: 8429 code = BIT_AND_EXPR; 8430 case BIT_AND_EXPR: 8431 goto binop; 8432 8433 case TRUTH_OR_EXPR: 8434 code = BIT_IOR_EXPR; 8435 case BIT_IOR_EXPR: 8436 goto binop; 8437 8438 case TRUTH_XOR_EXPR: 8439 code = BIT_XOR_EXPR; 8440 case BIT_XOR_EXPR: 8441 goto binop; 8442 8443 case LSHIFT_EXPR: 8444 case RSHIFT_EXPR: 8445 case LROTATE_EXPR: 8446 case RROTATE_EXPR: 8447 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) 8448 subtarget = 0; 8449 if (modifier == EXPAND_STACK_PARM) 8450 target = 0; 8451 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); 8452 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target, 8453 unsignedp); 8454 8455 /* Could determine the answer when only additive constants differ. Also, 8456 the addition of one can be handled by changing the condition. */ 8457 case LT_EXPR: 8458 case LE_EXPR: 8459 case GT_EXPR: 8460 case GE_EXPR: 8461 case EQ_EXPR: 8462 case NE_EXPR: 8463 case UNORDERED_EXPR: 8464 case ORDERED_EXPR: 8465 case UNLT_EXPR: 8466 case UNLE_EXPR: 8467 case UNGT_EXPR: 8468 case UNGE_EXPR: 8469 case UNEQ_EXPR: 8470 case LTGT_EXPR: 8471 temp = do_store_flag (exp, 8472 modifier != EXPAND_STACK_PARM ? target : NULL_RTX, 8473 tmode != VOIDmode ? tmode : mode, 0); 8474 if (temp != 0) 8475 return temp; 8476 8477 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */ 8478 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1)) 8479 && original_target 8480 && REG_P (original_target) 8481 && (GET_MODE (original_target) 8482 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) 8483 { 8484 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, 8485 VOIDmode, 0); 8486 8487 /* If temp is constant, we can just compute the result. */ 8488 if (GET_CODE (temp) == CONST_INT) 8489 { 8490 if (INTVAL (temp) != 0) 8491 emit_move_insn (target, const1_rtx); 8492 else 8493 emit_move_insn (target, const0_rtx); 8494 8495 return target; 8496 } 8497 8498 if (temp != original_target) 8499 { 8500 enum machine_mode mode1 = GET_MODE (temp); 8501 if (mode1 == VOIDmode) 8502 mode1 = tmode != VOIDmode ? tmode : mode; 8503 8504 temp = copy_to_mode_reg (mode1, temp); 8505 } 8506 8507 op1 = gen_label_rtx (); 8508 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX, 8509 GET_MODE (temp), unsignedp, op1); 8510 emit_move_insn (temp, const1_rtx); 8511 emit_label (op1); 8512 return temp; 8513 } 8514 8515 /* If no set-flag instruction, must generate a conditional store 8516 into a temporary variable. Drop through and handle this 8517 like && and ||. */ 8518 8519 if (! ignore 8520 && (target == 0 8521 || modifier == EXPAND_STACK_PARM 8522 || ! safe_from_p (target, exp, 1) 8523 /* Make sure we don't have a hard reg (such as function's return 8524 value) live across basic blocks, if not optimizing. */ 8525 || (!optimize && REG_P (target) 8526 && REGNO (target) < FIRST_PSEUDO_REGISTER))) 8527 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); 8528 8529 if (target) 8530 emit_move_insn (target, const0_rtx); 8531 8532 op1 = gen_label_rtx (); 8533 jumpifnot (exp, op1); 8534 8535 if (target) 8536 emit_move_insn (target, const1_rtx); 8537 8538 emit_label (op1); 8539 return ignore ? const0_rtx : target; 8540 8541 case TRUTH_NOT_EXPR: 8542 if (modifier == EXPAND_STACK_PARM) 8543 target = 0; 8544 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); 8545 /* The parser is careful to generate TRUTH_NOT_EXPR 8546 only with operands that are always zero or one. */ 8547 temp = expand_binop (mode, xor_optab, op0, const1_rtx, 8548 target, 1, OPTAB_LIB_WIDEN); 8549 gcc_assert (temp); 8550 return temp; 8551 8552 case STATEMENT_LIST: 8553 { 8554 tree_stmt_iterator iter; 8555 8556 gcc_assert (ignore); 8557 8558 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter)) 8559 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier); 8560 } 8561 return const0_rtx; 8562 8563 case COND_EXPR: 8564 /* A COND_EXPR with its type being VOID_TYPE represents a 8565 conditional jump and is handled in 8566 expand_gimple_cond_expr. */ 8567 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp))); 8568 8569 /* Note that COND_EXPRs whose type is a structure or union 8570 are required to be constructed to contain assignments of 8571 a temporary variable, so that we can evaluate them here 8572 for side effect only. If type is void, we must do likewise. */ 8573 8574 gcc_assert (!TREE_ADDRESSABLE (type) 8575 && !ignore 8576 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node 8577 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node); 8578 8579 /* If we are not to produce a result, we have no target. Otherwise, 8580 if a target was specified use it; it will not be used as an 8581 intermediate target unless it is safe. If no target, use a 8582 temporary. */ 8583 8584 if (modifier != EXPAND_STACK_PARM 8585 && original_target 8586 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1) 8587 && GET_MODE (original_target) == mode 8588#ifdef HAVE_conditional_move 8589 && (! can_conditionally_move_p (mode) 8590 || REG_P (original_target)) 8591#endif 8592 && !MEM_P (original_target)) 8593 temp = original_target; 8594 else 8595 temp = assign_temp (type, 0, 0, 1); 8596 8597 do_pending_stack_adjust (); 8598 NO_DEFER_POP; 8599 op0 = gen_label_rtx (); 8600 op1 = gen_label_rtx (); 8601 jumpifnot (TREE_OPERAND (exp, 0), op0); 8602 store_expr (TREE_OPERAND (exp, 1), temp, 8603 modifier == EXPAND_STACK_PARM); 8604 8605 emit_jump_insn (gen_jump (op1)); 8606 emit_barrier (); 8607 emit_label (op0); 8608 store_expr (TREE_OPERAND (exp, 2), temp, 8609 modifier == EXPAND_STACK_PARM); 8610 8611 emit_label (op1); 8612 OK_DEFER_POP; 8613 return temp; 8614 8615 case VEC_COND_EXPR: 8616 target = expand_vec_cond_expr (exp, target); 8617 return target; 8618 8619 case MODIFY_EXPR: 8620 { 8621 tree lhs = TREE_OPERAND (exp, 0); 8622 tree rhs = TREE_OPERAND (exp, 1); 8623 8624 gcc_assert (ignore); 8625 8626 /* Check for |= or &= of a bitfield of size one into another bitfield 8627 of size 1. In this case, (unless we need the result of the 8628 assignment) we can do this more efficiently with a 8629 test followed by an assignment, if necessary. 8630 8631 ??? At this point, we can't get a BIT_FIELD_REF here. But if 8632 things change so we do, this code should be enhanced to 8633 support it. */ 8634 if (TREE_CODE (lhs) == COMPONENT_REF 8635 && (TREE_CODE (rhs) == BIT_IOR_EXPR 8636 || TREE_CODE (rhs) == BIT_AND_EXPR) 8637 && TREE_OPERAND (rhs, 0) == lhs 8638 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF 8639 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) 8640 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) 8641 { 8642 rtx label = gen_label_rtx (); 8643 int value = TREE_CODE (rhs) == BIT_IOR_EXPR; 8644 do_jump (TREE_OPERAND (rhs, 1), 8645 value ? label : 0, 8646 value ? 0 : label); 8647 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value)); 8648 do_pending_stack_adjust (); 8649 emit_label (label); 8650 return const0_rtx; 8651 } 8652 8653 expand_assignment (lhs, rhs); 8654 8655 return const0_rtx; 8656 } 8657 8658 case RETURN_EXPR: 8659 if (!TREE_OPERAND (exp, 0)) 8660 expand_null_return (); 8661 else 8662 expand_return (TREE_OPERAND (exp, 0)); 8663 return const0_rtx; 8664 8665 case ADDR_EXPR: 8666 return expand_expr_addr_expr (exp, target, tmode, modifier); 8667 8668 case COMPLEX_EXPR: 8669 /* Get the rtx code of the operands. */ 8670 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8671 op1 = expand_normal (TREE_OPERAND (exp, 1)); 8672 8673 if (!target) 8674 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); 8675 8676 /* Move the real (op0) and imaginary (op1) parts to their location. */ 8677 write_complex_part (target, op0, false); 8678 write_complex_part (target, op1, true); 8679 8680 return target; 8681 8682 case REALPART_EXPR: 8683 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8684 return read_complex_part (op0, false); 8685 8686 case IMAGPART_EXPR: 8687 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8688 return read_complex_part (op0, true); 8689 8690 case RESX_EXPR: 8691 expand_resx_expr (exp); 8692 return const0_rtx; 8693 8694 case TRY_CATCH_EXPR: 8695 case CATCH_EXPR: 8696 case EH_FILTER_EXPR: 8697 case TRY_FINALLY_EXPR: 8698 /* Lowered by tree-eh.c. */ 8699 gcc_unreachable (); 8700 8701 case WITH_CLEANUP_EXPR: 8702 case CLEANUP_POINT_EXPR: 8703 case TARGET_EXPR: 8704 case CASE_LABEL_EXPR: 8705 case VA_ARG_EXPR: 8706 case BIND_EXPR: 8707 case INIT_EXPR: 8708 case CONJ_EXPR: 8709 case COMPOUND_EXPR: 8710 case PREINCREMENT_EXPR: 8711 case PREDECREMENT_EXPR: 8712 case POSTINCREMENT_EXPR: 8713 case POSTDECREMENT_EXPR: 8714 case LOOP_EXPR: 8715 case EXIT_EXPR: 8716 case TRUTH_ANDIF_EXPR: 8717 case TRUTH_ORIF_EXPR: 8718 /* Lowered by gimplify.c. */ 8719 gcc_unreachable (); 8720 8721 case EXC_PTR_EXPR: 8722 return get_exception_pointer (cfun); 8723 8724 case FILTER_EXPR: 8725 return get_exception_filter (cfun); 8726 8727 case FDESC_EXPR: 8728 /* Function descriptors are not valid except for as 8729 initialization constants, and should not be expanded. */ 8730 gcc_unreachable (); 8731 8732 case SWITCH_EXPR: 8733 expand_case (exp); 8734 return const0_rtx; 8735 8736 case LABEL_EXPR: 8737 expand_label (TREE_OPERAND (exp, 0)); 8738 return const0_rtx; 8739 8740 case ASM_EXPR: 8741 expand_asm_expr (exp); 8742 return const0_rtx; 8743 8744 case WITH_SIZE_EXPR: 8745 /* WITH_SIZE_EXPR expands to its first argument. The caller should 8746 have pulled out the size to use in whatever context it needed. */ 8747 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode, 8748 modifier, alt_rtl); 8749 8750 case REALIGN_LOAD_EXPR: 8751 { 8752 tree oprnd0 = TREE_OPERAND (exp, 0); 8753 tree oprnd1 = TREE_OPERAND (exp, 1); 8754 tree oprnd2 = TREE_OPERAND (exp, 2); 8755 rtx op2; 8756 8757 this_optab = optab_for_tree_code (code, type); 8758 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); 8759 op2 = expand_normal (oprnd2); 8760 temp = expand_ternary_op (mode, this_optab, op0, op1, op2, 8761 target, unsignedp); 8762 gcc_assert (temp); 8763 return temp; 8764 } 8765 8766 case DOT_PROD_EXPR: 8767 { 8768 tree oprnd0 = TREE_OPERAND (exp, 0); 8769 tree oprnd1 = TREE_OPERAND (exp, 1); 8770 tree oprnd2 = TREE_OPERAND (exp, 2); 8771 rtx op2; 8772 8773 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); 8774 op2 = expand_normal (oprnd2); 8775 target = expand_widen_pattern_expr (exp, op0, op1, op2, 8776 target, unsignedp); 8777 return target; 8778 } 8779 8780 case WIDEN_SUM_EXPR: 8781 { 8782 tree oprnd0 = TREE_OPERAND (exp, 0); 8783 tree oprnd1 = TREE_OPERAND (exp, 1); 8784 8785 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0); 8786 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1, 8787 target, unsignedp); 8788 return target; 8789 } 8790 8791 case REDUC_MAX_EXPR: 8792 case REDUC_MIN_EXPR: 8793 case REDUC_PLUS_EXPR: 8794 { 8795 op0 = expand_normal (TREE_OPERAND (exp, 0)); 8796 this_optab = optab_for_tree_code (code, type); 8797 temp = expand_unop (mode, this_optab, op0, target, unsignedp); 8798 gcc_assert (temp); 8799 return temp; 8800 } 8801 8802 case VEC_LSHIFT_EXPR: 8803 case VEC_RSHIFT_EXPR: 8804 { 8805 target = expand_vec_shift_expr (exp, target); 8806 return target; 8807 } 8808 8809 default: 8810 return lang_hooks.expand_expr (exp, original_target, tmode, 8811 modifier, alt_rtl); 8812 } 8813 8814 /* Here to do an ordinary binary operator. */ 8815 binop: 8816 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 8817 subtarget, &op0, &op1, 0); 8818 binop2: 8819 this_optab = optab_for_tree_code (code, type); 8820 binop3: 8821 if (modifier == EXPAND_STACK_PARM) 8822 target = 0; 8823 temp = expand_binop (mode, this_optab, op0, op1, target, 8824 unsignedp, OPTAB_LIB_WIDEN); 8825 gcc_assert (temp); 8826 return REDUCE_BIT_FIELD (temp); 8827} 8828#undef REDUCE_BIT_FIELD 8829 8830/* Subroutine of above: reduce EXP to the precision of TYPE (in the 8831 signedness of TYPE), possibly returning the result in TARGET. */ 8832static rtx 8833reduce_to_bit_field_precision (rtx exp, rtx target, tree type) 8834{ 8835 HOST_WIDE_INT prec = TYPE_PRECISION (type); 8836 if (target && GET_MODE (target) != GET_MODE (exp)) 8837 target = 0; 8838 /* For constant values, reduce using build_int_cst_type. */ 8839 if (GET_CODE (exp) == CONST_INT) 8840 { 8841 HOST_WIDE_INT value = INTVAL (exp); 8842 tree t = build_int_cst_type (type, value); 8843 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL); 8844 } 8845 else if (TYPE_UNSIGNED (type)) 8846 { 8847 rtx mask; 8848 if (prec < HOST_BITS_PER_WIDE_INT) 8849 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0, 8850 GET_MODE (exp)); 8851 else 8852 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1, 8853 ((unsigned HOST_WIDE_INT) 1 8854 << (prec - HOST_BITS_PER_WIDE_INT)) - 1, 8855 GET_MODE (exp)); 8856 return expand_and (GET_MODE (exp), exp, mask, target); 8857 } 8858 else 8859 { 8860 tree count = build_int_cst (NULL_TREE, 8861 GET_MODE_BITSIZE (GET_MODE (exp)) - prec); 8862 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0); 8863 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0); 8864 } 8865} 8866 8867/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that 8868 when applied to the address of EXP produces an address known to be 8869 aligned more than BIGGEST_ALIGNMENT. */ 8870 8871static int 8872is_aligning_offset (tree offset, tree exp) 8873{ 8874 /* Strip off any conversions. */ 8875 while (TREE_CODE (offset) == NON_LVALUE_EXPR 8876 || TREE_CODE (offset) == NOP_EXPR 8877 || TREE_CODE (offset) == CONVERT_EXPR) 8878 offset = TREE_OPERAND (offset, 0); 8879 8880 /* We must now have a BIT_AND_EXPR with a constant that is one less than 8881 power of 2 and which is larger than BIGGEST_ALIGNMENT. */ 8882 if (TREE_CODE (offset) != BIT_AND_EXPR 8883 || !host_integerp (TREE_OPERAND (offset, 1), 1) 8884 || compare_tree_int (TREE_OPERAND (offset, 1), 8885 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0 8886 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0) 8887 return 0; 8888 8889 /* Look at the first operand of BIT_AND_EXPR and strip any conversion. 8890 It must be NEGATE_EXPR. Then strip any more conversions. */ 8891 offset = TREE_OPERAND (offset, 0); 8892 while (TREE_CODE (offset) == NON_LVALUE_EXPR 8893 || TREE_CODE (offset) == NOP_EXPR 8894 || TREE_CODE (offset) == CONVERT_EXPR) 8895 offset = TREE_OPERAND (offset, 0); 8896 8897 if (TREE_CODE (offset) != NEGATE_EXPR) 8898 return 0; 8899 8900 offset = TREE_OPERAND (offset, 0); 8901 while (TREE_CODE (offset) == NON_LVALUE_EXPR 8902 || TREE_CODE (offset) == NOP_EXPR 8903 || TREE_CODE (offset) == CONVERT_EXPR) 8904 offset = TREE_OPERAND (offset, 0); 8905 8906 /* This must now be the address of EXP. */ 8907 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp; 8908} 8909 8910/* Return the tree node if an ARG corresponds to a string constant or zero 8911 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset 8912 in bytes within the string that ARG is accessing. The type of the 8913 offset will be `sizetype'. */ 8914 8915tree 8916string_constant (tree arg, tree *ptr_offset) 8917{ 8918 tree array, offset; 8919 STRIP_NOPS (arg); 8920 8921 if (TREE_CODE (arg) == ADDR_EXPR) 8922 { 8923 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) 8924 { 8925 *ptr_offset = size_zero_node; 8926 return TREE_OPERAND (arg, 0); 8927 } 8928 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL) 8929 { 8930 array = TREE_OPERAND (arg, 0); 8931 offset = size_zero_node; 8932 } 8933 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF) 8934 { 8935 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); 8936 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1); 8937 if (TREE_CODE (array) != STRING_CST 8938 && TREE_CODE (array) != VAR_DECL) 8939 return 0; 8940 } 8941 else 8942 return 0; 8943 } 8944 else if (TREE_CODE (arg) == PLUS_EXPR) 8945 { 8946 tree arg0 = TREE_OPERAND (arg, 0); 8947 tree arg1 = TREE_OPERAND (arg, 1); 8948 8949 STRIP_NOPS (arg0); 8950 STRIP_NOPS (arg1); 8951 8952 if (TREE_CODE (arg0) == ADDR_EXPR 8953 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST 8954 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL)) 8955 { 8956 array = TREE_OPERAND (arg0, 0); 8957 offset = arg1; 8958 } 8959 else if (TREE_CODE (arg1) == ADDR_EXPR 8960 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST 8961 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL)) 8962 { 8963 array = TREE_OPERAND (arg1, 0); 8964 offset = arg0; 8965 } 8966 else 8967 return 0; 8968 } 8969 else 8970 return 0; 8971 8972 if (TREE_CODE (array) == STRING_CST) 8973 { 8974 *ptr_offset = fold_convert (sizetype, offset); 8975 return array; 8976 } 8977 else if (TREE_CODE (array) == VAR_DECL) 8978 { 8979 int length; 8980 8981 /* Variables initialized to string literals can be handled too. */ 8982 if (DECL_INITIAL (array) == NULL_TREE 8983 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST) 8984 return 0; 8985 8986 /* If they are read-only, non-volatile and bind locally. */ 8987 if (! TREE_READONLY (array) 8988 || TREE_SIDE_EFFECTS (array) 8989 || ! targetm.binds_local_p (array)) 8990 return 0; 8991 8992 /* Avoid const char foo[4] = "abcde"; */ 8993 if (DECL_SIZE_UNIT (array) == NULL_TREE 8994 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST 8995 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0 8996 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0) 8997 return 0; 8998 8999 /* If variable is bigger than the string literal, OFFSET must be constant 9000 and inside of the bounds of the string literal. */ 9001 offset = fold_convert (sizetype, offset); 9002 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0 9003 && (! host_integerp (offset, 1) 9004 || compare_tree_int (offset, length) >= 0)) 9005 return 0; 9006 9007 *ptr_offset = offset; 9008 return DECL_INITIAL (array); 9009 } 9010 9011 return 0; 9012} 9013 9014/* Generate code to calculate EXP using a store-flag instruction 9015 and return an rtx for the result. EXP is either a comparison 9016 or a TRUTH_NOT_EXPR whose operand is a comparison. 9017 9018 If TARGET is nonzero, store the result there if convenient. 9019 9020 If ONLY_CHEAP is nonzero, only do this if it is likely to be very 9021 cheap. 9022 9023 Return zero if there is no suitable set-flag instruction 9024 available on this machine. 9025 9026 Once expand_expr has been called on the arguments of the comparison, 9027 we are committed to doing the store flag, since it is not safe to 9028 re-evaluate the expression. We emit the store-flag insn by calling 9029 emit_store_flag, but only expand the arguments if we have a reason 9030 to believe that emit_store_flag will be successful. If we think that 9031 it will, but it isn't, we have to simulate the store-flag with a 9032 set/jump/set sequence. */ 9033 9034static rtx 9035do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) 9036{ 9037 enum rtx_code code; 9038 tree arg0, arg1, type; 9039 tree tem; 9040 enum machine_mode operand_mode; 9041 int invert = 0; 9042 int unsignedp; 9043 rtx op0, op1; 9044 enum insn_code icode; 9045 rtx subtarget = target; 9046 rtx result, label; 9047 9048 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the 9049 result at the end. We can't simply invert the test since it would 9050 have already been inverted if it were valid. This case occurs for 9051 some floating-point comparisons. */ 9052 9053 if (TREE_CODE (exp) == TRUTH_NOT_EXPR) 9054 invert = 1, exp = TREE_OPERAND (exp, 0); 9055 9056 arg0 = TREE_OPERAND (exp, 0); 9057 arg1 = TREE_OPERAND (exp, 1); 9058 9059 /* Don't crash if the comparison was erroneous. */ 9060 if (arg0 == error_mark_node || arg1 == error_mark_node) 9061 return const0_rtx; 9062 9063 type = TREE_TYPE (arg0); 9064 operand_mode = TYPE_MODE (type); 9065 unsignedp = TYPE_UNSIGNED (type); 9066 9067 /* We won't bother with BLKmode store-flag operations because it would mean 9068 passing a lot of information to emit_store_flag. */ 9069 if (operand_mode == BLKmode) 9070 return 0; 9071 9072 /* We won't bother with store-flag operations involving function pointers 9073 when function pointers must be canonicalized before comparisons. */ 9074#ifdef HAVE_canonicalize_funcptr_for_compare 9075 if (HAVE_canonicalize_funcptr_for_compare 9076 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE 9077 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) 9078 == FUNCTION_TYPE)) 9079 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE 9080 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) 9081 == FUNCTION_TYPE)))) 9082 return 0; 9083#endif 9084 9085 STRIP_NOPS (arg0); 9086 STRIP_NOPS (arg1); 9087 9088 /* Get the rtx comparison code to use. We know that EXP is a comparison 9089 operation of some type. Some comparisons against 1 and -1 can be 9090 converted to comparisons with zero. Do so here so that the tests 9091 below will be aware that we have a comparison with zero. These 9092 tests will not catch constants in the first operand, but constants 9093 are rarely passed as the first operand. */ 9094 9095 switch (TREE_CODE (exp)) 9096 { 9097 case EQ_EXPR: 9098 code = EQ; 9099 break; 9100 case NE_EXPR: 9101 code = NE; 9102 break; 9103 case LT_EXPR: 9104 if (integer_onep (arg1)) 9105 arg1 = integer_zero_node, code = unsignedp ? LEU : LE; 9106 else 9107 code = unsignedp ? LTU : LT; 9108 break; 9109 case LE_EXPR: 9110 if (! unsignedp && integer_all_onesp (arg1)) 9111 arg1 = integer_zero_node, code = LT; 9112 else 9113 code = unsignedp ? LEU : LE; 9114 break; 9115 case GT_EXPR: 9116 if (! unsignedp && integer_all_onesp (arg1)) 9117 arg1 = integer_zero_node, code = GE; 9118 else 9119 code = unsignedp ? GTU : GT; 9120 break; 9121 case GE_EXPR: 9122 if (integer_onep (arg1)) 9123 arg1 = integer_zero_node, code = unsignedp ? GTU : GT; 9124 else 9125 code = unsignedp ? GEU : GE; 9126 break; 9127 9128 case UNORDERED_EXPR: 9129 code = UNORDERED; 9130 break; 9131 case ORDERED_EXPR: 9132 code = ORDERED; 9133 break; 9134 case UNLT_EXPR: 9135 code = UNLT; 9136 break; 9137 case UNLE_EXPR: 9138 code = UNLE; 9139 break; 9140 case UNGT_EXPR: 9141 code = UNGT; 9142 break; 9143 case UNGE_EXPR: 9144 code = UNGE; 9145 break; 9146 case UNEQ_EXPR: 9147 code = UNEQ; 9148 break; 9149 case LTGT_EXPR: 9150 code = LTGT; 9151 break; 9152 9153 default: 9154 gcc_unreachable (); 9155 } 9156 9157 /* Put a constant second. */ 9158 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST) 9159 { 9160 tem = arg0; arg0 = arg1; arg1 = tem; 9161 code = swap_condition (code); 9162 } 9163 9164 /* If this is an equality or inequality test of a single bit, we can 9165 do this by shifting the bit being tested to the low-order bit and 9166 masking the result with the constant 1. If the condition was EQ, 9167 we xor it with 1. This does not require an scc insn and is faster 9168 than an scc insn even if we have it. 9169 9170 The code to make this transformation was moved into fold_single_bit_test, 9171 so we just call into the folder and expand its result. */ 9172 9173 if ((code == NE || code == EQ) 9174 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 9175 && integer_pow2p (TREE_OPERAND (arg0, 1))) 9176 { 9177 tree type = lang_hooks.types.type_for_mode (mode, unsignedp); 9178 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR, 9179 arg0, arg1, type), 9180 target, VOIDmode, EXPAND_NORMAL); 9181 } 9182 9183 /* Now see if we are likely to be able to do this. Return if not. */ 9184 if (! can_compare_p (code, operand_mode, ccp_store_flag)) 9185 return 0; 9186 9187 icode = setcc_gen_code[(int) code]; 9188 if (icode == CODE_FOR_nothing 9189 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) 9190 { 9191 /* We can only do this if it is one of the special cases that 9192 can be handled without an scc insn. */ 9193 if ((code == LT && integer_zerop (arg1)) 9194 || (! only_cheap && code == GE && integer_zerop (arg1))) 9195 ; 9196 else if (! only_cheap && (code == NE || code == EQ) 9197 && TREE_CODE (type) != REAL_TYPE 9198 && ((abs_optab->handlers[(int) operand_mode].insn_code 9199 != CODE_FOR_nothing) 9200 || (ffs_optab->handlers[(int) operand_mode].insn_code 9201 != CODE_FOR_nothing))) 9202 ; 9203 else 9204 return 0; 9205 } 9206 9207 if (! get_subtarget (target) 9208 || GET_MODE (subtarget) != operand_mode) 9209 subtarget = 0; 9210 9211 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0); 9212 9213 if (target == 0) 9214 target = gen_reg_rtx (mode); 9215 9216 result = emit_store_flag (target, code, op0, op1, 9217 operand_mode, unsignedp, 1); 9218 9219 if (result) 9220 { 9221 if (invert) 9222 result = expand_binop (mode, xor_optab, result, const1_rtx, 9223 result, 0, OPTAB_LIB_WIDEN); 9224 return result; 9225 } 9226 9227 /* If this failed, we have to do this with set/compare/jump/set code. */ 9228 if (!REG_P (target) 9229 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) 9230 target = gen_reg_rtx (GET_MODE (target)); 9231 9232 emit_move_insn (target, invert ? const0_rtx : const1_rtx); 9233 result = compare_from_rtx (op0, op1, code, unsignedp, 9234 operand_mode, NULL_RTX); 9235 if (GET_CODE (result) == CONST_INT) 9236 return (((result == const0_rtx && ! invert) 9237 || (result != const0_rtx && invert)) 9238 ? const0_rtx : const1_rtx); 9239 9240 /* The code of RESULT may not match CODE if compare_from_rtx 9241 decided to swap its operands and reverse the original code. 9242 9243 We know that compare_from_rtx returns either a CONST_INT or 9244 a new comparison code, so it is safe to just extract the 9245 code from RESULT. */ 9246 code = GET_CODE (result); 9247 9248 label = gen_label_rtx (); 9249 gcc_assert (bcc_gen_fctn[(int) code]); 9250 9251 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); 9252 emit_move_insn (target, invert ? const1_rtx : const0_rtx); 9253 emit_label (label); 9254 9255 return target; 9256} 9257 9258 9259/* Stubs in case we haven't got a casesi insn. */ 9260#ifndef HAVE_casesi 9261# define HAVE_casesi 0 9262# define gen_casesi(a, b, c, d, e) (0) 9263# define CODE_FOR_casesi CODE_FOR_nothing 9264#endif 9265 9266/* If the machine does not have a case insn that compares the bounds, 9267 this means extra overhead for dispatch tables, which raises the 9268 threshold for using them. */ 9269#ifndef CASE_VALUES_THRESHOLD 9270#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) 9271#endif /* CASE_VALUES_THRESHOLD */ 9272 9273unsigned int 9274case_values_threshold (void) 9275{ 9276 return CASE_VALUES_THRESHOLD; 9277} 9278 9279/* Attempt to generate a casesi instruction. Returns 1 if successful, 9280 0 otherwise (i.e. if there is no casesi instruction). */ 9281int 9282try_casesi (tree index_type, tree index_expr, tree minval, tree range, 9283 rtx table_label ATTRIBUTE_UNUSED, rtx default_label) 9284{ 9285 enum machine_mode index_mode = SImode; 9286 int index_bits = GET_MODE_BITSIZE (index_mode); 9287 rtx op1, op2, index; 9288 enum machine_mode op_mode; 9289 9290 if (! HAVE_casesi) 9291 return 0; 9292 9293 /* Convert the index to SImode. */ 9294 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode)) 9295 { 9296 enum machine_mode omode = TYPE_MODE (index_type); 9297 rtx rangertx = expand_normal (range); 9298 9299 /* We must handle the endpoints in the original mode. */ 9300 index_expr = build2 (MINUS_EXPR, index_type, 9301 index_expr, minval); 9302 minval = integer_zero_node; 9303 index = expand_normal (index_expr); 9304 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX, 9305 omode, 1, default_label); 9306 /* Now we can safely truncate. */ 9307 index = convert_to_mode (index_mode, index, 0); 9308 } 9309 else 9310 { 9311 if (TYPE_MODE (index_type) != index_mode) 9312 { 9313 index_type = lang_hooks.types.type_for_size (index_bits, 0); 9314 index_expr = fold_convert (index_type, index_expr); 9315 } 9316 9317 index = expand_normal (index_expr); 9318 } 9319 9320 do_pending_stack_adjust (); 9321 9322 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode; 9323 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate) 9324 (index, op_mode)) 9325 index = copy_to_mode_reg (op_mode, index); 9326 9327 op1 = expand_normal (minval); 9328 9329 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode; 9330 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)), 9331 op1, TYPE_UNSIGNED (TREE_TYPE (minval))); 9332 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate) 9333 (op1, op_mode)) 9334 op1 = copy_to_mode_reg (op_mode, op1); 9335 9336 op2 = expand_normal (range); 9337 9338 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode; 9339 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)), 9340 op2, TYPE_UNSIGNED (TREE_TYPE (range))); 9341 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate) 9342 (op2, op_mode)) 9343 op2 = copy_to_mode_reg (op_mode, op2); 9344 9345 emit_jump_insn (gen_casesi (index, op1, op2, 9346 table_label, default_label)); 9347 return 1; 9348} 9349 9350/* Attempt to generate a tablejump instruction; same concept. */ 9351#ifndef HAVE_tablejump 9352#define HAVE_tablejump 0 9353#define gen_tablejump(x, y) (0) 9354#endif 9355 9356/* Subroutine of the next function. 9357 9358 INDEX is the value being switched on, with the lowest value 9359 in the table already subtracted. 9360 MODE is its expected mode (needed if INDEX is constant). 9361 RANGE is the length of the jump table. 9362 TABLE_LABEL is a CODE_LABEL rtx for the table itself. 9363 9364 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the 9365 index value is out of range. */ 9366 9367static void 9368do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label, 9369 rtx default_label) 9370{ 9371 rtx temp, vector; 9372 9373 if (INTVAL (range) > cfun->max_jumptable_ents) 9374 cfun->max_jumptable_ents = INTVAL (range); 9375 9376 /* Do an unsigned comparison (in the proper mode) between the index 9377 expression and the value which represents the length of the range. 9378 Since we just finished subtracting the lower bound of the range 9379 from the index expression, this comparison allows us to simultaneously 9380 check that the original index expression value is both greater than 9381 or equal to the minimum value of the range and less than or equal to 9382 the maximum value of the range. */ 9383 9384 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, 9385 default_label); 9386 9387 /* If index is in range, it must fit in Pmode. 9388 Convert to Pmode so we can index with it. */ 9389 if (mode != Pmode) 9390 index = convert_to_mode (Pmode, index, 1); 9391 9392 /* Don't let a MEM slip through, because then INDEX that comes 9393 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, 9394 and break_out_memory_refs will go to work on it and mess it up. */ 9395#ifdef PIC_CASE_VECTOR_ADDRESS 9396 if (flag_pic && !REG_P (index)) 9397 index = copy_to_mode_reg (Pmode, index); 9398#endif 9399 9400 /* If flag_force_addr were to affect this address 9401 it could interfere with the tricky assumptions made 9402 about addresses that contain label-refs, 9403 which may be valid only very near the tablejump itself. */ 9404 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the 9405 GET_MODE_SIZE, because this indicates how large insns are. The other 9406 uses should all be Pmode, because they are addresses. This code 9407 could fail if addresses and insns are not the same size. */ 9408 index = gen_rtx_PLUS (Pmode, 9409 gen_rtx_MULT (Pmode, index, 9410 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), 9411 gen_rtx_LABEL_REF (Pmode, table_label)); 9412#ifdef PIC_CASE_VECTOR_ADDRESS 9413 if (flag_pic) 9414 index = PIC_CASE_VECTOR_ADDRESS (index); 9415 else 9416#endif 9417 index = memory_address_noforce (CASE_VECTOR_MODE, index); 9418 temp = gen_reg_rtx (CASE_VECTOR_MODE); 9419 vector = gen_const_mem (CASE_VECTOR_MODE, index); 9420 convert_move (temp, vector, 0); 9421 9422 emit_jump_insn (gen_tablejump (temp, table_label)); 9423 9424 /* If we are generating PIC code or if the table is PC-relative, the 9425 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ 9426 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic) 9427 emit_barrier (); 9428} 9429 9430int 9431try_tablejump (tree index_type, tree index_expr, tree minval, tree range, 9432 rtx table_label, rtx default_label) 9433{ 9434 rtx index; 9435 9436 if (! HAVE_tablejump) 9437 return 0; 9438 9439 index_expr = fold_build2 (MINUS_EXPR, index_type, 9440 fold_convert (index_type, index_expr), 9441 fold_convert (index_type, minval)); 9442 index = expand_normal (index_expr); 9443 do_pending_stack_adjust (); 9444 9445 do_tablejump (index, TYPE_MODE (index_type), 9446 convert_modes (TYPE_MODE (index_type), 9447 TYPE_MODE (TREE_TYPE (range)), 9448 expand_normal (range), 9449 TYPE_UNSIGNED (TREE_TYPE (range))), 9450 table_label, default_label); 9451 return 1; 9452} 9453 9454/* Nonzero if the mode is a valid vector mode for this architecture. 9455 This returns nonzero even if there is no hardware support for the 9456 vector mode, but we can emulate with narrower modes. */ 9457 9458int 9459vector_mode_valid_p (enum machine_mode mode) 9460{ 9461 enum mode_class class = GET_MODE_CLASS (mode); 9462 enum machine_mode innermode; 9463 9464 /* Doh! What's going on? */ 9465 if (class != MODE_VECTOR_INT 9466 && class != MODE_VECTOR_FLOAT) 9467 return 0; 9468 9469 /* Hardware support. Woo hoo! */ 9470 if (targetm.vector_mode_supported_p (mode)) 9471 return 1; 9472 9473 innermode = GET_MODE_INNER (mode); 9474 9475 /* We should probably return 1 if requesting V4DI and we have no DI, 9476 but we have V2DI, but this is probably very unlikely. */ 9477 9478 /* If we have support for the inner mode, we can safely emulate it. 9479 We may not have V2DI, but me can emulate with a pair of DIs. */ 9480 return targetm.scalar_mode_supported_p (innermode); 9481} 9482 9483/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */ 9484static rtx 9485const_vector_from_tree (tree exp) 9486{ 9487 rtvec v; 9488 int units, i; 9489 tree link, elt; 9490 enum machine_mode inner, mode; 9491 9492 mode = TYPE_MODE (TREE_TYPE (exp)); 9493 9494 if (initializer_zerop (exp)) 9495 return CONST0_RTX (mode); 9496 9497 units = GET_MODE_NUNITS (mode); 9498 inner = GET_MODE_INNER (mode); 9499 9500 v = rtvec_alloc (units); 9501 9502 link = TREE_VECTOR_CST_ELTS (exp); 9503 for (i = 0; link; link = TREE_CHAIN (link), ++i) 9504 { 9505 elt = TREE_VALUE (link); 9506 9507 if (TREE_CODE (elt) == REAL_CST) 9508 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt), 9509 inner); 9510 else 9511 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt), 9512 TREE_INT_CST_HIGH (elt), 9513 inner); 9514 } 9515 9516 /* Initialize remaining elements to 0. */ 9517 for (; i < units; ++i) 9518 RTVEC_ELT (v, i) = CONST0_RTX (inner); 9519 9520 return gen_rtx_CONST_VECTOR (mode, v); 9521} 9522#include "gt-expr.h" 9523