1/* Default target hook functions. 2 Copyright (C) 2003-2020 Free Software Foundation, Inc. 3 4This file is part of GCC. 5 6GCC is free software; you can redistribute it and/or modify it under 7the terms of the GNU General Public License as published by the Free 8Software Foundation; either version 3, or (at your option) any later 9version. 10 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12WARRANTY; without even the implied warranty of MERCHANTABILITY or 13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14for more details. 15 16You should have received a copy of the GNU General Public License 17along with GCC; see the file COPYING3. If not see 18<http://www.gnu.org/licenses/>. */ 19 20/* The migration of target macros to target hooks works as follows: 21 22 1. Create a target hook that uses the existing target macros to 23 implement the same functionality. 24 25 2. Convert all the MI files to use the hook instead of the macro. 26 27 3. Repeat for a majority of the remaining target macros. This will 28 take some time. 29 30 4. Tell target maintainers to start migrating. 31 32 5. Eventually convert the backends to override the hook instead of 33 defining the macros. This will take some time too. 34 35 6. TBD when, poison the macros. Unmigrated targets will break at 36 this point. 37 38 Note that we expect steps 1-3 to be done by the people that 39 understand what the MI does with each macro, and step 5 to be done 40 by the target maintainers for their respective targets. 41 42 Note that steps 1 and 2 don't have to be done together, but no 43 target can override the new hook until step 2 is complete for it. 44 45 Once the macros are poisoned, we will revert to the old migration 46 rules - migrate the macro, callers, and targets all at once. This 47 comment can thus be removed at that point. */ 48 49#include "config.h" 50#include "system.h" 51#include "coretypes.h" 52#include "target.h" 53#include "function.h" 54#include "rtl.h" 55#include "tree.h" 56#include "tree-ssa-alias.h" 57#include "gimple-expr.h" 58#include "memmodel.h" 59#include "tm_p.h" 60#include "stringpool.h" 61#include "tree-vrp.h" 62#include "tree-ssanames.h" 63#include "profile-count.h" 64#include "optabs.h" 65#include "regs.h" 66#include "recog.h" 67#include "diagnostic-core.h" 68#include "fold-const.h" 69#include "stor-layout.h" 70#include "varasm.h" 71#include "flags.h" 72#include "explow.h" 73#include "calls.h" 74#include "expr.h" 75#include "output.h" 76#include "common/common-target.h" 77#include "reload.h" 78#include "intl.h" 79#include "opts.h" 80#include "gimplify.h" 81#include "predict.h" 82#include "real.h" 83#include "langhooks.h" 84#include "sbitmap.h" 85#include "function-abi.h" 86 87bool 88default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, 89 rtx addr ATTRIBUTE_UNUSED, 90 bool strict ATTRIBUTE_UNUSED) 91{ 92#ifdef GO_IF_LEGITIMATE_ADDRESS 93 /* Defer to the old implementation using a goto. */ 94 if (strict) 95 return strict_memory_address_p (mode, addr); 96 else 97 return memory_address_p (mode, addr); 98#else 99 gcc_unreachable (); 100#endif 101} 102 103void 104default_external_libcall (rtx fun ATTRIBUTE_UNUSED) 105{ 106#ifdef ASM_OUTPUT_EXTERNAL_LIBCALL 107 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun); 108#endif 109} 110 111int 112default_unspec_may_trap_p (const_rtx x, unsigned flags) 113{ 114 int i; 115 116 /* Any floating arithmetic may trap. */ 117 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)) 118 return 1; 119 120 for (i = 0; i < XVECLEN (x, 0); ++i) 121 { 122 if (may_trap_p_1 (XVECEXP (x, 0, i), flags)) 123 return 1; 124 } 125 126 return 0; 127} 128 129int 130default_bitfield_may_trap_p (const_rtx x, unsigned flags) 131{ 132 return 0; 133} 134 135machine_mode 136default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED, 137 machine_mode mode, 138 int *punsignedp ATTRIBUTE_UNUSED, 139 const_tree funtype ATTRIBUTE_UNUSED, 140 int for_return ATTRIBUTE_UNUSED) 141{ 142 if (type != NULL_TREE && for_return == 2) 143 return promote_mode (type, mode, punsignedp); 144 return mode; 145} 146 147machine_mode 148default_promote_function_mode_always_promote (const_tree type, 149 machine_mode mode, 150 int *punsignedp, 151 const_tree funtype ATTRIBUTE_UNUSED, 152 int for_return ATTRIBUTE_UNUSED) 153{ 154 return promote_mode (type, mode, punsignedp); 155} 156 157machine_mode 158default_cc_modes_compatible (machine_mode m1, machine_mode m2) 159{ 160 if (m1 == m2) 161 return m1; 162 return VOIDmode; 163} 164 165bool 166default_return_in_memory (const_tree type, 167 const_tree fntype ATTRIBUTE_UNUSED) 168{ 169 return (TYPE_MODE (type) == BLKmode); 170} 171 172rtx 173default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED, 174 machine_mode mode ATTRIBUTE_UNUSED) 175{ 176 return x; 177} 178 179bool 180default_legitimize_address_displacement (rtx *, rtx *, poly_int64, 181 machine_mode) 182{ 183 return false; 184} 185 186bool 187default_const_not_ok_for_debug_p (rtx x) 188{ 189 if (GET_CODE (x) == UNSPEC) 190 return true; 191 return false; 192} 193 194rtx 195default_expand_builtin_saveregs (void) 196{ 197 error ("%<__builtin_saveregs%> not supported by this target"); 198 return const0_rtx; 199} 200 201void 202default_setup_incoming_varargs (cumulative_args_t, 203 const function_arg_info &, int *, int) 204{ 205} 206 207/* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */ 208 209rtx 210default_builtin_setjmp_frame_value (void) 211{ 212 return virtual_stack_vars_rtx; 213} 214 215/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */ 216 217bool 218hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED) 219{ 220 return false; 221} 222 223bool 224default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED) 225{ 226 return (targetm.calls.setup_incoming_varargs 227 != default_setup_incoming_varargs); 228} 229 230scalar_int_mode 231default_eh_return_filter_mode (void) 232{ 233 return targetm.unwind_word_mode (); 234} 235 236scalar_int_mode 237default_libgcc_cmp_return_mode (void) 238{ 239 return word_mode; 240} 241 242scalar_int_mode 243default_libgcc_shift_count_mode (void) 244{ 245 return word_mode; 246} 247 248scalar_int_mode 249default_unwind_word_mode (void) 250{ 251 return word_mode; 252} 253 254/* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */ 255 256unsigned HOST_WIDE_INT 257default_shift_truncation_mask (machine_mode mode) 258{ 259 return SHIFT_COUNT_TRUNCATED ? GET_MODE_UNIT_BITSIZE (mode) - 1 : 0; 260} 261 262/* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */ 263 264unsigned int 265default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED) 266{ 267 return have_insn_for (DIV, mode) ? 3 : 2; 268} 269 270/* The default implementation of TARGET_MODE_REP_EXTENDED. */ 271 272int 273default_mode_rep_extended (scalar_int_mode, scalar_int_mode) 274{ 275 return UNKNOWN; 276} 277 278/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */ 279 280bool 281hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED) 282{ 283 return true; 284} 285 286/* Return machine mode for non-standard suffix 287 or VOIDmode if non-standard suffixes are unsupported. */ 288machine_mode 289default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED) 290{ 291 return VOIDmode; 292} 293 294/* The generic C++ ABI specifies this is a 64-bit value. */ 295tree 296default_cxx_guard_type (void) 297{ 298 return long_long_integer_type_node; 299} 300 301/* Returns the size of the cookie to use when allocating an array 302 whose elements have the indicated TYPE. Assumes that it is already 303 known that a cookie is needed. */ 304 305tree 306default_cxx_get_cookie_size (tree type) 307{ 308 tree cookie_size; 309 310 /* We need to allocate an additional max (sizeof (size_t), alignof 311 (true_type)) bytes. */ 312 tree sizetype_size; 313 tree type_align; 314 315 sizetype_size = size_in_bytes (sizetype); 316 type_align = size_int (TYPE_ALIGN_UNIT (type)); 317 if (tree_int_cst_lt (type_align, sizetype_size)) 318 cookie_size = sizetype_size; 319 else 320 cookie_size = type_align; 321 322 return cookie_size; 323} 324 325/* Return true if a parameter must be passed by reference. This version 326 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */ 327 328bool 329hook_pass_by_reference_must_pass_in_stack (cumulative_args_t, 330 const function_arg_info &arg) 331{ 332 return targetm.calls.must_pass_in_stack (arg); 333} 334 335/* Return true if a parameter follows callee copies conventions. This 336 version of the hook is true for all named arguments. */ 337 338bool 339hook_callee_copies_named (cumulative_args_t, const function_arg_info &arg) 340{ 341 return arg.named; 342} 343 344/* Emit to STREAM the assembler syntax for insn operand X. */ 345 346void 347default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED, 348 int code ATTRIBUTE_UNUSED) 349{ 350#ifdef PRINT_OPERAND 351 PRINT_OPERAND (stream, x, code); 352#else 353 gcc_unreachable (); 354#endif 355} 356 357/* Emit to STREAM the assembler syntax for an insn operand whose memory 358 address is X. */ 359 360void 361default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED, 362 machine_mode /*mode*/, 363 rtx x ATTRIBUTE_UNUSED) 364{ 365#ifdef PRINT_OPERAND_ADDRESS 366 PRINT_OPERAND_ADDRESS (stream, x); 367#else 368 gcc_unreachable (); 369#endif 370} 371 372/* Return true if CODE is a valid punctuation character for the 373 `print_operand' hook. */ 374 375bool 376default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED) 377{ 378#ifdef PRINT_OPERAND_PUNCT_VALID_P 379 return PRINT_OPERAND_PUNCT_VALID_P (code); 380#else 381 return false; 382#endif 383} 384 385/* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */ 386tree 387default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED) 388{ 389 const char *skipped = name + (*name == '*' ? 1 : 0); 390 const char *stripped = targetm.strip_name_encoding (skipped); 391 if (*name != '*' && user_label_prefix[0]) 392 stripped = ACONCAT ((user_label_prefix, stripped, NULL)); 393 return get_identifier (stripped); 394} 395 396/* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */ 397 398machine_mode 399default_translate_mode_attribute (machine_mode mode) 400{ 401 return mode; 402} 403 404/* True if MODE is valid for the target. By "valid", we mean able to 405 be manipulated in non-trivial ways. In particular, this means all 406 the arithmetic is supported. 407 408 By default we guess this means that any C type is supported. If 409 we can't map the mode back to a type that would be available in C, 410 then reject it. Special case, here, is the double-word arithmetic 411 supported by optabs.c. */ 412 413bool 414default_scalar_mode_supported_p (scalar_mode mode) 415{ 416 int precision = GET_MODE_PRECISION (mode); 417 418 switch (GET_MODE_CLASS (mode)) 419 { 420 case MODE_PARTIAL_INT: 421 case MODE_INT: 422 if (precision == CHAR_TYPE_SIZE) 423 return true; 424 if (precision == SHORT_TYPE_SIZE) 425 return true; 426 if (precision == INT_TYPE_SIZE) 427 return true; 428 if (precision == LONG_TYPE_SIZE) 429 return true; 430 if (precision == LONG_LONG_TYPE_SIZE) 431 return true; 432 if (precision == 2 * BITS_PER_WORD) 433 return true; 434 return false; 435 436 case MODE_FLOAT: 437 if (precision == FLOAT_TYPE_SIZE) 438 return true; 439 if (precision == DOUBLE_TYPE_SIZE) 440 return true; 441 if (precision == LONG_DOUBLE_TYPE_SIZE) 442 return true; 443 return false; 444 445 case MODE_DECIMAL_FLOAT: 446 case MODE_FRACT: 447 case MODE_UFRACT: 448 case MODE_ACCUM: 449 case MODE_UACCUM: 450 return false; 451 452 default: 453 gcc_unreachable (); 454 } 455} 456 457/* Return true if libgcc supports floating-point mode MODE (known to 458 be supported as a scalar mode). */ 459 460bool 461default_libgcc_floating_mode_supported_p (scalar_float_mode mode) 462{ 463 switch (mode) 464 { 465#ifdef HAVE_SFmode 466 case E_SFmode: 467#endif 468#ifdef HAVE_DFmode 469 case E_DFmode: 470#endif 471#ifdef HAVE_XFmode 472 case E_XFmode: 473#endif 474#ifdef HAVE_TFmode 475 case E_TFmode: 476#endif 477 return true; 478 479 default: 480 return false; 481 } 482} 483 484/* Return the machine mode to use for the type _FloatN, if EXTENDED is 485 false, or _FloatNx, if EXTENDED is true, or VOIDmode if not 486 supported. */ 487opt_scalar_float_mode 488default_floatn_mode (int n, bool extended) 489{ 490 if (extended) 491 { 492 opt_scalar_float_mode cand1, cand2; 493 scalar_float_mode mode; 494 switch (n) 495 { 496 case 32: 497#ifdef HAVE_DFmode 498 cand1 = DFmode; 499#endif 500 break; 501 502 case 64: 503#ifdef HAVE_XFmode 504 cand1 = XFmode; 505#endif 506#ifdef HAVE_TFmode 507 cand2 = TFmode; 508#endif 509 break; 510 511 case 128: 512 break; 513 514 default: 515 /* Those are the only valid _FloatNx types. */ 516 gcc_unreachable (); 517 } 518 if (cand1.exists (&mode) 519 && REAL_MODE_FORMAT (mode)->ieee_bits > n 520 && targetm.scalar_mode_supported_p (mode) 521 && targetm.libgcc_floating_mode_supported_p (mode)) 522 return cand1; 523 if (cand2.exists (&mode) 524 && REAL_MODE_FORMAT (mode)->ieee_bits > n 525 && targetm.scalar_mode_supported_p (mode) 526 && targetm.libgcc_floating_mode_supported_p (mode)) 527 return cand2; 528 } 529 else 530 { 531 opt_scalar_float_mode cand; 532 scalar_float_mode mode; 533 switch (n) 534 { 535 case 16: 536 /* Always enable _Float16 if we have basic support for the mode. 537 Targets can control the range and precision of operations on 538 the _Float16 type using TARGET_C_EXCESS_PRECISION. */ 539#ifdef HAVE_HFmode 540 cand = HFmode; 541#endif 542 break; 543 544 case 32: 545#ifdef HAVE_SFmode 546 cand = SFmode; 547#endif 548 break; 549 550 case 64: 551#ifdef HAVE_DFmode 552 cand = DFmode; 553#endif 554 break; 555 556 case 128: 557#ifdef HAVE_TFmode 558 cand = TFmode; 559#endif 560 break; 561 562 default: 563 break; 564 } 565 if (cand.exists (&mode) 566 && REAL_MODE_FORMAT (mode)->ieee_bits == n 567 && targetm.scalar_mode_supported_p (mode) 568 && targetm.libgcc_floating_mode_supported_p (mode)) 569 return cand; 570 } 571 return opt_scalar_float_mode (); 572} 573 574/* Define this to return true if the _Floatn and _Floatnx built-in functions 575 should implicitly enable the built-in function without the __builtin_ prefix 576 in addition to the normal built-in function with the __builtin_ prefix. The 577 default is to only enable built-in functions without the __builtin_ prefix 578 for the GNU C langauge. The argument FUNC is the enum builtin_in_function 579 id of the function to be enabled. */ 580 581bool 582default_floatn_builtin_p (int func ATTRIBUTE_UNUSED) 583{ 584 static bool first_time_p = true; 585 static bool c_or_objective_c; 586 587 if (first_time_p) 588 { 589 first_time_p = false; 590 c_or_objective_c = lang_GNU_C () || lang_GNU_OBJC (); 591 } 592 593 return c_or_objective_c; 594} 595 596/* Make some target macros useable by target-independent code. */ 597bool 598targhook_words_big_endian (void) 599{ 600 return !!WORDS_BIG_ENDIAN; 601} 602 603bool 604targhook_float_words_big_endian (void) 605{ 606 return !!FLOAT_WORDS_BIG_ENDIAN; 607} 608 609/* True if the target supports floating-point exceptions and rounding 610 modes. */ 611 612bool 613default_float_exceptions_rounding_supported_p (void) 614{ 615#ifdef HAVE_adddf3 616 return HAVE_adddf3; 617#else 618 return false; 619#endif 620} 621 622/* True if the target supports decimal floating point. */ 623 624bool 625default_decimal_float_supported_p (void) 626{ 627 return ENABLE_DECIMAL_FLOAT; 628} 629 630/* True if the target supports fixed-point arithmetic. */ 631 632bool 633default_fixed_point_supported_p (void) 634{ 635 return ENABLE_FIXED_POINT; 636} 637 638/* True if the target supports GNU indirect functions. */ 639 640bool 641default_has_ifunc_p (void) 642{ 643 return HAVE_GNU_INDIRECT_FUNCTION; 644} 645 646/* Return true if we predict the loop LOOP will be transformed to a 647 low-overhead loop, otherwise return false. 648 649 By default, false is returned, as this hook's applicability should be 650 verified for each target. Target maintainers should re-define the hook 651 if the target can take advantage of it. */ 652 653bool 654default_predict_doloop_p (class loop *loop ATTRIBUTE_UNUSED) 655{ 656 return false; 657} 658 659/* NULL if INSN insn is valid within a low-overhead loop, otherwise returns 660 an error message. 661 662 This function checks whether a given INSN is valid within a low-overhead 663 loop. If INSN is invalid it returns the reason for that, otherwise it 664 returns NULL. A called function may clobber any special registers required 665 for low-overhead looping. Additionally, some targets (eg, PPC) use the count 666 register for branch on table instructions. We reject the doloop pattern in 667 these cases. */ 668 669const char * 670default_invalid_within_doloop (const rtx_insn *insn) 671{ 672 if (CALL_P (insn)) 673 return "Function call in loop."; 674 675 if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn)) 676 return "Computed branch in the loop."; 677 678 return NULL; 679} 680 681/* Mapping of builtin functions to vectorized variants. */ 682 683tree 684default_builtin_vectorized_function (unsigned int, tree, tree) 685{ 686 return NULL_TREE; 687} 688 689/* Mapping of target builtin functions to vectorized variants. */ 690 691tree 692default_builtin_md_vectorized_function (tree, tree, tree) 693{ 694 return NULL_TREE; 695} 696 697/* Default vectorizer cost model values. */ 698 699int 700default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost, 701 tree vectype, 702 int misalign ATTRIBUTE_UNUSED) 703{ 704 switch (type_of_cost) 705 { 706 case scalar_stmt: 707 case scalar_load: 708 case scalar_store: 709 case vector_stmt: 710 case vector_load: 711 case vector_store: 712 case vec_to_scalar: 713 case scalar_to_vec: 714 case cond_branch_not_taken: 715 case vec_perm: 716 case vec_promote_demote: 717 return 1; 718 719 case unaligned_load: 720 case unaligned_store: 721 return 2; 722 723 case cond_branch_taken: 724 return 3; 725 726 case vec_construct: 727 return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype)) - 1; 728 729 default: 730 gcc_unreachable (); 731 } 732} 733 734/* Reciprocal. */ 735 736tree 737default_builtin_reciprocal (tree) 738{ 739 return NULL_TREE; 740} 741 742bool 743hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t, 744 const function_arg_info &) 745{ 746 return false; 747} 748 749bool 750hook_bool_CUMULATIVE_ARGS_arg_info_true (cumulative_args_t, 751 const function_arg_info &) 752{ 753 return true; 754} 755 756int 757hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t, 758 const function_arg_info &) 759{ 760 return 0; 761} 762 763void 764hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED, 765 tree ATTRIBUTE_UNUSED) 766{ 767} 768 769void 770default_function_arg_advance (cumulative_args_t, const function_arg_info &) 771{ 772 gcc_unreachable (); 773} 774 775/* Default implementation of TARGET_FUNCTION_ARG_OFFSET. */ 776 777HOST_WIDE_INT 778default_function_arg_offset (machine_mode, const_tree) 779{ 780 return 0; 781} 782 783/* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad 784 upward, but pad short args downward on big-endian machines. */ 785 786pad_direction 787default_function_arg_padding (machine_mode mode, const_tree type) 788{ 789 if (!BYTES_BIG_ENDIAN) 790 return PAD_UPWARD; 791 792 unsigned HOST_WIDE_INT size; 793 if (mode == BLKmode) 794 { 795 if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) 796 return PAD_UPWARD; 797 size = int_size_in_bytes (type); 798 } 799 else 800 /* Targets with variable-sized modes must override this hook 801 and handle variable-sized modes explicitly. */ 802 size = GET_MODE_SIZE (mode).to_constant (); 803 804 if (size < (PARM_BOUNDARY / BITS_PER_UNIT)) 805 return PAD_DOWNWARD; 806 807 return PAD_UPWARD; 808} 809 810rtx 811default_function_arg (cumulative_args_t, const function_arg_info &) 812{ 813 gcc_unreachable (); 814} 815 816rtx 817default_function_incoming_arg (cumulative_args_t, const function_arg_info &) 818{ 819 gcc_unreachable (); 820} 821 822unsigned int 823default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED, 824 const_tree type ATTRIBUTE_UNUSED) 825{ 826 return PARM_BOUNDARY; 827} 828 829unsigned int 830default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED, 831 const_tree type ATTRIBUTE_UNUSED) 832{ 833 return PARM_BOUNDARY; 834} 835 836void 837hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED) 838{ 839} 840 841const char * 842hook_invalid_arg_for_unprototyped_fn ( 843 const_tree typelist ATTRIBUTE_UNUSED, 844 const_tree funcdecl ATTRIBUTE_UNUSED, 845 const_tree val ATTRIBUTE_UNUSED) 846{ 847 return NULL; 848} 849 850/* Initialize the stack protection decls. */ 851 852/* Stack protection related decls living in libgcc. */ 853static GTY(()) tree stack_chk_guard_decl; 854 855tree 856default_stack_protect_guard (void) 857{ 858 tree t = stack_chk_guard_decl; 859 860 if (t == NULL) 861 { 862 rtx x; 863 864 t = build_decl (UNKNOWN_LOCATION, 865 VAR_DECL, get_identifier ("__stack_chk_guard"), 866 ptr_type_node); 867 TREE_STATIC (t) = 1; 868 TREE_PUBLIC (t) = 1; 869 DECL_EXTERNAL (t) = 1; 870 TREE_USED (t) = 1; 871 TREE_THIS_VOLATILE (t) = 1; 872 DECL_ARTIFICIAL (t) = 1; 873 DECL_IGNORED_P (t) = 1; 874 875 /* Do not share RTL as the declaration is visible outside of 876 current function. */ 877 x = DECL_RTL (t); 878 RTX_FLAG (x, used) = 1; 879 880 stack_chk_guard_decl = t; 881 } 882 883 return t; 884} 885 886static GTY(()) tree stack_chk_fail_decl; 887 888tree 889default_external_stack_protect_fail (void) 890{ 891 tree t = stack_chk_fail_decl; 892 893 if (t == NULL_TREE) 894 { 895 t = build_function_type_list (void_type_node, NULL_TREE); 896 t = build_decl (UNKNOWN_LOCATION, 897 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t); 898 TREE_STATIC (t) = 1; 899 TREE_PUBLIC (t) = 1; 900 DECL_EXTERNAL (t) = 1; 901 TREE_USED (t) = 1; 902 TREE_THIS_VOLATILE (t) = 1; 903 TREE_NOTHROW (t) = 1; 904 DECL_ARTIFICIAL (t) = 1; 905 DECL_IGNORED_P (t) = 1; 906 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT; 907 DECL_VISIBILITY_SPECIFIED (t) = 1; 908 909 stack_chk_fail_decl = t; 910 } 911 912 return build_call_expr (t, 0); 913} 914 915tree 916default_hidden_stack_protect_fail (void) 917{ 918#ifndef HAVE_GAS_HIDDEN 919 return default_external_stack_protect_fail (); 920#else 921 tree t = stack_chk_fail_decl; 922 923 if (!flag_pic) 924 return default_external_stack_protect_fail (); 925 926 if (t == NULL_TREE) 927 { 928 t = build_function_type_list (void_type_node, NULL_TREE); 929 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, 930 get_identifier ("__stack_chk_fail_local"), t); 931 TREE_STATIC (t) = 1; 932 TREE_PUBLIC (t) = 1; 933 DECL_EXTERNAL (t) = 1; 934 TREE_USED (t) = 1; 935 TREE_THIS_VOLATILE (t) = 1; 936 TREE_NOTHROW (t) = 1; 937 DECL_ARTIFICIAL (t) = 1; 938 DECL_IGNORED_P (t) = 1; 939 DECL_VISIBILITY_SPECIFIED (t) = 1; 940#if 1 941 /* 942 * This is a hack: 943 * It appears that our gas does not generate @PLT for hidden 944 * symbols. It could be that we need a newer version, or that 945 * this local function is handled differently on linux. 946 */ 947 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT; 948#else 949 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN; 950#endif 951 952 stack_chk_fail_decl = t; 953 } 954 955 return build_call_expr (t, 0); 956#endif 957} 958 959bool 960hook_bool_const_rtx_commutative_p (const_rtx x, 961 int outer_code ATTRIBUTE_UNUSED) 962{ 963 return COMMUTATIVE_P (x); 964} 965 966rtx 967default_function_value (const_tree ret_type ATTRIBUTE_UNUSED, 968 const_tree fn_decl_or_type, 969 bool outgoing ATTRIBUTE_UNUSED) 970{ 971 /* The old interface doesn't handle receiving the function type. */ 972 if (fn_decl_or_type 973 && !DECL_P (fn_decl_or_type)) 974 fn_decl_or_type = NULL; 975 976#ifdef FUNCTION_VALUE 977 return FUNCTION_VALUE (ret_type, fn_decl_or_type); 978#else 979 gcc_unreachable (); 980#endif 981} 982 983rtx 984default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED, 985 const_rtx fun ATTRIBUTE_UNUSED) 986{ 987#ifdef LIBCALL_VALUE 988 return LIBCALL_VALUE (MACRO_MODE (mode)); 989#else 990 gcc_unreachable (); 991#endif 992} 993 994/* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */ 995 996bool 997default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED) 998{ 999#ifdef FUNCTION_VALUE_REGNO_P 1000 return FUNCTION_VALUE_REGNO_P (regno); 1001#else 1002 gcc_unreachable (); 1003#endif 1004} 1005 1006rtx 1007default_internal_arg_pointer (void) 1008{ 1009 /* If the reg that the virtual arg pointer will be translated into is 1010 not a fixed reg or is the stack pointer, make a copy of the virtual 1011 arg pointer, and address parms via the copy. The frame pointer is 1012 considered fixed even though it is not marked as such. */ 1013 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM 1014 || ! (fixed_regs[ARG_POINTER_REGNUM] 1015 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))) 1016 return copy_to_reg (virtual_incoming_args_rtx); 1017 else 1018 return virtual_incoming_args_rtx; 1019} 1020 1021rtx 1022default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p) 1023{ 1024 if (incoming_p) 1025 { 1026#ifdef STATIC_CHAIN_INCOMING_REGNUM 1027 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM); 1028#endif 1029 } 1030 1031#ifdef STATIC_CHAIN_REGNUM 1032 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM); 1033#endif 1034 1035 { 1036 static bool issued_error; 1037 if (!issued_error) 1038 { 1039 issued_error = true; 1040 sorry ("nested functions not supported on this target"); 1041 } 1042 1043 /* It really doesn't matter what we return here, so long at it 1044 doesn't cause the rest of the compiler to crash. */ 1045 return gen_rtx_MEM (Pmode, stack_pointer_rtx); 1046 } 1047} 1048 1049void 1050default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func), 1051 rtx ARG_UNUSED (r_chain)) 1052{ 1053 sorry ("nested function trampolines not supported on this target"); 1054} 1055 1056poly_int64 1057default_return_pops_args (tree, tree, poly_int64) 1058{ 1059 return 0; 1060} 1061 1062reg_class_t 1063default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED, 1064 reg_class_t cl, 1065 reg_class_t best_cl ATTRIBUTE_UNUSED) 1066{ 1067 return cl; 1068} 1069 1070extern bool 1071default_lra_p (void) 1072{ 1073 return true; 1074} 1075 1076int 1077default_register_priority (int hard_regno ATTRIBUTE_UNUSED) 1078{ 1079 return 0; 1080} 1081 1082extern bool 1083default_register_usage_leveling_p (void) 1084{ 1085 return false; 1086} 1087 1088extern bool 1089default_different_addr_displacement_p (void) 1090{ 1091 return false; 1092} 1093 1094reg_class_t 1095default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED, 1096 reg_class_t reload_class_i ATTRIBUTE_UNUSED, 1097 machine_mode reload_mode ATTRIBUTE_UNUSED, 1098 secondary_reload_info *sri) 1099{ 1100 enum reg_class rclass = NO_REGS; 1101 enum reg_class reload_class = (enum reg_class) reload_class_i; 1102 1103 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing) 1104 { 1105 sri->icode = sri->prev_sri->t_icode; 1106 return NO_REGS; 1107 } 1108#ifdef SECONDARY_INPUT_RELOAD_CLASS 1109 if (in_p) 1110 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, 1111 MACRO_MODE (reload_mode), x); 1112#endif 1113#ifdef SECONDARY_OUTPUT_RELOAD_CLASS 1114 if (! in_p) 1115 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, 1116 MACRO_MODE (reload_mode), x); 1117#endif 1118 if (rclass != NO_REGS) 1119 { 1120 enum insn_code icode 1121 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab, 1122 reload_mode); 1123 1124 if (icode != CODE_FOR_nothing 1125 && !insn_operand_matches (icode, in_p, x)) 1126 icode = CODE_FOR_nothing; 1127 else if (icode != CODE_FOR_nothing) 1128 { 1129 const char *insn_constraint, *scratch_constraint; 1130 enum reg_class insn_class, scratch_class; 1131 1132 gcc_assert (insn_data[(int) icode].n_operands == 3); 1133 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint; 1134 if (!*insn_constraint) 1135 insn_class = ALL_REGS; 1136 else 1137 { 1138 if (in_p) 1139 { 1140 gcc_assert (*insn_constraint == '='); 1141 insn_constraint++; 1142 } 1143 insn_class = (reg_class_for_constraint 1144 (lookup_constraint (insn_constraint))); 1145 gcc_assert (insn_class != NO_REGS); 1146 } 1147 1148 scratch_constraint = insn_data[(int) icode].operand[2].constraint; 1149 /* The scratch register's constraint must start with "=&", 1150 except for an input reload, where only "=" is necessary, 1151 and where it might be beneficial to re-use registers from 1152 the input. */ 1153 gcc_assert (scratch_constraint[0] == '=' 1154 && (in_p || scratch_constraint[1] == '&')); 1155 scratch_constraint++; 1156 if (*scratch_constraint == '&') 1157 scratch_constraint++; 1158 scratch_class = (reg_class_for_constraint 1159 (lookup_constraint (scratch_constraint))); 1160 1161 if (reg_class_subset_p (reload_class, insn_class)) 1162 { 1163 gcc_assert (scratch_class == rclass); 1164 rclass = NO_REGS; 1165 } 1166 else 1167 rclass = insn_class; 1168 1169 } 1170 if (rclass == NO_REGS) 1171 sri->icode = icode; 1172 else 1173 sri->t_icode = icode; 1174 } 1175 return rclass; 1176} 1177 1178/* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE. */ 1179 1180machine_mode 1181default_secondary_memory_needed_mode (machine_mode mode) 1182{ 1183 if (!targetm.lra_p () 1184 && known_lt (GET_MODE_BITSIZE (mode), BITS_PER_WORD) 1185 && INTEGRAL_MODE_P (mode)) 1186 return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require (); 1187 return mode; 1188} 1189 1190/* By default, if flag_pic is true, then neither local nor global relocs 1191 should be placed in readonly memory. */ 1192 1193int 1194default_reloc_rw_mask (void) 1195{ 1196 return flag_pic ? 3 : 0; 1197} 1198 1199/* By default, address diff vectors are generated 1200for jump tables when flag_pic is true. */ 1201 1202bool 1203default_generate_pic_addr_diff_vec (void) 1204{ 1205 return flag_pic; 1206} 1207 1208/* By default, do no modification. */ 1209tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED, 1210 tree id) 1211{ 1212 return id; 1213} 1214 1215/* The default implementation of TARGET_STATIC_RTX_ALIGNMENT. */ 1216 1217HOST_WIDE_INT 1218default_static_rtx_alignment (machine_mode mode) 1219{ 1220 return GET_MODE_ALIGNMENT (mode); 1221} 1222 1223/* The default implementation of TARGET_CONSTANT_ALIGNMENT. */ 1224 1225HOST_WIDE_INT 1226default_constant_alignment (const_tree, HOST_WIDE_INT align) 1227{ 1228 return align; 1229} 1230 1231/* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings 1232 to at least BITS_PER_WORD but otherwise makes no changes. */ 1233 1234HOST_WIDE_INT 1235constant_alignment_word_strings (const_tree exp, HOST_WIDE_INT align) 1236{ 1237 if (TREE_CODE (exp) == STRING_CST) 1238 return MAX (align, BITS_PER_WORD); 1239 return align; 1240} 1241 1242/* Default to natural alignment for vector types, bounded by 1243 MAX_OFILE_ALIGNMENT. */ 1244 1245HOST_WIDE_INT 1246default_vector_alignment (const_tree type) 1247{ 1248 unsigned HOST_WIDE_INT align = MAX_OFILE_ALIGNMENT; 1249 tree size = TYPE_SIZE (type); 1250 if (tree_fits_uhwi_p (size)) 1251 align = tree_to_uhwi (size); 1252 1253 return align < MAX_OFILE_ALIGNMENT ? align : MAX_OFILE_ALIGNMENT; 1254} 1255 1256/* The default implementation of 1257 TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT. */ 1258 1259poly_uint64 1260default_preferred_vector_alignment (const_tree type) 1261{ 1262 return TYPE_ALIGN (type); 1263} 1264 1265/* By default assume vectors of element TYPE require a multiple of the natural 1266 alignment of TYPE. TYPE is naturally aligned if IS_PACKED is false. */ 1267bool 1268default_builtin_vector_alignment_reachable (const_tree /*type*/, bool is_packed) 1269{ 1270 return ! is_packed; 1271} 1272 1273/* By default, assume that a target supports any factor of misalignment 1274 memory access if it supports movmisalign patten. 1275 is_packed is true if the memory access is defined in a packed struct. */ 1276bool 1277default_builtin_support_vector_misalignment (machine_mode mode, 1278 const_tree type 1279 ATTRIBUTE_UNUSED, 1280 int misalignment 1281 ATTRIBUTE_UNUSED, 1282 bool is_packed 1283 ATTRIBUTE_UNUSED) 1284{ 1285 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing) 1286 return true; 1287 return false; 1288} 1289 1290/* By default, only attempt to parallelize bitwise operations, and 1291 possibly adds/subtracts using bit-twiddling. */ 1292 1293machine_mode 1294default_preferred_simd_mode (scalar_mode) 1295{ 1296 return word_mode; 1297} 1298 1299/* By default do not split reductions further. */ 1300 1301machine_mode 1302default_split_reduction (machine_mode mode) 1303{ 1304 return mode; 1305} 1306 1307/* By default only the preferred vector mode is tried. */ 1308 1309unsigned int 1310default_autovectorize_vector_modes (vector_modes *, bool) 1311{ 1312 return 0; 1313} 1314 1315/* The default implementation of TARGET_VECTORIZE_RELATED_MODE. */ 1316 1317opt_machine_mode 1318default_vectorize_related_mode (machine_mode vector_mode, 1319 scalar_mode element_mode, 1320 poly_uint64 nunits) 1321{ 1322 machine_mode result_mode; 1323 if ((maybe_ne (nunits, 0U) 1324 || multiple_p (GET_MODE_SIZE (vector_mode), 1325 GET_MODE_SIZE (element_mode), &nunits)) 1326 && mode_for_vector (element_mode, nunits).exists (&result_mode) 1327 && VECTOR_MODE_P (result_mode) 1328 && targetm.vector_mode_supported_p (result_mode)) 1329 return result_mode; 1330 1331 return opt_machine_mode (); 1332} 1333 1334/* By default a vector of integers is used as a mask. */ 1335 1336opt_machine_mode 1337default_get_mask_mode (machine_mode mode) 1338{ 1339 return related_int_vector_mode (mode); 1340} 1341 1342/* By default consider masked stores to be expensive. */ 1343 1344bool 1345default_empty_mask_is_expensive (unsigned ifn) 1346{ 1347 return ifn == IFN_MASK_STORE; 1348} 1349 1350/* By default, the cost model accumulates three separate costs (prologue, 1351 loop body, and epilogue) for a vectorized loop or block. So allocate an 1352 array of three unsigned ints, set it to zero, and return its address. */ 1353 1354void * 1355default_init_cost (class loop *loop_info ATTRIBUTE_UNUSED) 1356{ 1357 unsigned *cost = XNEWVEC (unsigned, 3); 1358 cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0; 1359 return cost; 1360} 1361 1362/* By default, the cost model looks up the cost of the given statement 1363 kind and mode, multiplies it by the occurrence count, accumulates 1364 it into the cost specified by WHERE, and returns the cost added. */ 1365 1366unsigned 1367default_add_stmt_cost (void *data, int count, enum vect_cost_for_stmt kind, 1368 class _stmt_vec_info *stmt_info, int misalign, 1369 enum vect_cost_model_location where) 1370{ 1371 unsigned *cost = (unsigned *) data; 1372 unsigned retval = 0; 1373 1374 tree vectype = stmt_info ? stmt_vectype (stmt_info) : NULL_TREE; 1375 int stmt_cost = targetm.vectorize.builtin_vectorization_cost (kind, vectype, 1376 misalign); 1377 /* Statements in an inner loop relative to the loop being 1378 vectorized are weighted more heavily. The value here is 1379 arbitrary and could potentially be improved with analysis. */ 1380 if (where == vect_body && stmt_info && stmt_in_inner_loop_p (stmt_info)) 1381 count *= 50; /* FIXME. */ 1382 1383 retval = (unsigned) (count * stmt_cost); 1384 cost[where] += retval; 1385 1386 return retval; 1387} 1388 1389/* By default, the cost model just returns the accumulated costs. */ 1390 1391void 1392default_finish_cost (void *data, unsigned *prologue_cost, 1393 unsigned *body_cost, unsigned *epilogue_cost) 1394{ 1395 unsigned *cost = (unsigned *) data; 1396 *prologue_cost = cost[vect_prologue]; 1397 *body_cost = cost[vect_body]; 1398 *epilogue_cost = cost[vect_epilogue]; 1399} 1400 1401/* Free the cost data. */ 1402 1403void 1404default_destroy_cost_data (void *data) 1405{ 1406 free (data); 1407} 1408 1409/* Determine whether or not a pointer mode is valid. Assume defaults 1410 of ptr_mode or Pmode - can be overridden. */ 1411bool 1412default_valid_pointer_mode (scalar_int_mode mode) 1413{ 1414 return (mode == ptr_mode || mode == Pmode); 1415} 1416 1417/* Determine whether the memory reference specified by REF may alias 1418 the C libraries errno location. */ 1419bool 1420default_ref_may_alias_errno (ao_ref *ref) 1421{ 1422 tree base = ao_ref_base (ref); 1423 /* The default implementation assumes the errno location is 1424 a declaration of type int or is always accessed via a 1425 pointer to int. We assume that accesses to errno are 1426 not deliberately obfuscated (even in conforming ways). */ 1427 if (TYPE_UNSIGNED (TREE_TYPE (base)) 1428 || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node)) 1429 return false; 1430 /* The default implementation assumes an errno location declaration 1431 is never defined in the current compilation unit and may not be 1432 aliased by a local variable. */ 1433 if (DECL_P (base) 1434 && DECL_EXTERNAL (base) 1435 && !TREE_STATIC (base)) 1436 return true; 1437 else if (TREE_CODE (base) == MEM_REF 1438 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) 1439 { 1440 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)); 1441 return !pi || pi->pt.anything || pi->pt.nonlocal; 1442 } 1443 return false; 1444} 1445 1446/* Return the mode for a pointer to a given ADDRSPACE, 1447 defaulting to ptr_mode for all address spaces. */ 1448 1449scalar_int_mode 1450default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED) 1451{ 1452 return ptr_mode; 1453} 1454 1455/* Return the mode for an address in a given ADDRSPACE, 1456 defaulting to Pmode for all address spaces. */ 1457 1458scalar_int_mode 1459default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED) 1460{ 1461 return Pmode; 1462} 1463 1464/* Named address space version of valid_pointer_mode. 1465 To match the above, the same modes apply to all address spaces. */ 1466 1467bool 1468default_addr_space_valid_pointer_mode (scalar_int_mode mode, 1469 addr_space_t as ATTRIBUTE_UNUSED) 1470{ 1471 return targetm.valid_pointer_mode (mode); 1472} 1473 1474/* Some places still assume that all pointer or address modes are the 1475 standard Pmode and ptr_mode. These optimizations become invalid if 1476 the target actually supports multiple different modes. For now, 1477 we disable such optimizations on such targets, using this function. */ 1478 1479bool 1480target_default_pointer_address_modes_p (void) 1481{ 1482 if (targetm.addr_space.address_mode != default_addr_space_address_mode) 1483 return false; 1484 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode) 1485 return false; 1486 1487 return true; 1488} 1489 1490/* Named address space version of legitimate_address_p. 1491 By default, all address spaces have the same form. */ 1492 1493bool 1494default_addr_space_legitimate_address_p (machine_mode mode, rtx mem, 1495 bool strict, 1496 addr_space_t as ATTRIBUTE_UNUSED) 1497{ 1498 return targetm.legitimate_address_p (mode, mem, strict); 1499} 1500 1501/* Named address space version of LEGITIMIZE_ADDRESS. 1502 By default, all address spaces have the same form. */ 1503 1504rtx 1505default_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode, 1506 addr_space_t as ATTRIBUTE_UNUSED) 1507{ 1508 return targetm.legitimize_address (x, oldx, mode); 1509} 1510 1511/* The default hook for determining if one named address space is a subset of 1512 another and to return which address space to use as the common address 1513 space. */ 1514 1515bool 1516default_addr_space_subset_p (addr_space_t subset, addr_space_t superset) 1517{ 1518 return (subset == superset); 1519} 1520 1521/* The default hook for determining if 0 within a named address 1522 space is a valid address. */ 1523 1524bool 1525default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED) 1526{ 1527 return false; 1528} 1529 1530/* The default hook for debugging the address space is to return the 1531 address space number to indicate DW_AT_address_class. */ 1532int 1533default_addr_space_debug (addr_space_t as) 1534{ 1535 return as; 1536} 1537 1538/* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE. 1539 Don't complain about any address space. */ 1540 1541void 1542default_addr_space_diagnose_usage (addr_space_t, location_t) 1543{ 1544} 1545 1546 1547/* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be 1548 called for targets with only a generic address space. */ 1549 1550rtx 1551default_addr_space_convert (rtx op ATTRIBUTE_UNUSED, 1552 tree from_type ATTRIBUTE_UNUSED, 1553 tree to_type ATTRIBUTE_UNUSED) 1554{ 1555 gcc_unreachable (); 1556} 1557 1558/* The defualt implementation of TARGET_HARD_REGNO_NREGS. */ 1559 1560unsigned int 1561default_hard_regno_nregs (unsigned int, machine_mode mode) 1562{ 1563 /* Targets with variable-sized modes must provide their own definition 1564 of this hook. */ 1565 return CEIL (GET_MODE_SIZE (mode).to_constant (), UNITS_PER_WORD); 1566} 1567 1568bool 1569default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED) 1570{ 1571 return true; 1572} 1573 1574/* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */ 1575 1576bool 1577default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, 1578 addr_space_t addrspace ATTRIBUTE_UNUSED) 1579{ 1580 return false; 1581} 1582 1583bool 1584default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl), 1585 tree ARG_UNUSED (name), 1586 tree ARG_UNUSED (args), 1587 int ARG_UNUSED (flags)) 1588{ 1589 warning (OPT_Wattributes, 1590 "target attribute is not supported on this machine"); 1591 1592 return false; 1593} 1594 1595bool 1596default_target_option_pragma_parse (tree ARG_UNUSED (args), 1597 tree ARG_UNUSED (pop_target)) 1598{ 1599 /* If args is NULL the caller is handle_pragma_pop_options (). In that case, 1600 emit no warning because "#pragma GCC pop_target" is valid on targets that 1601 do not have the "target" pragma. */ 1602 if (args) 1603 warning (OPT_Wpragmas, 1604 "%<#pragma GCC target%> is not supported for this machine"); 1605 1606 return false; 1607} 1608 1609bool 1610default_target_can_inline_p (tree caller, tree callee) 1611{ 1612 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee); 1613 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller); 1614 if (! callee_opts) 1615 callee_opts = target_option_default_node; 1616 if (! caller_opts) 1617 caller_opts = target_option_default_node; 1618 1619 /* If both caller and callee have attributes, assume that if the 1620 pointer is different, the two functions have different target 1621 options since build_target_option_node uses a hash table for the 1622 options. */ 1623 return callee_opts == caller_opts; 1624} 1625 1626/* If the machine does not have a case insn that compares the bounds, 1627 this means extra overhead for dispatch tables, which raises the 1628 threshold for using them. */ 1629 1630unsigned int 1631default_case_values_threshold (void) 1632{ 1633 return (targetm.have_casesi () ? 4 : 5); 1634} 1635 1636bool 1637default_have_conditional_execution (void) 1638{ 1639 return HAVE_conditional_execution; 1640} 1641 1642/* By default we assume that c99 functions are present at the runtime, 1643 but sincos is not. */ 1644bool 1645default_libc_has_function (enum function_class fn_class) 1646{ 1647 if (fn_class == function_c94 1648 || fn_class == function_c99_misc 1649 || fn_class == function_c99_math_complex) 1650 return true; 1651 1652 return false; 1653} 1654 1655/* By default assume that libc has not a fast implementation. */ 1656 1657bool 1658default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED) 1659{ 1660 return false; 1661} 1662 1663bool 1664gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED) 1665{ 1666 return true; 1667} 1668 1669bool 1670no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED) 1671{ 1672 return false; 1673} 1674 1675tree 1676default_builtin_tm_load_store (tree ARG_UNUSED (type)) 1677{ 1678 return NULL_TREE; 1679} 1680 1681/* Compute cost of moving registers to/from memory. */ 1682 1683int 1684default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED, 1685 reg_class_t rclass ATTRIBUTE_UNUSED, 1686 bool in ATTRIBUTE_UNUSED) 1687{ 1688#ifndef MEMORY_MOVE_COST 1689 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in)); 1690#else 1691 return MEMORY_MOVE_COST (MACRO_MODE (mode), (enum reg_class) rclass, in); 1692#endif 1693} 1694 1695/* Compute cost of moving data from a register of class FROM to one of 1696 TO, using MODE. */ 1697 1698int 1699default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED, 1700 reg_class_t from ATTRIBUTE_UNUSED, 1701 reg_class_t to ATTRIBUTE_UNUSED) 1702{ 1703#ifndef REGISTER_MOVE_COST 1704 return 2; 1705#else 1706 return REGISTER_MOVE_COST (MACRO_MODE (mode), 1707 (enum reg_class) from, (enum reg_class) to); 1708#endif 1709} 1710 1711/* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */ 1712 1713bool 1714default_slow_unaligned_access (machine_mode, unsigned int) 1715{ 1716 return STRICT_ALIGNMENT; 1717} 1718 1719/* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */ 1720 1721HOST_WIDE_INT 1722default_estimated_poly_value (poly_int64 x) 1723{ 1724 return x.coeffs[0]; 1725} 1726 1727/* For hooks which use the MOVE_RATIO macro, this gives the legacy default 1728 behavior. SPEED_P is true if we are compiling for speed. */ 1729 1730unsigned int 1731get_move_ratio (bool speed_p ATTRIBUTE_UNUSED) 1732{ 1733 unsigned int move_ratio; 1734#ifdef MOVE_RATIO 1735 move_ratio = (unsigned int) MOVE_RATIO (speed_p); 1736#else 1737#if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti) 1738 move_ratio = 2; 1739#else /* No cpymem patterns, pick a default. */ 1740 move_ratio = ((speed_p) ? 15 : 3); 1741#endif 1742#endif 1743 return move_ratio; 1744} 1745 1746/* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be 1747 used; return FALSE if the cpymem/setmem optab should be expanded, or 1748 a call to memcpy emitted. */ 1749 1750bool 1751default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size, 1752 unsigned int alignment, 1753 enum by_pieces_operation op, 1754 bool speed_p) 1755{ 1756 unsigned int max_size = 0; 1757 unsigned int ratio = 0; 1758 1759 switch (op) 1760 { 1761 case CLEAR_BY_PIECES: 1762 max_size = STORE_MAX_PIECES; 1763 ratio = CLEAR_RATIO (speed_p); 1764 break; 1765 case MOVE_BY_PIECES: 1766 max_size = MOVE_MAX_PIECES; 1767 ratio = get_move_ratio (speed_p); 1768 break; 1769 case SET_BY_PIECES: 1770 max_size = STORE_MAX_PIECES; 1771 ratio = SET_RATIO (speed_p); 1772 break; 1773 case STORE_BY_PIECES: 1774 max_size = STORE_MAX_PIECES; 1775 ratio = get_move_ratio (speed_p); 1776 break; 1777 case COMPARE_BY_PIECES: 1778 max_size = COMPARE_MAX_PIECES; 1779 /* Pick a likely default, just as in get_move_ratio. */ 1780 ratio = speed_p ? 15 : 3; 1781 break; 1782 } 1783 1784 return by_pieces_ninsns (size, alignment, max_size + 1, op) < ratio; 1785} 1786 1787/* This hook controls code generation for expanding a memcmp operation by 1788 pieces. Return 1 for the normal pattern of compare/jump after each pair 1789 of loads, or a higher number to reduce the number of branches. */ 1790 1791int 1792default_compare_by_pieces_branch_ratio (machine_mode) 1793{ 1794 return 1; 1795} 1796 1797/* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function 1798 entry. If RECORD_P is true and the target supports named sections, 1799 the location of the NOPs will be recorded in a special object section 1800 called "__patchable_function_entries". This routine may be called 1801 twice per function to put NOPs before and after the function 1802 entry. */ 1803 1804void 1805default_print_patchable_function_entry (FILE *file, 1806 unsigned HOST_WIDE_INT patch_area_size, 1807 bool record_p) 1808{ 1809 const char *nop_templ = 0; 1810 int code_num; 1811 rtx_insn *my_nop = make_insn_raw (gen_nop ()); 1812 1813 /* We use the template alone, relying on the (currently sane) assumption 1814 that the NOP template does not have variable operands. */ 1815 code_num = recog_memoized (my_nop); 1816 nop_templ = get_insn_template (code_num, my_nop); 1817 1818 if (record_p && targetm_common.have_named_sections) 1819 { 1820 char buf[256]; 1821 static int patch_area_number; 1822 section *previous_section = in_section; 1823 const char *asm_op = integer_asm_op (POINTER_SIZE_UNITS, false); 1824 1825 gcc_assert (asm_op != NULL); 1826 patch_area_number++; 1827 ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE", patch_area_number); 1828 1829 switch_to_section (get_section ("__patchable_function_entries", 1830 SECTION_WRITE | SECTION_RELRO, NULL)); 1831 assemble_align (POINTER_SIZE); 1832 fputs (asm_op, file); 1833 assemble_name_raw (file, buf); 1834 fputc ('\n', file); 1835 1836 switch_to_section (previous_section); 1837 ASM_OUTPUT_LABEL (file, buf); 1838 } 1839 1840 unsigned i; 1841 for (i = 0; i < patch_area_size; ++i) 1842 output_asm_insn (nop_templ, NULL); 1843} 1844 1845bool 1846default_profile_before_prologue (void) 1847{ 1848#ifdef PROFILE_BEFORE_PROLOGUE 1849 return true; 1850#else 1851 return false; 1852#endif 1853} 1854 1855/* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */ 1856 1857reg_class_t 1858default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, 1859 reg_class_t rclass) 1860{ 1861#ifdef PREFERRED_RELOAD_CLASS 1862 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass); 1863#else 1864 return rclass; 1865#endif 1866} 1867 1868/* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */ 1869 1870reg_class_t 1871default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED, 1872 reg_class_t rclass) 1873{ 1874 return rclass; 1875} 1876 1877/* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */ 1878reg_class_t 1879default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED) 1880{ 1881 return NO_REGS; 1882} 1883 1884/* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */ 1885 1886bool 1887default_class_likely_spilled_p (reg_class_t rclass) 1888{ 1889 return (reg_class_size[(int) rclass] == 1); 1890} 1891 1892/* The default implementation of TARGET_CLASS_MAX_NREGS. */ 1893 1894unsigned char 1895default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED, 1896 machine_mode mode ATTRIBUTE_UNUSED) 1897{ 1898#ifdef CLASS_MAX_NREGS 1899 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass, 1900 MACRO_MODE (mode)); 1901#else 1902 /* Targets with variable-sized modes must provide their own definition 1903 of this hook. */ 1904 unsigned int size = GET_MODE_SIZE (mode).to_constant (); 1905 return (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD; 1906#endif 1907} 1908 1909/* Determine the debugging unwind mechanism for the target. */ 1910 1911enum unwind_info_type 1912default_debug_unwind_info (void) 1913{ 1914 /* If the target wants to force the use of dwarf2 unwind info, let it. */ 1915 /* ??? Change all users to the hook, then poison this. */ 1916#ifdef DWARF2_FRAME_INFO 1917 if (DWARF2_FRAME_INFO) 1918 return UI_DWARF2; 1919#endif 1920 1921 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */ 1922#ifdef DWARF2_DEBUGGING_INFO 1923 if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG) 1924 return UI_DWARF2; 1925#endif 1926 1927 return UI_NONE; 1928} 1929 1930/* Targets that set NUM_POLY_INT_COEFFS to something greater than 1 1931 must define this hook. */ 1932 1933unsigned int 1934default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *) 1935{ 1936 gcc_unreachable (); 1937} 1938 1939/* Determine the correct mode for a Dwarf frame register that represents 1940 register REGNO. */ 1941 1942machine_mode 1943default_dwarf_frame_reg_mode (int regno) 1944{ 1945 machine_mode save_mode = reg_raw_mode[regno]; 1946 1947 if (targetm.hard_regno_call_part_clobbered (eh_edge_abi.id (), 1948 regno, save_mode)) 1949 save_mode = choose_hard_reg_mode (regno, 1, &eh_edge_abi); 1950 return save_mode; 1951} 1952 1953/* To be used by targets where reg_raw_mode doesn't return the right 1954 mode for registers used in apply_builtin_return and apply_builtin_arg. */ 1955 1956fixed_size_mode 1957default_get_reg_raw_mode (int regno) 1958{ 1959 /* Targets must override this hook if the underlying register is 1960 variable-sized. */ 1961 return as_a <fixed_size_mode> (reg_raw_mode[regno]); 1962} 1963 1964/* Return true if a leaf function should stay leaf even with profiling 1965 enabled. */ 1966 1967bool 1968default_keep_leaf_when_profiled () 1969{ 1970 return false; 1971} 1972 1973/* Return true if the state of option OPTION should be stored in PCH files 1974 and checked by default_pch_valid_p. Store the option's current state 1975 in STATE if so. */ 1976 1977static inline bool 1978option_affects_pch_p (int option, struct cl_option_state *state) 1979{ 1980 if ((cl_options[option].flags & CL_TARGET) == 0) 1981 return false; 1982 if ((cl_options[option].flags & CL_PCH_IGNORE) != 0) 1983 return false; 1984 if (option_flag_var (option, &global_options) == &target_flags) 1985 if (targetm.check_pch_target_flags) 1986 return false; 1987 return get_option_state (&global_options, option, state); 1988} 1989 1990/* Default version of get_pch_validity. 1991 By default, every flag difference is fatal; that will be mostly right for 1992 most targets, but completely right for very few. */ 1993 1994void * 1995default_get_pch_validity (size_t *sz) 1996{ 1997 struct cl_option_state state; 1998 size_t i; 1999 char *result, *r; 2000 2001 *sz = 2; 2002 if (targetm.check_pch_target_flags) 2003 *sz += sizeof (target_flags); 2004 for (i = 0; i < cl_options_count; i++) 2005 if (option_affects_pch_p (i, &state)) 2006 *sz += state.size; 2007 2008 result = r = XNEWVEC (char, *sz); 2009 r[0] = flag_pic; 2010 r[1] = flag_pie; 2011 r += 2; 2012 if (targetm.check_pch_target_flags) 2013 { 2014 memcpy (r, &target_flags, sizeof (target_flags)); 2015 r += sizeof (target_flags); 2016 } 2017 2018 for (i = 0; i < cl_options_count; i++) 2019 if (option_affects_pch_p (i, &state)) 2020 { 2021 memcpy (r, state.data, state.size); 2022 r += state.size; 2023 } 2024 2025 return result; 2026} 2027 2028/* Return a message which says that a PCH file was created with a different 2029 setting of OPTION. */ 2030 2031static const char * 2032pch_option_mismatch (const char *option) 2033{ 2034 return xasprintf (_("created and used with differing settings of '%s'"), 2035 option); 2036} 2037 2038/* Default version of pch_valid_p. */ 2039 2040const char * 2041default_pch_valid_p (const void *data_p, size_t len) 2042{ 2043 struct cl_option_state state; 2044 const char *data = (const char *)data_p; 2045 size_t i; 2046 2047 /* -fpic and -fpie also usually make a PCH invalid. */ 2048 if (data[0] != flag_pic) 2049 return _("created and used with different settings of %<-fpic%>"); 2050 if (data[1] != flag_pie) 2051 return _("created and used with different settings of %<-fpie%>"); 2052 data += 2; 2053 2054 /* Check target_flags. */ 2055 if (targetm.check_pch_target_flags) 2056 { 2057 int tf; 2058 const char *r; 2059 2060 memcpy (&tf, data, sizeof (target_flags)); 2061 data += sizeof (target_flags); 2062 len -= sizeof (target_flags); 2063 r = targetm.check_pch_target_flags (tf); 2064 if (r != NULL) 2065 return r; 2066 } 2067 2068 for (i = 0; i < cl_options_count; i++) 2069 if (option_affects_pch_p (i, &state)) 2070 { 2071 if (memcmp (data, state.data, state.size) != 0) 2072 return pch_option_mismatch (cl_options[i].opt_text); 2073 data += state.size; 2074 len -= state.size; 2075 } 2076 2077 return NULL; 2078} 2079 2080/* Default version of cstore_mode. */ 2081 2082scalar_int_mode 2083default_cstore_mode (enum insn_code icode) 2084{ 2085 return as_a <scalar_int_mode> (insn_data[(int) icode].operand[0].mode); 2086} 2087 2088/* Default version of member_type_forces_blk. */ 2089 2090bool 2091default_member_type_forces_blk (const_tree, machine_mode) 2092{ 2093 return false; 2094} 2095 2096rtx 2097default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED, 2098 rtx ptr ATTRIBUTE_UNUSED, 2099 rtx bnd ATTRIBUTE_UNUSED) 2100{ 2101 gcc_unreachable (); 2102} 2103 2104void 2105default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED, 2106 rtx addr ATTRIBUTE_UNUSED, 2107 rtx bounds ATTRIBUTE_UNUSED, 2108 rtx to ATTRIBUTE_UNUSED) 2109{ 2110 gcc_unreachable (); 2111} 2112 2113rtx 2114default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED) 2115{ 2116 gcc_unreachable (); 2117} 2118 2119void 2120default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED, 2121 rtx bounds ATTRIBUTE_UNUSED) 2122{ 2123 gcc_unreachable (); 2124} 2125 2126/* Default version of canonicalize_comparison. */ 2127 2128void 2129default_canonicalize_comparison (int *, rtx *, rtx *, bool) 2130{ 2131} 2132 2133/* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */ 2134 2135void 2136default_atomic_assign_expand_fenv (tree *, tree *, tree *) 2137{ 2138} 2139 2140#ifndef PAD_VARARGS_DOWN 2141#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN 2142#endif 2143 2144/* Build an indirect-ref expression over the given TREE, which represents a 2145 piece of a va_arg() expansion. */ 2146tree 2147build_va_arg_indirect_ref (tree addr) 2148{ 2149 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr); 2150 return addr; 2151} 2152 2153/* The "standard" implementation of va_arg: read the value from the 2154 current (padded) address and increment by the (padded) size. */ 2155 2156tree 2157std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p, 2158 gimple_seq *post_p) 2159{ 2160 tree addr, t, type_size, rounded_size, valist_tmp; 2161 unsigned HOST_WIDE_INT align, boundary; 2162 bool indirect; 2163 2164 /* All of the alignment and movement below is for args-grow-up machines. 2165 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all 2166 implement their own specialized gimplify_va_arg_expr routines. */ 2167 if (ARGS_GROW_DOWNWARD) 2168 gcc_unreachable (); 2169 2170 indirect = pass_va_arg_by_reference (type); 2171 if (indirect) 2172 type = build_pointer_type (type); 2173 2174 if (targetm.calls.split_complex_arg 2175 && TREE_CODE (type) == COMPLEX_TYPE 2176 && targetm.calls.split_complex_arg (type)) 2177 { 2178 tree real_part, imag_part; 2179 2180 real_part = std_gimplify_va_arg_expr (valist, 2181 TREE_TYPE (type), pre_p, NULL); 2182 real_part = get_initialized_tmp_var (real_part, pre_p); 2183 2184 imag_part = std_gimplify_va_arg_expr (unshare_expr (valist), 2185 TREE_TYPE (type), pre_p, NULL); 2186 imag_part = get_initialized_tmp_var (imag_part, pre_p); 2187 2188 return build2 (COMPLEX_EXPR, type, real_part, imag_part); 2189 } 2190 2191 align = PARM_BOUNDARY / BITS_PER_UNIT; 2192 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type); 2193 2194 /* When we align parameter on stack for caller, if the parameter 2195 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be 2196 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee 2197 here with caller. */ 2198 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT) 2199 boundary = MAX_SUPPORTED_STACK_ALIGNMENT; 2200 2201 boundary /= BITS_PER_UNIT; 2202 2203 /* Hoist the valist value into a temporary for the moment. */ 2204 valist_tmp = get_initialized_tmp_var (valist, pre_p); 2205 2206 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually 2207 requires greater alignment, we must perform dynamic alignment. */ 2208 if (boundary > align 2209 && !TYPE_EMPTY_P (type) 2210 && !integer_zerop (TYPE_SIZE (type))) 2211 { 2212 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, 2213 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1)); 2214 gimplify_and_add (t, pre_p); 2215 2216 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, 2217 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist), 2218 valist_tmp, 2219 build_int_cst (TREE_TYPE (valist), -boundary))); 2220 gimplify_and_add (t, pre_p); 2221 } 2222 else 2223 boundary = align; 2224 2225 /* If the actual alignment is less than the alignment of the type, 2226 adjust the type accordingly so that we don't assume strict alignment 2227 when dereferencing the pointer. */ 2228 boundary *= BITS_PER_UNIT; 2229 if (boundary < TYPE_ALIGN (type)) 2230 { 2231 type = build_variant_type_copy (type); 2232 SET_TYPE_ALIGN (type, boundary); 2233 } 2234 2235 /* Compute the rounded size of the type. */ 2236 type_size = arg_size_in_bytes (type); 2237 rounded_size = round_up (type_size, align); 2238 2239 /* Reduce rounded_size so it's sharable with the postqueue. */ 2240 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue); 2241 2242 /* Get AP. */ 2243 addr = valist_tmp; 2244 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size)) 2245 { 2246 /* Small args are padded downward. */ 2247 t = fold_build2_loc (input_location, GT_EXPR, sizetype, 2248 rounded_size, size_int (align)); 2249 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node, 2250 size_binop (MINUS_EXPR, rounded_size, type_size)); 2251 addr = fold_build_pointer_plus (addr, t); 2252 } 2253 2254 /* Compute new value for AP. */ 2255 t = fold_build_pointer_plus (valist_tmp, rounded_size); 2256 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t); 2257 gimplify_and_add (t, pre_p); 2258 2259 addr = fold_convert (build_pointer_type (type), addr); 2260 2261 if (indirect) 2262 addr = build_va_arg_indirect_ref (addr); 2263 2264 return build_va_arg_indirect_ref (addr); 2265} 2266 2267/* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do 2268 not support nested low-overhead loops. */ 2269 2270bool 2271can_use_doloop_if_innermost (const widest_int &, const widest_int &, 2272 unsigned int loop_depth, bool) 2273{ 2274 return loop_depth == 1; 2275} 2276 2277/* Default implementation of TARGET_OPTAB_SUPPORTED_P. */ 2278 2279bool 2280default_optab_supported_p (int, machine_mode, machine_mode, optimization_type) 2281{ 2282 return true; 2283} 2284 2285/* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST. */ 2286 2287unsigned int 2288default_max_noce_ifcvt_seq_cost (edge e) 2289{ 2290 bool predictable_p = predictable_edge_p (e); 2291 2292 if (predictable_p) 2293 { 2294 if (global_options_set.x_param_max_rtl_if_conversion_predictable_cost) 2295 return param_max_rtl_if_conversion_predictable_cost; 2296 } 2297 else 2298 { 2299 if (global_options_set.x_param_max_rtl_if_conversion_unpredictable_cost) 2300 return param_max_rtl_if_conversion_unpredictable_cost; 2301 } 2302 2303 return BRANCH_COST (true, predictable_p) * COSTS_N_INSNS (3); 2304} 2305 2306/* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION. */ 2307 2308unsigned int 2309default_min_arithmetic_precision (void) 2310{ 2311 return WORD_REGISTER_OPERATIONS ? BITS_PER_WORD : BITS_PER_UNIT; 2312} 2313 2314/* Default implementation of TARGET_C_EXCESS_PRECISION. */ 2315 2316enum flt_eval_method 2317default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED) 2318{ 2319 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT; 2320} 2321 2322/* Default implementation for 2323 TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */ 2324HOST_WIDE_INT 2325default_stack_clash_protection_alloca_probe_range (void) 2326{ 2327 return 0; 2328} 2329 2330/* The default implementation of TARGET_EARLY_REMAT_MODES. */ 2331 2332void 2333default_select_early_remat_modes (sbitmap) 2334{ 2335} 2336 2337/* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */ 2338 2339tree 2340default_preferred_else_value (unsigned, tree type, unsigned, tree *) 2341{ 2342 return build_zero_cst (type); 2343} 2344 2345/* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */ 2346bool 2347default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED) 2348{ 2349#ifdef HAVE_speculation_barrier 2350 return active ? HAVE_speculation_barrier : true; 2351#else 2352 return false; 2353#endif 2354} 2355/* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE 2356 that can be used on targets that never have speculative execution. */ 2357bool 2358speculation_safe_value_not_needed (bool active) 2359{ 2360 return !active; 2361} 2362 2363/* Default implementation of the speculation-safe-load builtin. This 2364 implementation simply copies val to result and generates a 2365 speculation_barrier insn, if such a pattern is defined. */ 2366rtx 2367default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED, 2368 rtx result, rtx val, 2369 rtx failval ATTRIBUTE_UNUSED) 2370{ 2371 emit_move_insn (result, val); 2372 2373#ifdef HAVE_speculation_barrier 2374 /* Assume the target knows what it is doing: if it defines a 2375 speculation barrier, but it is not enabled, then assume that one 2376 isn't needed. */ 2377 if (HAVE_speculation_barrier) 2378 emit_insn (gen_speculation_barrier ()); 2379#endif 2380 2381 return result; 2382} 2383 2384#include "gt-targhooks.h" 2385