1/* Target machine subroutines for Altera Nios II. 2 Copyright (C) 2012-2015 Free Software Foundation, Inc. 3 Contributed by Jonah Graham (jgraham@altera.com), 4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com). 5 Contributed by Mentor Graphics, Inc. 6 7 This file is part of GCC. 8 9 GCC is free software; you can redistribute it and/or modify it 10 under the terms of the GNU General Public License as published 11 by the Free Software Foundation; either version 3, or (at your 12 option) any later version. 13 14 GCC is distributed in the hope that it will be useful, but WITHOUT 15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public 17 License for more details. 18 19 You should have received a copy of the GNU General Public License 20 along with GCC; see the file COPYING3. If not see 21 <http://www.gnu.org/licenses/>. */ 22 23#include "config.h" 24#include "system.h" 25#include "coretypes.h" 26#include "tm.h" 27#include "rtl.h" 28#include "hash-set.h" 29#include "machmode.h" 30#include "vec.h" 31#include "double-int.h" 32#include "input.h" 33#include "alias.h" 34#include "symtab.h" 35#include "wide-int.h" 36#include "inchash.h" 37#include "tree.h" 38#include "fold-const.h" 39#include "regs.h" 40#include "hard-reg-set.h" 41#include "insn-config.h" 42#include "conditions.h" 43#include "output.h" 44#include "insn-attr.h" 45#include "flags.h" 46#include "recog.h" 47#include "hashtab.h" 48#include "function.h" 49#include "statistics.h" 50#include "real.h" 51#include "fixed-value.h" 52#include "expmed.h" 53#include "dojump.h" 54#include "explow.h" 55#include "calls.h" 56#include "emit-rtl.h" 57#include "varasm.h" 58#include "stmt.h" 59#include "expr.h" 60#include "insn-codes.h" 61#include "optabs.h" 62#include "ggc.h" 63#include "predict.h" 64#include "dominance.h" 65#include "cfg.h" 66#include "cfgrtl.h" 67#include "cfganal.h" 68#include "lcm.h" 69#include "cfgbuild.h" 70#include "cfgcleanup.h" 71#include "basic-block.h" 72#include "diagnostic-core.h" 73#include "toplev.h" 74#include "target.h" 75#include "target-def.h" 76#include "tm_p.h" 77#include "langhooks.h" 78#include "df.h" 79#include "debug.h" 80#include "reload.h" 81#include "stor-layout.h" 82#include "builtins.h" 83 84/* Forward function declarations. */ 85static bool prologue_saved_reg_p (unsigned); 86static void nios2_load_pic_register (void); 87static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code, int); 88static const char *nios2_unspec_reloc_name (int); 89static void nios2_register_builtin_fndecl (unsigned, tree); 90 91/* Threshold for data being put into the small data/bss area, instead 92 of the normal data area (references to the small data/bss area take 93 1 instruction, and use the global pointer, references to the normal 94 data area takes 2 instructions). */ 95unsigned HOST_WIDE_INT nios2_section_threshold = NIOS2_DEFAULT_GVALUE; 96 97struct GTY (()) machine_function 98{ 99 /* Current frame information, to be filled in by nios2_compute_frame_layout 100 with register save masks, and offsets for the current function. */ 101 102 /* Mask of registers to save. */ 103 unsigned int save_mask; 104 /* Number of bytes that the entire frame takes up. */ 105 int total_size; 106 /* Number of bytes that variables take up. */ 107 int var_size; 108 /* Number of bytes that outgoing arguments take up. */ 109 int args_size; 110 /* Number of bytes needed to store registers in frame. */ 111 int save_reg_size; 112 /* Offset from new stack pointer to store registers. */ 113 int save_regs_offset; 114 /* Offset from save_regs_offset to store frame pointer register. */ 115 int fp_save_offset; 116 /* != 0 if frame layout already calculated. */ 117 int initialized; 118}; 119 120/* State to track the assignment of custom codes to FPU/custom builtins. */ 121static enum nios2_ccs_code custom_code_status[256]; 122static int custom_code_index[256]; 123/* Set to true if any conflicts (re-use of a code between 0-255) are found. */ 124static bool custom_code_conflict = false; 125 126 127/* Definition of builtin function types for nios2. */ 128 129#define N2_FTYPES \ 130 N2_FTYPE(1, (SF)) \ 131 N2_FTYPE(1, (VOID)) \ 132 N2_FTYPE(2, (DF, DF)) \ 133 N2_FTYPE(3, (DF, DF, DF)) \ 134 N2_FTYPE(2, (DF, SF)) \ 135 N2_FTYPE(2, (DF, SI)) \ 136 N2_FTYPE(2, (DF, UI)) \ 137 N2_FTYPE(2, (SF, DF)) \ 138 N2_FTYPE(2, (SF, SF)) \ 139 N2_FTYPE(3, (SF, SF, SF)) \ 140 N2_FTYPE(2, (SF, SI)) \ 141 N2_FTYPE(2, (SF, UI)) \ 142 N2_FTYPE(2, (SI, CVPTR)) \ 143 N2_FTYPE(2, (SI, DF)) \ 144 N2_FTYPE(3, (SI, DF, DF)) \ 145 N2_FTYPE(2, (SI, SF)) \ 146 N2_FTYPE(3, (SI, SF, SF)) \ 147 N2_FTYPE(2, (SI, SI)) \ 148 N2_FTYPE(2, (UI, CVPTR)) \ 149 N2_FTYPE(2, (UI, DF)) \ 150 N2_FTYPE(2, (UI, SF)) \ 151 N2_FTYPE(2, (VOID, DF)) \ 152 N2_FTYPE(2, (VOID, SF)) \ 153 N2_FTYPE(3, (VOID, SI, SI)) \ 154 N2_FTYPE(3, (VOID, VPTR, SI)) 155 156#define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID 157#define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1 158#define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2 159 160/* Expand ftcode enumeration. */ 161enum nios2_ftcode { 162#define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS, 163N2_FTYPES 164#undef N2_FTYPE 165N2_FTYPE_MAX 166}; 167 168/* Return the tree function type, based on the ftcode. */ 169static tree 170nios2_ftype (enum nios2_ftcode ftcode) 171{ 172 static tree types[(int) N2_FTYPE_MAX]; 173 174 tree N2_TYPE_SF = float_type_node; 175 tree N2_TYPE_DF = double_type_node; 176 tree N2_TYPE_SI = integer_type_node; 177 tree N2_TYPE_UI = unsigned_type_node; 178 tree N2_TYPE_VOID = void_type_node; 179 180 static const_tree N2_TYPE_CVPTR, N2_TYPE_VPTR; 181 if (!N2_TYPE_CVPTR) 182 { 183 /* const volatile void *. */ 184 N2_TYPE_CVPTR 185 = build_pointer_type (build_qualified_type (void_type_node, 186 (TYPE_QUAL_CONST 187 | TYPE_QUAL_VOLATILE))); 188 /* volatile void *. */ 189 N2_TYPE_VPTR 190 = build_pointer_type (build_qualified_type (void_type_node, 191 TYPE_QUAL_VOLATILE)); 192 } 193 if (types[(int) ftcode] == NULL_TREE) 194 switch (ftcode) 195 { 196#define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R 197#define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1 198#define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2 199#define N2_FTYPE(N,ARGS) \ 200 case N2_FTYPE_OP ## N ARGS: \ 201 types[(int) ftcode] \ 202 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \ 203 break; 204 N2_FTYPES 205#undef N2_FTYPE 206 default: gcc_unreachable (); 207 } 208 return types[(int) ftcode]; 209} 210 211 212/* Definition of FPU instruction descriptions. */ 213 214struct nios2_fpu_insn_info 215{ 216 const char *name; 217 int num_operands, *optvar; 218 int opt, no_opt; 219#define N2F_DF 0x1 220#define N2F_DFREQ 0x2 221#define N2F_UNSAFE 0x4 222#define N2F_FINITE 0x8 223#define N2F_NO_ERRNO 0x10 224 unsigned int flags; 225 enum insn_code icode; 226 enum nios2_ftcode ftcode; 227}; 228 229/* Base macro for defining FPU instructions. */ 230#define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \ 231 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \ 232 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \ 233 N2_FTYPE_OP ## nop args } 234 235/* Arithmetic and math functions; 2 or 3 operand FP operations. */ 236#define N2FPU_OP2(mode) (mode, mode) 237#define N2FPU_OP3(mode) (mode, mode, mode) 238#define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \ 239 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \ 240 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F)) 241#define N2FPU_INSN_SF(code, nop, flags) \ 242 N2FPU_INSN_DEF (code, code, nop, flags, s, S) 243#define N2FPU_INSN_DF(code, nop, flags) \ 244 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D) 245 246/* Compare instructions, 3 operand FP operation with a SI result. */ 247#define N2FPU_CMP_DEF(code, flags, m, M) \ 248 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \ 249 nios2_s ## code ## m ## f, (SI, M ## F, M ## F)) 250#define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S) 251#define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D) 252 253/* The order of definition needs to be maintained consistent with 254 enum n2fpu_code in nios2-opts.h. */ 255struct nios2_fpu_insn_info nios2_fpu_insn[] = 256 { 257 /* Single precision instructions. */ 258 N2FPU_INSN_SF (add, 3, 0), 259 N2FPU_INSN_SF (sub, 3, 0), 260 N2FPU_INSN_SF (mul, 3, 0), 261 N2FPU_INSN_SF (div, 3, 0), 262 /* Due to textual difference between min/max and smin/smax. */ 263 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, s, S), 264 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, s, S), 265 N2FPU_INSN_SF (neg, 2, 0), 266 N2FPU_INSN_SF (abs, 2, 0), 267 N2FPU_INSN_SF (sqrt, 2, 0), 268 N2FPU_INSN_SF (sin, 2, N2F_UNSAFE), 269 N2FPU_INSN_SF (cos, 2, N2F_UNSAFE), 270 N2FPU_INSN_SF (tan, 2, N2F_UNSAFE), 271 N2FPU_INSN_SF (atan, 2, N2F_UNSAFE), 272 N2FPU_INSN_SF (exp, 2, N2F_UNSAFE), 273 N2FPU_INSN_SF (log, 2, N2F_UNSAFE), 274 /* Single precision compares. */ 275 N2FPU_CMP_SF (eq), N2FPU_CMP_SF (ne), 276 N2FPU_CMP_SF (lt), N2FPU_CMP_SF (le), 277 N2FPU_CMP_SF (gt), N2FPU_CMP_SF (ge), 278 279 /* Double precision instructions. */ 280 N2FPU_INSN_DF (add, 3, 0), 281 N2FPU_INSN_DF (sub, 3, 0), 282 N2FPU_INSN_DF (mul, 3, 0), 283 N2FPU_INSN_DF (div, 3, 0), 284 /* Due to textual difference between min/max and smin/smax. */ 285 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, d, D), 286 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, d, D), 287 N2FPU_INSN_DF (neg, 2, 0), 288 N2FPU_INSN_DF (abs, 2, 0), 289 N2FPU_INSN_DF (sqrt, 2, 0), 290 N2FPU_INSN_DF (sin, 2, N2F_UNSAFE), 291 N2FPU_INSN_DF (cos, 2, N2F_UNSAFE), 292 N2FPU_INSN_DF (tan, 2, N2F_UNSAFE), 293 N2FPU_INSN_DF (atan, 2, N2F_UNSAFE), 294 N2FPU_INSN_DF (exp, 2, N2F_UNSAFE), 295 N2FPU_INSN_DF (log, 2, N2F_UNSAFE), 296 /* Double precision compares. */ 297 N2FPU_CMP_DF (eq), N2FPU_CMP_DF (ne), 298 N2FPU_CMP_DF (lt), N2FPU_CMP_DF (le), 299 N2FPU_CMP_DF (gt), N2FPU_CMP_DF (ge), 300 301 /* Conversion instructions. */ 302 N2FPU_INSN_DEF_BASE (floatis, 2, 0, floatsisf2, (SF, SI)), 303 N2FPU_INSN_DEF_BASE (floatus, 2, 0, floatunssisf2, (SF, UI)), 304 N2FPU_INSN_DEF_BASE (floatid, 2, 0, floatsidf2, (DF, SI)), 305 N2FPU_INSN_DEF_BASE (floatud, 2, 0, floatunssidf2, (DF, UI)), 306 N2FPU_INSN_DEF_BASE (round, 2, N2F_NO_ERRNO, lroundsfsi2, (SI, SF)), 307 N2FPU_INSN_DEF_BASE (fixsi, 2, 0, fix_truncsfsi2, (SI, SF)), 308 N2FPU_INSN_DEF_BASE (fixsu, 2, 0, fixuns_truncsfsi2, (UI, SF)), 309 N2FPU_INSN_DEF_BASE (fixdi, 2, 0, fix_truncdfsi2, (SI, DF)), 310 N2FPU_INSN_DEF_BASE (fixdu, 2, 0, fixuns_truncdfsi2, (UI, DF)), 311 N2FPU_INSN_DEF_BASE (fextsd, 2, 0, extendsfdf2, (DF, SF)), 312 N2FPU_INSN_DEF_BASE (ftruncds, 2, 0, truncdfsf2, (SF, DF)), 313 314 /* X, Y access instructions. */ 315 N2FPU_INSN_DEF_BASE (fwrx, 2, N2F_DFREQ, nios2_fwrx, (VOID, DF)), 316 N2FPU_INSN_DEF_BASE (fwry, 2, N2F_DFREQ, nios2_fwry, (VOID, SF)), 317 N2FPU_INSN_DEF_BASE (frdxlo, 1, N2F_DFREQ, nios2_frdxlo, (SF)), 318 N2FPU_INSN_DEF_BASE (frdxhi, 1, N2F_DFREQ, nios2_frdxhi, (SF)), 319 N2FPU_INSN_DEF_BASE (frdy, 1, N2F_DFREQ, nios2_frdy, (SF)) 320 }; 321 322/* Some macros for ease of access. */ 323#define N2FPU(code) nios2_fpu_insn[(int) code] 324#define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0) 325#define N2FPU_N(code) (*N2FPU(code).optvar) 326#define N2FPU_NAME(code) (N2FPU(code).name) 327#define N2FPU_ICODE(code) (N2FPU(code).icode) 328#define N2FPU_FTCODE(code) (N2FPU(code).ftcode) 329#define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE) 330#define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE) 331#define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO) 332#define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF) 333#define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ) 334 335/* Same as above, but for cases where using only the op part is shorter. */ 336#define N2FPU_OP(op) N2FPU(n2fpu_ ## op) 337#define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op) 338#define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op) 339 340/* Export the FPU insn enabled predicate to nios2.md. */ 341bool 342nios2_fpu_insn_enabled (enum n2fpu_code code) 343{ 344 return N2FPU_ENABLED_P (code); 345} 346 347/* Return true if COND comparison for mode MODE is enabled under current 348 settings. */ 349 350static bool 351nios2_fpu_compare_enabled (enum rtx_code cond, machine_mode mode) 352{ 353 if (mode == SFmode) 354 switch (cond) 355 { 356 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqs); 357 case NE: return N2FPU_OP_ENABLED_P (fcmpnes); 358 case GT: return N2FPU_OP_ENABLED_P (fcmpgts); 359 case GE: return N2FPU_OP_ENABLED_P (fcmpges); 360 case LT: return N2FPU_OP_ENABLED_P (fcmplts); 361 case LE: return N2FPU_OP_ENABLED_P (fcmples); 362 default: break; 363 } 364 else if (mode == DFmode) 365 switch (cond) 366 { 367 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqd); 368 case NE: return N2FPU_OP_ENABLED_P (fcmpned); 369 case GT: return N2FPU_OP_ENABLED_P (fcmpgtd); 370 case GE: return N2FPU_OP_ENABLED_P (fcmpged); 371 case LT: return N2FPU_OP_ENABLED_P (fcmpltd); 372 case LE: return N2FPU_OP_ENABLED_P (fcmpled); 373 default: break; 374 } 375 return false; 376} 377 378/* Stack layout and calling conventions. */ 379 380#define NIOS2_STACK_ALIGN(LOC) \ 381 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \ 382 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) 383 384/* Return the bytes needed to compute the frame pointer from the current 385 stack pointer. */ 386static int 387nios2_compute_frame_layout (void) 388{ 389 unsigned int regno; 390 unsigned int save_mask = 0; 391 int total_size; 392 int var_size; 393 int out_args_size; 394 int save_reg_size; 395 396 if (cfun->machine->initialized) 397 return cfun->machine->total_size; 398 399 var_size = NIOS2_STACK_ALIGN (get_frame_size ()); 400 out_args_size = NIOS2_STACK_ALIGN (crtl->outgoing_args_size); 401 total_size = var_size + out_args_size; 402 403 /* Calculate space needed for gp registers. */ 404 save_reg_size = 0; 405 for (regno = 0; regno <= LAST_GP_REG; regno++) 406 if (prologue_saved_reg_p (regno)) 407 { 408 save_mask |= 1 << regno; 409 save_reg_size += 4; 410 } 411 412 /* If we call eh_return, we need to save the EH data registers. */ 413 if (crtl->calls_eh_return) 414 { 415 unsigned i; 416 unsigned r; 417 418 for (i = 0; (r = EH_RETURN_DATA_REGNO (i)) != INVALID_REGNUM; i++) 419 if (!(save_mask & (1 << r))) 420 { 421 save_mask |= 1 << r; 422 save_reg_size += 4; 423 } 424 } 425 426 cfun->machine->fp_save_offset = 0; 427 if (save_mask & (1 << HARD_FRAME_POINTER_REGNUM)) 428 { 429 int fp_save_offset = 0; 430 for (regno = 0; regno < HARD_FRAME_POINTER_REGNUM; regno++) 431 if (save_mask & (1 << regno)) 432 fp_save_offset += 4; 433 434 cfun->machine->fp_save_offset = fp_save_offset; 435 } 436 437 save_reg_size = NIOS2_STACK_ALIGN (save_reg_size); 438 total_size += save_reg_size; 439 total_size += NIOS2_STACK_ALIGN (crtl->args.pretend_args_size); 440 441 /* Save other computed information. */ 442 cfun->machine->save_mask = save_mask; 443 cfun->machine->total_size = total_size; 444 cfun->machine->var_size = var_size; 445 cfun->machine->args_size = out_args_size; 446 cfun->machine->save_reg_size = save_reg_size; 447 cfun->machine->initialized = reload_completed; 448 cfun->machine->save_regs_offset = out_args_size + var_size; 449 450 return total_size; 451} 452 453/* Generate save/restore of register REGNO at SP + OFFSET. Used by the 454 prologue/epilogue expand routines. */ 455static void 456save_reg (int regno, unsigned offset) 457{ 458 rtx reg = gen_rtx_REG (SImode, regno); 459 rtx addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, 460 gen_int_mode (offset, Pmode)); 461 rtx insn = emit_move_insn (gen_frame_mem (Pmode, addr), reg); 462 RTX_FRAME_RELATED_P (insn) = 1; 463} 464 465static void 466restore_reg (int regno, unsigned offset) 467{ 468 rtx reg = gen_rtx_REG (SImode, regno); 469 rtx addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, 470 gen_int_mode (offset, Pmode)); 471 rtx insn = emit_move_insn (reg, gen_frame_mem (Pmode, addr)); 472 /* Tag epilogue unwind note. */ 473 add_reg_note (insn, REG_CFA_RESTORE, reg); 474 RTX_FRAME_RELATED_P (insn) = 1; 475} 476 477/* Emit conditional trap for checking stack limit. */ 478static void 479nios2_emit_stack_limit_check (void) 480{ 481 if (REG_P (stack_limit_rtx)) 482 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode, stack_pointer_rtx, 483 stack_limit_rtx), 484 stack_pointer_rtx, stack_limit_rtx, GEN_INT (3))); 485 else 486 sorry ("only register based stack limit is supported"); 487} 488 489/* Temp regno used inside prologue/epilogue. */ 490#define TEMP_REG_NUM 8 491 492static rtx 493nios2_emit_add_constant (rtx reg, HOST_WIDE_INT immed) 494{ 495 rtx insn; 496 if (SMALL_INT (immed)) 497 insn = emit_insn (gen_add2_insn (reg, gen_int_mode (immed, Pmode))); 498 else 499 { 500 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM); 501 emit_move_insn (tmp, gen_int_mode (immed, Pmode)); 502 insn = emit_insn (gen_add2_insn (reg, tmp)); 503 } 504 return insn; 505} 506 507void 508nios2_expand_prologue (void) 509{ 510 unsigned int regno; 511 int total_frame_size, save_offset; 512 int sp_offset; /* offset from base_reg to final stack value. */ 513 int save_regs_base; /* offset from base_reg to register save area. */ 514 rtx insn; 515 516 total_frame_size = nios2_compute_frame_layout (); 517 518 if (flag_stack_usage_info) 519 current_function_static_stack_size = total_frame_size; 520 521 /* Decrement the stack pointer. */ 522 if (!SMALL_INT (total_frame_size)) 523 { 524 /* We need an intermediary point, this will point at the spill block. */ 525 insn = emit_insn 526 (gen_add2_insn (stack_pointer_rtx, 527 gen_int_mode (cfun->machine->save_regs_offset 528 - total_frame_size, Pmode))); 529 RTX_FRAME_RELATED_P (insn) = 1; 530 save_regs_base = 0; 531 sp_offset = -cfun->machine->save_regs_offset; 532 } 533 else if (total_frame_size) 534 { 535 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, 536 gen_int_mode (-total_frame_size, 537 Pmode))); 538 RTX_FRAME_RELATED_P (insn) = 1; 539 save_regs_base = cfun->machine->save_regs_offset; 540 sp_offset = 0; 541 } 542 else 543 save_regs_base = sp_offset = 0; 544 545 if (crtl->limit_stack) 546 nios2_emit_stack_limit_check (); 547 548 save_offset = save_regs_base + cfun->machine->save_reg_size; 549 550 for (regno = LAST_GP_REG; regno > 0; regno--) 551 if (cfun->machine->save_mask & (1 << regno)) 552 { 553 save_offset -= 4; 554 save_reg (regno, save_offset); 555 } 556 557 if (frame_pointer_needed) 558 { 559 int fp_save_offset = save_regs_base + cfun->machine->fp_save_offset; 560 insn = emit_insn (gen_add3_insn (hard_frame_pointer_rtx, 561 stack_pointer_rtx, 562 gen_int_mode (fp_save_offset, Pmode))); 563 RTX_FRAME_RELATED_P (insn) = 1; 564 } 565 566 if (sp_offset) 567 { 568 rtx sp_adjust 569 = gen_rtx_SET (VOIDmode, stack_pointer_rtx, 570 plus_constant (Pmode, stack_pointer_rtx, sp_offset)); 571 if (SMALL_INT (sp_offset)) 572 insn = emit_insn (sp_adjust); 573 else 574 { 575 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM); 576 emit_move_insn (tmp, gen_int_mode (sp_offset, Pmode)); 577 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, tmp)); 578 /* Attach the sp_adjust as a note indicating what happened. */ 579 add_reg_note (insn, REG_FRAME_RELATED_EXPR, sp_adjust); 580 } 581 RTX_FRAME_RELATED_P (insn) = 1; 582 583 if (crtl->limit_stack) 584 nios2_emit_stack_limit_check (); 585 } 586 587 /* Load the PIC register if needed. */ 588 if (crtl->uses_pic_offset_table) 589 nios2_load_pic_register (); 590 591 /* If we are profiling, make sure no instructions are scheduled before 592 the call to mcount. */ 593 if (crtl->profile) 594 emit_insn (gen_blockage ()); 595} 596 597void 598nios2_expand_epilogue (bool sibcall_p) 599{ 600 rtx insn, cfa_adj; 601 int total_frame_size; 602 int sp_adjust, save_offset; 603 unsigned int regno; 604 605 if (!sibcall_p && nios2_can_use_return_insn ()) 606 { 607 emit_jump_insn (gen_return ()); 608 return; 609 } 610 611 emit_insn (gen_blockage ()); 612 613 total_frame_size = nios2_compute_frame_layout (); 614 if (frame_pointer_needed) 615 { 616 /* Recover the stack pointer. */ 617 insn = emit_insn (gen_add3_insn 618 (stack_pointer_rtx, hard_frame_pointer_rtx, 619 gen_int_mode (-cfun->machine->fp_save_offset, Pmode))); 620 cfa_adj = plus_constant (Pmode, stack_pointer_rtx, 621 (total_frame_size 622 - cfun->machine->save_regs_offset)); 623 add_reg_note (insn, REG_CFA_DEF_CFA, cfa_adj); 624 RTX_FRAME_RELATED_P (insn) = 1; 625 626 save_offset = 0; 627 sp_adjust = total_frame_size - cfun->machine->save_regs_offset; 628 } 629 else if (!SMALL_INT (total_frame_size)) 630 { 631 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM); 632 emit_move_insn (tmp, gen_int_mode (cfun->machine->save_regs_offset, 633 Pmode)); 634 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, tmp)); 635 cfa_adj = gen_rtx_SET (VOIDmode, stack_pointer_rtx, 636 plus_constant (Pmode, stack_pointer_rtx, 637 cfun->machine->save_regs_offset)); 638 add_reg_note (insn, REG_CFA_ADJUST_CFA, cfa_adj); 639 RTX_FRAME_RELATED_P (insn) = 1; 640 save_offset = 0; 641 sp_adjust = total_frame_size - cfun->machine->save_regs_offset; 642 } 643 else 644 { 645 save_offset = cfun->machine->save_regs_offset; 646 sp_adjust = total_frame_size; 647 } 648 649 save_offset += cfun->machine->save_reg_size; 650 651 for (regno = LAST_GP_REG; regno > 0; regno--) 652 if (cfun->machine->save_mask & (1 << regno)) 653 { 654 save_offset -= 4; 655 restore_reg (regno, save_offset); 656 } 657 658 if (sp_adjust) 659 { 660 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, 661 gen_int_mode (sp_adjust, Pmode))); 662 cfa_adj = gen_rtx_SET (VOIDmode, stack_pointer_rtx, 663 plus_constant (Pmode, stack_pointer_rtx, 664 sp_adjust)); 665 add_reg_note (insn, REG_CFA_ADJUST_CFA, cfa_adj); 666 RTX_FRAME_RELATED_P (insn) = 1; 667 } 668 669 /* Add in the __builtin_eh_return stack adjustment. */ 670 if (crtl->calls_eh_return) 671 emit_insn (gen_add2_insn (stack_pointer_rtx, EH_RETURN_STACKADJ_RTX)); 672 673 if (!sibcall_p) 674 emit_jump_insn (gen_simple_return ()); 675} 676 677/* Implement RETURN_ADDR_RTX. Note, we do not support moving 678 back to a previous frame. */ 679rtx 680nios2_get_return_address (int count) 681{ 682 if (count != 0) 683 return const0_rtx; 684 685 return get_hard_reg_initial_val (Pmode, RA_REGNO); 686} 687 688/* Emit code to change the current function's return address to 689 ADDRESS. SCRATCH is available as a scratch register, if needed. 690 ADDRESS and SCRATCH are both word-mode GPRs. */ 691void 692nios2_set_return_address (rtx address, rtx scratch) 693{ 694 nios2_compute_frame_layout (); 695 if (cfun->machine->save_mask & (1 << RA_REGNO)) 696 { 697 unsigned offset = cfun->machine->save_reg_size - 4; 698 rtx base; 699 700 if (frame_pointer_needed) 701 base = hard_frame_pointer_rtx; 702 else 703 { 704 base = stack_pointer_rtx; 705 offset += cfun->machine->save_regs_offset; 706 707 if (!SMALL_INT (offset)) 708 { 709 emit_move_insn (scratch, gen_int_mode (offset, Pmode)); 710 emit_insn (gen_add2_insn (scratch, base)); 711 base = scratch; 712 offset = 0; 713 } 714 } 715 if (offset) 716 base = plus_constant (Pmode, base, offset); 717 emit_move_insn (gen_rtx_MEM (Pmode, base), address); 718 } 719 else 720 emit_move_insn (gen_rtx_REG (Pmode, RA_REGNO), address); 721} 722 723/* Implement FUNCTION_PROFILER macro. */ 724void 725nios2_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED) 726{ 727 fprintf (file, "\tmov\tr8, ra\n"); 728 if (flag_pic == 1) 729 { 730 fprintf (file, "\tnextpc\tr2\n"); 731 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n"); 732 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n"); 733 fprintf (file, "\tadd\tr2, r2, r3\n"); 734 fprintf (file, "\tldw\tr2, %%call(_mcount)(r2)\n"); 735 fprintf (file, "\tcallr\tr2\n"); 736 } 737 else if (flag_pic == 2) 738 { 739 fprintf (file, "\tnextpc\tr2\n"); 740 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n"); 741 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n"); 742 fprintf (file, "\tadd\tr2, r2, r3\n"); 743 fprintf (file, "\tmovhi\tr3, %%call_hiadj(_mcount)\n"); 744 fprintf (file, "\taddi\tr3, r3, %%call_lo(_mcount)\n"); 745 fprintf (file, "\tadd\tr3, r2, r3\n"); 746 fprintf (file, "\tldw\tr2, 0(r3)\n"); 747 fprintf (file, "\tcallr\tr2\n"); 748 } 749 else 750 fprintf (file, "\tcall\t_mcount\n"); 751 fprintf (file, "\tmov\tra, r8\n"); 752} 753 754/* Dump stack layout. */ 755static void 756nios2_dump_frame_layout (FILE *file) 757{ 758 fprintf (file, "\t%s Current Frame Info\n", ASM_COMMENT_START); 759 fprintf (file, "\t%s total_size = %d\n", ASM_COMMENT_START, 760 cfun->machine->total_size); 761 fprintf (file, "\t%s var_size = %d\n", ASM_COMMENT_START, 762 cfun->machine->var_size); 763 fprintf (file, "\t%s args_size = %d\n", ASM_COMMENT_START, 764 cfun->machine->args_size); 765 fprintf (file, "\t%s save_reg_size = %d\n", ASM_COMMENT_START, 766 cfun->machine->save_reg_size); 767 fprintf (file, "\t%s initialized = %d\n", ASM_COMMENT_START, 768 cfun->machine->initialized); 769 fprintf (file, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START, 770 cfun->machine->save_regs_offset); 771 fprintf (file, "\t%s is_leaf = %d\n", ASM_COMMENT_START, 772 crtl->is_leaf); 773 fprintf (file, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START, 774 frame_pointer_needed); 775 fprintf (file, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START, 776 crtl->args.pretend_args_size); 777} 778 779/* Return true if REGNO should be saved in the prologue. */ 780static bool 781prologue_saved_reg_p (unsigned regno) 782{ 783 gcc_assert (GP_REG_P (regno)); 784 785 if (df_regs_ever_live_p (regno) && !call_used_regs[regno]) 786 return true; 787 788 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed) 789 return true; 790 791 if (regno == PIC_OFFSET_TABLE_REGNUM && crtl->uses_pic_offset_table) 792 return true; 793 794 if (regno == RA_REGNO && df_regs_ever_live_p (RA_REGNO)) 795 return true; 796 797 return false; 798} 799 800/* Implement TARGET_CAN_ELIMINATE. */ 801static bool 802nios2_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to) 803{ 804 if (to == STACK_POINTER_REGNUM) 805 return !frame_pointer_needed; 806 return true; 807} 808 809/* Implement INITIAL_ELIMINATION_OFFSET macro. */ 810int 811nios2_initial_elimination_offset (int from, int to) 812{ 813 int offset; 814 815 nios2_compute_frame_layout (); 816 817 /* Set OFFSET to the offset from the stack pointer. */ 818 switch (from) 819 { 820 case FRAME_POINTER_REGNUM: 821 offset = cfun->machine->args_size; 822 break; 823 824 case ARG_POINTER_REGNUM: 825 offset = cfun->machine->total_size; 826 offset -= crtl->args.pretend_args_size; 827 break; 828 829 default: 830 gcc_unreachable (); 831 } 832 833 /* If we are asked for the frame pointer offset, then adjust OFFSET 834 by the offset from the frame pointer to the stack pointer. */ 835 if (to == HARD_FRAME_POINTER_REGNUM) 836 offset -= (cfun->machine->save_regs_offset 837 + cfun->machine->fp_save_offset); 838 839 return offset; 840} 841 842/* Return nonzero if this function is known to have a null epilogue. 843 This allows the optimizer to omit jumps to jumps if no stack 844 was created. */ 845int 846nios2_can_use_return_insn (void) 847{ 848 if (!reload_completed || crtl->profile) 849 return 0; 850 851 return nios2_compute_frame_layout () == 0; 852} 853 854 855/* Check and signal some warnings/errors on FPU insn options. */ 856static void 857nios2_custom_check_insns (void) 858{ 859 unsigned int i, j; 860 bool errors = false; 861 862 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 863 if (N2FPU_ENABLED_P (i) && N2FPU_DOUBLE_P (i)) 864 { 865 for (j = 0; j < ARRAY_SIZE (nios2_fpu_insn); j++) 866 if (N2FPU_DOUBLE_REQUIRED_P (j) && ! N2FPU_ENABLED_P (j)) 867 { 868 error ("switch %<-mcustom-%s%> is required for double " 869 "precision floating point", N2FPU_NAME (j)); 870 errors = true; 871 } 872 break; 873 } 874 875 /* Warn if the user has certain exotic operations that won't get used 876 without -funsafe-math-optimizations. See expand_builtin () in 877 builtins.c. */ 878 if (!flag_unsafe_math_optimizations) 879 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 880 if (N2FPU_ENABLED_P (i) && N2FPU_UNSAFE_P (i)) 881 warning (0, "switch %<-mcustom-%s%> has no effect unless " 882 "-funsafe-math-optimizations is specified", N2FPU_NAME (i)); 883 884 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't 885 get used without -ffinite-math-only. See fold_builtin_fmin_fmax () 886 in builtins.c. */ 887 if (!flag_finite_math_only) 888 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 889 if (N2FPU_ENABLED_P (i) && N2FPU_FINITE_P (i)) 890 warning (0, "switch %<-mcustom-%s%> has no effect unless " 891 "-ffinite-math-only is specified", N2FPU_NAME (i)); 892 893 /* Warn if the user is trying to use a custom rounding instruction 894 that won't get used without -fno-math-errno. See 895 expand_builtin_int_roundingfn_2 () in builtins.c. */ 896 if (flag_errno_math) 897 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 898 if (N2FPU_ENABLED_P (i) && N2FPU_NO_ERRNO_P (i)) 899 warning (0, "switch %<-mcustom-%s%> has no effect unless " 900 "-fno-math-errno is specified", N2FPU_NAME (i)); 901 902 if (errors || custom_code_conflict) 903 fatal_error (input_location, 904 "conflicting use of -mcustom switches, target attributes, " 905 "and/or __builtin_custom_ functions"); 906} 907 908static void 909nios2_set_fpu_custom_code (enum n2fpu_code code, int n, bool override_p) 910{ 911 if (override_p || N2FPU_N (code) == -1) 912 N2FPU_N (code) = n; 913 nios2_register_custom_code (n, CCS_FPU, (int) code); 914} 915 916/* Type to represent a standard FPU config. */ 917struct nios2_fpu_config 918{ 919 const char *name; 920 bool set_sp_constants; 921 int code[n2fpu_code_num]; 922}; 923 924#define NIOS2_FPU_CONFIG_NUM 3 925static struct nios2_fpu_config custom_fpu_config[NIOS2_FPU_CONFIG_NUM]; 926 927static void 928nios2_init_fpu_configs (void) 929{ 930 struct nios2_fpu_config* cfg; 931 int i = 0; 932#define NEXT_FPU_CONFIG \ 933 do { \ 934 cfg = &custom_fpu_config[i++]; \ 935 memset (cfg, -1, sizeof (struct nios2_fpu_config));\ 936 } while (0) 937 938 NEXT_FPU_CONFIG; 939 cfg->name = "60-1"; 940 cfg->set_sp_constants = true; 941 cfg->code[n2fpu_fmuls] = 252; 942 cfg->code[n2fpu_fadds] = 253; 943 cfg->code[n2fpu_fsubs] = 254; 944 945 NEXT_FPU_CONFIG; 946 cfg->name = "60-2"; 947 cfg->set_sp_constants = true; 948 cfg->code[n2fpu_fmuls] = 252; 949 cfg->code[n2fpu_fadds] = 253; 950 cfg->code[n2fpu_fsubs] = 254; 951 cfg->code[n2fpu_fdivs] = 255; 952 953 NEXT_FPU_CONFIG; 954 cfg->name = "72-3"; 955 cfg->set_sp_constants = true; 956 cfg->code[n2fpu_floatus] = 243; 957 cfg->code[n2fpu_fixsi] = 244; 958 cfg->code[n2fpu_floatis] = 245; 959 cfg->code[n2fpu_fcmpgts] = 246; 960 cfg->code[n2fpu_fcmples] = 249; 961 cfg->code[n2fpu_fcmpeqs] = 250; 962 cfg->code[n2fpu_fcmpnes] = 251; 963 cfg->code[n2fpu_fmuls] = 252; 964 cfg->code[n2fpu_fadds] = 253; 965 cfg->code[n2fpu_fsubs] = 254; 966 cfg->code[n2fpu_fdivs] = 255; 967 968#undef NEXT_FPU_CONFIG 969 gcc_assert (i == NIOS2_FPU_CONFIG_NUM); 970} 971 972static struct nios2_fpu_config * 973nios2_match_custom_fpu_cfg (const char *cfgname, const char *endp) 974{ 975 int i; 976 for (i = 0; i < NIOS2_FPU_CONFIG_NUM; i++) 977 { 978 bool match = !(endp != NULL 979 ? strncmp (custom_fpu_config[i].name, cfgname, 980 endp - cfgname) 981 : strcmp (custom_fpu_config[i].name, cfgname)); 982 if (match) 983 return &custom_fpu_config[i]; 984 } 985 return NULL; 986} 987 988/* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string. 989 OVERRIDE is true if loaded config codes should overwrite current state. */ 990static void 991nios2_handle_custom_fpu_cfg (const char *cfgname, const char *endp, 992 bool override) 993{ 994 struct nios2_fpu_config *cfg = nios2_match_custom_fpu_cfg (cfgname, endp); 995 if (cfg) 996 { 997 unsigned int i; 998 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 999 if (cfg->code[i] >= 0) 1000 nios2_set_fpu_custom_code ((enum n2fpu_code) i, cfg->code[i], 1001 override); 1002 if (cfg->set_sp_constants) 1003 flag_single_precision_constant = 1; 1004 } 1005 else 1006 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> " 1007 "value %<%s%>", cfgname); 1008 1009 /* Guard against errors in the standard configurations. */ 1010 nios2_custom_check_insns (); 1011} 1012 1013/* Check individual FPU insn options, and register custom code. */ 1014static void 1015nios2_handle_custom_fpu_insn_option (int fpu_insn_index) 1016{ 1017 int param = N2FPU_N (fpu_insn_index); 1018 1019 if (0 <= param && param <= 255) 1020 nios2_register_custom_code (param, CCS_FPU, fpu_insn_index); 1021 1022 /* Valid values are 0-255, but also allow -1 so that the 1023 -mno-custom-<opt> switches work. */ 1024 else if (param != -1) 1025 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255", 1026 N2FPU_NAME (fpu_insn_index), param); 1027} 1028 1029/* Allocate a chunk of memory for per-function machine-dependent data. */ 1030static struct machine_function * 1031nios2_init_machine_status (void) 1032{ 1033 return ggc_cleared_alloc<machine_function> (); 1034} 1035 1036/* Implement TARGET_OPTION_OVERRIDE. */ 1037static void 1038nios2_option_override (void) 1039{ 1040 unsigned int i; 1041 1042#ifdef SUBTARGET_OVERRIDE_OPTIONS 1043 SUBTARGET_OVERRIDE_OPTIONS; 1044#endif 1045 1046 /* Check for unsupported options. */ 1047 if (flag_pic && !TARGET_LINUX_ABI) 1048 sorry ("position-independent code requires the Linux ABI"); 1049 1050 /* Function to allocate machine-dependent function status. */ 1051 init_machine_status = &nios2_init_machine_status; 1052 1053 nios2_section_threshold 1054 = (global_options_set.x_g_switch_value 1055 ? g_switch_value : NIOS2_DEFAULT_GVALUE); 1056 1057 if (nios2_gpopt_option == gpopt_unspecified) 1058 { 1059 /* Default to -mgpopt unless -fpic or -fPIC. */ 1060 if (flag_pic) 1061 nios2_gpopt_option = gpopt_none; 1062 else 1063 nios2_gpopt_option = gpopt_local; 1064 } 1065 1066 /* If we don't have mul, we don't have mulx either! */ 1067 if (!TARGET_HAS_MUL && TARGET_HAS_MULX) 1068 target_flags &= ~MASK_HAS_MULX; 1069 1070 /* Initialize default FPU configurations. */ 1071 nios2_init_fpu_configs (); 1072 1073 /* Set up default handling for floating point custom instructions. 1074 1075 Putting things in this order means that the -mcustom-fpu-cfg= 1076 switch will always be overridden by individual -mcustom-fadds= 1077 switches, regardless of the order in which they were specified 1078 on the command line. 1079 1080 This behavior of prioritization of individual -mcustom-<insn>= 1081 options before the -mcustom-fpu-cfg= switch is maintained for 1082 compatibility. */ 1083 if (nios2_custom_fpu_cfg_string && *nios2_custom_fpu_cfg_string) 1084 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string, NULL, false); 1085 1086 /* Handle options for individual FPU insns. */ 1087 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 1088 nios2_handle_custom_fpu_insn_option (i); 1089 1090 nios2_custom_check_insns (); 1091 1092 /* Save the initial options in case the user does function specific 1093 options. */ 1094 target_option_default_node = target_option_current_node 1095 = build_target_option_node (&global_options); 1096} 1097 1098 1099/* Return true if CST is a constant within range of movi/movui/movhi. */ 1100static bool 1101nios2_simple_const_p (const_rtx cst) 1102{ 1103 HOST_WIDE_INT val = INTVAL (cst); 1104 return SMALL_INT (val) || SMALL_INT_UNSIGNED (val) || UPPER16_INT (val); 1105} 1106 1107/* Compute a (partial) cost for rtx X. Return true if the complete 1108 cost has been computed, and false if subexpressions should be 1109 scanned. In either case, *TOTAL contains the cost result. */ 1110static bool 1111nios2_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, 1112 int opno ATTRIBUTE_UNUSED, 1113 int *total, bool speed ATTRIBUTE_UNUSED) 1114{ 1115 switch (code) 1116 { 1117 case CONST_INT: 1118 if (INTVAL (x) == 0) 1119 { 1120 *total = COSTS_N_INSNS (0); 1121 return true; 1122 } 1123 else if (nios2_simple_const_p (x)) 1124 { 1125 *total = COSTS_N_INSNS (2); 1126 return true; 1127 } 1128 else 1129 { 1130 *total = COSTS_N_INSNS (4); 1131 return true; 1132 } 1133 1134 case LABEL_REF: 1135 case SYMBOL_REF: 1136 case CONST: 1137 case CONST_DOUBLE: 1138 { 1139 *total = COSTS_N_INSNS (4); 1140 return true; 1141 } 1142 1143 case AND: 1144 { 1145 /* Recognize 'nor' insn pattern. */ 1146 if (GET_CODE (XEXP (x, 0)) == NOT 1147 && GET_CODE (XEXP (x, 1)) == NOT) 1148 { 1149 *total = COSTS_N_INSNS (1); 1150 return true; 1151 } 1152 return false; 1153 } 1154 1155 case MULT: 1156 { 1157 *total = COSTS_N_INSNS (1); 1158 return false; 1159 } 1160 case SIGN_EXTEND: 1161 { 1162 *total = COSTS_N_INSNS (3); 1163 return false; 1164 } 1165 case ZERO_EXTEND: 1166 { 1167 *total = COSTS_N_INSNS (1); 1168 return false; 1169 } 1170 1171 default: 1172 return false; 1173 } 1174} 1175 1176/* Implement TARGET_PREFERRED_RELOAD_CLASS. */ 1177static reg_class_t 1178nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t regclass) 1179{ 1180 return regclass == NO_REGS ? GENERAL_REGS : regclass; 1181} 1182 1183/* Emit a call to __tls_get_addr. TI is the argument to this function. 1184 RET is an RTX for the return value location. The entire insn sequence 1185 is returned. */ 1186static GTY(()) rtx nios2_tls_symbol; 1187 1188static rtx 1189nios2_call_tls_get_addr (rtx ti) 1190{ 1191 rtx arg = gen_rtx_REG (Pmode, FIRST_ARG_REGNO); 1192 rtx ret = gen_rtx_REG (Pmode, FIRST_RETVAL_REGNO); 1193 rtx fn, insn; 1194 1195 if (!nios2_tls_symbol) 1196 nios2_tls_symbol = init_one_libfunc ("__tls_get_addr"); 1197 1198 emit_move_insn (arg, ti); 1199 fn = gen_rtx_MEM (QImode, nios2_tls_symbol); 1200 insn = emit_call_insn (gen_call_value (ret, fn, const0_rtx)); 1201 RTL_CONST_CALL_P (insn) = 1; 1202 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), ret); 1203 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), arg); 1204 1205 return ret; 1206} 1207 1208/* Return true for large offsets requiring hiadj/lo relocation pairs. */ 1209static bool 1210nios2_large_offset_p (int unspec) 1211{ 1212 gcc_assert (nios2_unspec_reloc_name (unspec) != NULL); 1213 1214 if (flag_pic == 2 1215 /* FIXME: TLS GOT offset relocations will eventually also get this 1216 treatment, after binutils support for those are also completed. */ 1217 && (unspec == UNSPEC_PIC_SYM || unspec == UNSPEC_PIC_CALL_SYM)) 1218 return true; 1219 1220 /* 'gotoff' offsets are always hiadj/lo. */ 1221 if (unspec == UNSPEC_PIC_GOTOFF_SYM) 1222 return true; 1223 1224 return false; 1225} 1226 1227/* Return true for conforming unspec relocations. Also used in 1228 constraints.md and predicates.md. */ 1229bool 1230nios2_unspec_reloc_p (rtx op) 1231{ 1232 return (GET_CODE (op) == CONST 1233 && GET_CODE (XEXP (op, 0)) == UNSPEC 1234 && ! nios2_large_offset_p (XINT (XEXP (op, 0), 1))); 1235} 1236 1237/* Helper to generate unspec constant. */ 1238static rtx 1239nios2_unspec_offset (rtx loc, int unspec) 1240{ 1241 return gen_rtx_CONST (Pmode, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, loc), 1242 unspec)); 1243} 1244 1245/* Generate GOT pointer based address with large offset. */ 1246static rtx 1247nios2_large_got_address (rtx offset, rtx tmp) 1248{ 1249 if (!tmp) 1250 tmp = gen_reg_rtx (Pmode); 1251 emit_move_insn (tmp, offset); 1252 return gen_rtx_PLUS (Pmode, tmp, pic_offset_table_rtx); 1253} 1254 1255/* Generate a GOT pointer based address. */ 1256static rtx 1257nios2_got_address (rtx loc, int unspec) 1258{ 1259 rtx offset = nios2_unspec_offset (loc, unspec); 1260 crtl->uses_pic_offset_table = 1; 1261 1262 if (nios2_large_offset_p (unspec)) 1263 return force_reg (Pmode, nios2_large_got_address (offset, NULL_RTX)); 1264 1265 return gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset); 1266} 1267 1268/* Generate the code to access LOC, a thread local SYMBOL_REF. The 1269 return value will be a valid address and move_operand (either a REG 1270 or a LO_SUM). */ 1271static rtx 1272nios2_legitimize_tls_address (rtx loc) 1273{ 1274 rtx tmp, mem, tp; 1275 enum tls_model model = SYMBOL_REF_TLS_MODEL (loc); 1276 1277 switch (model) 1278 { 1279 case TLS_MODEL_GLOBAL_DYNAMIC: 1280 tmp = gen_reg_rtx (Pmode); 1281 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_GD)); 1282 return nios2_call_tls_get_addr (tmp); 1283 1284 case TLS_MODEL_LOCAL_DYNAMIC: 1285 tmp = gen_reg_rtx (Pmode); 1286 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_LDM)); 1287 return gen_rtx_PLUS (Pmode, nios2_call_tls_get_addr (tmp), 1288 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LDO)); 1289 1290 case TLS_MODEL_INITIAL_EXEC: 1291 tmp = gen_reg_rtx (Pmode); 1292 mem = gen_const_mem (Pmode, nios2_got_address (loc, UNSPEC_LOAD_TLS_IE)); 1293 emit_move_insn (tmp, mem); 1294 tp = gen_rtx_REG (Pmode, TP_REGNO); 1295 return gen_rtx_PLUS (Pmode, tp, tmp); 1296 1297 case TLS_MODEL_LOCAL_EXEC: 1298 tp = gen_rtx_REG (Pmode, TP_REGNO); 1299 return gen_rtx_PLUS (Pmode, tp, 1300 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LE)); 1301 default: 1302 gcc_unreachable (); 1303 } 1304} 1305 1306/* Divide Support 1307 1308 If -O3 is used, we want to output a table lookup for 1309 divides between small numbers (both num and den >= 0 1310 and < 0x10). The overhead of this method in the worst 1311 case is 40 bytes in the text section (10 insns) and 1312 256 bytes in the data section. Additional divides do 1313 not incur additional penalties in the data section. 1314 1315 Code speed is improved for small divides by about 5x 1316 when using this method in the worse case (~9 cycles 1317 vs ~45). And in the worst case divides not within the 1318 table are penalized by about 10% (~5 cycles vs ~45). 1319 However in the typical case the penalty is not as bad 1320 because doing the long divide in only 45 cycles is 1321 quite optimistic. 1322 1323 ??? would be nice to have some benchmarks other 1324 than Dhrystone to back this up. 1325 1326 This bit of expansion is to create this instruction 1327 sequence as rtl. 1328 or $8, $4, $5 1329 slli $9, $4, 4 1330 cmpgeui $3, $8, 16 1331 beq $3, $0, .L3 1332 or $10, $9, $5 1333 add $12, $11, divide_table 1334 ldbu $2, 0($12) 1335 br .L1 1336.L3: 1337 call slow_div 1338.L1: 1339# continue here with result in $2 1340 1341 ??? Ideally I would like the libcall block to contain all 1342 of this code, but I don't know how to do that. What it 1343 means is that if the divide can be eliminated, it may not 1344 completely disappear. 1345 1346 ??? The __divsi3_table label should ideally be moved out 1347 of this block and into a global. If it is placed into the 1348 sdata section we can save even more cycles by doing things 1349 gp relative. */ 1350void 1351nios2_emit_expensive_div (rtx *operands, machine_mode mode) 1352{ 1353 rtx or_result, shift_left_result; 1354 rtx lookup_value; 1355 rtx_code_label *lab1, *lab3; 1356 rtx insns; 1357 rtx libfunc; 1358 rtx final_result; 1359 rtx tmp; 1360 rtx table; 1361 1362 /* It may look a little generic, but only SImode is supported for now. */ 1363 gcc_assert (mode == SImode); 1364 libfunc = optab_libfunc (sdiv_optab, SImode); 1365 1366 lab1 = gen_label_rtx (); 1367 lab3 = gen_label_rtx (); 1368 1369 or_result = expand_simple_binop (SImode, IOR, 1370 operands[1], operands[2], 1371 0, 0, OPTAB_LIB_WIDEN); 1372 1373 emit_cmp_and_jump_insns (or_result, GEN_INT (15), GTU, 0, 1374 GET_MODE (or_result), 0, lab3); 1375 JUMP_LABEL (get_last_insn ()) = lab3; 1376 1377 shift_left_result = expand_simple_binop (SImode, ASHIFT, 1378 operands[1], GEN_INT (4), 1379 0, 0, OPTAB_LIB_WIDEN); 1380 1381 lookup_value = expand_simple_binop (SImode, IOR, 1382 shift_left_result, operands[2], 1383 0, 0, OPTAB_LIB_WIDEN); 1384 table = gen_rtx_PLUS (SImode, lookup_value, 1385 gen_rtx_SYMBOL_REF (SImode, "__divsi3_table")); 1386 convert_move (operands[0], gen_rtx_MEM (QImode, table), 1); 1387 1388 tmp = emit_jump_insn (gen_jump (lab1)); 1389 JUMP_LABEL (tmp) = lab1; 1390 emit_barrier (); 1391 1392 emit_label (lab3); 1393 LABEL_NUSES (lab3) = 1; 1394 1395 start_sequence (); 1396 final_result = emit_library_call_value (libfunc, NULL_RTX, 1397 LCT_CONST, SImode, 2, 1398 operands[1], SImode, 1399 operands[2], SImode); 1400 1401 insns = get_insns (); 1402 end_sequence (); 1403 emit_libcall_block (insns, operands[0], final_result, 1404 gen_rtx_DIV (SImode, operands[1], operands[2])); 1405 1406 emit_label (lab1); 1407 LABEL_NUSES (lab1) = 1; 1408} 1409 1410 1411/* Branches and compares. */ 1412 1413/* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant 1414 comparison, e.g. >= 1 into > 0. */ 1415static void 1416nios2_alternate_compare_const (enum rtx_code code, rtx op, 1417 enum rtx_code *alt_code, rtx *alt_op, 1418 machine_mode mode) 1419{ 1420 HOST_WIDE_INT opval = INTVAL (op); 1421 enum rtx_code scode = signed_condition (code); 1422 bool dec_p = (scode == LT || scode == GE); 1423 1424 if (code == EQ || code == NE) 1425 { 1426 *alt_code = code; 1427 *alt_op = op; 1428 return; 1429 } 1430 1431 *alt_op = (dec_p 1432 ? gen_int_mode (opval - 1, mode) 1433 : gen_int_mode (opval + 1, mode)); 1434 1435 /* The required conversion between [>,>=] and [<,<=] is captured 1436 by a reverse + swap of condition codes. */ 1437 *alt_code = reverse_condition (swap_condition (code)); 1438 1439 { 1440 /* Test if the incremented/decremented value crosses the over/underflow 1441 boundary. Supposedly, such boundary cases should already be transformed 1442 into always-true/false or EQ conditions, so use an assertion here. */ 1443 unsigned HOST_WIDE_INT alt_opval = INTVAL (*alt_op); 1444 if (code == scode) 1445 alt_opval ^= (1 << (GET_MODE_BITSIZE (mode) - 1)); 1446 alt_opval &= GET_MODE_MASK (mode); 1447 gcc_assert (dec_p ? alt_opval != GET_MODE_MASK (mode) : alt_opval != 0); 1448 } 1449} 1450 1451/* Return true if the constant comparison is supported by nios2. */ 1452static bool 1453nios2_valid_compare_const_p (enum rtx_code code, rtx op) 1454{ 1455 switch (code) 1456 { 1457 case EQ: case NE: case GE: case LT: 1458 return SMALL_INT (INTVAL (op)); 1459 case GEU: case LTU: 1460 return SMALL_INT_UNSIGNED (INTVAL (op)); 1461 default: 1462 return false; 1463 } 1464} 1465 1466/* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in 1467 the current configuration. Perform modifications if MODIFY_P is true. 1468 Returns true if FPU compare can be done. */ 1469 1470bool 1471nios2_validate_fpu_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2, 1472 bool modify_p) 1473{ 1474 bool rev_p = false; 1475 enum rtx_code code = GET_CODE (*cmp); 1476 1477 if (!nios2_fpu_compare_enabled (code, mode)) 1478 { 1479 code = swap_condition (code); 1480 if (nios2_fpu_compare_enabled (code, mode)) 1481 rev_p = true; 1482 else 1483 return false; 1484 } 1485 1486 if (modify_p) 1487 { 1488 if (rev_p) 1489 { 1490 rtx tmp = *op1; 1491 *op1 = *op2; 1492 *op2 = tmp; 1493 } 1494 *op1 = force_reg (mode, *op1); 1495 *op2 = force_reg (mode, *op2); 1496 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2); 1497 } 1498 return true; 1499} 1500 1501/* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid 1502 nios2 supported form. Returns true if success. */ 1503bool 1504nios2_validate_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2) 1505{ 1506 enum rtx_code code = GET_CODE (*cmp); 1507 enum rtx_code alt_code; 1508 rtx alt_op2; 1509 1510 if (GET_MODE_CLASS (mode) == MODE_FLOAT) 1511 return nios2_validate_fpu_compare (mode, cmp, op1, op2, true); 1512 1513 if (!reg_or_0_operand (*op2, mode)) 1514 { 1515 /* Create alternate constant compare. */ 1516 nios2_alternate_compare_const (code, *op2, &alt_code, &alt_op2, mode); 1517 1518 /* If alterate op2 is zero(0), we can use it directly, possibly 1519 swapping the compare code. */ 1520 if (alt_op2 == const0_rtx) 1521 { 1522 code = alt_code; 1523 *op2 = alt_op2; 1524 goto check_rebuild_cmp; 1525 } 1526 1527 /* Check if either constant compare can be used. */ 1528 if (nios2_valid_compare_const_p (code, *op2)) 1529 return true; 1530 else if (nios2_valid_compare_const_p (alt_code, alt_op2)) 1531 { 1532 code = alt_code; 1533 *op2 = alt_op2; 1534 goto rebuild_cmp; 1535 } 1536 1537 /* We have to force op2 into a register now. Try to pick one 1538 with a lower cost. */ 1539 if (! nios2_simple_const_p (*op2) 1540 && nios2_simple_const_p (alt_op2)) 1541 { 1542 code = alt_code; 1543 *op2 = alt_op2; 1544 } 1545 *op2 = force_reg (SImode, *op2); 1546 } 1547 check_rebuild_cmp: 1548 if (code == GT || code == GTU || code == LE || code == LEU) 1549 { 1550 rtx t = *op1; *op1 = *op2; *op2 = t; 1551 code = swap_condition (code); 1552 } 1553 rebuild_cmp: 1554 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2); 1555 return true; 1556} 1557 1558 1559/* Addressing Modes. */ 1560 1561/* Implement TARGET_LEGITIMATE_CONSTANT_P. */ 1562static bool 1563nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x) 1564{ 1565 rtx base, offset; 1566 split_const (x, &base, &offset); 1567 return GET_CODE (base) != SYMBOL_REF || !SYMBOL_REF_TLS_MODEL (base); 1568} 1569 1570/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */ 1571static bool 1572nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x) 1573{ 1574 return nios2_legitimate_constant_p (mode, x) == false; 1575} 1576 1577/* Return true if register REGNO is a valid base register. 1578 STRICT_P is true if REG_OK_STRICT is in effect. */ 1579 1580bool 1581nios2_regno_ok_for_base_p (int regno, bool strict_p) 1582{ 1583 if (!HARD_REGISTER_NUM_P (regno)) 1584 { 1585 if (!strict_p) 1586 return true; 1587 1588 if (!reg_renumber) 1589 return false; 1590 1591 regno = reg_renumber[regno]; 1592 } 1593 1594 /* The fake registers will be eliminated to either the stack or 1595 hard frame pointer, both of which are usually valid base registers. 1596 Reload deals with the cases where the eliminated form isn't valid. */ 1597 return (GP_REG_P (regno) 1598 || regno == FRAME_POINTER_REGNUM 1599 || regno == ARG_POINTER_REGNUM); 1600} 1601 1602/* Return true if the address expression formed by BASE + OFFSET is 1603 valid. */ 1604static bool 1605nios2_valid_addr_expr_p (rtx base, rtx offset, bool strict_p) 1606{ 1607 if (!strict_p && GET_CODE (base) == SUBREG) 1608 base = SUBREG_REG (base); 1609 return (REG_P (base) 1610 && nios2_regno_ok_for_base_p (REGNO (base), strict_p) 1611 && (offset == NULL_RTX 1612 || const_arith_operand (offset, Pmode) 1613 || nios2_unspec_reloc_p (offset))); 1614} 1615 1616/* Implement TARGET_LEGITIMATE_ADDRESS_P. */ 1617static bool 1618nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, 1619 rtx operand, bool strict_p) 1620{ 1621 switch (GET_CODE (operand)) 1622 { 1623 /* Direct. */ 1624 case SYMBOL_REF: 1625 if (SYMBOL_REF_TLS_MODEL (operand)) 1626 return false; 1627 1628 if (nios2_symbol_ref_in_small_data_p (operand)) 1629 return true; 1630 1631 /* Else, fall through. */ 1632 case LABEL_REF: 1633 case CONST_INT: 1634 case CONST: 1635 case CONST_DOUBLE: 1636 return false; 1637 1638 /* Register indirect. */ 1639 case REG: 1640 return nios2_regno_ok_for_base_p (REGNO (operand), strict_p); 1641 1642 /* Register indirect with displacement. */ 1643 case PLUS: 1644 { 1645 rtx op0 = XEXP (operand, 0); 1646 rtx op1 = XEXP (operand, 1); 1647 1648 return (nios2_valid_addr_expr_p (op0, op1, strict_p) 1649 || nios2_valid_addr_expr_p (op1, op0, strict_p)); 1650 } 1651 1652 default: 1653 break; 1654 } 1655 return false; 1656} 1657 1658/* Return true if SECTION is a small section name. */ 1659static bool 1660nios2_small_section_name_p (const char *section) 1661{ 1662 return (strcmp (section, ".sbss") == 0 1663 || strncmp (section, ".sbss.", 6) == 0 1664 || strcmp (section, ".sdata") == 0 1665 || strncmp (section, ".sdata.", 7) == 0); 1666} 1667 1668/* Return true if EXP should be placed in the small data section. */ 1669static bool 1670nios2_in_small_data_p (const_tree exp) 1671{ 1672 /* We want to merge strings, so we never consider them small data. */ 1673 if (TREE_CODE (exp) == STRING_CST) 1674 return false; 1675 1676 if (TREE_CODE (exp) == VAR_DECL) 1677 { 1678 if (DECL_SECTION_NAME (exp)) 1679 { 1680 const char *section = DECL_SECTION_NAME (exp); 1681 if (nios2_small_section_name_p (section)) 1682 return true; 1683 } 1684 else 1685 { 1686 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp)); 1687 1688 /* If this is an incomplete type with size 0, then we can't put it 1689 in sdata because it might be too big when completed. */ 1690 if (size > 0 1691 && (unsigned HOST_WIDE_INT) size <= nios2_section_threshold) 1692 return true; 1693 } 1694 } 1695 1696 return false; 1697} 1698 1699/* Return true if symbol is in small data section. */ 1700 1701bool 1702nios2_symbol_ref_in_small_data_p (rtx sym) 1703{ 1704 tree decl; 1705 1706 gcc_assert (GET_CODE (sym) == SYMBOL_REF); 1707 decl = SYMBOL_REF_DECL (sym); 1708 1709 /* TLS variables are not accessed through the GP. */ 1710 if (SYMBOL_REF_TLS_MODEL (sym) != 0) 1711 return false; 1712 1713 /* If the user has explicitly placed the symbol in a small data section 1714 via an attribute, generate gp-relative addressing even if the symbol 1715 is external, weak, or larger than we'd automatically put in the 1716 small data section. OTOH, if the symbol is located in some 1717 non-small-data section, we can't use gp-relative accesses on it 1718 unless the user has requested gpopt_data or gpopt_all. */ 1719 1720 switch (nios2_gpopt_option) 1721 { 1722 case gpopt_none: 1723 /* Don't generate a gp-relative addressing mode if that's been 1724 disabled. */ 1725 return false; 1726 1727 case gpopt_local: 1728 /* Use GP-relative addressing for small data symbols that are 1729 not external or weak, plus any symbols that have explicitly 1730 been placed in a small data section. */ 1731 if (decl && DECL_SECTION_NAME (decl)) 1732 return nios2_small_section_name_p (DECL_SECTION_NAME (decl)); 1733 return (SYMBOL_REF_SMALL_P (sym) 1734 && !SYMBOL_REF_EXTERNAL_P (sym) 1735 && !(decl && DECL_WEAK (decl))); 1736 1737 case gpopt_global: 1738 /* Use GP-relative addressing for small data symbols, even if 1739 they are external or weak. Note that SYMBOL_REF_SMALL_P 1740 is also true of symbols that have explicitly been placed 1741 in a small data section. */ 1742 return SYMBOL_REF_SMALL_P (sym); 1743 1744 case gpopt_data: 1745 /* Use GP-relative addressing for all data symbols regardless 1746 of the object size, but not for code symbols. This option 1747 is equivalent to the user asserting that the entire data 1748 section is accessible from the GP. */ 1749 return !SYMBOL_REF_FUNCTION_P (sym); 1750 1751 case gpopt_all: 1752 /* Use GP-relative addressing for everything, including code. 1753 Effectively, the user has asserted that the entire program 1754 fits within the 64K range of the GP offset. */ 1755 return true; 1756 1757 default: 1758 /* We shouldn't get here. */ 1759 return false; 1760 } 1761} 1762 1763/* Implement TARGET_SECTION_TYPE_FLAGS. */ 1764 1765static unsigned int 1766nios2_section_type_flags (tree decl, const char *name, int reloc) 1767{ 1768 unsigned int flags; 1769 1770 flags = default_section_type_flags (decl, name, reloc); 1771 1772 if (nios2_small_section_name_p (name)) 1773 flags |= SECTION_SMALL; 1774 1775 return flags; 1776} 1777 1778/* Return true if SYMBOL_REF X binds locally. */ 1779 1780static bool 1781nios2_symbol_binds_local_p (const_rtx x) 1782{ 1783 return (SYMBOL_REF_DECL (x) 1784 ? targetm.binds_local_p (SYMBOL_REF_DECL (x)) 1785 : SYMBOL_REF_LOCAL_P (x)); 1786} 1787 1788/* Position independent code related. */ 1789 1790/* Emit code to load the PIC register. */ 1791static void 1792nios2_load_pic_register (void) 1793{ 1794 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM); 1795 1796 emit_insn (gen_load_got_register (pic_offset_table_rtx, tmp)); 1797 emit_insn (gen_add3_insn (pic_offset_table_rtx, pic_offset_table_rtx, tmp)); 1798} 1799 1800/* Generate a PIC address as a MEM rtx. */ 1801static rtx 1802nios2_load_pic_address (rtx sym, int unspec, rtx tmp) 1803{ 1804 if (flag_pic == 2 1805 && GET_CODE (sym) == SYMBOL_REF 1806 && nios2_symbol_binds_local_p (sym)) 1807 /* Under -fPIC, generate a GOTOFF address for local symbols. */ 1808 { 1809 rtx offset = nios2_unspec_offset (sym, UNSPEC_PIC_GOTOFF_SYM); 1810 crtl->uses_pic_offset_table = 1; 1811 return nios2_large_got_address (offset, tmp); 1812 } 1813 1814 return gen_const_mem (Pmode, nios2_got_address (sym, unspec)); 1815} 1816 1817/* Nonzero if the constant value X is a legitimate general operand 1818 when generating PIC code. It is given that flag_pic is on and 1819 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */ 1820bool 1821nios2_legitimate_pic_operand_p (rtx x) 1822{ 1823 if (GET_CODE (x) == CONST 1824 && GET_CODE (XEXP (x, 0)) == UNSPEC 1825 && nios2_large_offset_p (XINT (XEXP (x, 0), 1))) 1826 return true; 1827 1828 return ! (GET_CODE (x) == SYMBOL_REF 1829 || GET_CODE (x) == LABEL_REF || GET_CODE (x) == CONST); 1830} 1831 1832/* Return TRUE if X is a thread-local symbol. */ 1833static bool 1834nios2_tls_symbol_p (rtx x) 1835{ 1836 return (targetm.have_tls && GET_CODE (x) == SYMBOL_REF 1837 && SYMBOL_REF_TLS_MODEL (x) != 0); 1838} 1839 1840/* Legitimize addresses that are CONSTANT_P expressions. */ 1841static rtx 1842nios2_legitimize_constant_address (rtx addr) 1843{ 1844 rtx base, offset; 1845 split_const (addr, &base, &offset); 1846 1847 if (nios2_tls_symbol_p (base)) 1848 base = nios2_legitimize_tls_address (base); 1849 else if (flag_pic) 1850 base = nios2_load_pic_address (base, UNSPEC_PIC_SYM, NULL_RTX); 1851 else 1852 return addr; 1853 1854 if (offset != const0_rtx) 1855 { 1856 gcc_assert (can_create_pseudo_p ()); 1857 return gen_rtx_PLUS (Pmode, force_reg (Pmode, base), 1858 (CONST_INT_P (offset) 1859 ? (SMALL_INT (INTVAL (offset)) 1860 ? offset : force_reg (Pmode, offset)) 1861 : offset)); 1862 } 1863 return base; 1864} 1865 1866/* Implement TARGET_LEGITIMIZE_ADDRESS. */ 1867static rtx 1868nios2_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, 1869 machine_mode mode ATTRIBUTE_UNUSED) 1870{ 1871 if (CONSTANT_P (x)) 1872 return nios2_legitimize_constant_address (x); 1873 1874 /* For the TLS LE (Local Exec) model, the compiler may try to 1875 combine constant offsets with unspec relocs, creating address RTXs 1876 looking like this: 1877 (plus:SI (reg:SI 23 r23) 1878 (const:SI 1879 (plus:SI 1880 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE) 1881 (const_int 48 [0x30])))) 1882 1883 This usually happens when 'var' is a thread-local struct variable, 1884 and access of a field in var causes the addend. 1885 1886 We typically want this combining, so transform the above into this 1887 form, which is allowed: 1888 (plus:SI (reg:SI 23 r23) 1889 (const:SI 1890 (unspec:SI 1891 [(const:SI 1892 (plus:SI (symbol_ref:SI ("var")) 1893 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE))) 1894 1895 Which will be output as '%tls_le(var+48)(r23)' in assembly. */ 1896 if (GET_CODE (x) == PLUS 1897 && GET_CODE (XEXP (x, 1)) == CONST) 1898 { 1899 rtx unspec, offset; 1900 split_const (XEXP (x, 1), &unspec, &offset); 1901 if (GET_CODE (unspec) == UNSPEC 1902 && !nios2_large_offset_p (XINT (unspec, 1)) 1903 && offset != const0_rtx) 1904 { 1905 rtx reg = force_reg (Pmode, XEXP (x, 0)); 1906 unspec = copy_rtx (unspec); 1907 XVECEXP (unspec, 0, 0) 1908 = plus_constant (Pmode, XVECEXP (unspec, 0, 0), INTVAL (offset)); 1909 x = gen_rtx_PLUS (Pmode, reg, gen_rtx_CONST (Pmode, unspec)); 1910 } 1911 } 1912 1913 return x; 1914} 1915 1916static rtx 1917nios2_delegitimize_address (rtx x) 1918{ 1919 x = delegitimize_mem_from_attrs (x); 1920 1921 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC) 1922 { 1923 switch (XINT (XEXP (x, 0), 1)) 1924 { 1925 case UNSPEC_PIC_SYM: 1926 case UNSPEC_PIC_CALL_SYM: 1927 case UNSPEC_PIC_GOTOFF_SYM: 1928 case UNSPEC_ADD_TLS_GD: 1929 case UNSPEC_ADD_TLS_LDM: 1930 case UNSPEC_LOAD_TLS_IE: 1931 case UNSPEC_ADD_TLS_LE: 1932 x = XVECEXP (XEXP (x, 0), 0, 0); 1933 gcc_assert (CONSTANT_P (x)); 1934 break; 1935 } 1936 } 1937 return x; 1938} 1939 1940/* Main expander function for RTL moves. */ 1941int 1942nios2_emit_move_sequence (rtx *operands, machine_mode mode) 1943{ 1944 rtx to = operands[0]; 1945 rtx from = operands[1]; 1946 1947 if (!register_operand (to, mode) && !reg_or_0_operand (from, mode)) 1948 { 1949 gcc_assert (can_create_pseudo_p ()); 1950 from = copy_to_mode_reg (mode, from); 1951 } 1952 1953 if (GET_CODE (from) == SYMBOL_REF || GET_CODE (from) == LABEL_REF 1954 || (GET_CODE (from) == CONST 1955 && GET_CODE (XEXP (from, 0)) != UNSPEC)) 1956 from = nios2_legitimize_constant_address (from); 1957 1958 operands[0] = to; 1959 operands[1] = from; 1960 return 0; 1961} 1962 1963/* The function with address *ADDR is being called. If the address 1964 needs to be loaded from the GOT, emit the instruction to do so and 1965 update *ADDR to point to the rtx for the loaded value. 1966 If REG != NULL_RTX, it is used as the target/scratch register in the 1967 GOT address calculation. */ 1968void 1969nios2_adjust_call_address (rtx *call_op, rtx reg) 1970{ 1971 if (MEM_P (*call_op)) 1972 call_op = &XEXP (*call_op, 0); 1973 1974 rtx addr = *call_op; 1975 if (flag_pic && CONSTANT_P (addr)) 1976 { 1977 rtx tmp = reg ? reg : NULL_RTX; 1978 if (!reg) 1979 reg = gen_reg_rtx (Pmode); 1980 addr = nios2_load_pic_address (addr, UNSPEC_PIC_CALL_SYM, tmp); 1981 emit_insn (gen_rtx_SET (VOIDmode, reg, addr)); 1982 *call_op = reg; 1983 } 1984} 1985 1986 1987/* Output assembly language related definitions. */ 1988 1989/* Print the operand OP to file stream FILE modified by LETTER. 1990 LETTER can be one of: 1991 1992 i: print "i" if OP is an immediate, except 0 1993 o: print "io" if OP is volatile 1994 z: for const0_rtx print $0 instead of 0 1995 H: for %hiadj 1996 L: for %lo 1997 U: for upper half of 32 bit value 1998 D: for the upper 32-bits of a 64-bit double value 1999 R: prints reverse condition. 2000*/ 2001static void 2002nios2_print_operand (FILE *file, rtx op, int letter) 2003{ 2004 2005 switch (letter) 2006 { 2007 case 'i': 2008 if (CONSTANT_P (op) && op != const0_rtx) 2009 fprintf (file, "i"); 2010 return; 2011 2012 case 'o': 2013 if (GET_CODE (op) == MEM 2014 && ((MEM_VOLATILE_P (op) && TARGET_BYPASS_CACHE_VOLATILE) 2015 || TARGET_BYPASS_CACHE)) 2016 fprintf (file, "io"); 2017 return; 2018 2019 default: 2020 break; 2021 } 2022 2023 if (comparison_operator (op, VOIDmode)) 2024 { 2025 enum rtx_code cond = GET_CODE (op); 2026 if (letter == 0) 2027 { 2028 fprintf (file, "%s", GET_RTX_NAME (cond)); 2029 return; 2030 } 2031 if (letter == 'R') 2032 { 2033 fprintf (file, "%s", GET_RTX_NAME (reverse_condition (cond))); 2034 return; 2035 } 2036 } 2037 2038 switch (GET_CODE (op)) 2039 { 2040 case REG: 2041 if (letter == 0 || letter == 'z') 2042 { 2043 fprintf (file, "%s", reg_names[REGNO (op)]); 2044 return; 2045 } 2046 else if (letter == 'D') 2047 { 2048 fprintf (file, "%s", reg_names[REGNO (op)+1]); 2049 return; 2050 } 2051 break; 2052 2053 case CONST_INT: 2054 if (INTVAL (op) == 0 && letter == 'z') 2055 { 2056 fprintf (file, "zero"); 2057 return; 2058 } 2059 2060 if (letter == 'U') 2061 { 2062 HOST_WIDE_INT val = INTVAL (op); 2063 val = (val >> 16) & 0xFFFF; 2064 output_addr_const (file, gen_int_mode (val, SImode)); 2065 return; 2066 } 2067 /* Else, fall through. */ 2068 2069 case CONST: 2070 case LABEL_REF: 2071 case SYMBOL_REF: 2072 case CONST_DOUBLE: 2073 if (letter == 0 || letter == 'z') 2074 { 2075 output_addr_const (file, op); 2076 return; 2077 } 2078 else if (letter == 'H' || letter == 'L') 2079 { 2080 fprintf (file, "%%"); 2081 if (GET_CODE (op) == CONST 2082 && GET_CODE (XEXP (op, 0)) == UNSPEC) 2083 { 2084 rtx unspec = XEXP (op, 0); 2085 int unspec_reloc = XINT (unspec, 1); 2086 gcc_assert (nios2_large_offset_p (unspec_reloc)); 2087 fprintf (file, "%s_", nios2_unspec_reloc_name (unspec_reloc)); 2088 op = XVECEXP (unspec, 0, 0); 2089 } 2090 fprintf (file, letter == 'H' ? "hiadj(" : "lo("); 2091 output_addr_const (file, op); 2092 fprintf (file, ")"); 2093 return; 2094 } 2095 break; 2096 2097 case SUBREG: 2098 case MEM: 2099 if (letter == 0) 2100 { 2101 output_address (op); 2102 return; 2103 } 2104 break; 2105 2106 case CODE_LABEL: 2107 if (letter == 0) 2108 { 2109 output_addr_const (file, op); 2110 return; 2111 } 2112 break; 2113 2114 default: 2115 break; 2116 } 2117 2118 output_operand_lossage ("Unsupported operand for code '%c'", letter); 2119 gcc_unreachable (); 2120} 2121 2122/* Return true if this is a GP-relative accessible reference. */ 2123static bool 2124gprel_constant_p (rtx op) 2125{ 2126 if (GET_CODE (op) == SYMBOL_REF 2127 && nios2_symbol_ref_in_small_data_p (op)) 2128 return true; 2129 else if (GET_CODE (op) == CONST 2130 && GET_CODE (XEXP (op, 0)) == PLUS) 2131 return gprel_constant_p (XEXP (XEXP (op, 0), 0)); 2132 2133 return false; 2134} 2135 2136/* Return the name string for a supported unspec reloc offset. */ 2137static const char * 2138nios2_unspec_reloc_name (int unspec) 2139{ 2140 switch (unspec) 2141 { 2142 case UNSPEC_PIC_SYM: 2143 return "got"; 2144 case UNSPEC_PIC_CALL_SYM: 2145 return "call"; 2146 case UNSPEC_PIC_GOTOFF_SYM: 2147 return "gotoff"; 2148 case UNSPEC_LOAD_TLS_IE: 2149 return "tls_ie"; 2150 case UNSPEC_ADD_TLS_LE: 2151 return "tls_le"; 2152 case UNSPEC_ADD_TLS_GD: 2153 return "tls_gd"; 2154 case UNSPEC_ADD_TLS_LDM: 2155 return "tls_ldm"; 2156 case UNSPEC_ADD_TLS_LDO: 2157 return "tls_ldo"; 2158 default: 2159 return NULL; 2160 } 2161} 2162 2163/* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */ 2164static bool 2165nios2_output_addr_const_extra (FILE *file, rtx op) 2166{ 2167 const char *name; 2168 gcc_assert (GET_CODE (op) == UNSPEC); 2169 2170 /* Support for printing out const unspec relocations. */ 2171 name = nios2_unspec_reloc_name (XINT (op, 1)); 2172 if (name) 2173 { 2174 fprintf (file, "%%%s(", name); 2175 output_addr_const (file, XVECEXP (op, 0, 0)); 2176 fprintf (file, ")"); 2177 return true; 2178 } 2179 return false; 2180} 2181 2182/* Implement TARGET_PRINT_OPERAND_ADDRESS. */ 2183static void 2184nios2_print_operand_address (FILE *file, rtx op) 2185{ 2186 switch (GET_CODE (op)) 2187 { 2188 case CONST: 2189 case CONST_INT: 2190 case LABEL_REF: 2191 case CONST_DOUBLE: 2192 case SYMBOL_REF: 2193 if (gprel_constant_p (op)) 2194 { 2195 fprintf (file, "%%gprel("); 2196 output_addr_const (file, op); 2197 fprintf (file, ")(%s)", reg_names[GP_REGNO]); 2198 return; 2199 } 2200 2201 break; 2202 2203 case PLUS: 2204 { 2205 rtx op0 = XEXP (op, 0); 2206 rtx op1 = XEXP (op, 1); 2207 2208 if (REG_P (op0) && CONSTANT_P (op1)) 2209 { 2210 output_addr_const (file, op1); 2211 fprintf (file, "(%s)", reg_names[REGNO (op0)]); 2212 return; 2213 } 2214 else if (REG_P (op1) && CONSTANT_P (op0)) 2215 { 2216 output_addr_const (file, op0); 2217 fprintf (file, "(%s)", reg_names[REGNO (op1)]); 2218 return; 2219 } 2220 } 2221 break; 2222 2223 case REG: 2224 fprintf (file, "0(%s)", reg_names[REGNO (op)]); 2225 return; 2226 2227 case MEM: 2228 { 2229 rtx base = XEXP (op, 0); 2230 nios2_print_operand_address (file, base); 2231 return; 2232 } 2233 default: 2234 break; 2235 } 2236 2237 fprintf (stderr, "Missing way to print address\n"); 2238 debug_rtx (op); 2239 gcc_unreachable (); 2240} 2241 2242/* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */ 2243static void 2244nios2_output_dwarf_dtprel (FILE *file, int size, rtx x) 2245{ 2246 gcc_assert (size == 4); 2247 fprintf (file, "\t.4byte\t%%tls_ldo("); 2248 output_addr_const (file, x); 2249 fprintf (file, ")"); 2250} 2251 2252/* Implemet TARGET_ASM_FILE_END. */ 2253 2254static void 2255nios2_asm_file_end (void) 2256{ 2257 /* The Nios II Linux stack is mapped non-executable by default, so add a 2258 .note.GNU-stack section for switching to executable stacks only when 2259 trampolines are generated. */ 2260 if (TARGET_LINUX_ABI && trampolines_created) 2261 file_end_indicate_exec_stack (); 2262} 2263 2264/* Implement TARGET_ASM_FUNCTION_PROLOGUE. */ 2265static void 2266nios2_asm_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED) 2267{ 2268 if (flag_verbose_asm || flag_debug_asm) 2269 { 2270 nios2_compute_frame_layout (); 2271 nios2_dump_frame_layout (file); 2272 } 2273} 2274 2275/* Emit assembly of custom FPU instructions. */ 2276const char * 2277nios2_fpu_insn_asm (enum n2fpu_code code) 2278{ 2279 static char buf[256]; 2280 const char *op1, *op2, *op3; 2281 int ln = 256, n = 0; 2282 2283 int N = N2FPU_N (code); 2284 int num_operands = N2FPU (code).num_operands; 2285 const char *insn_name = N2FPU_NAME (code); 2286 tree ftype = nios2_ftype (N2FPU_FTCODE (code)); 2287 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype)); 2288 machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype))); 2289 2290 /* Prepare X register for DF input operands. */ 2291 if (GET_MODE_SIZE (src_mode) == 8 && num_operands == 3) 2292 n = snprintf (buf, ln, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t", 2293 N2FPU_N (n2fpu_fwrx)); 2294 2295 if (src_mode == SFmode) 2296 { 2297 if (dst_mode == VOIDmode) 2298 { 2299 /* The fwry case. */ 2300 op1 = op3 = "zero"; 2301 op2 = "%0"; 2302 num_operands -= 1; 2303 } 2304 else 2305 { 2306 op1 = (dst_mode == DFmode ? "%D0" : "%0"); 2307 op2 = "%1"; 2308 op3 = (num_operands == 2 ? "zero" : "%2"); 2309 } 2310 } 2311 else if (src_mode == DFmode) 2312 { 2313 if (dst_mode == VOIDmode) 2314 { 2315 /* The fwrx case. */ 2316 op1 = "zero"; 2317 op2 = "%0"; 2318 op3 = "%D0"; 2319 num_operands -= 1; 2320 } 2321 else 2322 { 2323 op1 = (dst_mode == DFmode ? "%D0" : "%0"); 2324 op2 = (num_operands == 2 ? "%1" : "%2"); 2325 op3 = (num_operands == 2 ? "%D1" : "%D2"); 2326 } 2327 } 2328 else if (src_mode == VOIDmode) 2329 { 2330 /* frdxlo, frdxhi, frdy cases. */ 2331 gcc_assert (dst_mode == SFmode); 2332 op1 = "%0"; 2333 op2 = op3 = "zero"; 2334 } 2335 else if (src_mode == SImode) 2336 { 2337 /* Conversion operators. */ 2338 gcc_assert (num_operands == 2); 2339 op1 = (dst_mode == DFmode ? "%D0" : "%0"); 2340 op2 = "%1"; 2341 op3 = "zero"; 2342 } 2343 else 2344 gcc_unreachable (); 2345 2346 /* Main instruction string. */ 2347 n += snprintf (buf + n, ln - n, "custom\t%d, %s, %s, %s # %s %%0%s%s", 2348 N, op1, op2, op3, insn_name, 2349 (num_operands >= 2 ? ", %1" : ""), 2350 (num_operands == 3 ? ", %2" : "")); 2351 2352 /* Extraction of Y register for DF results. */ 2353 if (dst_mode == DFmode) 2354 snprintf (buf + n, ln - n, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0", 2355 N2FPU_N (n2fpu_frdy)); 2356 return buf; 2357} 2358 2359 2360 2361/* Function argument related. */ 2362 2363/* Define where to put the arguments to a function. Value is zero to 2364 push the argument on the stack, or a hard register in which to 2365 store the argument. 2366 2367 MODE is the argument's machine mode. 2368 TYPE is the data type of the argument (as a tree). 2369 This is null for libcalls where that information may 2370 not be available. 2371 CUM is a variable of type CUMULATIVE_ARGS which gives info about 2372 the preceding args and about the function being called. 2373 NAMED is nonzero if this argument is a named parameter 2374 (otherwise it is an extra parameter matching an ellipsis). */ 2375 2376static rtx 2377nios2_function_arg (cumulative_args_t cum_v, machine_mode mode, 2378 const_tree type ATTRIBUTE_UNUSED, 2379 bool named ATTRIBUTE_UNUSED) 2380{ 2381 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 2382 rtx return_rtx = NULL_RTX; 2383 2384 if (cum->regs_used < NUM_ARG_REGS) 2385 return_rtx = gen_rtx_REG (mode, FIRST_ARG_REGNO + cum->regs_used); 2386 2387 return return_rtx; 2388} 2389 2390/* Return number of bytes, at the beginning of the argument, that must be 2391 put in registers. 0 is the argument is entirely in registers or entirely 2392 in memory. */ 2393 2394static int 2395nios2_arg_partial_bytes (cumulative_args_t cum_v, 2396 machine_mode mode, tree type ATTRIBUTE_UNUSED, 2397 bool named ATTRIBUTE_UNUSED) 2398{ 2399 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 2400 HOST_WIDE_INT param_size; 2401 2402 if (mode == BLKmode) 2403 { 2404 param_size = int_size_in_bytes (type); 2405 gcc_assert (param_size >= 0); 2406 } 2407 else 2408 param_size = GET_MODE_SIZE (mode); 2409 2410 /* Convert to words (round up). */ 2411 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD; 2412 2413 if (cum->regs_used < NUM_ARG_REGS 2414 && cum->regs_used + param_size > NUM_ARG_REGS) 2415 return (NUM_ARG_REGS - cum->regs_used) * UNITS_PER_WORD; 2416 2417 return 0; 2418} 2419 2420/* Update the data in CUM to advance over an argument of mode MODE 2421 and data type TYPE; TYPE is null for libcalls where that information 2422 may not be available. */ 2423 2424static void 2425nios2_function_arg_advance (cumulative_args_t cum_v, machine_mode mode, 2426 const_tree type ATTRIBUTE_UNUSED, 2427 bool named ATTRIBUTE_UNUSED) 2428{ 2429 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 2430 HOST_WIDE_INT param_size; 2431 2432 if (mode == BLKmode) 2433 { 2434 param_size = int_size_in_bytes (type); 2435 gcc_assert (param_size >= 0); 2436 } 2437 else 2438 param_size = GET_MODE_SIZE (mode); 2439 2440 /* Convert to words (round up). */ 2441 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD; 2442 2443 if (cum->regs_used + param_size > NUM_ARG_REGS) 2444 cum->regs_used = NUM_ARG_REGS; 2445 else 2446 cum->regs_used += param_size; 2447} 2448 2449enum direction 2450nios2_function_arg_padding (machine_mode mode, const_tree type) 2451{ 2452 /* On little-endian targets, the first byte of every stack argument 2453 is passed in the first byte of the stack slot. */ 2454 if (!BYTES_BIG_ENDIAN) 2455 return upward; 2456 2457 /* Otherwise, integral types are padded downward: the last byte of a 2458 stack argument is passed in the last byte of the stack slot. */ 2459 if (type != 0 2460 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type) 2461 : GET_MODE_CLASS (mode) == MODE_INT) 2462 return downward; 2463 2464 /* Arguments smaller than a stack slot are padded downward. */ 2465 if (mode != BLKmode) 2466 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY) ? upward : downward; 2467 2468 return ((int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT)) 2469 ? upward : downward); 2470} 2471 2472enum direction 2473nios2_block_reg_padding (machine_mode mode, tree type, 2474 int first ATTRIBUTE_UNUSED) 2475{ 2476 return nios2_function_arg_padding (mode, type); 2477} 2478 2479/* Emit RTL insns to initialize the variable parts of a trampoline. 2480 FNADDR is an RTX for the address of the function's pure code. 2481 CXT is an RTX for the static chain value for the function. 2482 On Nios II, we handle this by a library call. */ 2483static void 2484nios2_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt) 2485{ 2486 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0); 2487 rtx ctx_reg = force_reg (Pmode, cxt); 2488 rtx addr = force_reg (Pmode, XEXP (m_tramp, 0)); 2489 2490 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"), 2491 LCT_NORMAL, VOIDmode, 3, addr, Pmode, fnaddr, Pmode, 2492 ctx_reg, Pmode); 2493} 2494 2495/* Implement TARGET_FUNCTION_VALUE. */ 2496static rtx 2497nios2_function_value (const_tree ret_type, const_tree fn ATTRIBUTE_UNUSED, 2498 bool outgoing ATTRIBUTE_UNUSED) 2499{ 2500 return gen_rtx_REG (TYPE_MODE (ret_type), FIRST_RETVAL_REGNO); 2501} 2502 2503/* Implement TARGET_LIBCALL_VALUE. */ 2504static rtx 2505nios2_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED) 2506{ 2507 return gen_rtx_REG (mode, FIRST_RETVAL_REGNO); 2508} 2509 2510/* Implement TARGET_FUNCTION_VALUE_REGNO_P. */ 2511static bool 2512nios2_function_value_regno_p (const unsigned int regno) 2513{ 2514 return regno == FIRST_RETVAL_REGNO; 2515} 2516 2517/* Implement TARGET_RETURN_IN_MEMORY. */ 2518static bool 2519nios2_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED) 2520{ 2521 return (int_size_in_bytes (type) > (2 * UNITS_PER_WORD) 2522 || int_size_in_bytes (type) == -1); 2523} 2524 2525/* TODO: It may be possible to eliminate the copyback and implement 2526 own va_arg type. */ 2527static void 2528nios2_setup_incoming_varargs (cumulative_args_t cum_v, 2529 machine_mode mode, tree type, 2530 int *pretend_size, int second_time) 2531{ 2532 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 2533 CUMULATIVE_ARGS local_cum; 2534 cumulative_args_t local_cum_v = pack_cumulative_args (&local_cum); 2535 int regs_to_push; 2536 int pret_size; 2537 2538 local_cum = *cum; 2539 nios2_function_arg_advance (local_cum_v, mode, type, 1); 2540 2541 regs_to_push = NUM_ARG_REGS - local_cum.regs_used; 2542 2543 if (!second_time && regs_to_push > 0) 2544 { 2545 rtx ptr = virtual_incoming_args_rtx; 2546 rtx mem = gen_rtx_MEM (BLKmode, ptr); 2547 emit_insn (gen_blockage ()); 2548 move_block_from_reg (local_cum.regs_used + FIRST_ARG_REGNO, mem, 2549 regs_to_push); 2550 emit_insn (gen_blockage ()); 2551 } 2552 2553 pret_size = regs_to_push * UNITS_PER_WORD; 2554 if (pret_size) 2555 *pretend_size = pret_size; 2556} 2557 2558 2559 2560/* Init FPU builtins. */ 2561static void 2562nios2_init_fpu_builtins (int start_code) 2563{ 2564 tree fndecl; 2565 char builtin_name[64] = "__builtin_custom_"; 2566 unsigned int i, n = strlen ("__builtin_custom_"); 2567 2568 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 2569 { 2570 snprintf (builtin_name + n, sizeof (builtin_name) - n, 2571 "%s", N2FPU_NAME (i)); 2572 fndecl = 2573 add_builtin_function (builtin_name, nios2_ftype (N2FPU_FTCODE (i)), 2574 start_code + i, BUILT_IN_MD, NULL, NULL_TREE); 2575 nios2_register_builtin_fndecl (start_code + i, fndecl); 2576 } 2577} 2578 2579/* Helper function for expanding FPU builtins. */ 2580static rtx 2581nios2_expand_fpu_builtin (tree exp, unsigned int code, rtx target) 2582{ 2583 struct expand_operand ops[MAX_RECOG_OPERANDS]; 2584 enum insn_code icode = N2FPU_ICODE (code); 2585 int nargs, argno, opno = 0; 2586 int num_operands = N2FPU (code).num_operands; 2587 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp)); 2588 bool has_target_p = (dst_mode != VOIDmode); 2589 2590 if (N2FPU_N (code) < 0) 2591 fatal_error (input_location, 2592 "Cannot call %<__builtin_custom_%s%> without specifying switch" 2593 " %<-mcustom-%s%>", N2FPU_NAME (code), N2FPU_NAME (code)); 2594 if (has_target_p) 2595 create_output_operand (&ops[opno++], target, dst_mode); 2596 else 2597 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */ 2598 num_operands -= 1; 2599 nargs = call_expr_nargs (exp); 2600 for (argno = 0; argno < nargs; argno++) 2601 { 2602 tree arg = CALL_EXPR_ARG (exp, argno); 2603 create_input_operand (&ops[opno++], expand_normal (arg), 2604 TYPE_MODE (TREE_TYPE (arg))); 2605 } 2606 if (!maybe_expand_insn (icode, num_operands, ops)) 2607 { 2608 error ("invalid argument to built-in function"); 2609 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx; 2610 } 2611 return has_target_p ? ops[0].value : const0_rtx; 2612} 2613 2614/* Nios II has custom instruction built-in functions of the forms: 2615 __builtin_custom_n 2616 __builtin_custom_nX 2617 __builtin_custom_nXX 2618 __builtin_custom_Xn 2619 __builtin_custom_XnX 2620 __builtin_custom_XnXX 2621 2622 where each X could be either 'i' (int), 'f' (float), or 'p' (void*). 2623 Therefore with 0-1 return values, and 0-2 arguments, we have a 2624 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions. 2625*/ 2626#define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9)) 2627static char custom_builtin_name[NUM_CUSTOM_BUILTINS][5]; 2628 2629static void 2630nios2_init_custom_builtins (int start_code) 2631{ 2632 tree builtin_ftype, ret_type, fndecl; 2633 char builtin_name[32] = "__builtin_custom_"; 2634 int n = strlen ("__builtin_custom_"); 2635 int builtin_code = 0; 2636 int lhs, rhs1, rhs2; 2637 2638 struct { tree type; const char *c; } op[4]; 2639 /* z */ op[0].c = ""; op[0].type = NULL_TREE; 2640 /* f */ op[1].c = "f"; op[1].type = float_type_node; 2641 /* i */ op[2].c = "i"; op[2].type = integer_type_node; 2642 /* p */ op[3].c = "p"; op[3].type = ptr_type_node; 2643 2644 /* We enumerate through the possible operand types to create all the 2645 __builtin_custom_XnXX function tree types. Note that these may slightly 2646 overlap with the function types created for other fixed builtins. */ 2647 2648 for (lhs = 0; lhs < 4; lhs++) 2649 for (rhs1 = 0; rhs1 < 4; rhs1++) 2650 for (rhs2 = 0; rhs2 < 4; rhs2++) 2651 { 2652 if (rhs1 == 0 && rhs2 != 0) 2653 continue; 2654 ret_type = (op[lhs].type ? op[lhs].type : void_type_node); 2655 builtin_ftype 2656 = build_function_type_list (ret_type, integer_type_node, 2657 op[rhs1].type, op[rhs2].type, 2658 NULL_TREE); 2659 snprintf (builtin_name + n, 32 - n, "%sn%s%s", 2660 op[lhs].c, op[rhs1].c, op[rhs2].c); 2661 /* Save copy of parameter string into custom_builtin_name[]. */ 2662 strncpy (custom_builtin_name[builtin_code], builtin_name + n, 5); 2663 fndecl = 2664 add_builtin_function (builtin_name, builtin_ftype, 2665 start_code + builtin_code, 2666 BUILT_IN_MD, NULL, NULL_TREE); 2667 nios2_register_builtin_fndecl (start_code + builtin_code, fndecl); 2668 builtin_code += 1; 2669 } 2670} 2671 2672/* Helper function for expanding custom builtins. */ 2673static rtx 2674nios2_expand_custom_builtin (tree exp, unsigned int index, rtx target) 2675{ 2676 bool has_target_p = (TREE_TYPE (exp) != void_type_node); 2677 machine_mode tmode = VOIDmode; 2678 int nargs, argno; 2679 rtx value, insn, unspec_args[3]; 2680 tree arg; 2681 2682 /* XnXX form. */ 2683 if (has_target_p) 2684 { 2685 tmode = TYPE_MODE (TREE_TYPE (exp)); 2686 if (!target || GET_MODE (target) != tmode 2687 || !REG_P (target)) 2688 target = gen_reg_rtx (tmode); 2689 } 2690 2691 nargs = call_expr_nargs (exp); 2692 for (argno = 0; argno < nargs; argno++) 2693 { 2694 arg = CALL_EXPR_ARG (exp, argno); 2695 value = expand_normal (arg); 2696 unspec_args[argno] = value; 2697 if (argno == 0) 2698 { 2699 if (!custom_insn_opcode (value, VOIDmode)) 2700 error ("custom instruction opcode must be compile time " 2701 "constant in the range 0-255 for __builtin_custom_%s", 2702 custom_builtin_name[index]); 2703 } 2704 else 2705 /* For other arguments, force into a register. */ 2706 unspec_args[argno] = force_reg (TYPE_MODE (TREE_TYPE (arg)), 2707 unspec_args[argno]); 2708 } 2709 /* Fill remaining unspec operands with zero. */ 2710 for (; argno < 3; argno++) 2711 unspec_args[argno] = const0_rtx; 2712 2713 insn = (has_target_p 2714 ? gen_rtx_SET (VOIDmode, target, 2715 gen_rtx_UNSPEC_VOLATILE (tmode, 2716 gen_rtvec_v (3, unspec_args), 2717 UNSPECV_CUSTOM_XNXX)) 2718 : gen_rtx_UNSPEC_VOLATILE (VOIDmode, gen_rtvec_v (3, unspec_args), 2719 UNSPECV_CUSTOM_NXX)); 2720 emit_insn (insn); 2721 return has_target_p ? target : const0_rtx; 2722} 2723 2724 2725 2726 2727/* Main definition of built-in functions. Nios II has a small number of fixed 2728 builtins, plus a large number of FPU insn builtins, and builtins for 2729 generating custom instructions. */ 2730 2731struct nios2_builtin_desc 2732{ 2733 enum insn_code icode; 2734 enum nios2_ftcode ftype; 2735 const char *name; 2736}; 2737 2738#define N2_BUILTINS \ 2739 N2_BUILTIN_DEF (sync, N2_FTYPE_VOID_VOID) \ 2740 N2_BUILTIN_DEF (ldbio, N2_FTYPE_SI_CVPTR) \ 2741 N2_BUILTIN_DEF (ldbuio, N2_FTYPE_UI_CVPTR) \ 2742 N2_BUILTIN_DEF (ldhio, N2_FTYPE_SI_CVPTR) \ 2743 N2_BUILTIN_DEF (ldhuio, N2_FTYPE_UI_CVPTR) \ 2744 N2_BUILTIN_DEF (ldwio, N2_FTYPE_SI_CVPTR) \ 2745 N2_BUILTIN_DEF (stbio, N2_FTYPE_VOID_VPTR_SI) \ 2746 N2_BUILTIN_DEF (sthio, N2_FTYPE_VOID_VPTR_SI) \ 2747 N2_BUILTIN_DEF (stwio, N2_FTYPE_VOID_VPTR_SI) \ 2748 N2_BUILTIN_DEF (rdctl, N2_FTYPE_SI_SI) \ 2749 N2_BUILTIN_DEF (wrctl, N2_FTYPE_VOID_SI_SI) 2750 2751enum nios2_builtin_code { 2752#define N2_BUILTIN_DEF(name, ftype) NIOS2_BUILTIN_ ## name, 2753 N2_BUILTINS 2754#undef N2_BUILTIN_DEF 2755 NUM_FIXED_NIOS2_BUILTINS 2756}; 2757 2758static const struct nios2_builtin_desc nios2_builtins[] = { 2759#define N2_BUILTIN_DEF(name, ftype) \ 2760 { CODE_FOR_ ## name, ftype, "__builtin_" #name }, 2761 N2_BUILTINS 2762#undef N2_BUILTIN_DEF 2763}; 2764 2765/* Start/ends of FPU/custom insn builtin index ranges. */ 2766static unsigned int nios2_fpu_builtin_base; 2767static unsigned int nios2_custom_builtin_base; 2768static unsigned int nios2_custom_builtin_end; 2769 2770/* Implement TARGET_INIT_BUILTINS. */ 2771static void 2772nios2_init_builtins (void) 2773{ 2774 unsigned int i; 2775 2776 /* Initialize fixed builtins. */ 2777 for (i = 0; i < ARRAY_SIZE (nios2_builtins); i++) 2778 { 2779 const struct nios2_builtin_desc *d = &nios2_builtins[i]; 2780 tree fndecl = 2781 add_builtin_function (d->name, nios2_ftype (d->ftype), i, 2782 BUILT_IN_MD, NULL, NULL); 2783 nios2_register_builtin_fndecl (i, fndecl); 2784 } 2785 2786 /* Initialize FPU builtins. */ 2787 nios2_fpu_builtin_base = ARRAY_SIZE (nios2_builtins); 2788 nios2_init_fpu_builtins (nios2_fpu_builtin_base); 2789 2790 /* Initialize custom insn builtins. */ 2791 nios2_custom_builtin_base 2792 = nios2_fpu_builtin_base + ARRAY_SIZE (nios2_fpu_insn); 2793 nios2_custom_builtin_end 2794 = nios2_custom_builtin_base + NUM_CUSTOM_BUILTINS; 2795 nios2_init_custom_builtins (nios2_custom_builtin_base); 2796} 2797 2798/* Array of fndecls for TARGET_BUILTIN_DECL. */ 2799#define NIOS2_NUM_BUILTINS \ 2800 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS) 2801static GTY(()) tree nios2_builtin_decls[NIOS2_NUM_BUILTINS]; 2802 2803static void 2804nios2_register_builtin_fndecl (unsigned code, tree fndecl) 2805{ 2806 nios2_builtin_decls[code] = fndecl; 2807} 2808 2809/* Implement TARGET_BUILTIN_DECL. */ 2810static tree 2811nios2_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED) 2812{ 2813 gcc_assert (nios2_custom_builtin_end == ARRAY_SIZE (nios2_builtin_decls)); 2814 2815 if (code >= nios2_custom_builtin_end) 2816 return error_mark_node; 2817 2818 if (code >= nios2_fpu_builtin_base 2819 && code < nios2_custom_builtin_base 2820 && ! N2FPU_ENABLED_P (code - nios2_fpu_builtin_base)) 2821 return error_mark_node; 2822 2823 return nios2_builtin_decls[code]; 2824} 2825 2826 2827/* Low-level built-in expand routine. */ 2828static rtx 2829nios2_expand_builtin_insn (const struct nios2_builtin_desc *d, int n, 2830 struct expand_operand *ops, bool has_target_p) 2831{ 2832 if (maybe_expand_insn (d->icode, n, ops)) 2833 return has_target_p ? ops[0].value : const0_rtx; 2834 else 2835 { 2836 error ("invalid argument to built-in function %s", d->name); 2837 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx; 2838 } 2839} 2840 2841/* Expand ldio/stio form load-store instruction builtins. */ 2842static rtx 2843nios2_expand_ldstio_builtin (tree exp, rtx target, 2844 const struct nios2_builtin_desc *d) 2845{ 2846 bool has_target_p; 2847 rtx addr, mem, val; 2848 struct expand_operand ops[MAX_RECOG_OPERANDS]; 2849 machine_mode mode = insn_data[d->icode].operand[0].mode; 2850 2851 addr = expand_normal (CALL_EXPR_ARG (exp, 0)); 2852 mem = gen_rtx_MEM (mode, addr); 2853 2854 if (insn_data[d->icode].operand[0].allows_mem) 2855 { 2856 /* stxio. */ 2857 val = expand_normal (CALL_EXPR_ARG (exp, 1)); 2858 if (CONST_INT_P (val)) 2859 val = force_reg (mode, gen_int_mode (INTVAL (val), mode)); 2860 val = simplify_gen_subreg (mode, val, GET_MODE (val), 0); 2861 create_output_operand (&ops[0], mem, mode); 2862 create_input_operand (&ops[1], val, mode); 2863 has_target_p = false; 2864 } 2865 else 2866 { 2867 /* ldxio. */ 2868 create_output_operand (&ops[0], target, mode); 2869 create_input_operand (&ops[1], mem, mode); 2870 has_target_p = true; 2871 } 2872 return nios2_expand_builtin_insn (d, 2, ops, has_target_p); 2873} 2874 2875/* Expand rdctl/wrctl builtins. */ 2876static rtx 2877nios2_expand_rdwrctl_builtin (tree exp, rtx target, 2878 const struct nios2_builtin_desc *d) 2879{ 2880 bool has_target_p = (insn_data[d->icode].operand[0].predicate 2881 == register_operand); 2882 rtx ctlcode = expand_normal (CALL_EXPR_ARG (exp, 0)); 2883 struct expand_operand ops[MAX_RECOG_OPERANDS]; 2884 if (!rdwrctl_operand (ctlcode, VOIDmode)) 2885 { 2886 error ("Control register number must be in range 0-31 for %s", 2887 d->name); 2888 return has_target_p ? gen_reg_rtx (SImode) : const0_rtx; 2889 } 2890 if (has_target_p) 2891 { 2892 create_output_operand (&ops[0], target, SImode); 2893 create_integer_operand (&ops[1], INTVAL (ctlcode)); 2894 } 2895 else 2896 { 2897 rtx val = expand_normal (CALL_EXPR_ARG (exp, 1)); 2898 create_integer_operand (&ops[0], INTVAL (ctlcode)); 2899 create_input_operand (&ops[1], val, SImode); 2900 } 2901 return nios2_expand_builtin_insn (d, 2, ops, has_target_p); 2902} 2903 2904/* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls 2905 a built-in function, with result going to TARGET if that's convenient 2906 (and in mode MODE if that's convenient). 2907 SUBTARGET may be used as the target for computing one of EXP's operands. 2908 IGNORE is nonzero if the value is to be ignored. */ 2909 2910static rtx 2911nios2_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED, 2912 machine_mode mode ATTRIBUTE_UNUSED, 2913 int ignore ATTRIBUTE_UNUSED) 2914{ 2915 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0); 2916 unsigned int fcode = DECL_FUNCTION_CODE (fndecl); 2917 2918 if (fcode < nios2_fpu_builtin_base) 2919 { 2920 const struct nios2_builtin_desc *d = &nios2_builtins[fcode]; 2921 2922 switch (fcode) 2923 { 2924 case NIOS2_BUILTIN_sync: 2925 emit_insn (gen_sync ()); 2926 return const0_rtx; 2927 2928 case NIOS2_BUILTIN_ldbio: 2929 case NIOS2_BUILTIN_ldbuio: 2930 case NIOS2_BUILTIN_ldhio: 2931 case NIOS2_BUILTIN_ldhuio: 2932 case NIOS2_BUILTIN_ldwio: 2933 case NIOS2_BUILTIN_stbio: 2934 case NIOS2_BUILTIN_sthio: 2935 case NIOS2_BUILTIN_stwio: 2936 return nios2_expand_ldstio_builtin (exp, target, d); 2937 2938 case NIOS2_BUILTIN_rdctl: 2939 case NIOS2_BUILTIN_wrctl: 2940 return nios2_expand_rdwrctl_builtin (exp, target, d); 2941 2942 default: 2943 gcc_unreachable (); 2944 } 2945 } 2946 else if (fcode < nios2_custom_builtin_base) 2947 /* FPU builtin range. */ 2948 return nios2_expand_fpu_builtin (exp, fcode - nios2_fpu_builtin_base, 2949 target); 2950 else if (fcode < nios2_custom_builtin_end) 2951 /* Custom insn builtin range. */ 2952 return nios2_expand_custom_builtin (exp, fcode - nios2_custom_builtin_base, 2953 target); 2954 else 2955 gcc_unreachable (); 2956} 2957 2958/* Implement TARGET_INIT_LIBFUNCS. */ 2959static void 2960nios2_init_libfuncs (void) 2961{ 2962 /* For Linux, we have access to kernel support for atomic operations. */ 2963 if (TARGET_LINUX_ABI) 2964 init_sync_libfuncs (UNITS_PER_WORD); 2965} 2966 2967 2968 2969/* Register a custom code use, and signal error if a conflict was found. */ 2970static void 2971nios2_register_custom_code (unsigned int N, enum nios2_ccs_code status, 2972 int index) 2973{ 2974 gcc_assert (N <= 255); 2975 2976 if (status == CCS_FPU) 2977 { 2978 if (custom_code_status[N] == CCS_FPU && index != custom_code_index[N]) 2979 { 2980 custom_code_conflict = true; 2981 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>", 2982 N2FPU_NAME (custom_code_index[N]), N2FPU_NAME (index)); 2983 } 2984 else if (custom_code_status[N] == CCS_BUILTIN_CALL) 2985 { 2986 custom_code_conflict = true; 2987 error ("call to %<__builtin_custom_%s%> conflicts with switch " 2988 "%<-mcustom-%s%>", custom_builtin_name[custom_code_index[N]], 2989 N2FPU_NAME (index)); 2990 } 2991 } 2992 else if (status == CCS_BUILTIN_CALL) 2993 { 2994 if (custom_code_status[N] == CCS_FPU) 2995 { 2996 custom_code_conflict = true; 2997 error ("call to %<__builtin_custom_%s%> conflicts with switch " 2998 "%<-mcustom-%s%>", custom_builtin_name[index], 2999 N2FPU_NAME (custom_code_index[N])); 3000 } 3001 else 3002 { 3003 /* Note that code conflicts between different __builtin_custom_xnxx 3004 calls are not checked. */ 3005 } 3006 } 3007 else 3008 gcc_unreachable (); 3009 3010 custom_code_status[N] = status; 3011 custom_code_index[N] = index; 3012} 3013 3014/* Mark a custom code as not in use. */ 3015static void 3016nios2_deregister_custom_code (unsigned int N) 3017{ 3018 if (N <= 255) 3019 { 3020 custom_code_status[N] = CCS_UNUSED; 3021 custom_code_index[N] = 0; 3022 } 3023} 3024 3025/* Target attributes can affect per-function option state, so we need to 3026 save/restore the custom code tracking info using the 3027 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */ 3028 3029static void 3030nios2_option_save (struct cl_target_option *ptr, 3031 struct gcc_options *opts ATTRIBUTE_UNUSED) 3032{ 3033 unsigned int i; 3034 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 3035 ptr->saved_fpu_custom_code[i] = N2FPU_N (i); 3036 memcpy (ptr->saved_custom_code_status, custom_code_status, 3037 sizeof (custom_code_status)); 3038 memcpy (ptr->saved_custom_code_index, custom_code_index, 3039 sizeof (custom_code_index)); 3040} 3041 3042static void 3043nios2_option_restore (struct gcc_options *opts ATTRIBUTE_UNUSED, 3044 struct cl_target_option *ptr) 3045{ 3046 unsigned int i; 3047 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 3048 N2FPU_N (i) = ptr->saved_fpu_custom_code[i]; 3049 memcpy (custom_code_status, ptr->saved_custom_code_status, 3050 sizeof (custom_code_status)); 3051 memcpy (custom_code_index, ptr->saved_custom_code_index, 3052 sizeof (custom_code_index)); 3053} 3054 3055/* Inner function to process the attribute((target(...))), take an argument and 3056 set the current options from the argument. If we have a list, recursively 3057 go over the list. */ 3058 3059static bool 3060nios2_valid_target_attribute_rec (tree args) 3061{ 3062 if (TREE_CODE (args) == TREE_LIST) 3063 { 3064 bool ret = true; 3065 for (; args; args = TREE_CHAIN (args)) 3066 if (TREE_VALUE (args) 3067 && !nios2_valid_target_attribute_rec (TREE_VALUE (args))) 3068 ret = false; 3069 return ret; 3070 } 3071 else if (TREE_CODE (args) == STRING_CST) 3072 { 3073 char *argstr = ASTRDUP (TREE_STRING_POINTER (args)); 3074 while (argstr && *argstr != '\0') 3075 { 3076 bool no_opt = false, end_p = false; 3077 char *eq = NULL, *p; 3078 while (ISSPACE (*argstr)) 3079 argstr++; 3080 p = argstr; 3081 while (*p != '\0' && *p != ',') 3082 { 3083 if (!eq && *p == '=') 3084 eq = p; 3085 ++p; 3086 } 3087 if (*p == '\0') 3088 end_p = true; 3089 else 3090 *p = '\0'; 3091 if (eq) *eq = '\0'; 3092 3093 if (!strncmp (argstr, "no-", 3)) 3094 { 3095 no_opt = true; 3096 argstr += 3; 3097 } 3098 if (!strncmp (argstr, "custom-fpu-cfg", 14)) 3099 { 3100 char *end_eq = p; 3101 if (no_opt) 3102 { 3103 error ("custom-fpu-cfg option does not support %<no-%>"); 3104 return false; 3105 } 3106 if (!eq) 3107 { 3108 error ("custom-fpu-cfg option requires configuration" 3109 " argument"); 3110 return false; 3111 } 3112 /* Increment and skip whitespace. */ 3113 while (ISSPACE (*(++eq))) ; 3114 /* Decrement and skip to before any trailing whitespace. */ 3115 while (ISSPACE (*(--end_eq))) ; 3116 3117 nios2_handle_custom_fpu_cfg (eq, end_eq + 1, true); 3118 } 3119 else if (!strncmp (argstr, "custom-", 7)) 3120 { 3121 int code = -1; 3122 unsigned int i; 3123 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++) 3124 if (!strncmp (argstr + 7, N2FPU_NAME (i), 3125 strlen (N2FPU_NAME (i)))) 3126 { 3127 /* Found insn. */ 3128 code = i; 3129 break; 3130 } 3131 if (code >= 0) 3132 { 3133 if (no_opt) 3134 { 3135 if (eq) 3136 { 3137 error ("%<no-custom-%s%> does not accept arguments", 3138 N2FPU_NAME (code)); 3139 return false; 3140 } 3141 /* Disable option by setting to -1. */ 3142 nios2_deregister_custom_code (N2FPU_N (code)); 3143 N2FPU_N (code) = -1; 3144 } 3145 else 3146 { 3147 char *t; 3148 if (eq) 3149 while (ISSPACE (*(++eq))) ; 3150 if (!eq || eq == p) 3151 { 3152 error ("%<custom-%s=%> requires argument", 3153 N2FPU_NAME (code)); 3154 return false; 3155 } 3156 for (t = eq; t != p; ++t) 3157 { 3158 if (ISSPACE (*t)) 3159 continue; 3160 if (!ISDIGIT (*t)) 3161 { 3162 error ("`custom-%s=' argument requires " 3163 "numeric digits", N2FPU_NAME (code)); 3164 return false; 3165 } 3166 } 3167 /* Set option to argument. */ 3168 N2FPU_N (code) = atoi (eq); 3169 nios2_handle_custom_fpu_insn_option (code); 3170 } 3171 } 3172 else 3173 { 3174 error ("%<custom-%s=%> is not recognised as FPU instruction", 3175 argstr + 7); 3176 return false; 3177 } 3178 } 3179 else 3180 { 3181 error ("%<%s%> is unknown", argstr); 3182 return false; 3183 } 3184 3185 if (end_p) 3186 break; 3187 else 3188 argstr = p + 1; 3189 } 3190 return true; 3191 } 3192 else 3193 gcc_unreachable (); 3194} 3195 3196/* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */ 3197 3198static tree 3199nios2_valid_target_attribute_tree (tree args) 3200{ 3201 if (!nios2_valid_target_attribute_rec (args)) 3202 return NULL_TREE; 3203 nios2_custom_check_insns (); 3204 return build_target_option_node (&global_options); 3205} 3206 3207/* Hook to validate attribute((target("string"))). */ 3208 3209static bool 3210nios2_valid_target_attribute_p (tree fndecl, tree ARG_UNUSED (name), 3211 tree args, int ARG_UNUSED (flags)) 3212{ 3213 struct cl_target_option cur_target; 3214 bool ret = true; 3215 tree old_optimize = build_optimization_node (&global_options); 3216 tree new_target, new_optimize; 3217 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl); 3218 3219 /* If the function changed the optimization levels as well as setting target 3220 options, start with the optimizations specified. */ 3221 if (func_optimize && func_optimize != old_optimize) 3222 cl_optimization_restore (&global_options, 3223 TREE_OPTIMIZATION (func_optimize)); 3224 3225 /* The target attributes may also change some optimization flags, so update 3226 the optimization options if necessary. */ 3227 cl_target_option_save (&cur_target, &global_options); 3228 new_target = nios2_valid_target_attribute_tree (args); 3229 new_optimize = build_optimization_node (&global_options); 3230 3231 if (!new_target) 3232 ret = false; 3233 3234 else if (fndecl) 3235 { 3236 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target; 3237 3238 if (old_optimize != new_optimize) 3239 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize; 3240 } 3241 3242 cl_target_option_restore (&global_options, &cur_target); 3243 3244 if (old_optimize != new_optimize) 3245 cl_optimization_restore (&global_options, 3246 TREE_OPTIMIZATION (old_optimize)); 3247 return ret; 3248} 3249 3250/* Remember the last target of nios2_set_current_function. */ 3251static GTY(()) tree nios2_previous_fndecl; 3252 3253/* Establish appropriate back-end context for processing the function 3254 FNDECL. The argument might be NULL to indicate processing at top 3255 level, outside of any function scope. */ 3256static void 3257nios2_set_current_function (tree fndecl) 3258{ 3259 tree old_tree = (nios2_previous_fndecl 3260 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl) 3261 : NULL_TREE); 3262 3263 tree new_tree = (fndecl 3264 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl) 3265 : NULL_TREE); 3266 3267 if (fndecl && fndecl != nios2_previous_fndecl) 3268 { 3269 nios2_previous_fndecl = fndecl; 3270 if (old_tree == new_tree) 3271 ; 3272 3273 else if (new_tree) 3274 { 3275 cl_target_option_restore (&global_options, 3276 TREE_TARGET_OPTION (new_tree)); 3277 target_reinit (); 3278 } 3279 3280 else if (old_tree) 3281 { 3282 struct cl_target_option *def 3283 = TREE_TARGET_OPTION (target_option_current_node); 3284 3285 cl_target_option_restore (&global_options, def); 3286 target_reinit (); 3287 } 3288 } 3289} 3290 3291/* Hook to validate the current #pragma GCC target and set the FPU custom 3292 code option state. If ARGS is NULL, then POP_TARGET is used to reset 3293 the options. */ 3294static bool 3295nios2_pragma_target_parse (tree args, tree pop_target) 3296{ 3297 tree cur_tree; 3298 if (! args) 3299 { 3300 cur_tree = ((pop_target) 3301 ? pop_target 3302 : target_option_default_node); 3303 cl_target_option_restore (&global_options, 3304 TREE_TARGET_OPTION (cur_tree)); 3305 } 3306 else 3307 { 3308 cur_tree = nios2_valid_target_attribute_tree (args); 3309 if (!cur_tree) 3310 return false; 3311 } 3312 3313 target_option_current_node = cur_tree; 3314 return true; 3315} 3316 3317/* Implement TARGET_MERGE_DECL_ATTRIBUTES. 3318 We are just using this hook to add some additional error checking to 3319 the default behavior. GCC does not provide a target hook for merging 3320 the target options, and only correctly handles merging empty vs non-empty 3321 option data; see merge_decls() in c-decl.c. 3322 So here we require either that at least one of the decls has empty 3323 target options, or that the target options/data be identical. */ 3324static tree 3325nios2_merge_decl_attributes (tree olddecl, tree newdecl) 3326{ 3327 tree oldopts = lookup_attribute ("target", DECL_ATTRIBUTES (olddecl)); 3328 tree newopts = lookup_attribute ("target", DECL_ATTRIBUTES (newdecl)); 3329 if (newopts && oldopts && newopts != oldopts) 3330 { 3331 tree oldtree = DECL_FUNCTION_SPECIFIC_TARGET (olddecl); 3332 tree newtree = DECL_FUNCTION_SPECIFIC_TARGET (newdecl); 3333 if (oldtree && newtree && oldtree != newtree) 3334 { 3335 struct cl_target_option *olddata = TREE_TARGET_OPTION (oldtree); 3336 struct cl_target_option *newdata = TREE_TARGET_OPTION (newtree); 3337 if (olddata != newdata 3338 && memcmp (olddata, newdata, sizeof (struct cl_target_option))) 3339 error ("%qE redeclared with conflicting %qs attributes", 3340 DECL_NAME (newdecl), "target"); 3341 } 3342 } 3343 return merge_attributes (DECL_ATTRIBUTES (olddecl), 3344 DECL_ATTRIBUTES (newdecl)); 3345} 3346 3347/* Implement TARGET_ASM_OUTPUT_MI_THUNK. */ 3348static void 3349nios2_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED, 3350 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset, 3351 tree function) 3352{ 3353 rtx this_rtx, funexp; 3354 rtx_insn *insn; 3355 3356 /* Pretend to be a post-reload pass while generating rtl. */ 3357 reload_completed = 1; 3358 3359 if (flag_pic) 3360 nios2_load_pic_register (); 3361 3362 /* Mark the end of the (empty) prologue. */ 3363 emit_note (NOTE_INSN_PROLOGUE_END); 3364 3365 /* Find the "this" pointer. If the function returns a structure, 3366 the structure return pointer is in $5. */ 3367 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function)) 3368 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO + 1); 3369 else 3370 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO); 3371 3372 /* Add DELTA to THIS_RTX. */ 3373 nios2_emit_add_constant (this_rtx, delta); 3374 3375 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */ 3376 if (vcall_offset) 3377 { 3378 rtx tmp; 3379 3380 tmp = gen_rtx_REG (Pmode, 2); 3381 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx)); 3382 nios2_emit_add_constant (tmp, vcall_offset); 3383 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp)); 3384 emit_insn (gen_add2_insn (this_rtx, tmp)); 3385 } 3386 3387 /* Generate a tail call to the target function. */ 3388 if (!TREE_USED (function)) 3389 { 3390 assemble_external (function); 3391 TREE_USED (function) = 1; 3392 } 3393 funexp = XEXP (DECL_RTL (function), 0); 3394 /* Function address needs to be constructed under PIC, 3395 provide r2 to use here. */ 3396 nios2_adjust_call_address (&funexp, gen_rtx_REG (Pmode, 2)); 3397 insn = emit_call_insn (gen_sibcall_internal (funexp, const0_rtx)); 3398 SIBLING_CALL_P (insn) = 1; 3399 3400 /* Run just enough of rest_of_compilation to get the insns emitted. 3401 There's not really enough bulk here to make other passes such as 3402 instruction scheduling worth while. Note that use_thunk calls 3403 assemble_start_function and assemble_end_function. */ 3404 insn = get_insns (); 3405 shorten_branches (insn); 3406 final_start_function (insn, file, 1); 3407 final (insn, file, 1); 3408 final_end_function (); 3409 3410 /* Stop pretending to be a post-reload pass. */ 3411 reload_completed = 0; 3412} 3413 3414 3415/* Initialize the GCC target structure. */ 3416#undef TARGET_ASM_FUNCTION_PROLOGUE 3417#define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue 3418 3419#undef TARGET_IN_SMALL_DATA_P 3420#define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p 3421 3422#undef TARGET_SECTION_TYPE_FLAGS 3423#define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags 3424 3425#undef TARGET_INIT_BUILTINS 3426#define TARGET_INIT_BUILTINS nios2_init_builtins 3427#undef TARGET_EXPAND_BUILTIN 3428#define TARGET_EXPAND_BUILTIN nios2_expand_builtin 3429#undef TARGET_BUILTIN_DECL 3430#define TARGET_BUILTIN_DECL nios2_builtin_decl 3431 3432#undef TARGET_INIT_LIBFUNCS 3433#define TARGET_INIT_LIBFUNCS nios2_init_libfuncs 3434 3435#undef TARGET_FUNCTION_OK_FOR_SIBCALL 3436#define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true 3437 3438#undef TARGET_CAN_ELIMINATE 3439#define TARGET_CAN_ELIMINATE nios2_can_eliminate 3440 3441#undef TARGET_FUNCTION_ARG 3442#define TARGET_FUNCTION_ARG nios2_function_arg 3443 3444#undef TARGET_FUNCTION_ARG_ADVANCE 3445#define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance 3446 3447#undef TARGET_ARG_PARTIAL_BYTES 3448#define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes 3449 3450#undef TARGET_TRAMPOLINE_INIT 3451#define TARGET_TRAMPOLINE_INIT nios2_trampoline_init 3452 3453#undef TARGET_FUNCTION_VALUE 3454#define TARGET_FUNCTION_VALUE nios2_function_value 3455 3456#undef TARGET_LIBCALL_VALUE 3457#define TARGET_LIBCALL_VALUE nios2_libcall_value 3458 3459#undef TARGET_FUNCTION_VALUE_REGNO_P 3460#define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p 3461 3462#undef TARGET_RETURN_IN_MEMORY 3463#define TARGET_RETURN_IN_MEMORY nios2_return_in_memory 3464 3465#undef TARGET_PROMOTE_PROTOTYPES 3466#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true 3467 3468#undef TARGET_SETUP_INCOMING_VARARGS 3469#define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs 3470 3471#undef TARGET_MUST_PASS_IN_STACK 3472#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size 3473 3474#undef TARGET_LEGITIMATE_CONSTANT_P 3475#define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p 3476 3477#undef TARGET_LEGITIMIZE_ADDRESS 3478#define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address 3479 3480#undef TARGET_DELEGITIMIZE_ADDRESS 3481#define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address 3482 3483#undef TARGET_LEGITIMATE_ADDRESS_P 3484#define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p 3485 3486#undef TARGET_PREFERRED_RELOAD_CLASS 3487#define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class 3488 3489#undef TARGET_RTX_COSTS 3490#define TARGET_RTX_COSTS nios2_rtx_costs 3491 3492#undef TARGET_HAVE_TLS 3493#define TARGET_HAVE_TLS TARGET_LINUX_ABI 3494 3495#undef TARGET_CANNOT_FORCE_CONST_MEM 3496#define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem 3497 3498#undef TARGET_ASM_OUTPUT_DWARF_DTPREL 3499#define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel 3500 3501#undef TARGET_PRINT_OPERAND 3502#define TARGET_PRINT_OPERAND nios2_print_operand 3503 3504#undef TARGET_PRINT_OPERAND_ADDRESS 3505#define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address 3506 3507#undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA 3508#define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra 3509 3510#undef TARGET_ASM_FILE_END 3511#define TARGET_ASM_FILE_END nios2_asm_file_end 3512 3513#undef TARGET_OPTION_OVERRIDE 3514#define TARGET_OPTION_OVERRIDE nios2_option_override 3515 3516#undef TARGET_OPTION_SAVE 3517#define TARGET_OPTION_SAVE nios2_option_save 3518 3519#undef TARGET_OPTION_RESTORE 3520#define TARGET_OPTION_RESTORE nios2_option_restore 3521 3522#undef TARGET_SET_CURRENT_FUNCTION 3523#define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function 3524 3525#undef TARGET_OPTION_VALID_ATTRIBUTE_P 3526#define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p 3527 3528#undef TARGET_OPTION_PRAGMA_PARSE 3529#define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse 3530 3531#undef TARGET_MERGE_DECL_ATTRIBUTES 3532#define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes 3533 3534#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK 3535#define TARGET_ASM_CAN_OUTPUT_MI_THUNK \ 3536 hook_bool_const_tree_hwi_hwi_const_tree_true 3537 3538#undef TARGET_ASM_OUTPUT_MI_THUNK 3539#define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk 3540 3541struct gcc_target targetm = TARGET_INITIALIZER; 3542 3543#include "gt-nios2.h" 3544