1/* Subroutines used for code generation on the Lattice Mico32 architecture.
2   Contributed by Jon Beniston <jon@beniston.com>
3
4   Copyright (C) 2009 Free Software Foundation, Inc.
5
6   This file is part of GCC.
7
8   GCC is free software; you can redistribute it and/or modify it
9   under the terms of the GNU General Public License as published
10   by the Free Software Foundation; either version 3, or (at your
11   option) any later version.
12
13   GCC is distributed in the hope that it will be useful, but WITHOUT
14   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16   License for more details.
17
18   You should have received a copy of the GNU General Public License
19   along with GCC; see the file COPYING3.  If not see
20   <http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "insn-attr.h"
35#include "insn-codes.h"
36#include "recog.h"
37#include "output.h"
38#include "tree.h"
39#include "expr.h"
40#include "flags.h"
41#include "reload.h"
42#include "tm_p.h"
43#include "function.h"
44#include "toplev.h"
45#include "optabs.h"
46#include "libfuncs.h"
47#include "ggc.h"
48#include "target.h"
49#include "target-def.h"
50#include "langhooks.h"
51#include "tm-constrs.h"
52#include "df.h"
53
54struct lm32_frame_info
55{
56  HOST_WIDE_INT total_size;	/* number of bytes of entire frame.  */
57  HOST_WIDE_INT callee_size;	/* number of bytes to save callee saves.  */
58  HOST_WIDE_INT pretend_size;	/* number of bytes we pretend caller did.  */
59  HOST_WIDE_INT args_size;	/* number of bytes for outgoing arguments.  */
60  HOST_WIDE_INT locals_size;	/* number of bytes for local variables.  */
61  unsigned int reg_save_mask;	/* mask of saved registers.  */
62};
63
64/* Prototypes for static functions.  */
65static rtx emit_add (rtx dest, rtx src0, rtx src1);
66static void expand_save_restore (struct lm32_frame_info *info, int op);
67static void stack_adjust (HOST_WIDE_INT amount);
68static bool lm32_in_small_data_p (const_tree);
69static void lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum,
70					 enum machine_mode mode, tree type,
71					 int *pretend_size, int no_rtl);
72static bool lm32_rtx_costs (rtx x, int code, int outer_code, int *total,
73			    bool speed);
74static bool lm32_can_eliminate (const int, const int);
75static bool
76lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
77static HOST_WIDE_INT lm32_compute_frame_size (int size);
78
79#undef TARGET_ADDRESS_COST
80#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
81#undef TARGET_RTX_COSTS
82#define TARGET_RTX_COSTS lm32_rtx_costs
83#undef TARGET_IN_SMALL_DATA_P
84#define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
85#undef TARGET_PROMOTE_FUNCTION_MODE
86#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
87#undef TARGET_SETUP_INCOMING_VARARGS
88#define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
89#undef TARGET_PROMOTE_PROTOTYPES
90#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
91#undef TARGET_MIN_ANCHOR_OFFSET
92#define TARGET_MIN_ANCHOR_OFFSET -0x8000
93#undef TARGET_MAX_ANCHOR_OFFSET
94#define TARGET_MAX_ANCHOR_OFFSET 0x7fff
95#undef TARGET_CAN_ELIMINATE
96#define TARGET_CAN_ELIMINATE lm32_can_eliminate
97#undef TARGET_LEGITIMATE_ADDRESS_P
98#define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
99
100struct gcc_target targetm = TARGET_INITIALIZER;
101
102/* Current frame information calculated by lm32_compute_frame_size.  */
103static struct lm32_frame_info current_frame_info;
104
105/* Return non-zero if the given return type should be returned in memory.  */
106
107int
108lm32_return_in_memory (tree type)
109{
110  HOST_WIDE_INT size;
111
112  if (!AGGREGATE_TYPE_P (type))
113    {
114      /* All simple types are returned in registers.  */
115      return 0;
116    }
117
118  size = int_size_in_bytes (type);
119  if (size >= 0 && size <= UNITS_PER_WORD)
120    {
121      /* If it can fit in one register.  */
122      return 0;
123    }
124
125  return 1;
126}
127
128/* Generate an emit a word sized add instruction.  */
129
130static rtx
131emit_add (rtx dest, rtx src0, rtx src1)
132{
133  rtx insn;
134  insn = emit_insn (gen_addsi3 (dest, src0, src1));
135  return insn;
136}
137
138/* Generate the code to compare (and possibly branch) two integer values
139   TEST_CODE is the comparison code we are trying to emulate
140     (or implement directly)
141   RESULT is where to store the result of the comparison,
142     or null to emit a branch
143   CMP0 CMP1 are the two comparison operands
144   DESTINATION is the destination of the branch, or null to only compare
145   */
146
147static void
148gen_int_relational (enum rtx_code code,
149		    rtx result,
150		    rtx cmp0,
151		    rtx cmp1,
152		    rtx destination)
153{
154  enum machine_mode mode;
155  int branch_p;
156
157  mode = GET_MODE (cmp0);
158  if (mode == VOIDmode)
159    mode = GET_MODE (cmp1);
160
161  /* Is this a branch or compare.  */
162  branch_p = (destination != 0);
163
164  /* Instruction set doesn't support LE or LT, so swap operands and use
165     GE, GT.  */
166  switch (code)
167    {
168    case LE:
169    case LT:
170    case LEU:
171    case LTU:
172      code = swap_condition (code);
173      rtx temp = cmp0;
174      cmp0 = cmp1;
175      cmp1 = temp;
176      break;
177    default:
178      break;
179    }
180
181  if (branch_p)
182    {
183      rtx insn;
184
185      /* Operands must be in registers.  */
186      if (!register_operand (cmp0, mode))
187	cmp0 = force_reg (mode, cmp0);
188      if (!register_operand (cmp1, mode))
189	cmp1 = force_reg (mode, cmp1);
190
191      /* Generate conditional branch instruction.  */
192      rtx cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
193      rtx label = gen_rtx_LABEL_REF (VOIDmode, destination);
194      insn = gen_rtx_SET (VOIDmode, pc_rtx,
195			  gen_rtx_IF_THEN_ELSE (VOIDmode,
196						cond, label, pc_rtx));
197      emit_jump_insn (insn);
198    }
199  else
200    {
201      /* We can't have const_ints in cmp0, other than 0.  */
202      if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
203	cmp0 = force_reg (mode, cmp0);
204
205      /* If the comparison is against an int not in legal range
206         move it into a register.  */
207      if (GET_CODE (cmp1) == CONST_INT)
208	{
209	  switch (code)
210	    {
211	    case EQ:
212	    case NE:
213	    case LE:
214	    case LT:
215	    case GE:
216	    case GT:
217	      if (!satisfies_constraint_K (cmp1))
218		cmp1 = force_reg (mode, cmp1);
219	      break;
220	    case LEU:
221	    case LTU:
222	    case GEU:
223	    case GTU:
224	      if (!satisfies_constraint_L (cmp1))
225		cmp1 = force_reg (mode, cmp1);
226	      break;
227	    default:
228	      gcc_unreachable ();
229	    }
230	}
231
232      /* Generate compare instruction.  */
233      emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
234    }
235}
236
237/* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
238   and OPERAND[3].  Store the result in OPERANDS[0].  */
239
240void
241lm32_expand_scc (rtx operands[])
242{
243  rtx target = operands[0];
244  enum rtx_code code = GET_CODE (operands[1]);
245  rtx op0 = operands[2];
246  rtx op1 = operands[3];
247
248  gen_int_relational (code, target, op0, op1, NULL_RTX);
249}
250
251/* Compare OPERANDS[1] with OPERANDS[2] using comparison code
252   CODE and jump to OPERANDS[3] if the condition holds.  */
253
254void
255lm32_expand_conditional_branch (rtx operands[])
256{
257  enum rtx_code code = GET_CODE (operands[0]);
258  rtx op0 = operands[1];
259  rtx op1 = operands[2];
260  rtx destination = operands[3];
261
262  gen_int_relational (code, NULL_RTX, op0, op1, destination);
263}
264
265/* Generate and emit RTL to save or restore callee save registers.  */
266static void
267expand_save_restore (struct lm32_frame_info *info, int op)
268{
269  unsigned int reg_save_mask = info->reg_save_mask;
270  int regno;
271  HOST_WIDE_INT offset;
272  rtx insn;
273
274  /* Callee saves are below locals and above outgoing arguments.  */
275  offset = info->args_size + info->callee_size;
276  for (regno = 0; regno <= 31; regno++)
277    {
278      if ((reg_save_mask & (1 << regno)) != 0)
279	{
280	  rtx offset_rtx;
281	  rtx mem;
282
283	  offset_rtx = GEN_INT (offset);
284	  if (satisfies_constraint_K (offset_rtx))
285	    {
286              mem = gen_rtx_MEM (word_mode,
287                                 gen_rtx_PLUS (Pmode,
288                                               stack_pointer_rtx,
289                                               offset_rtx));
290            }
291          else
292            {
293              /* r10 is caller saved so it can be used as a temp reg.  */
294              rtx r10;
295
296              r10 = gen_rtx_REG (word_mode, 10);
297              insn = emit_move_insn (r10, offset_rtx);
298              if (op == 0)
299                RTX_FRAME_RELATED_P (insn) = 1;
300              insn = emit_add (r10, r10, stack_pointer_rtx);
301              if (op == 0)
302                RTX_FRAME_RELATED_P (insn) = 1;
303              mem = gen_rtx_MEM (word_mode, r10);
304            }
305
306	  if (op == 0)
307	    insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
308	  else
309	    insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
310
311	  /* only prologue instructions which set the sp fp or save a
312	     register should be marked as frame related.  */
313	  if (op == 0)
314	    RTX_FRAME_RELATED_P (insn) = 1;
315	  offset -= UNITS_PER_WORD;
316	}
317    }
318}
319
320static void
321stack_adjust (HOST_WIDE_INT amount)
322{
323  rtx insn;
324
325  if (!IN_RANGE (amount, -32776, 32768))
326    {
327      /* r10 is caller saved so it can be used as a temp reg.  */
328      rtx r10;
329      r10 = gen_rtx_REG (word_mode, 10);
330      insn = emit_move_insn (r10, GEN_INT (amount));
331      if (amount < 0)
332	RTX_FRAME_RELATED_P (insn) = 1;
333      insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
334      if (amount < 0)
335	RTX_FRAME_RELATED_P (insn) = 1;
336    }
337  else
338    {
339      insn = emit_add (stack_pointer_rtx,
340		       stack_pointer_rtx, GEN_INT (amount));
341      if (amount < 0)
342	RTX_FRAME_RELATED_P (insn) = 1;
343    }
344}
345
346
347/* Create and emit instructions for a functions prologue.  */
348void
349lm32_expand_prologue (void)
350{
351  rtx insn;
352
353  lm32_compute_frame_size (get_frame_size ());
354
355  if (current_frame_info.total_size > 0)
356    {
357      /* Add space on stack new frame.  */
358      stack_adjust (-current_frame_info.total_size);
359
360      /* Save callee save registers.  */
361      if (current_frame_info.reg_save_mask != 0)
362	expand_save_restore (&current_frame_info, 0);
363
364      /* Setup frame pointer if it's needed.  */
365      if (frame_pointer_needed == 1)
366	{
367	  /* Load offset - Don't use total_size, as that includes pretend_size,
368             which isn't part of this frame?  */
369	  insn =
370	    emit_move_insn (frame_pointer_rtx,
371			    GEN_INT (current_frame_info.args_size +
372				     current_frame_info.callee_size +
373				     current_frame_info.locals_size));
374	  RTX_FRAME_RELATED_P (insn) = 1;
375
376	  /* Add in sp.  */
377	  insn = emit_add (frame_pointer_rtx,
378			   frame_pointer_rtx, stack_pointer_rtx);
379	  RTX_FRAME_RELATED_P (insn) = 1;
380	}
381
382      /* Prevent prologue from being scheduled into function body.  */
383      emit_insn (gen_blockage ());
384    }
385}
386
387/* Create an emit instructions for a functions epilogue.  */
388void
389lm32_expand_epilogue (void)
390{
391  rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
392
393  lm32_compute_frame_size (get_frame_size ());
394
395  if (current_frame_info.total_size > 0)
396    {
397      /* Prevent stack code from being reordered.  */
398      emit_insn (gen_blockage ());
399
400      /* Restore callee save registers.  */
401      if (current_frame_info.reg_save_mask != 0)
402	expand_save_restore (&current_frame_info, 1);
403
404      /* Deallocate stack.  */
405      stack_adjust (current_frame_info.total_size);
406
407      /* Return to calling function.  */
408      emit_jump_insn (gen_return_internal (ra_rtx));
409    }
410  else
411    {
412      /* Return to calling function.  */
413      emit_jump_insn (gen_return_internal (ra_rtx));
414    }
415}
416
417/* Return the bytes needed to compute the frame pointer from the current
418   stack pointer.  */
419static HOST_WIDE_INT
420lm32_compute_frame_size (int size)
421{
422  int regno;
423  HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
424  unsigned int reg_save_mask;
425
426  locals_size = size;
427  args_size = crtl->outgoing_args_size;
428  pretend_size = crtl->args.pretend_args_size;
429  callee_size = 0;
430  reg_save_mask = 0;
431
432  /* Build mask that actually determines which regsiters we save
433     and calculate size required to store them in the stack.  */
434  for (regno = 1; regno < SP_REGNUM; regno++)
435    {
436      if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
437	{
438	  reg_save_mask |= 1 << regno;
439	  callee_size += UNITS_PER_WORD;
440	}
441    }
442  if (df_regs_ever_live_p (RA_REGNUM) || !current_function_is_leaf
443      || !optimize)
444    {
445      reg_save_mask |= 1 << RA_REGNUM;
446      callee_size += UNITS_PER_WORD;
447    }
448  if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
449    {
450      reg_save_mask |= 1 << FP_REGNUM;
451      callee_size += UNITS_PER_WORD;
452    }
453
454  /* Compute total frame size.  */
455  total_size = pretend_size + args_size + locals_size + callee_size;
456
457  /* Align frame to appropriate boundary.  */
458  total_size = (total_size + 3) & ~3;
459
460  /* Save computed information.  */
461  current_frame_info.total_size = total_size;
462  current_frame_info.callee_size = callee_size;
463  current_frame_info.pretend_size = pretend_size;
464  current_frame_info.locals_size = locals_size;
465  current_frame_info.args_size = args_size;
466  current_frame_info.reg_save_mask = reg_save_mask;
467
468  return total_size;
469}
470
471void
472lm32_print_operand (FILE * file, rtx op, int letter)
473{
474  enum rtx_code code;
475
476  code = GET_CODE (op);
477
478  if (code == SIGN_EXTEND)
479    op = XEXP (op, 0), code = GET_CODE (op);
480  else if (code == REG || code == SUBREG)
481    {
482      int regnum;
483
484      if (code == REG)
485	regnum = REGNO (op);
486      else
487	regnum = true_regnum (op);
488
489      fprintf (file, "%s", reg_names[regnum]);
490    }
491  else if (code == HIGH)
492    output_addr_const (file, XEXP (op, 0));
493  else if (code == MEM)
494    output_address (XEXP (op, 0));
495  else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
496    fprintf (file, "%s", reg_names[0]);
497  else if (GET_CODE (op) == CONST_DOUBLE)
498    {
499      if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
500	output_operand_lossage ("Only 0.0 can be loaded as an immediate");
501      else
502	fprintf (file, "0");
503    }
504  else if (code == EQ)
505    fprintf (file, "e  ");
506  else if (code == NE)
507    fprintf (file, "ne ");
508  else if (code == GT)
509    fprintf (file, "g  ");
510  else if (code == GTU)
511    fprintf (file, "gu ");
512  else if (code == LT)
513    fprintf (file, "l  ");
514  else if (code == LTU)
515    fprintf (file, "lu ");
516  else if (code == GE)
517    fprintf (file, "ge ");
518  else if (code == GEU)
519    fprintf (file, "geu");
520  else if (code == LE)
521    fprintf (file, "le ");
522  else if (code == LEU)
523    fprintf (file, "leu");
524  else
525    output_addr_const (file, op);
526}
527
528/* A C compound statement to output to stdio stream STREAM the
529   assembler syntax for an instruction operand that is a memory
530   reference whose address is ADDR.  ADDR is an RTL expression.
531
532   On some machines, the syntax for a symbolic address depends on
533   the section that the address refers to.  On these machines,
534   define the macro `ENCODE_SECTION_INFO' to store the information
535   into the `symbol_ref', and then check for it here.  */
536
537void
538lm32_print_operand_address (FILE * file, rtx addr)
539{
540  switch (GET_CODE (addr))
541    {
542    case REG:
543      fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
544      break;
545
546    case MEM:
547      output_address (XEXP (addr, 0));
548      break;
549
550    case PLUS:
551      {
552	rtx arg0 = XEXP (addr, 0);
553	rtx arg1 = XEXP (addr, 1);
554
555	if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
556	  {
557	    if (GET_CODE (arg1) == CONST_INT)
558	      fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
559		       INTVAL (arg1));
560	    else
561	      {
562		fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
563		output_addr_const (file, arg1);
564		fprintf (file, ")");
565	      }
566	  }
567	else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
568	  output_addr_const (file, addr);
569	else
570	  fatal_insn ("bad operand", addr);
571      }
572      break;
573
574    case SYMBOL_REF:
575      if (SYMBOL_REF_SMALL_P (addr))
576	{
577	  fprintf (file, "gp(");
578	  output_addr_const (file, addr);
579	  fprintf (file, ")");
580	}
581      else
582	fatal_insn ("can't use non gp relative absolute address", addr);
583      break;
584
585    default:
586      fatal_insn ("invalid addressing mode", addr);
587      break;
588    }
589}
590
591/* Determine where to put an argument to a function.
592   Value is zero to push the argument on the stack,
593   or a hard register in which to store the argument.
594
595   MODE is the argument's machine mode.
596   TYPE is the data type of the argument (as a tree).
597    This is null for libcalls where that information may
598    not be available.
599   CUM is a variable of type CUMULATIVE_ARGS which gives info about
600    the preceding args and about the function being called.
601   NAMED is nonzero if this argument is a named parameter
602    (otherwise it is an extra parameter matching an ellipsis).  */
603
604rtx
605lm32_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
606		   tree type, int named)
607{
608  if (mode == VOIDmode)
609    /* Compute operand 2 of the call insn.  */
610    return GEN_INT (0);
611
612  if (targetm.calls.must_pass_in_stack (mode, type))
613    return NULL_RTX;
614
615  if (!named || (cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
616    return NULL_RTX;
617
618  return gen_rtx_REG (mode, cum + LM32_FIRST_ARG_REG);
619}
620
621HOST_WIDE_INT
622lm32_compute_initial_elimination_offset (int from, int to)
623{
624  HOST_WIDE_INT offset = 0;
625
626  switch (from)
627    {
628    case ARG_POINTER_REGNUM:
629      switch (to)
630	{
631	case FRAME_POINTER_REGNUM:
632	  offset = 0;
633	  break;
634	case STACK_POINTER_REGNUM:
635	  offset =
636	    lm32_compute_frame_size (get_frame_size ()) -
637	    current_frame_info.pretend_size;
638	  break;
639	default:
640	  gcc_unreachable ();
641	}
642      break;
643    default:
644      gcc_unreachable ();
645    }
646
647  return offset;
648}
649
650static void
651lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum, enum machine_mode mode,
652			     tree type, int *pretend_size, int no_rtl)
653{
654  int first_anon_arg;
655  tree fntype;
656  int stdarg_p;
657
658  fntype = TREE_TYPE (current_function_decl);
659  stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
660	      && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
661		  != void_type_node));
662
663  if (stdarg_p)
664    first_anon_arg = *cum + LM32_FIRST_ARG_REG;
665  else
666    {
667      /* this is the common case, we have been passed details setup
668         for the last named argument, we want to skip over the
669         registers, if any used in passing this named paramter in
670         order to determine which is the first registers used to pass
671         anonymous arguments.  */
672      int size;
673
674      if (mode == BLKmode)
675	size = int_size_in_bytes (type);
676      else
677	size = GET_MODE_SIZE (mode);
678
679      first_anon_arg =
680	*cum + LM32_FIRST_ARG_REG +
681	((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
682    }
683
684  if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
685    {
686      int first_reg_offset = first_anon_arg;
687      int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
688      rtx regblock;
689
690      regblock = gen_rtx_MEM (BLKmode,
691			      plus_constant (arg_pointer_rtx,
692					     FIRST_PARM_OFFSET (0)));
693      move_block_from_reg (first_reg_offset, regblock, size);
694
695      *pretend_size = size * UNITS_PER_WORD;
696    }
697}
698
699/* Override command line options.  */
700void
701lm32_override_options (void)
702{
703  /* We must have sign-extend enabled if barrel-shift isn't.  */
704  if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
705    target_flags |= MASK_SIGN_EXTEND_ENABLED;
706}
707
708/* Return nonzero if this function is known to have a null epilogue.
709   This allows the optimizer to omit jumps to jumps if no stack
710   was created.  */
711int
712lm32_can_use_return (void)
713{
714  if (!reload_completed)
715    return 0;
716
717  if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
718    return 0;
719
720  if (lm32_compute_frame_size (get_frame_size ()) != 0)
721    return 0;
722
723  return 1;
724}
725
726/* Support function to determine the return address of the function
727   'count' frames back up the stack.  */
728rtx
729lm32_return_addr_rtx (int count, rtx frame)
730{
731  rtx r;
732  if (count == 0)
733    {
734      if (!df_regs_ever_live_p (RA_REGNUM))
735	r = gen_rtx_REG (Pmode, RA_REGNUM);
736      else
737	{
738	  r = gen_rtx_MEM (Pmode,
739			   gen_rtx_PLUS (Pmode, frame,
740					 GEN_INT (-2 * UNITS_PER_WORD)));
741	  set_mem_alias_set (r, get_frame_alias_set ());
742	}
743    }
744  else if (flag_omit_frame_pointer)
745    r = NULL_RTX;
746  else
747    {
748      r = gen_rtx_MEM (Pmode,
749		       gen_rtx_PLUS (Pmode, frame,
750				     GEN_INT (-2 * UNITS_PER_WORD)));
751      set_mem_alias_set (r, get_frame_alias_set ());
752    }
753  return r;
754}
755
756/* Return true if EXP should be placed in the small data section.  */
757
758static bool
759lm32_in_small_data_p (const_tree exp)
760{
761  /* We want to merge strings, so we never consider them small data.  */
762  if (TREE_CODE (exp) == STRING_CST)
763    return false;
764
765  /* Functions are never in the small data area.  Duh.  */
766  if (TREE_CODE (exp) == FUNCTION_DECL)
767    return false;
768
769  if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
770    {
771      const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
772      if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
773	return true;
774    }
775  else
776    {
777      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
778
779      /* If this is an incomplete type with size 0, then we can't put it
780         in sdata because it might be too big when completed.  */
781      if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
782	return true;
783    }
784
785  return false;
786}
787
788/* Emit straight-line code to move LENGTH bytes from SRC to DEST.
789   Assume that the areas do not overlap.  */
790
791static void
792lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
793			HOST_WIDE_INT alignment)
794{
795  HOST_WIDE_INT offset, delta;
796  unsigned HOST_WIDE_INT bits;
797  int i;
798  enum machine_mode mode;
799  rtx *regs;
800
801  /* Work out how many bits to move at a time.  */
802  switch (alignment)
803    {
804    case 1:
805      bits = 8;
806      break;
807    case 2:
808      bits = 16;
809      break;
810    default:
811      bits = 32;
812      break;
813    }
814
815  mode = mode_for_size (bits, MODE_INT, 0);
816  delta = bits / BITS_PER_UNIT;
817
818  /* Allocate a buffer for the temporary registers.  */
819  regs = alloca (sizeof (rtx) * length / delta);
820
821  /* Load as many BITS-sized chunks as possible.  */
822  for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
823    {
824      regs[i] = gen_reg_rtx (mode);
825      emit_move_insn (regs[i], adjust_address (src, mode, offset));
826    }
827
828  /* Copy the chunks to the destination.  */
829  for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
830    emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
831
832  /* Mop up any left-over bytes.  */
833  if (offset < length)
834    {
835      src = adjust_address (src, BLKmode, offset);
836      dest = adjust_address (dest, BLKmode, offset);
837      move_by_pieces (dest, src, length - offset,
838		      MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
839    }
840}
841
842/* Expand string/block move operations.
843
844   operands[0] is the pointer to the destination.
845   operands[1] is the pointer to the source.
846   operands[2] is the number of bytes to move.
847   operands[3] is the alignment.  */
848
849int
850lm32_expand_block_move (rtx * operands)
851{
852  if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
853    {
854      lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
855			      INTVAL (operands[3]));
856      return 1;
857    }
858  return 0;
859}
860
861/* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
862   isn't protected by a PIC unspec.  */
863int
864nonpic_symbol_mentioned_p (rtx x)
865{
866  const char *fmt;
867  int i;
868
869  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
870      || GET_CODE (x) == PC)
871    return 1;
872
873  /* We don't want to look into the possible MEM location of a
874     CONST_DOUBLE, since we're not going to use it, in general.  */
875  if (GET_CODE (x) == CONST_DOUBLE)
876    return 0;
877
878  if (GET_CODE (x) == UNSPEC)
879    return 0;
880
881  fmt = GET_RTX_FORMAT (GET_CODE (x));
882  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
883    {
884      if (fmt[i] == 'E')
885	{
886	  int j;
887
888	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
889	    if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
890	      return 1;
891	}
892      else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
893	return 1;
894    }
895
896  return 0;
897}
898
899/* Compute a (partial) cost for rtx X.  Return true if the complete
900   cost has been computed, and false if subexpressions should be
901   scanned.  In either case, *TOTAL contains the cost result.  */
902
903static bool
904lm32_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
905{
906  enum machine_mode mode = GET_MODE (x);
907  bool small_mode;
908
909  const int arithmetic_latency = 1;
910  const int shift_latency = 1;
911  const int compare_latency = 2;
912  const int multiply_latency = 3;
913  const int load_latency = 3;
914  const int libcall_size_cost = 5;
915
916  /* Determine if we can handle the given mode size in a single instruction.  */
917  small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
918
919  switch (code)
920    {
921
922    case PLUS:
923    case MINUS:
924    case AND:
925    case IOR:
926    case XOR:
927    case NOT:
928    case NEG:
929      if (!speed)
930	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
931      else
932	*total =
933	  COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
934      break;
935
936    case COMPARE:
937      if (small_mode)
938	{
939	  if (!speed)
940	    *total = COSTS_N_INSNS (1);
941	  else
942	    *total = COSTS_N_INSNS (compare_latency);
943	}
944      else
945	{
946	  /* FIXME. Guessing here.  */
947	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
948	}
949      break;
950
951    case ASHIFT:
952    case ASHIFTRT:
953    case LSHIFTRT:
954      if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
955	{
956	  if (!speed)
957	    *total = COSTS_N_INSNS (1);
958	  else
959	    *total = COSTS_N_INSNS (shift_latency);
960	}
961      else if (TARGET_BARREL_SHIFT_ENABLED)
962	{
963	  /* FIXME: Guessing here.  */
964	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
965	}
966      else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
967	{
968	  *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
969	}
970      else
971	{
972	  /* Libcall.  */
973	  if (!speed)
974	    *total = COSTS_N_INSNS (libcall_size_cost);
975	  else
976	    *total = COSTS_N_INSNS (100);
977	}
978      break;
979
980    case MULT:
981      if (TARGET_MULTIPLY_ENABLED && small_mode)
982	{
983	  if (!speed)
984	    *total = COSTS_N_INSNS (1);
985	  else
986	    *total = COSTS_N_INSNS (multiply_latency);
987	}
988      else
989	{
990	  /* Libcall.  */
991	  if (!speed)
992	    *total = COSTS_N_INSNS (libcall_size_cost);
993	  else
994	    *total = COSTS_N_INSNS (100);
995	}
996      break;
997
998    case DIV:
999    case MOD:
1000    case UDIV:
1001    case UMOD:
1002      if (TARGET_DIVIDE_ENABLED && small_mode)
1003	{
1004	  if (!speed)
1005	    *total = COSTS_N_INSNS (1);
1006	  else
1007	    {
1008	      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1009		{
1010		  int cycles = 0;
1011		  unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1012
1013		  while (i)
1014		    {
1015		      i >>= 2;
1016		      cycles++;
1017		    }
1018		  if (IN_RANGE (i, 0, 65536))
1019		    *total = COSTS_N_INSNS (1 + 1 + cycles);
1020		  else
1021		    *total = COSTS_N_INSNS (2 + 1 + cycles);
1022		  return true;
1023		}
1024	      else if (GET_CODE (XEXP (x, 1)) == REG)
1025		{
1026		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1027		  return true;
1028		}
1029	      else
1030		{
1031		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1032		  return false;
1033		}
1034	    }
1035	}
1036      else
1037	{
1038	  /* Libcall.  */
1039	  if (!speed)
1040	    *total = COSTS_N_INSNS (libcall_size_cost);
1041	  else
1042	    *total = COSTS_N_INSNS (100);
1043	}
1044      break;
1045
1046    case HIGH:
1047    case LO_SUM:
1048      if (!speed)
1049	*total = COSTS_N_INSNS (1);
1050      else
1051	*total = COSTS_N_INSNS (arithmetic_latency);
1052      break;
1053
1054    case ZERO_EXTEND:
1055      if (MEM_P (XEXP (x, 0)))
1056	*total = COSTS_N_INSNS (0);
1057      else if (small_mode)
1058	{
1059	  if (!speed)
1060	    *total = COSTS_N_INSNS (1);
1061	  else
1062	    *total = COSTS_N_INSNS (arithmetic_latency);
1063	}
1064      else
1065	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1066      break;
1067
1068    case CONST_INT:
1069      {
1070	switch (outer_code)
1071	  {
1072	  case HIGH:
1073	  case LO_SUM:
1074	    *total = COSTS_N_INSNS (0);
1075	    return true;
1076
1077	  case AND:
1078	  case XOR:
1079	  case IOR:
1080	  case ASHIFT:
1081	  case ASHIFTRT:
1082	  case LSHIFTRT:
1083	  case ROTATE:
1084	  case ROTATERT:
1085	    if (satisfies_constraint_L (x))
1086	      *total = COSTS_N_INSNS (0);
1087	    else
1088	      *total = COSTS_N_INSNS (2);
1089	    return true;
1090
1091	  case SET:
1092	  case PLUS:
1093	  case MINUS:
1094	  case COMPARE:
1095	    if (satisfies_constraint_K (x))
1096	      *total = COSTS_N_INSNS (0);
1097	    else
1098	      *total = COSTS_N_INSNS (2);
1099	    return true;
1100
1101	  case MULT:
1102	    if (TARGET_MULTIPLY_ENABLED)
1103	      {
1104	        if (satisfies_constraint_K (x))
1105	         *total = COSTS_N_INSNS (0);
1106	        else
1107	          *total = COSTS_N_INSNS (2);
1108		return true;
1109	      }
1110	    /* Fall through.  */
1111
1112	  default:
1113            if (satisfies_constraint_K (x))
1114	      *total = COSTS_N_INSNS (1);
1115	    else
1116	      *total = COSTS_N_INSNS (2);
1117	    return true;
1118	  }
1119      }
1120
1121    case SYMBOL_REF:
1122    case CONST:
1123      switch (outer_code)
1124	{
1125	case HIGH:
1126	case LO_SUM:
1127	  *total = COSTS_N_INSNS (0);
1128	  return true;
1129
1130	case MEM:
1131	case SET:
1132	  if (g_switch_value)
1133	    {
1134	      *total = COSTS_N_INSNS (0);
1135	      return true;
1136	    }
1137	  break;
1138	}
1139      /* Fall through.  */
1140
1141    case LABEL_REF:
1142    case CONST_DOUBLE:
1143      *total = COSTS_N_INSNS (2);
1144      return true;
1145
1146    case SET:
1147      *total = COSTS_N_INSNS (1);
1148      break;
1149
1150    case MEM:
1151      if (!speed)
1152	*total = COSTS_N_INSNS (1);
1153      else
1154	*total = COSTS_N_INSNS (load_latency);
1155      break;
1156
1157    }
1158
1159  return false;
1160}
1161
1162/* Implemenent TARGET_CAN_ELIMINATE.  */
1163
1164bool
1165lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1166{
1167  return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1168}
1169
1170/* Implement TARGET_LEGITIMATE_ADDRESS_P.  */
1171
1172static bool
1173lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1174{
1175   /* (rM) */
1176  if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1177    return true;
1178  if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1179    return true;
1180
1181  /* (rM)+literal) */
1182  if (GET_CODE (x) == PLUS
1183     && REG_P (XEXP (x, 0))
1184     && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1185         || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1186     && GET_CODE (XEXP (x, 1)) == CONST_INT
1187     && satisfies_constraint_K (XEXP ((x), 1)))
1188    return true;
1189
1190  /* gp(sym)  */
1191  if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1192    return true;
1193
1194  return false;
1195}
1196
1197/* Check a move is not memory to memory.  */
1198
1199bool
1200lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1201  if (memory_operand (operands[0], mode))
1202    return register_or_zero_operand (operands[1], mode);
1203  return true;
1204}
1205
1206/* Implement LEGITIMATE_CONSTANT_P.  */
1207
1208bool
1209lm32_legitimate_constant_p (rtx x)
1210{
1211  /* 32-bit addresses require multiple instructions.  */
1212  if (!flag_pic && reloc_operand (x, GET_MODE (x)))
1213    return false;
1214
1215  return true;
1216}
1217