1/* Target definitions for the MorphoRISC1
2   Copyright (C) 2005 Free Software Foundation, Inc.
3   Contributed by Red Hat, Inc.
4
5   This file is part of GCC.
6
7   GCC is free software; you can redistribute it and/or modify it
8   under the terms of the GNU General Public License as published
9   by the Free Software Foundation; either version 2, or (at your
10   option) any later version.
11
12   GCC is distributed in the hope that it will be useful, but WITHOUT
13   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15   License for more details.
16
17   You should have received a copy of the GNU General Public License
18   along with GCC; see the file COPYING.  If not, write to the Free
19   Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20   02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-attr.h"
33#include "recog.h"
34#include "toplev.h"
35#include "output.h"
36#include "integrate.h"
37#include "tree.h"
38#include "function.h"
39#include "expr.h"
40#include "optabs.h"
41#include "libfuncs.h"
42#include "flags.h"
43#include "tm_p.h"
44#include "ggc.h"
45#include "insn-flags.h"
46#include "obstack.h"
47#include "except.h"
48#include "target.h"
49#include "target-def.h"
50#include "basic-block.h"
51
52/* Frame pointer register mask.  */
53#define FP_MASK		 	 (1 << (GPR_FP))
54
55/* Link register mask.  */
56#define LINK_MASK	 	 (1 << (GPR_LINK))
57
58/* Given a SIZE in bytes, advance to the next word.  */
59#define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
60
61/* A C structure for machine-specific, per-function data.
62   This is added to the cfun structure.  */
63struct machine_function GTY(())
64{
65  /* Flags if __builtin_return_address (n) with n >= 1 was used.  */
66  int ra_needs_full_frame;
67  struct rtx_def * eh_stack_adjust;
68  int interrupt_handler;
69  int has_loops;
70};
71
72/* Define the information needed to generate branch and scc insns.
73   This is stored from the compare operation.  */
74struct rtx_def * mt_compare_op0;
75struct rtx_def * mt_compare_op1;
76
77/* Current frame information calculated by compute_frame_size.  */
78struct mt_frame_info current_frame_info;
79
80/* Zero structure to initialize current_frame_info.  */
81struct mt_frame_info zero_frame_info;
82
83/* mt doesn't have unsigned compares need a library call for this.  */
84struct rtx_def * mt_ucmpsi3_libcall;
85
86static int mt_flag_delayed_branch;
87
88
89static rtx
90mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
91			 int incoming ATTRIBUTE_UNUSED)
92{
93  return gen_rtx_REG (Pmode, RETVAL_REGNUM);
94}
95
96/* Implement RETURN_ADDR_RTX.  */
97rtx
98mt_return_addr_rtx (int count)
99{
100  if (count != 0)
101    return NULL_RTX;
102
103  return get_hard_reg_initial_val (Pmode, GPR_LINK);
104}
105
106/* The following variable value indicates the number of nops required
107   between the current instruction and the next instruction to avoid
108   any pipeline hazards.  */
109static int mt_nops_required = 0;
110static const char * mt_nop_reasons = "";
111
112/* Implement ASM_OUTPUT_OPCODE.  */
113const char *
114mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED, const char *ptr)
115{
116  if (mt_nops_required)
117    fprintf (f, ";# need %d nops because of %s\n\t",
118	     mt_nops_required, mt_nop_reasons);
119
120  while (mt_nops_required)
121    {
122      fprintf (f, "nop\n\t");
123      -- mt_nops_required;
124    }
125
126  return ptr;
127}
128
129/* Given an insn, return whether it's a memory operation or a branch
130   operation, otherwise return TYPE_ARITH.  */
131static enum attr_type
132mt_get_attr_type (rtx complete_insn)
133{
134  rtx insn = PATTERN (complete_insn);
135
136  if (JUMP_P (complete_insn))
137    return TYPE_BRANCH;
138  if (CALL_P (complete_insn))
139    return TYPE_BRANCH;
140
141  if (GET_CODE (insn) != SET)
142    return TYPE_ARITH;
143
144  if (SET_DEST (insn) == pc_rtx)
145    return TYPE_BRANCH;
146
147  if (GET_CODE (SET_DEST (insn)) == MEM)
148    return TYPE_STORE;
149
150  if (GET_CODE (SET_SRC (insn)) == MEM)
151    return TYPE_LOAD;
152
153  return TYPE_ARITH;
154}
155
156/* A helper routine for insn_dependent_p called through note_stores.  */
157
158static void
159insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
160{
161  rtx * pinsn = (rtx *) data;
162
163  if (*pinsn && reg_mentioned_p (x, *pinsn))
164    *pinsn = NULL_RTX;
165}
166
167/* Return true if anything in insn X is (anti,output,true)
168   dependent on anything in insn Y.  */
169
170static bool
171insn_dependent_p (rtx x, rtx y)
172{
173  rtx tmp;
174
175  if (! INSN_P (x) || ! INSN_P (y))
176    return 0;
177
178  tmp = PATTERN (y);
179  note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
180  if (tmp == NULL_RTX)
181    return true;
182
183  tmp = PATTERN (x);
184  note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
185  return (tmp == NULL_RTX);
186}
187
188
189/* Return true if anything in insn X is true dependent on anything in
190   insn Y.  */
191static bool
192insn_true_dependent_p (rtx x, rtx y)
193{
194  rtx tmp;
195
196  if (! INSN_P (x) || ! INSN_P (y))
197    return 0;
198
199  tmp = PATTERN (y);
200  note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
201  return (tmp == NULL_RTX);
202}
203
204/* The following determines the number of nops that need to be
205   inserted between the previous instructions and current instruction
206   to avoid pipeline hazards on the mt processor.  Remember that
207   the function is not called for asm insns.  */
208
209void
210mt_final_prescan_insn (rtx   insn,
211			rtx * opvec ATTRIBUTE_UNUSED,
212			int   noperands ATTRIBUTE_UNUSED)
213{
214  rtx prev_i;
215  enum attr_type prev_attr;
216
217  mt_nops_required = 0;
218  mt_nop_reasons = "";
219
220  /* ms2 constraints are dealt with in reorg.  */
221  if (TARGET_MS2)
222    return;
223
224  /* Only worry about real instructions.  */
225  if (! INSN_P (insn))
226    return;
227
228  /* Find the previous real instructions.  */
229  for (prev_i = PREV_INSN (insn);
230       prev_i != NULL
231	 && (! INSN_P (prev_i)
232	     || GET_CODE (PATTERN (prev_i)) == USE
233	     || GET_CODE (PATTERN (prev_i)) == CLOBBER);
234       prev_i = PREV_INSN (prev_i))
235    {
236      /* If we meet a barrier, there is no flow through here.  */
237      if (BARRIER_P (prev_i))
238	return;
239    }
240
241  /* If there isn't one then there is nothing that we need do.  */
242  if (prev_i == NULL || ! INSN_P (prev_i))
243    return;
244
245  prev_attr = mt_get_attr_type (prev_i);
246
247  /* Delayed branch slots already taken care of by delay branch scheduling.  */
248  if (prev_attr == TYPE_BRANCH)
249    return;
250
251  switch (mt_get_attr_type (insn))
252    {
253    case TYPE_LOAD:
254    case TYPE_STORE:
255      /* Avoid consecutive memory operation.  */
256      if  ((prev_attr == TYPE_LOAD || prev_attr == TYPE_STORE)
257	   && TARGET_MS1_64_001)
258	{
259	  mt_nops_required = 1;
260	  mt_nop_reasons = "consecutive mem ops";
261	}
262      /* Drop through.  */
263
264    case TYPE_ARITH:
265    case TYPE_COMPLEX:
266      /* One cycle of delay is required between load
267	 and the dependent arithmetic instruction.  */
268      if (prev_attr == TYPE_LOAD
269	  && insn_true_dependent_p (prev_i, insn))
270	{
271	  mt_nops_required = 1;
272	  mt_nop_reasons = "load->arith dependency delay";
273	}
274      break;
275
276    case TYPE_BRANCH:
277      if (insn_dependent_p (prev_i, insn))
278	{
279	  if (prev_attr == TYPE_ARITH && TARGET_MS1_64_001)
280	    {
281	      /* One cycle of delay between arith
282		 instructions and branch dependent on arith.  */
283	      mt_nops_required = 1;
284	      mt_nop_reasons = "arith->branch dependency delay";
285	    }
286	  else if (prev_attr == TYPE_LOAD)
287	    {
288	      /* Two cycles of delay are required
289		 between load and dependent branch.  */
290	      if (TARGET_MS1_64_001)
291		mt_nops_required = 2;
292	      else
293		mt_nops_required = 1;
294	      mt_nop_reasons = "load->branch dependency delay";
295	    }
296	}
297      break;
298
299    default:
300      fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn);
301      break;
302    }
303}
304
305/* Print debugging information for a frame.  */
306static void
307mt_debug_stack (struct mt_frame_info * info)
308{
309  int regno;
310
311  if (!info)
312    {
313      error ("info pointer NULL");
314      gcc_unreachable ();
315    }
316
317  fprintf (stderr, "\nStack information for function %s:\n",
318	   ((current_function_decl && DECL_NAME (current_function_decl))
319	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
320	    : "<unknown>"));
321
322  fprintf (stderr, "\ttotal_size       = %d\n", info->total_size);
323  fprintf (stderr, "\tpretend_size     = %d\n", info->pretend_size);
324  fprintf (stderr, "\targs_size        = %d\n", info->args_size);
325  fprintf (stderr, "\textra_size       = %d\n", info->extra_size);
326  fprintf (stderr, "\treg_size         = %d\n", info->reg_size);
327  fprintf (stderr, "\tvar_size         = %d\n", info->var_size);
328  fprintf (stderr, "\tframe_size       = %d\n", info->frame_size);
329  fprintf (stderr, "\treg_mask         = 0x%x\n", info->reg_mask);
330  fprintf (stderr, "\tsave_fp          = %d\n", info->save_fp);
331  fprintf (stderr, "\tsave_lr          = %d\n", info->save_lr);
332  fprintf (stderr, "\tinitialized      = %d\n", info->initialized);
333  fprintf (stderr, "\tsaved registers =");
334
335  /* Print out reg_mask in a more readable format.  */
336  for (regno = GPR_R0; regno <= GPR_LAST; regno++)
337    if ( (1 << regno) & info->reg_mask)
338      fprintf (stderr, " %s", reg_names[regno]);
339
340  putc ('\n', stderr);
341  fflush (stderr);
342}
343
344/* Print a memory address as an operand to reference that memory location.  */
345
346static void
347mt_print_operand_simple_address (FILE * file, rtx addr)
348{
349  if (!addr)
350    error ("PRINT_OPERAND_ADDRESS, null pointer");
351
352  else
353    switch (GET_CODE (addr))
354      {
355      case REG:
356	fprintf (file, "%s, #0", reg_names [REGNO (addr)]);
357	break;
358
359      case PLUS:
360	{
361	  rtx reg = 0;
362	  rtx offset = 0;
363	  rtx arg0 = XEXP (addr, 0);
364	  rtx arg1 = XEXP (addr, 1);
365
366	  if (GET_CODE (arg0) == REG)
367	    {
368	      reg = arg0;
369	      offset = arg1;
370	      if (GET_CODE (offset) == REG)
371		fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr);
372	    }
373
374	  else if (GET_CODE (arg1) == REG)
375	      reg = arg1, offset = arg0;
376	  else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
377	    {
378	      fprintf (file, "%s, #", reg_names [GPR_R0]);
379	      output_addr_const (file, addr);
380	      break;
381	    }
382	  fprintf (file, "%s, #", reg_names [REGNO (reg)]);
383	  output_addr_const (file, offset);
384	  break;
385	}
386
387      case LABEL_REF:
388      case SYMBOL_REF:
389      case CONST_INT:
390      case CONST:
391	output_addr_const (file, addr);
392	break;
393
394      default:
395	fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr);
396	break;
397      }
398}
399
400/* Implement PRINT_OPERAND_ADDRESS.  */
401void
402mt_print_operand_address (FILE * file, rtx addr)
403{
404  if (GET_CODE (addr) == AND
405      && GET_CODE (XEXP (addr, 1)) == CONST_INT
406      && INTVAL (XEXP (addr, 1)) == -3)
407    mt_print_operand_simple_address (file, XEXP (addr, 0));
408  else
409    mt_print_operand_simple_address (file, addr);
410}
411
412/* Implement PRINT_OPERAND.  */
413void
414mt_print_operand (FILE * file, rtx x, int code)
415{
416  switch (code)
417    {
418    case '#':
419      /* Output a nop if there's nothing for the delay slot.  */
420      if (dbr_sequence_length () == 0)
421	fputs ("\n\tnop", file);
422      return;
423
424    case 'H':
425      fprintf(file, "#%%hi16(");
426      output_addr_const (file, x);
427      fprintf(file, ")");
428      return;
429
430    case 'L':
431      fprintf(file, "#%%lo16(");
432      output_addr_const (file, x);
433      fprintf(file, ")");
434      return;
435
436    case 'N':
437      fprintf(file, "#%ld", ~INTVAL (x));
438      return;
439
440    case 'z':
441      if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
442	{
443	  fputs (reg_names[GPR_R0], file);
444	  return;
445	}
446
447    case 0:
448      /* Handled below.  */
449      break;
450
451    default:
452      /* output_operand_lossage ("mt_print_operand: unknown code"); */
453      fprintf (file, "unknown code");
454      return;
455    }
456
457  switch (GET_CODE (x))
458    {
459    case REG:
460      fputs (reg_names [REGNO (x)], file);
461      break;
462
463    case CONST:
464    case CONST_INT:
465      fprintf(file, "#%ld", INTVAL (x));
466      break;
467
468    case MEM:
469      mt_print_operand_address(file, XEXP (x,0));
470      break;
471
472    case LABEL_REF:
473    case SYMBOL_REF:
474      output_addr_const (file, x);
475      break;
476
477    default:
478      fprintf(file, "Unknown code: %d", GET_CODE (x));
479      break;
480    }
481
482  return;
483}
484
485/* Implement INIT_CUMULATIVE_ARGS.  */
486void
487mt_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype, rtx libname,
488			 tree fndecl ATTRIBUTE_UNUSED, int incoming)
489{
490  *cum = 0;
491
492  if (TARGET_DEBUG_ARG)
493    {
494      fprintf (stderr, "\nmt_init_cumulative_args:");
495
496      if (incoming)
497	fputs (" incoming", stderr);
498
499      if (fntype)
500	{
501	  tree ret_type = TREE_TYPE (fntype);
502	  fprintf (stderr, " return = %s,",
503		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
504	}
505
506      if (libname && GET_CODE (libname) == SYMBOL_REF)
507	fprintf (stderr, " libname = %s", XSTR (libname, 0));
508
509      if (cfun->returns_struct)
510	fprintf (stderr, " return-struct");
511
512      putc ('\n', stderr);
513    }
514}
515
516/* Compute the slot number to pass an argument in.
517   Returns the slot number or -1 if passing on the stack.
518
519   CUM is a variable of type CUMULATIVE_ARGS which gives info about
520    the preceding args and about the function being called.
521   MODE is the argument's machine mode.
522   TYPE is the data type of the argument (as a tree).
523    This is null for libcalls where that information may
524    not be available.
525   NAMED is nonzero if this argument is a named parameter
526    (otherwise it is an extra parameter matching an ellipsis).
527   INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
528   *PREGNO records the register number to use if scalar type.  */
529
530static int
531mt_function_arg_slotno (const CUMULATIVE_ARGS * cum,
532			enum machine_mode mode,
533			tree type,
534			int named ATTRIBUTE_UNUSED,
535			int incoming_p ATTRIBUTE_UNUSED,
536			int * pregno)
537{
538  int regbase = FIRST_ARG_REGNUM;
539  int slotno  = * cum;
540
541  if (mode == VOIDmode || targetm.calls.must_pass_in_stack (mode, type))
542    return -1;
543
544  if (slotno >= MT_NUM_ARG_REGS)
545    return -1;
546
547  * pregno = regbase + slotno;
548
549  return slotno;
550}
551
552/* Implement FUNCTION_ARG.  */
553rtx
554mt_function_arg (const CUMULATIVE_ARGS * cum,
555		 enum machine_mode mode,
556		 tree type,
557		 int named,
558		 int incoming_p)
559{
560  int slotno, regno;
561  rtx reg;
562
563  slotno = mt_function_arg_slotno (cum, mode, type, named, incoming_p, &regno);
564
565  if (slotno == -1)
566    reg = NULL_RTX;
567  else
568    reg = gen_rtx_REG (mode, regno);
569
570  return reg;
571}
572
573/* Implement FUNCTION_ARG_ADVANCE.  */
574void
575mt_function_arg_advance (CUMULATIVE_ARGS * cum,
576			 enum machine_mode mode,
577			 tree type ATTRIBUTE_UNUSED,
578			 int named)
579{
580  int slotno, regno;
581
582  /* We pass 0 for incoming_p here, it doesn't matter.  */
583  slotno = mt_function_arg_slotno (cum, mode, type, named, 0, &regno);
584
585  * cum += (mode != BLKmode
586	    ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
587	    : ROUND_ADVANCE (int_size_in_bytes (type)));
588
589  if (TARGET_DEBUG_ARG)
590    fprintf (stderr,
591	     "mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
592	     *cum, GET_MODE_NAME (mode), named,
593	     (*cum) * UNITS_PER_WORD);
594}
595
596/* Implement hook TARGET_ARG_PARTIAL_BYTES.
597
598   Returns the number of bytes at the beginning of an argument that
599   must be put in registers.  The value must be zero for arguments
600   that are passed entirely in registers or that are entirely pushed
601   on the stack.  */
602static int
603mt_arg_partial_bytes (CUMULATIVE_ARGS * pcum,
604		       enum machine_mode mode,
605		       tree type,
606		       bool named ATTRIBUTE_UNUSED)
607{
608  int cum = * pcum;
609  int words;
610
611  if (mode == BLKmode)
612    words = ((int_size_in_bytes (type) + UNITS_PER_WORD - 1)
613	     / UNITS_PER_WORD);
614  else
615    words = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
616
617  if (! targetm.calls.pass_by_reference (&cum, mode, type, named)
618      && cum < MT_NUM_ARG_REGS
619      && (cum + words) > MT_NUM_ARG_REGS)
620    {
621      int bytes = (MT_NUM_ARG_REGS - cum) * UNITS_PER_WORD;
622
623      if (TARGET_DEBUG)
624	fprintf (stderr, "function_arg_partial_nregs = %d\n", bytes);
625      return bytes;
626    }
627
628  return 0;
629}
630
631
632/* Implement TARGET_PASS_BY_REFERENCE hook.  */
633static bool
634mt_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
635		       enum machine_mode mode ATTRIBUTE_UNUSED,
636		       tree type,
637		       bool named ATTRIBUTE_UNUSED)
638{
639  return (type && int_size_in_bytes (type) > 4 * UNITS_PER_WORD);
640}
641
642/* Implement FUNCTION_ARG_BOUNDARY.  */
643int
644mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
645			   tree type ATTRIBUTE_UNUSED)
646{
647  return BITS_PER_WORD;
648}
649
650/* Implement REG_OK_FOR_BASE_P.  */
651int
652mt_reg_ok_for_base_p (rtx x, int strict)
653{
654  if (strict)
655    return  (((unsigned) REGNO (x)) < FIRST_PSEUDO_REGISTER);
656  return 1;
657}
658
659/* Helper function of mt_legitimate_address_p.  Return true if XINSN
660   is a simple address, otherwise false.  */
661static bool
662mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
663				rtx xinsn, int strict)
664{
665  if (TARGET_DEBUG)
666    {
667      fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
668	       strict ? "" : "not ");
669      debug_rtx (xinsn);
670    }
671
672  if (GET_CODE (xinsn) == REG && mt_reg_ok_for_base_p (xinsn, strict))
673    return true;
674
675  if (GET_CODE (xinsn) == PLUS
676      && GET_CODE (XEXP (xinsn, 0)) == REG
677      && mt_reg_ok_for_base_p (XEXP (xinsn, 0), strict)
678      && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
679      && SMALL_INT (XEXP (xinsn, 1)))
680    return true;
681
682  return false;
683}
684
685
686/* Helper function of GO_IF_LEGITIMATE_ADDRESS.  Return nonzero if
687   XINSN is a legitimate address on MT.  */
688int
689mt_legitimate_address_p (enum machine_mode mode, rtx xinsn, int strict)
690{
691  if (mt_legitimate_simple_address_p (mode, xinsn, strict))
692    return 1;
693
694  if ((mode) == SImode
695      && GET_CODE (xinsn) == AND
696      && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
697      && INTVAL (XEXP (xinsn, 1)) == -3)
698    return mt_legitimate_simple_address_p (mode, XEXP (xinsn, 0), strict);
699  else
700    return 0;
701}
702
703/* Return truth value of whether OP can be used as an operands where a
704   register or 16 bit unsigned integer is needed.  */
705
706int
707uns_arith_operand (rtx op, enum machine_mode mode)
708{
709  if (GET_CODE (op) == CONST_INT && SMALL_INT_UNSIGNED (op))
710    return 1;
711
712  return register_operand (op, mode);
713}
714
715/* Return truth value of whether OP can be used as an operands where a
716   16 bit integer is needed.  */
717
718int
719arith_operand (rtx op, enum machine_mode mode)
720{
721  if (GET_CODE (op) == CONST_INT && SMALL_INT (op))
722    return 1;
723
724  return register_operand (op, mode);
725}
726
727/* Return truth value of whether OP is a register or the constant 0.  */
728
729int
730reg_or_0_operand (rtx op, enum machine_mode mode)
731{
732  switch (GET_CODE (op))
733    {
734    case CONST_INT:
735      return INTVAL (op) == 0;
736
737    case REG:
738    case SUBREG:
739      return register_operand (op, mode);
740
741    default:
742      break;
743    }
744
745  return 0;
746}
747
748/* Return truth value of whether OP is a constant that requires two
749   loads to put in a register.  */
750
751int
752big_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
753{
754  if (GET_CODE (op) == CONST_INT && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
755    return 1;
756
757  return 0;
758}
759
760/* Return truth value of whether OP is a constant that require only
761   one load to put in a register.  */
762
763int
764single_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
765{
766  if (big_const_operand (op, mode)
767      || GET_CODE (op) == CONST
768      || GET_CODE (op) == LABEL_REF
769      || GET_CODE (op) == SYMBOL_REF)
770    return 0;
771
772  return 1;
773}
774
775/* True if the current function is an interrupt handler
776   (either via #pragma or an attribute specification).  */
777int interrupt_handler;
778enum processor_type mt_cpu;
779
780static struct machine_function *
781mt_init_machine_status (void)
782{
783  struct machine_function *f;
784
785  f = ggc_alloc_cleared (sizeof (struct machine_function));
786
787  return f;
788}
789
790/* Implement OVERRIDE_OPTIONS.  */
791void
792mt_override_options (void)
793{
794  if (mt_cpu_string != NULL)
795    {
796      if (!strcmp (mt_cpu_string, "ms1-64-001"))
797	mt_cpu = PROCESSOR_MS1_64_001;
798      else if (!strcmp (mt_cpu_string, "ms1-16-002"))
799	mt_cpu = PROCESSOR_MS1_16_002;
800      else if  (!strcmp (mt_cpu_string, "ms1-16-003"))
801	mt_cpu = PROCESSOR_MS1_16_003;
802      else if (!strcmp (mt_cpu_string, "ms2"))
803	mt_cpu = PROCESSOR_MS2;
804      else
805	error ("bad value (%s) for -march= switch", mt_cpu_string);
806    }
807  else
808    mt_cpu = PROCESSOR_MS1_16_002;
809
810  if (flag_exceptions)
811    {
812      flag_omit_frame_pointer = 0;
813      flag_gcse = 0;
814    }
815
816  /* We do delayed branch filling in machine dependent reorg */
817  mt_flag_delayed_branch = flag_delayed_branch;
818  flag_delayed_branch = 0;
819
820  init_machine_status = mt_init_machine_status;
821}
822
823/* Do what is necessary for `va_start'.  We look at the current function
824   to determine if stdarg or varargs is used and return the address of the
825   first unnamed parameter.  */
826
827static void
828mt_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
829			   enum machine_mode mode ATTRIBUTE_UNUSED,
830			   tree type ATTRIBUTE_UNUSED,
831			   int *pretend_size, int no_rtl)
832{
833  int regno;
834  int regs = MT_NUM_ARG_REGS - *cum;
835
836  *pretend_size = regs < 0 ? 0 : GET_MODE_SIZE (SImode) * regs;
837
838  if (no_rtl)
839    return;
840
841  for (regno = *cum; regno < MT_NUM_ARG_REGS; regno++)
842    {
843      rtx reg = gen_rtx_REG (SImode, FIRST_ARG_REGNUM + regno);
844      rtx slot = gen_rtx_PLUS (Pmode,
845			       gen_rtx_REG (SImode, ARG_POINTER_REGNUM),
846			       GEN_INT (UNITS_PER_WORD * regno));
847
848      emit_move_insn (gen_rtx_MEM (SImode, slot), reg);
849    }
850}
851
852/* Returns the number of bytes offset between the frame pointer and the stack
853   pointer for the current function.  SIZE is the number of bytes of space
854   needed for local variables.  */
855
856unsigned int
857mt_compute_frame_size (int size)
858{
859  int           regno;
860  unsigned int  total_size;
861  unsigned int  var_size;
862  unsigned int  args_size;
863  unsigned int  pretend_size;
864  unsigned int  extra_size;
865  unsigned int  reg_size;
866  unsigned int  frame_size;
867  unsigned int  reg_mask;
868
869  var_size      = size;
870  args_size     = current_function_outgoing_args_size;
871  pretend_size  = current_function_pretend_args_size;
872  extra_size    = FIRST_PARM_OFFSET (0);
873  total_size    = extra_size + pretend_size + args_size + var_size;
874  reg_size      = 0;
875  reg_mask	= 0;
876
877  /* Calculate space needed for registers.  */
878  for (regno = GPR_R0; regno <= GPR_LAST; regno++)
879    {
880      if (MUST_SAVE_REGISTER (regno))
881        {
882          reg_size += UNITS_PER_WORD;
883          reg_mask |= 1 << regno;
884        }
885    }
886
887  current_frame_info.save_fp = (regs_ever_live [GPR_FP]
888				|| frame_pointer_needed
889				|| interrupt_handler);
890  current_frame_info.save_lr = (regs_ever_live [GPR_LINK]
891				|| profile_flag
892				|| interrupt_handler);
893
894  reg_size += (current_frame_info.save_fp + current_frame_info.save_lr)
895               * UNITS_PER_WORD;
896  total_size += reg_size;
897  total_size = ((total_size + 3) & ~3);
898
899  frame_size = total_size;
900
901  /* Save computed information.  */
902  current_frame_info.pretend_size = pretend_size;
903  current_frame_info.var_size     = var_size;
904  current_frame_info.args_size    = args_size;
905  current_frame_info.reg_size     = reg_size;
906  current_frame_info.frame_size   = args_size + var_size;
907  current_frame_info.total_size   = total_size;
908  current_frame_info.extra_size   = extra_size;
909  current_frame_info.reg_mask     = reg_mask;
910  current_frame_info.initialized  = reload_completed;
911
912  return total_size;
913}
914
915/* Emit code to save REG in stack offset pointed to by MEM.
916   STACK_OFFSET is the offset from the SP where the save will happen.
917   This function sets the REG_FRAME_RELATED_EXPR note accordingly.  */
918static void
919mt_emit_save_restore (enum save_direction direction,
920		      rtx reg, rtx mem, int stack_offset)
921{
922  if (direction == FROM_PROCESSOR_TO_MEM)
923    {
924      rtx insn;
925
926      insn = emit_move_insn (mem, reg);
927      RTX_FRAME_RELATED_P (insn) = 1;
928      REG_NOTES (insn)
929	= gen_rtx_EXPR_LIST
930	(REG_FRAME_RELATED_EXPR,
931	 gen_rtx_SET (VOIDmode,
932		      gen_rtx_MEM (SImode,
933				   gen_rtx_PLUS (SImode,
934						 stack_pointer_rtx,
935						 GEN_INT (stack_offset))),
936		      reg),
937	 REG_NOTES (insn));
938    }
939  else
940    emit_move_insn (reg, mem);
941}
942
943
944/* Emit code to save the frame pointer in the prologue and restore
945   frame pointer in epilogue.  */
946
947static void
948mt_emit_save_fp (enum save_direction direction,
949		  struct mt_frame_info info)
950{
951  rtx base_reg;
952  int reg_mask = info.reg_mask  & ~(FP_MASK | LINK_MASK);
953  int offset = info.total_size;
954  int stack_offset = info.total_size;
955
956  /* If there is nothing to save, get out now.  */
957  if (! info.save_fp && ! info.save_lr && ! reg_mask)
958    return;
959
960  /* If offset doesn't fit in a 15-bit signed integer,
961     uses a scratch registers to get a smaller offset.  */
962  if (CONST_OK_FOR_LETTER_P(offset, 'O'))
963    base_reg = stack_pointer_rtx;
964  else
965    {
966      /* Use the scratch register R9 that holds old stack pointer.  */
967      base_reg = gen_rtx_REG (SImode, GPR_R9);
968      offset = 0;
969    }
970
971  if (info.save_fp)
972    {
973      offset -= UNITS_PER_WORD;
974      stack_offset -= UNITS_PER_WORD;
975      mt_emit_save_restore
976	(direction, gen_rtx_REG (SImode, GPR_FP),
977	 gen_rtx_MEM (SImode,
978		      gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
979	 stack_offset);
980    }
981}
982
983/* Emit code to save registers in the prologue and restore register
984   in epilogue.  */
985
986static void
987mt_emit_save_regs (enum save_direction direction,
988		    struct mt_frame_info info)
989{
990  rtx base_reg;
991  int regno;
992  int reg_mask = info.reg_mask  & ~(FP_MASK | LINK_MASK);
993  int offset = info.total_size;
994  int stack_offset = info.total_size;
995
996  /* If there is nothing to save, get out now.  */
997  if (! info.save_fp && ! info.save_lr && ! reg_mask)
998    return;
999
1000  /* If offset doesn't fit in a 15-bit signed integer,
1001     uses a scratch registers to get a smaller offset.  */
1002  if (CONST_OK_FOR_LETTER_P(offset, 'O'))
1003    base_reg = stack_pointer_rtx;
1004  else
1005    {
1006      /* Use the scratch register R9 that holds old stack pointer.  */
1007      base_reg = gen_rtx_REG (SImode, GPR_R9);
1008      offset = 0;
1009    }
1010
1011  if (info.save_fp)
1012    {
1013      /* This just records the space for it, the actual move generated in
1014	 mt_emit_save_fp ().  */
1015      offset -= UNITS_PER_WORD;
1016      stack_offset -= UNITS_PER_WORD;
1017    }
1018
1019  if (info.save_lr)
1020    {
1021      offset -= UNITS_PER_WORD;
1022      stack_offset -= UNITS_PER_WORD;
1023      mt_emit_save_restore
1024	(direction, gen_rtx_REG (SImode, GPR_LINK),
1025	 gen_rtx_MEM (SImode,
1026		      gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1027	 stack_offset);
1028    }
1029
1030  /* Save any needed call-saved regs.  */
1031  for (regno = GPR_R0; regno <= GPR_LAST; regno++)
1032    {
1033      if ((reg_mask & (1 << regno)) != 0)
1034	{
1035	  offset -= UNITS_PER_WORD;
1036	  stack_offset -= UNITS_PER_WORD;
1037	  mt_emit_save_restore
1038	    (direction, gen_rtx_REG (SImode, regno),
1039	     gen_rtx_MEM (SImode,
1040			  gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1041	     stack_offset);
1042	}
1043    }
1044}
1045
1046/* Return true if FUNC is a function with the 'interrupt' attribute.  */
1047static bool
1048mt_interrupt_function_p (tree func)
1049{
1050  tree a;
1051
1052  if (TREE_CODE (func) != FUNCTION_DECL)
1053    return false;
1054
1055  a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
1056  return a != NULL_TREE;
1057}
1058
1059/* Generate prologue code.  */
1060void
1061mt_expand_prologue (void)
1062{
1063  rtx size_rtx, insn;
1064  unsigned int frame_size;
1065
1066  if (mt_interrupt_function_p (current_function_decl))
1067    {
1068      interrupt_handler = 1;
1069      if (cfun->machine)
1070	cfun->machine->interrupt_handler = 1;
1071    }
1072
1073  mt_compute_frame_size (get_frame_size ());
1074
1075  if (TARGET_DEBUG_STACK)
1076    mt_debug_stack (&current_frame_info);
1077
1078  /* Compute size of stack adjustment.  */
1079  frame_size = current_frame_info.total_size;
1080
1081  /* If offset doesn't fit in a 15-bit signed integer,
1082     uses a scratch registers to get a smaller offset.  */
1083  if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1084    size_rtx = GEN_INT (frame_size);
1085  else
1086    {
1087      /* We do not have any scratch registers.  */
1088      gcc_assert (!interrupt_handler);
1089
1090      size_rtx = gen_rtx_REG (SImode, GPR_R9);
1091      insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1092      insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1093				    GEN_INT (frame_size & 0x0000ffff)));
1094    }
1095
1096  /* Allocate stack for this frame.  */
1097  /* Make stack adjustment and use scratch register if constant too
1098     large to fit as immediate.  */
1099  if (frame_size)
1100    {
1101      insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
1102				 stack_pointer_rtx,
1103				 size_rtx));
1104      RTX_FRAME_RELATED_P (insn) = 1;
1105      REG_NOTES (insn)
1106	= gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1107			     gen_rtx_SET (VOIDmode,
1108					  stack_pointer_rtx,
1109					  gen_rtx_MINUS (SImode,
1110							stack_pointer_rtx,
1111							GEN_INT (frame_size))),
1112			     REG_NOTES (insn));
1113    }
1114
1115  /* Set R9 to point to old sp if required for access to register save
1116     area.  */
1117  if ( current_frame_info.reg_size != 0
1118       && !CONST_OK_FOR_LETTER_P (frame_size, 'O'))
1119      emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1120
1121  /* Save the frame pointer.  */
1122  mt_emit_save_fp (FROM_PROCESSOR_TO_MEM, current_frame_info);
1123
1124  /* Now put the frame pointer into the frame pointer register.  */
1125  if (frame_pointer_needed)
1126    {
1127      insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1128      RTX_FRAME_RELATED_P (insn) = 1;
1129    }
1130
1131  /* Save the registers.  */
1132  mt_emit_save_regs (FROM_PROCESSOR_TO_MEM, current_frame_info);
1133
1134  /* If we are profiling, make sure no instructions are scheduled before
1135     the call to mcount.  */
1136  if (profile_flag)
1137    emit_insn (gen_blockage ());
1138}
1139
1140/* Implement EPILOGUE_USES.  */
1141int
1142mt_epilogue_uses (int regno)
1143{
1144  if (cfun->machine && cfun->machine->interrupt_handler && reload_completed)
1145    return 1;
1146  return regno == GPR_LINK;
1147}
1148
1149/* Generate epilogue.  EH_MODE is NORMAL_EPILOGUE when generating a
1150   function epilogue, or EH_EPILOGUE when generating an EH
1151   epilogue.  */
1152void
1153mt_expand_epilogue (enum epilogue_type eh_mode)
1154{
1155  rtx size_rtx, insn;
1156  unsigned frame_size;
1157
1158  mt_compute_frame_size (get_frame_size ());
1159
1160  if (TARGET_DEBUG_STACK)
1161    mt_debug_stack (& current_frame_info);
1162
1163  /* Compute size of stack adjustment.  */
1164  frame_size = current_frame_info.total_size;
1165
1166  /* If offset doesn't fit in a 15-bit signed integer,
1167     uses a scratch registers to get a smaller offset.  */
1168  if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1169    size_rtx = GEN_INT (frame_size);
1170  else
1171    {
1172      /* We do not have any scratch registers.  */
1173      gcc_assert (!interrupt_handler);
1174
1175      size_rtx = gen_rtx_REG (SImode, GPR_R9);
1176      insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1177      insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1178				    GEN_INT (frame_size & 0x0000ffff)));
1179      /* Set R9 to point to old sp if required for access to register
1180	 save area.  */
1181      emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1182    }
1183
1184  /* Restore sp if there was some possible change to it.  */
1185  if (frame_pointer_needed)
1186    insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1187
1188  /* Restore the registers.  */
1189  mt_emit_save_fp (FROM_MEM_TO_PROCESSOR, current_frame_info);
1190  mt_emit_save_regs (FROM_MEM_TO_PROCESSOR, current_frame_info);
1191
1192  /* Make stack adjustment and use scratch register if constant too
1193     large to fit as immediate.  */
1194  if (frame_size)
1195    {
1196      if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1197	/* Can handle this with simple add.  */
1198	insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
1199				      stack_pointer_rtx,
1200				      size_rtx));
1201      else
1202	/* Scratch reg R9 has the old sp value.  */
1203	insn = emit_move_insn (stack_pointer_rtx,
1204			       gen_rtx_REG (SImode, GPR_R9));
1205
1206      REG_NOTES (insn)
1207	= gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1208			     gen_rtx_SET (VOIDmode,
1209					  stack_pointer_rtx,
1210					  gen_rtx_PLUS (SImode,
1211							stack_pointer_rtx,
1212							GEN_INT (frame_size))),
1213			     REG_NOTES (insn));
1214    }
1215
1216  if (cfun->machine && cfun->machine->eh_stack_adjust != NULL_RTX)
1217    /* Perform the additional bump for __throw.  */
1218    emit_insn (gen_addsi3 (stack_pointer_rtx,
1219			   stack_pointer_rtx,
1220			   cfun->machine->eh_stack_adjust));
1221
1222  /* Generate the appropriate return.  */
1223  if (eh_mode == EH_EPILOGUE)
1224    {
1225      emit_jump_insn (gen_eh_return_internal ());
1226      emit_barrier ();
1227    }
1228  else if (interrupt_handler)
1229    emit_jump_insn (gen_return_interrupt_internal ());
1230  else
1231    emit_jump_insn (gen_return_internal ());
1232
1233  /* Reset state info for each function.  */
1234  interrupt_handler = 0;
1235  current_frame_info = zero_frame_info;
1236  if (cfun->machine)
1237    cfun->machine->eh_stack_adjust = NULL_RTX;
1238}
1239
1240
1241/* Generate code for the "eh_return" pattern.  */
1242void
1243mt_expand_eh_return (rtx * operands)
1244{
1245  if (GET_CODE (operands[0]) != REG
1246      || REGNO (operands[0]) != EH_RETURN_STACKADJ_REGNO)
1247    {
1248      rtx sp = EH_RETURN_STACKADJ_RTX;
1249
1250      emit_move_insn (sp, operands[0]);
1251      operands[0] = sp;
1252    }
1253
1254  emit_insn (gen_eh_epilogue (operands[0]));
1255}
1256
1257/* Generate code for the "eh_epilogue" pattern.  */
1258void
1259mt_emit_eh_epilogue (rtx * operands ATTRIBUTE_UNUSED)
1260{
1261  cfun->machine->eh_stack_adjust = EH_RETURN_STACKADJ_RTX; /* operands[0]; */
1262  mt_expand_epilogue (EH_EPILOGUE);
1263}
1264
1265/* Handle an "interrupt" attribute.  */
1266static tree
1267mt_handle_interrupt_attribute (tree * node,
1268			  tree   name,
1269			  tree   args  ATTRIBUTE_UNUSED,
1270			  int    flags ATTRIBUTE_UNUSED,
1271			  bool * no_add_attrs)
1272{
1273  if (TREE_CODE (*node) != FUNCTION_DECL)
1274    {
1275      warning (OPT_Wattributes,
1276	       "%qs attribute only applies to functions",
1277	       IDENTIFIER_POINTER (name));
1278      *no_add_attrs = true;
1279    }
1280
1281  return NULL_TREE;
1282}
1283
1284/* Table of machine attributes.  */
1285const struct attribute_spec mt_attribute_table[] =
1286{
1287  /* name,        min, max, decl?, type?, func?, handler  */
1288  { "interrupt",  0,   0,   false, false, false, mt_handle_interrupt_attribute },
1289  { NULL,         0,   0,   false, false, false, NULL }
1290};
1291
1292/* Implement INITIAL_ELIMINATION_OFFSET.  */
1293int
1294mt_initial_elimination_offset (int from, int to)
1295{
1296  mt_compute_frame_size (get_frame_size ());
1297
1298  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1299    return 0;
1300
1301  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1302    return current_frame_info.total_size;
1303
1304  else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1305    return current_frame_info.total_size;
1306
1307  else
1308    gcc_unreachable ();
1309}
1310
1311/* Generate a compare for CODE.  Return a brand-new rtx that
1312   represents the result of the compare.  */
1313
1314static rtx
1315mt_generate_compare (enum rtx_code code, rtx op0, rtx op1)
1316{
1317  rtx scratch0, scratch1, const_scratch;
1318
1319  switch (code)
1320    {
1321    case GTU:
1322    case LTU:
1323    case GEU:
1324    case LEU:
1325      /* Need to adjust ranges for faking unsigned compares.  */
1326      scratch0 = gen_reg_rtx (SImode);
1327      scratch1 = gen_reg_rtx (SImode);
1328      const_scratch = force_reg (SImode, GEN_INT(MT_MIN_INT));
1329      emit_insn (gen_addsi3 (scratch0, const_scratch, op0));
1330      emit_insn (gen_addsi3 (scratch1, const_scratch, op1));
1331      break;
1332    default:
1333      scratch0 = op0;
1334      scratch1 = op1;
1335      break;
1336    }
1337
1338  /* Adjust compare operator to fake unsigned compares.  */
1339  switch (code)
1340    {
1341    case GTU:
1342      code = GT; break;
1343    case LTU:
1344      code = LT; break;
1345    case GEU:
1346      code = GE; break;
1347    case LEU:
1348      code = LE; break;
1349    default:
1350      /* do nothing */
1351      break;
1352    }
1353
1354  /* Generate the actual compare.  */
1355  return gen_rtx_fmt_ee (code, VOIDmode, scratch0, scratch1);
1356}
1357
1358/* Emit a branch of kind CODE to location LOC.  */
1359
1360void
1361mt_emit_cbranch (enum rtx_code code, rtx loc, rtx op0, rtx op1)
1362{
1363  rtx condition_rtx, loc_ref;
1364
1365  if (! reg_or_0_operand (op0, SImode))
1366    op0 = copy_to_mode_reg (SImode, op0);
1367
1368  if (! reg_or_0_operand (op1, SImode))
1369    op1 = copy_to_mode_reg (SImode, op1);
1370
1371  condition_rtx = mt_generate_compare (code, op0, op1);
1372  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
1373  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1374			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
1375						     loc_ref, pc_rtx)));
1376}
1377
1378/* Subfunction of the following function.  Update the flags of any MEM
1379   found in part of X.  */
1380
1381static void
1382mt_set_memflags_1 (rtx x, int in_struct_p, int volatile_p)
1383{
1384  int i;
1385
1386  switch (GET_CODE (x))
1387    {
1388    case SEQUENCE:
1389    case PARALLEL:
1390      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1391	mt_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p);
1392      break;
1393
1394    case INSN:
1395      mt_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p);
1396      break;
1397
1398    case SET:
1399      mt_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p);
1400      mt_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p);
1401      break;
1402
1403    case MEM:
1404      MEM_IN_STRUCT_P (x) = in_struct_p;
1405      MEM_VOLATILE_P (x) = volatile_p;
1406      /* Sadly, we cannot use alias sets because the extra aliasing
1407	 produced by the AND interferes.  Given that two-byte quantities
1408	 are the only thing we would be able to differentiate anyway,
1409	 there does not seem to be any point in convoluting the early
1410	 out of the alias check.  */
1411      /* set_mem_alias_set (x, alias_set); */
1412      break;
1413
1414    default:
1415      break;
1416    }
1417}
1418
1419/* Look for any MEMs in the current sequence of insns and set the
1420   in-struct, unchanging, and volatile flags from the flags in REF.
1421   If REF is not a MEM, don't do anything.  */
1422
1423void
1424mt_set_memflags (rtx ref)
1425{
1426  rtx insn;
1427  int in_struct_p, volatile_p;
1428
1429  if (GET_CODE (ref) != MEM)
1430    return;
1431
1432  in_struct_p = MEM_IN_STRUCT_P (ref);
1433  volatile_p = MEM_VOLATILE_P (ref);
1434
1435  /* This is only called from mt.md, after having had something
1436     generated from one of the insn patterns.  So if everything is
1437     zero, the pattern is already up-to-date.  */
1438  if (! in_struct_p && ! volatile_p)
1439    return;
1440
1441  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1442    mt_set_memflags_1 (insn, in_struct_p, volatile_p);
1443}
1444
1445/* Implement SECONDARY_RELOAD_CLASS.  */
1446enum reg_class
1447mt_secondary_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1448			    enum machine_mode mode,
1449			    rtx x)
1450{
1451  if ((mode == QImode && (!TARGET_BYTE_ACCESS)) || mode == HImode)
1452    {
1453      if (GET_CODE (x) == MEM
1454	  || (GET_CODE (x) == REG && true_regnum (x) == -1)
1455	  || (GET_CODE (x) == SUBREG
1456	      && (GET_CODE (SUBREG_REG (x)) == MEM
1457		  || (GET_CODE (SUBREG_REG (x)) == REG
1458		      && true_regnum (SUBREG_REG (x)) == -1))))
1459	return GENERAL_REGS;
1460    }
1461
1462  return NO_REGS;
1463}
1464
1465/* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
1466   macros.  */
1467rtx
1468mt_function_value (tree valtype, enum machine_mode mode, tree func_decl ATTRIBUTE_UNUSED)
1469{
1470  if ((mode) == DImode || (mode) == DFmode)
1471    return gen_rtx_MEM (mode, gen_rtx_REG (mode, RETURN_VALUE_REGNUM));
1472
1473  if (valtype)
1474    mode = TYPE_MODE (valtype);
1475
1476  return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1477}
1478
1479/* Split a move into two smaller pieces.
1480   MODE indicates the reduced mode.  OPERANDS[0] is the original destination
1481   OPERANDS[1] is the original src.  The new destinations are
1482   OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
1483   and OPERANDS[5].  */
1484
1485void
1486mt_split_words (enum machine_mode nmode,
1487		 enum machine_mode omode,
1488		 rtx *operands)
1489{
1490  rtx dl,dh;	/* src/dest pieces.  */
1491  rtx sl,sh;
1492  int	move_high_first = 0;	/* Assume no overlap.  */
1493
1494  switch (GET_CODE (operands[0])) /* Dest.  */
1495    {
1496    case SUBREG:
1497    case REG:
1498      if ((GET_CODE (operands[1]) == REG
1499	   || GET_CODE (operands[1]) == SUBREG)
1500	  && true_regnum (operands[0]) <= true_regnum (operands[1]))
1501	move_high_first = 1;
1502
1503      if (GET_CODE (operands[0]) == SUBREG)
1504	{
1505	  dl = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]),
1506			       SUBREG_BYTE (operands[0]) + GET_MODE_SIZE (nmode));
1507	  dh = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]), SUBREG_BYTE (operands[0]));
1508	}
1509      else if (GET_CODE (operands[0]) == REG && ! IS_PSEUDO_P (operands[0]))
1510	{
1511	  int	r = REGNO (operands[0]);
1512	  dh = gen_rtx_REG (nmode, r);
1513	  dl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1514	}
1515      else
1516	{
1517	  dh = gen_rtx_SUBREG (nmode, operands[0], 0);
1518	  dl = gen_rtx_SUBREG (nmode, operands[0], GET_MODE_SIZE (nmode));
1519	}
1520      break;
1521
1522    case MEM:
1523      switch (GET_CODE (XEXP (operands[0], 0)))
1524	{
1525	case POST_INC:
1526	case POST_DEC:
1527	  gcc_unreachable ();
1528	default:
1529	  dl = operand_subword (operands[0],
1530				GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1531				0, omode);
1532	  dh = operand_subword (operands[0], 0, 0, omode);
1533	}
1534      break;
1535    default:
1536      gcc_unreachable ();
1537    }
1538
1539  switch (GET_CODE (operands[1]))
1540    {
1541    case REG:
1542      if (! IS_PSEUDO_P (operands[1]))
1543	{
1544	  int r = REGNO (operands[1]);
1545
1546	  sh = gen_rtx_REG (nmode, r);
1547	  sl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1548	}
1549      else
1550	{
1551	  sh = gen_rtx_SUBREG (nmode, operands[1], 0);
1552	  sl = gen_rtx_SUBREG (nmode, operands[1], GET_MODE_SIZE (nmode));
1553	}
1554      break;
1555
1556    case CONST_DOUBLE:
1557      if (operands[1] == const0_rtx)
1558	sh = sl = const0_rtx;
1559      else
1560	split_double (operands[1], & sh, & sl);
1561      break;
1562
1563    case CONST_INT:
1564      if (operands[1] == const0_rtx)
1565	sh = sl = const0_rtx;
1566      else
1567	{
1568	  int vl, vh;
1569
1570	  switch (nmode)
1571	    {
1572	    default:
1573	      gcc_unreachable ();
1574	    }
1575
1576	  sl = GEN_INT (vl);
1577	  sh = GEN_INT (vh);
1578	}
1579      break;
1580
1581    case SUBREG:
1582      sl = gen_rtx_SUBREG (nmode,
1583			   SUBREG_REG (operands[1]),
1584			   SUBREG_BYTE (operands[1]) + GET_MODE_SIZE (nmode));
1585      sh = gen_rtx_SUBREG (nmode,
1586			   SUBREG_REG (operands[1]),
1587			   SUBREG_BYTE (operands[1]));
1588      break;
1589
1590    case MEM:
1591      switch (GET_CODE (XEXP (operands[1], 0)))
1592	{
1593	case POST_DEC:
1594	case POST_INC:
1595	  gcc_unreachable ();
1596	  break;
1597	default:
1598	  sl = operand_subword (operands[1],
1599				GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1600				0, omode);
1601	  sh = operand_subword (operands[1], 0, 0, omode);
1602
1603	  /* Check if the DF load is going to clobber the register
1604             used for the address, and if so make sure that is going
1605             to be the second move.  */
1606	  if (GET_CODE (dl) == REG
1607	      && true_regnum (dl)
1608	      == true_regnum (XEXP (XEXP (sl, 0 ), 0)))
1609	    move_high_first = 1;
1610	}
1611      break;
1612    default:
1613      gcc_unreachable ();
1614    }
1615
1616  if (move_high_first)
1617    {
1618      operands[2] = dh;
1619      operands[3] = sh;
1620      operands[4] = dl;
1621      operands[5] = sl;
1622    }
1623  else
1624    {
1625      operands[2] = dl;
1626      operands[3] = sl;
1627      operands[4] = dh;
1628      operands[5] = sh;
1629    }
1630  return;
1631}
1632
1633/* Implement TARGET_MUST_PASS_IN_STACK hook.  */
1634static bool
1635mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1636{
1637  return (((type) != 0
1638	   && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1639	       || TREE_ADDRESSABLE (type))));
1640}
1641
1642/* Increment the counter for the number of loop instructions in the
1643   current function.  */
1644
1645void mt_add_loop (void)
1646{
1647  cfun->machine->has_loops++;
1648}
1649
1650
1651/* Maximum loop nesting depth.  */
1652#define MAX_LOOP_DEPTH 4
1653/* Maximum size of a loop (allows some headroom for delayed branch slot
1654   filling.  */
1655#define MAX_LOOP_LENGTH (200 * 4)
1656
1657/* We need to keep a vector of loops */
1658typedef struct loop_info *loop_info;
1659DEF_VEC_P (loop_info);
1660DEF_VEC_ALLOC_P (loop_info,heap);
1661
1662/* Information about a loop we have found (or are in the process of
1663   finding).  */
1664struct loop_info GTY (())
1665{
1666  /* loop number, for dumps */
1667  int loop_no;
1668
1669  /* Predecessor block of the loop.   This is the one that falls into
1670     the loop and contains the initialization instruction.  */
1671  basic_block predecessor;
1672
1673  /* First block in the loop.  This is the one branched to by the dbnz
1674     insn.  */
1675  basic_block head;
1676
1677  /* Last block in the loop (the one with the dbnz insn */
1678  basic_block tail;
1679
1680  /* The successor block of the loop.  This is the one the dbnz insn
1681     falls into.  */
1682  basic_block successor;
1683
1684  /* The dbnz insn.  */
1685  rtx dbnz;
1686
1687  /* The initialization insn.  */
1688  rtx init;
1689
1690  /* The new initialization instruction.  */
1691  rtx loop_init;
1692
1693  /* The new ending instruction. */
1694  rtx loop_end;
1695
1696  /* The new label placed at the end of the loop. */
1697  rtx end_label;
1698
1699  /* The nesting depth of the loop.  Set to -1 for a bad loop.  */
1700  int depth;
1701
1702  /* The length of the loop.  */
1703  int length;
1704
1705  /* Next loop in the graph. */
1706  struct loop_info *next;
1707
1708  /* Vector of blocks only within the loop, (excluding those within
1709     inner loops).  */
1710  VEC (basic_block,heap) *blocks;
1711
1712  /* Vector of inner loops within this loop  */
1713  VEC (loop_info,heap) *loops;
1714};
1715
1716/* Information used during loop detection.  */
1717typedef struct loop_work GTY(())
1718{
1719  /* Basic block to be scanned.  */
1720  basic_block block;
1721
1722  /* Loop it will be within.  */
1723  loop_info loop;
1724} loop_work;
1725
1726/* Work list.  */
1727DEF_VEC_O (loop_work);
1728DEF_VEC_ALLOC_O (loop_work,heap);
1729
1730/* Determine the nesting and length of LOOP.  Return false if the loop
1731   is bad.  */
1732
1733static bool
1734mt_loop_nesting (loop_info loop)
1735{
1736  loop_info inner;
1737  unsigned ix;
1738  int inner_depth = 0;
1739
1740  if (!loop->depth)
1741    {
1742      /* Make sure we only have one entry point.  */
1743      if (EDGE_COUNT (loop->head->preds) == 2)
1744	{
1745	  loop->predecessor = EDGE_PRED (loop->head, 0)->src;
1746	  if (loop->predecessor == loop->tail)
1747	    /* We wanted the other predecessor.  */
1748	    loop->predecessor = EDGE_PRED (loop->head, 1)->src;
1749
1750	  /* We can only place a loop insn on a fall through edge of a
1751	     single exit block.  */
1752	  if (EDGE_COUNT (loop->predecessor->succs) != 1
1753	      || !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU))
1754	    loop->predecessor = NULL;
1755	}
1756
1757      /* Mark this loop as bad for now.  */
1758      loop->depth = -1;
1759      if (loop->predecessor)
1760	{
1761	  for (ix = 0; VEC_iterate (loop_info, loop->loops, ix++, inner);)
1762	    {
1763	      if (!inner->depth)
1764		mt_loop_nesting (inner);
1765
1766	      if (inner->depth < 0)
1767		{
1768		  inner_depth = -1;
1769		  break;
1770		}
1771
1772	      if (inner_depth < inner->depth)
1773		inner_depth = inner->depth;
1774	      loop->length += inner->length;
1775	    }
1776
1777	  /* Set the proper loop depth, if it was good. */
1778	  if (inner_depth >= 0)
1779	    loop->depth = inner_depth + 1;
1780	}
1781    }
1782  return (loop->depth > 0
1783	  && loop->predecessor
1784	  && loop->depth < MAX_LOOP_DEPTH
1785	  && loop->length < MAX_LOOP_LENGTH);
1786}
1787
1788/* Determine the length of block BB.  */
1789
1790static int
1791mt_block_length (basic_block bb)
1792{
1793  int length = 0;
1794  rtx insn;
1795
1796  for (insn = BB_HEAD (bb);
1797       insn != NEXT_INSN (BB_END (bb));
1798       insn = NEXT_INSN (insn))
1799    {
1800      if (!INSN_P (insn))
1801	continue;
1802      if (CALL_P (insn))
1803	{
1804	  /* Calls are not allowed in loops.  */
1805	  length = MAX_LOOP_LENGTH + 1;
1806	  break;
1807	}
1808
1809      length += get_attr_length (insn);
1810    }
1811  return length;
1812}
1813
1814/* Scan the blocks of LOOP (and its inferiors) looking for uses of
1815   REG.  Return true, if we find any.  Don't count the loop's dbnz
1816   insn if it matches DBNZ.  */
1817
1818static bool
1819mt_scan_loop (loop_info loop, rtx reg, rtx dbnz)
1820{
1821  unsigned ix;
1822  loop_info inner;
1823  basic_block bb;
1824
1825  for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
1826    {
1827      rtx insn;
1828
1829      for (insn = BB_HEAD (bb);
1830	   insn != NEXT_INSN (BB_END (bb));
1831	   insn = NEXT_INSN (insn))
1832	{
1833	  if (!INSN_P (insn))
1834	    continue;
1835	  if (insn == dbnz)
1836	    continue;
1837	  if (reg_mentioned_p (reg, PATTERN (insn)))
1838	    return true;
1839	}
1840    }
1841  for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
1842    if (mt_scan_loop (inner, reg, NULL_RTX))
1843      return true;
1844
1845  return false;
1846}
1847
1848/* MS2 has a loop instruction which needs to be placed just before the
1849   loop.  It indicates the end of the loop and specifies the number of
1850   loop iterations.  It can be nested with an automatically maintained
1851   stack of counter and end address registers.  It's an ideal
1852   candidate for doloop.  Unfortunately, gcc presumes that loops
1853   always end with an explicit instruction, and the doloop_begin
1854   instruction is not a flow control instruction so it can be
1855   scheduled earlier than just before the start of the loop.  To make
1856   matters worse, the optimization pipeline can duplicate loop exit
1857   and entrance blocks and fails to track abnormally exiting loops.
1858   Thus we cannot simply use doloop.
1859
1860   What we do is emit a dbnz pattern for the doloop optimization, and
1861   let that be optimized as normal.  Then in machine dependent reorg
1862   we have to repeat the loop searching algorithm.  We use the
1863   flow graph to find closed loops ending in a dbnz insn.  We then try
1864   and convert it to use the loop instruction.  The conditions are,
1865
1866   * the loop has no abnormal exits, duplicated end conditions or
1867   duplicated entrance blocks
1868
1869   * the loop counter register is only used in the dbnz instruction
1870   within the loop
1871
1872   * we can find the instruction setting the initial value of the loop
1873   counter
1874
1875   * the loop is not executed more than 65535 times. (This might be
1876   changed to 2^32-1, and would therefore allow variable initializers.)
1877
1878   * the loop is not nested more than 4 deep 5) there are no
1879   subroutine calls in the loop.  */
1880
1881static void
1882mt_reorg_loops (FILE *dump_file)
1883{
1884  basic_block bb;
1885  loop_info loops = NULL;
1886  loop_info loop;
1887  int nloops = 0;
1888  unsigned dwork = 0;
1889  VEC (loop_work,heap) *works = VEC_alloc (loop_work,heap,20);
1890  loop_work *work;
1891  edge e;
1892  edge_iterator ei;
1893  bool replaced = false;
1894
1895  /* Find all the possible loop tails.  This means searching for every
1896     dbnz instruction.  For each one found, create a loop_info
1897     structure and add the head block to the work list. */
1898  FOR_EACH_BB (bb)
1899    {
1900      rtx tail = BB_END (bb);
1901
1902      while (GET_CODE (tail) == NOTE)
1903	tail = PREV_INSN (tail);
1904
1905      bb->aux = NULL;
1906      if (recog_memoized (tail) == CODE_FOR_decrement_and_branch_until_zero)
1907	{
1908	  /* A possible loop end */
1909
1910	  loop = XNEW (struct loop_info);
1911	  loop->next = loops;
1912	  loops = loop;
1913	  loop->tail = bb;
1914	  loop->head = BRANCH_EDGE (bb)->dest;
1915	  loop->successor = FALLTHRU_EDGE (bb)->dest;
1916	  loop->predecessor = NULL;
1917	  loop->dbnz = tail;
1918	  loop->depth = 0;
1919	  loop->length = mt_block_length (bb);
1920	  loop->blocks = VEC_alloc (basic_block, heap, 20);
1921	  VEC_quick_push (basic_block, loop->blocks, bb);
1922	  loop->loops = NULL;
1923	  loop->loop_no = nloops++;
1924
1925	  loop->init = loop->end_label = NULL_RTX;
1926	  loop->loop_init = loop->loop_end = NULL_RTX;
1927
1928	  work = VEC_safe_push (loop_work, heap, works, NULL);
1929	  work->block = loop->head;
1930	  work->loop = loop;
1931
1932	  bb->aux = loop;
1933
1934	  if (dump_file)
1935	    {
1936	      fprintf (dump_file, ";; potential loop %d ending at\n",
1937		       loop->loop_no);
1938	      print_rtl_single (dump_file, tail);
1939	    }
1940	}
1941    }
1942
1943  /*  Now find all the closed loops.
1944      until work list empty,
1945       if block's auxptr is set
1946         if != loop slot
1947           if block's loop's start != block
1948	     mark loop as bad
1949	   else
1950             append block's loop's fallthrough block to worklist
1951	     increment this loop's depth
1952       else if block is exit block
1953         mark loop as bad
1954       else
1955     	  set auxptr
1956	  for each target of block
1957     	    add to worklist */
1958  while (VEC_iterate (loop_work, works, dwork++, work))
1959    {
1960      loop = work->loop;
1961      bb = work->block;
1962      if (bb == EXIT_BLOCK_PTR)
1963	/* We've reached the exit block.  The loop must be bad. */
1964	loop->depth = -1;
1965      else if (!bb->aux)
1966	{
1967	  /* We've not seen this block before.  Add it to the loop's
1968	     list and then add each successor to the work list.  */
1969	  bb->aux = loop;
1970	  loop->length += mt_block_length (bb);
1971	  VEC_safe_push (basic_block, heap, loop->blocks, bb);
1972	  FOR_EACH_EDGE (e, ei, bb->succs)
1973	    {
1974	      if (!VEC_space (loop_work, works, 1))
1975		{
1976		  if (dwork)
1977		    {
1978		      VEC_block_remove (loop_work, works, 0, dwork);
1979		      dwork = 0;
1980		    }
1981		  else
1982		    VEC_reserve (loop_work, heap, works, 1);
1983		}
1984	      work = VEC_quick_push (loop_work, works, NULL);
1985	      work->block = EDGE_SUCC (bb, ei.index)->dest;
1986	      work->loop = loop;
1987	    }
1988	}
1989      else if (bb->aux != loop)
1990	{
1991	  /* We've seen this block in a different loop.  If it's not
1992	     the other loop's head, then this loop must be bad.
1993	     Otherwise, the other loop might be a nested loop, so
1994	     continue from that loop's successor.  */
1995	  loop_info other = bb->aux;
1996
1997	  if (other->head != bb)
1998	    loop->depth = -1;
1999	  else
2000	    {
2001	      VEC_safe_push (loop_info, heap, loop->loops, other);
2002	      work = VEC_safe_push (loop_work, heap, works, NULL);
2003	      work->loop = loop;
2004	      work->block = other->successor;
2005	    }
2006	}
2007    }
2008  VEC_free (loop_work, heap, works);
2009
2010  /* Now optimize the loops.  */
2011  for (loop = loops; loop; loop = loop->next)
2012    {
2013      rtx iter_reg, insn, init_insn;
2014      rtx init_val, loop_end, loop_init, end_label, head_label;
2015
2016      if (!mt_loop_nesting (loop))
2017	{
2018	  if (dump_file)
2019	    fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
2020	  continue;
2021	}
2022
2023      /* Get the loop iteration register.  */
2024      iter_reg = SET_DEST (XVECEXP (PATTERN (loop->dbnz), 0, 1));
2025
2026      if (!REG_P (iter_reg))
2027	{
2028	  /* Spilled */
2029	  if (dump_file)
2030	    fprintf (dump_file, ";; loop %d has spilled iteration count\n",
2031		     loop->loop_no);
2032	  continue;
2033	}
2034
2035      /* Look for the initializing insn */
2036      init_insn = NULL_RTX;
2037      for (insn = BB_END (loop->predecessor);
2038	   insn != PREV_INSN (BB_HEAD (loop->predecessor));
2039	   insn = PREV_INSN (insn))
2040	{
2041	  if (!INSN_P (insn))
2042	    continue;
2043	  if (reg_mentioned_p (iter_reg, PATTERN (insn)))
2044	    {
2045	      rtx set = single_set (insn);
2046
2047	      if (set && rtx_equal_p (iter_reg, SET_DEST (set)))
2048		init_insn = insn;
2049	      break;
2050	    }
2051	}
2052
2053      if (!init_insn)
2054	{
2055	  if (dump_file)
2056	    fprintf (dump_file, ";; loop %d has no initializer\n",
2057		     loop->loop_no);
2058	  continue;
2059	}
2060      if (dump_file)
2061	{
2062	  fprintf (dump_file, ";; loop %d initialized by\n",
2063		   loop->loop_no);
2064	  print_rtl_single (dump_file, init_insn);
2065	}
2066
2067      init_val = PATTERN (init_insn);
2068      if (GET_CODE (init_val) == SET)
2069	init_val = SET_SRC (init_val);
2070      if (GET_CODE (init_val) != CONST_INT || INTVAL (init_val) >= 65535)
2071	{
2072	  if (dump_file)
2073	    fprintf (dump_file, ";; loop %d has complex initializer\n",
2074		     loop->loop_no);
2075	  continue;
2076	}
2077
2078      /* Scan all the blocks to make sure they don't use iter_reg.  */
2079      if (mt_scan_loop (loop, iter_reg, loop->dbnz))
2080	{
2081	  if (dump_file)
2082	    fprintf (dump_file, ";; loop %d uses iterator\n",
2083		     loop->loop_no);
2084	  continue;
2085	}
2086
2087      /* The loop is good for replacement.  */
2088
2089      /* loop is 1 based, dbnz is zero based.  */
2090      init_val = GEN_INT (INTVAL (init_val) + 1);
2091
2092      iter_reg = gen_rtx_REG (SImode, LOOP_FIRST + loop->depth - 1);
2093      end_label = gen_label_rtx ();
2094      head_label = XEXP (SET_SRC (XVECEXP (PATTERN (loop->dbnz), 0, 0)), 1);
2095      loop_end = gen_loop_end (iter_reg, head_label);
2096      loop_init = gen_loop_init (iter_reg, init_val, end_label);
2097      loop->init = init_insn;
2098      loop->end_label = end_label;
2099      loop->loop_init = loop_init;
2100      loop->loop_end = loop_end;
2101      replaced = true;
2102
2103      if (dump_file)
2104	{
2105	  fprintf (dump_file, ";; replacing loop %d initializer with\n",
2106		   loop->loop_no);
2107	  print_rtl_single (dump_file, loop->loop_init);
2108	  fprintf (dump_file, ";; replacing loop %d terminator with\n",
2109		   loop->loop_no);
2110	  print_rtl_single (dump_file, loop->loop_end);
2111	}
2112    }
2113
2114  /* Now apply the optimizations.  Do it this way so we don't mess up
2115     the flow graph half way through.  */
2116  for (loop = loops; loop; loop = loop->next)
2117    if (loop->loop_init)
2118      {
2119	emit_jump_insn_after (loop->loop_init, BB_END (loop->predecessor));
2120	delete_insn (loop->init);
2121	emit_label_before (loop->end_label, loop->dbnz);
2122	emit_jump_insn_before (loop->loop_end, loop->dbnz);
2123	delete_insn (loop->dbnz);
2124      }
2125
2126  /* Free up the loop structures */
2127  while (loops)
2128    {
2129      loop = loops;
2130      loops = loop->next;
2131      VEC_free (loop_info, heap, loop->loops);
2132      VEC_free (basic_block, heap, loop->blocks);
2133      XDELETE (loop);
2134    }
2135
2136  if (replaced && dump_file)
2137    {
2138      fprintf (dump_file, ";; Replaced loops\n");
2139      print_rtl (dump_file, get_insns ());
2140    }
2141}
2142
2143/* Structures to hold branch information during reorg.  */
2144typedef struct branch_info
2145{
2146  rtx insn;  /* The branch insn.  */
2147
2148  struct branch_info *next;
2149} branch_info;
2150
2151typedef struct label_info
2152{
2153  rtx label;  /* The label.  */
2154  branch_info *branches;  /* branches to this label.  */
2155  struct label_info *next;
2156} label_info;
2157
2158/* Chain of labels found in current function, used during reorg.  */
2159static label_info *mt_labels;
2160
2161/* If *X is a label, add INSN to the list of branches for that
2162   label.  */
2163
2164static int
2165mt_add_branches (rtx *x, void *insn)
2166{
2167  if (GET_CODE (*x) == LABEL_REF)
2168    {
2169      branch_info *branch = xmalloc (sizeof (*branch));
2170      rtx label = XEXP (*x, 0);
2171      label_info *info;
2172
2173      for (info = mt_labels; info; info = info->next)
2174	if (info->label == label)
2175	  break;
2176
2177      if (!info)
2178	{
2179	  info = xmalloc (sizeof (*info));
2180	  info->next = mt_labels;
2181	  mt_labels = info;
2182
2183	  info->label = label;
2184	  info->branches = NULL;
2185	}
2186
2187      branch->next = info->branches;
2188      info->branches = branch;
2189      branch->insn = insn;
2190    }
2191  return 0;
2192}
2193
2194/* If BRANCH has a filled delay slot, check if INSN is dependent upon
2195   it.  If so, undo the delay slot fill.   Returns the next insn, if
2196   we patch out the branch.  Returns the branch insn, if we cannot
2197   patch out the branch (due to anti-dependency in the delay slot).
2198   In that case, the caller must insert nops at the branch target.  */
2199
2200static rtx
2201mt_check_delay_slot (rtx branch, rtx insn)
2202{
2203  rtx slot;
2204  rtx tmp;
2205  rtx p;
2206  rtx jmp;
2207
2208  gcc_assert (GET_CODE (PATTERN (branch)) == SEQUENCE);
2209  if (INSN_DELETED_P (branch))
2210    return NULL_RTX;
2211  slot = XVECEXP (PATTERN (branch), 0, 1);
2212
2213  tmp = PATTERN (insn);
2214  note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
2215  if (tmp)
2216    /* Not dependent.  */
2217    return NULL_RTX;
2218
2219  /* Undo the delay slot.  */
2220  jmp = XVECEXP (PATTERN (branch), 0, 0);
2221
2222  tmp = PATTERN (jmp);
2223  note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
2224  if (!tmp)
2225    /* Anti dependent. */
2226    return branch;
2227
2228  p = PREV_INSN (branch);
2229  NEXT_INSN (p) = slot;
2230  PREV_INSN (slot) = p;
2231  NEXT_INSN (slot) = jmp;
2232  PREV_INSN (jmp) = slot;
2233  NEXT_INSN (jmp) = branch;
2234  PREV_INSN (branch) = jmp;
2235  XVECEXP (PATTERN (branch), 0, 0) = NULL_RTX;
2236  XVECEXP (PATTERN (branch), 0, 1) = NULL_RTX;
2237  delete_insn (branch);
2238  return jmp;
2239}
2240
2241/* Insert nops to satisfy pipeline constraints.  We only deal with ms2
2242   constraints here.  Earlier CPUs are dealt with by inserting nops with
2243   final_prescan (but that can lead to inferior code, and is
2244   impractical with ms2's JAL hazard).
2245
2246   ms2 dynamic constraints
2247   1) a load and a following use must be separated by one insn
2248   2) an insn and a following dependent call must be separated by two insns
2249
2250   only arith insns are placed in delay slots so #1 cannot happen with
2251   a load in a delay slot.  #2 can happen with an arith insn in the
2252   delay slot.  */
2253
2254static void
2255mt_reorg_hazard (void)
2256{
2257  rtx insn, next;
2258
2259  /* Find all the branches */
2260  for (insn = get_insns ();
2261       insn;
2262       insn = NEXT_INSN (insn))
2263    {
2264      rtx jmp;
2265
2266      if (!INSN_P (insn))
2267	continue;
2268
2269      jmp = PATTERN (insn);
2270
2271      if (GET_CODE (jmp) != SEQUENCE)
2272	/* If it's not got a filled delay slot, then it can't
2273	   conflict.  */
2274	continue;
2275
2276      jmp = XVECEXP (jmp, 0, 0);
2277
2278      if (recog_memoized (jmp) == CODE_FOR_tablejump)
2279	for (jmp = XEXP (XEXP (XVECEXP (PATTERN (jmp), 0, 1), 0), 0);
2280	     !JUMP_TABLE_DATA_P (jmp);
2281	     jmp = NEXT_INSN (jmp))
2282	  continue;
2283
2284      for_each_rtx (&PATTERN (jmp), mt_add_branches, insn);
2285    }
2286
2287  /* Now scan for dependencies.  */
2288  for (insn = get_insns ();
2289       insn && !INSN_P (insn);
2290       insn = NEXT_INSN (insn))
2291    continue;
2292
2293  for (;
2294       insn;
2295       insn = next)
2296    {
2297      rtx jmp, tmp;
2298      enum attr_type attr;
2299
2300      gcc_assert (INSN_P (insn) && !INSN_DELETED_P (insn));
2301      for (next = NEXT_INSN (insn);
2302	   next;
2303	   next = NEXT_INSN (next))
2304	{
2305	  if (!INSN_P (next))
2306	    continue;
2307	  if (GET_CODE (PATTERN (next)) != USE)
2308	    break;
2309	}
2310
2311      jmp = insn;
2312      if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2313	jmp = XVECEXP (PATTERN (insn), 0, 0);
2314
2315      attr = recog_memoized (jmp) >= 0 ? get_attr_type (jmp) : TYPE_UNKNOWN;
2316
2317      if (next && attr == TYPE_LOAD)
2318	{
2319	  /* A load.  See if NEXT is dependent, and if so insert a
2320	     nop.  */
2321
2322	  tmp = PATTERN (next);
2323	  if (GET_CODE (tmp) == SEQUENCE)
2324	    tmp = PATTERN (XVECEXP (tmp, 0, 0));
2325	  note_stores (PATTERN (insn), insn_dependent_p_1, &tmp);
2326	  if (!tmp)
2327	    emit_insn_after (gen_nop (), insn);
2328	}
2329
2330      if (attr == TYPE_CALL)
2331	{
2332	  /* A call.  Make sure we're not dependent on either of the
2333	     previous two dynamic instructions.  */
2334	  int nops = 0;
2335	  int count;
2336	  rtx prev = insn;
2337	  rtx rescan = NULL_RTX;
2338
2339	  for (count = 2; count && !nops;)
2340	    {
2341	      int type;
2342
2343	      prev = PREV_INSN (prev);
2344	      if (!prev)
2345		{
2346		  /* If we reach the start of the function, we must
2347		     presume the caller set the address in the delay
2348		     slot of the call instruction.  */
2349		  nops = count;
2350		  break;
2351		}
2352
2353	      if (BARRIER_P (prev))
2354		break;
2355	      if (LABEL_P (prev))
2356		{
2357		  /* Look at branches to this label.  */
2358		  label_info *label;
2359		  branch_info *branch;
2360
2361		  for (label = mt_labels;
2362		       label;
2363		       label = label->next)
2364		    if (label->label == prev)
2365		      {
2366			for (branch = label->branches;
2367			     branch;
2368			     branch = branch->next)
2369			  {
2370			    tmp = mt_check_delay_slot (branch->insn, jmp);
2371
2372			    if (tmp == branch->insn)
2373			      {
2374				nops = count;
2375				break;
2376			      }
2377
2378			    if (tmp && branch->insn == next)
2379			      rescan = tmp;
2380			  }
2381			break;
2382		      }
2383		  continue;
2384		}
2385	      if (!INSN_P (prev) || GET_CODE (PATTERN (prev)) == USE)
2386		continue;
2387
2388	      if (GET_CODE (PATTERN (prev)) == SEQUENCE)
2389		{
2390		  /* Look at the delay slot.  */
2391		  tmp = mt_check_delay_slot (prev, jmp);
2392		  if (tmp == prev)
2393		    nops = count;
2394		  break;
2395		}
2396
2397	      type = (INSN_CODE (prev) >= 0 ? get_attr_type (prev)
2398		      : TYPE_COMPLEX);
2399	      if (type == TYPE_CALL || type == TYPE_BRANCH)
2400		break;
2401
2402	      if (type == TYPE_LOAD
2403		  || type == TYPE_ARITH
2404		  || type == TYPE_COMPLEX)
2405		{
2406		  tmp = PATTERN (jmp);
2407		  note_stores (PATTERN (prev), insn_dependent_p_1, &tmp);
2408		  if (!tmp)
2409		    {
2410		      nops = count;
2411		      break;
2412		    }
2413		}
2414
2415	      if (INSN_CODE (prev) >= 0)
2416		count--;
2417	    }
2418
2419	  if (rescan)
2420	    for (next = NEXT_INSN (rescan);
2421		 next && !INSN_P (next);
2422		 next = NEXT_INSN (next))
2423	      continue;
2424	  while (nops--)
2425	    emit_insn_before (gen_nop (), insn);
2426	}
2427    }
2428
2429  /* Free the data structures.  */
2430  while (mt_labels)
2431    {
2432      label_info *label = mt_labels;
2433      branch_info *branch, *next;
2434
2435      mt_labels = label->next;
2436      for (branch = label->branches; branch; branch = next)
2437	{
2438	  next = branch->next;
2439	  free (branch);
2440	}
2441      free (label);
2442    }
2443}
2444
2445/* Fixup the looping instructions, do delayed branch scheduling, fixup
2446   scheduling hazards.  */
2447
2448static void
2449mt_machine_reorg (void)
2450{
2451  if (cfun->machine->has_loops && TARGET_MS2)
2452    mt_reorg_loops (dump_file);
2453
2454  if (mt_flag_delayed_branch)
2455    dbr_schedule (get_insns ());
2456
2457  if (TARGET_MS2)
2458    {
2459      /* Force all instructions to be split into their final form.  */
2460      split_all_insns_noflow ();
2461      mt_reorg_hazard ();
2462    }
2463}
2464
2465/* Initialize the GCC target structure.  */
2466const struct attribute_spec mt_attribute_table[];
2467
2468#undef  TARGET_ATTRIBUTE_TABLE
2469#define TARGET_ATTRIBUTE_TABLE 		mt_attribute_table
2470#undef  TARGET_STRUCT_VALUE_RTX
2471#define TARGET_STRUCT_VALUE_RTX		mt_struct_value_rtx
2472#undef  TARGET_PROMOTE_PROTOTYPES
2473#define TARGET_PROMOTE_PROTOTYPES	hook_bool_tree_true
2474#undef  TARGET_PASS_BY_REFERENCE
2475#define TARGET_PASS_BY_REFERENCE	mt_pass_by_reference
2476#undef  TARGET_MUST_PASS_IN_STACK
2477#define TARGET_MUST_PASS_IN_STACK       mt_pass_in_stack
2478#undef  TARGET_ARG_PARTIAL_BYTES
2479#define TARGET_ARG_PARTIAL_BYTES	mt_arg_partial_bytes
2480#undef  TARGET_SETUP_INCOMING_VARARGS
2481#define TARGET_SETUP_INCOMING_VARARGS 	mt_setup_incoming_varargs
2482#undef  TARGET_MACHINE_DEPENDENT_REORG
2483#define TARGET_MACHINE_DEPENDENT_REORG  mt_machine_reorg
2484
2485struct gcc_target targetm = TARGET_INITIALIZER;
2486
2487#include "gt-mt.h"
2488