1/* Definitions for Toshiba Media Processor
2   Copyright (C) 2001-2015 Free Software Foundation, Inc.
3   Contributed by Red Hat, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "hash-set.h"
27#include "machmode.h"
28#include "vec.h"
29#include "double-int.h"
30#include "input.h"
31#include "alias.h"
32#include "symtab.h"
33#include "wide-int.h"
34#include "inchash.h"
35#include "tree.h"
36#include "fold-const.h"
37#include "varasm.h"
38#include "calls.h"
39#include "stringpool.h"
40#include "stor-layout.h"
41#include "regs.h"
42#include "hard-reg-set.h"
43#include "insn-config.h"
44#include "conditions.h"
45#include "insn-flags.h"
46#include "output.h"
47#include "insn-attr.h"
48#include "flags.h"
49#include "recog.h"
50#include "obstack.h"
51#include "hashtab.h"
52#include "function.h"
53#include "statistics.h"
54#include "real.h"
55#include "fixed-value.h"
56#include "expmed.h"
57#include "dojump.h"
58#include "explow.h"
59#include "emit-rtl.h"
60#include "stmt.h"
61#include "expr.h"
62#include "except.h"
63#include "insn-codes.h"
64#include "optabs.h"
65#include "reload.h"
66#include "tm_p.h"
67#include "ggc.h"
68#include "diagnostic-core.h"
69#include "target.h"
70#include "target-def.h"
71#include "langhooks.h"
72#include "dominance.h"
73#include "cfg.h"
74#include "cfgrtl.h"
75#include "cfganal.h"
76#include "lcm.h"
77#include "cfgbuild.h"
78#include "cfgcleanup.h"
79#include "predict.h"
80#include "basic-block.h"
81#include "df.h"
82#include "hash-table.h"
83#include "tree-ssa-alias.h"
84#include "internal-fn.h"
85#include "gimple-fold.h"
86#include "tree-eh.h"
87#include "gimple-expr.h"
88#include "is-a.h"
89#include "gimple.h"
90#include "gimplify.h"
91#include "opts.h"
92#include "dumpfile.h"
93#include "builtins.h"
94#include "rtl-iter.h"
95
96/* Structure of this file:
97
98 + Command Line Option Support
99 + Pattern support - constraints, predicates, expanders
100 + Reload Support
101 + Costs
102 + Functions to save and restore machine-specific function data.
103 + Frame/Epilog/Prolog Related
104 + Operand Printing
105 + Function args in registers
106 + Handle pipeline hazards
107 + Handle attributes
108 + Trampolines
109 + Machine-dependent Reorg
110 + Builtins.  */
111
112/* Symbol encodings:
113
114   Symbols are encoded as @ <char> . <name> where <char> is one of these:
115
116   b - based
117   t - tiny
118   n - near
119   f - far
120   i - io, near
121   I - io, far
122   c - cb (control bus)  */
123
124struct GTY(()) machine_function
125{
126  int mep_frame_pointer_needed;
127
128  /* For varargs. */
129  int arg_regs_to_save;
130  int regsave_filler;
131  int frame_filler;
132  int frame_locked;
133
134  /* Records __builtin_return address.  */
135  rtx eh_stack_adjust;
136
137  int reg_save_size;
138  int reg_save_slot[FIRST_PSEUDO_REGISTER];
139  unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
140
141  /* 2 if the current function has an interrupt attribute, 1 if not, 0
142     if unknown.  This is here because resource.c uses EPILOGUE_USES
143     which needs it.  */
144  int interrupt_handler;
145
146  /* Likewise, for disinterrupt attribute.  */
147  int disable_interrupts;
148
149  /* Number of doloop tags used so far.  */
150  int doloop_tags;
151
152  /* True if the last tag was allocated to a doloop_end.  */
153  bool doloop_tag_from_end;
154
155  /* True if reload changes $TP.  */
156  bool reload_changes_tp;
157
158  /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
159     We only set this if the function is an interrupt handler.  */
160  int asms_without_operands;
161};
162
163#define MEP_CONTROL_REG(x) \
164  (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
165
166static GTY(()) section * based_section;
167static GTY(()) section * tinybss_section;
168static GTY(()) section * far_section;
169static GTY(()) section * farbss_section;
170static GTY(()) section * frodata_section;
171static GTY(()) section * srodata_section;
172
173static GTY(()) section * vtext_section;
174static GTY(()) section * vftext_section;
175static GTY(()) section * ftext_section;
176
177static void mep_set_leaf_registers (int);
178static bool symbol_p (rtx);
179static bool symbolref_p (rtx);
180static void encode_pattern_1 (rtx);
181static void encode_pattern (rtx);
182static bool const_in_range (rtx, int, int);
183static void mep_rewrite_mult (rtx_insn *, rtx);
184static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
185static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
186static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
187static bool move_needs_splitting (rtx, rtx, machine_mode);
188static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
189static bool mep_nongeneral_reg (rtx);
190static bool mep_general_copro_reg (rtx);
191static bool mep_nonregister (rtx);
192static struct machine_function* mep_init_machine_status (void);
193static rtx mep_tp_rtx (void);
194static rtx mep_gp_rtx (void);
195static bool mep_interrupt_p (void);
196static bool mep_disinterrupt_p (void);
197static bool mep_reg_set_p (rtx, rtx);
198static bool mep_reg_set_in_function (int);
199static bool mep_interrupt_saved_reg (int);
200static bool mep_call_saves_register (int);
201static rtx_insn *F (rtx_insn *);
202static void add_constant (int, int, int, int);
203static rtx_insn *maybe_dead_move (rtx, rtx, bool);
204static void mep_reload_pointer (int, const char *);
205static void mep_start_function (FILE *, HOST_WIDE_INT);
206static bool mep_function_ok_for_sibcall (tree, tree);
207static int unique_bit_in (HOST_WIDE_INT);
208static int bit_size_for_clip (HOST_WIDE_INT);
209static int bytesize (const_tree, machine_mode);
210static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
211static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
212static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
213static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
214static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
215static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
216static bool mep_function_attribute_inlinable_p (const_tree);
217static bool mep_can_inline_p (tree, tree);
218static bool mep_lookup_pragma_disinterrupt (const char *);
219static int mep_multiple_address_regions (tree, bool);
220static int mep_attrlist_to_encoding (tree, tree);
221static void mep_insert_attributes (tree, tree *);
222static void mep_encode_section_info (tree, rtx, int);
223static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
224static void mep_unique_section (tree, int);
225static unsigned int mep_section_type_flags (tree, const char *, int);
226static void mep_asm_named_section (const char *, unsigned int, tree);
227static bool mep_mentioned_p (rtx, rtx, int);
228static void mep_reorg_regmove (rtx_insn *);
229static rtx_insn *mep_insert_repeat_label_last (rtx_insn *, rtx_code_label *,
230					       bool, bool);
231static void mep_reorg_repeat (rtx_insn *);
232static bool mep_invertable_branch_p (rtx_insn *);
233static void mep_invert_branch (rtx_insn *, rtx_insn *);
234static void mep_reorg_erepeat (rtx_insn *);
235static void mep_jmp_return_reorg (rtx_insn *);
236static void mep_reorg_addcombine (rtx_insn *);
237static void mep_reorg (void);
238static void mep_init_intrinsics (void);
239static void mep_init_builtins (void);
240static void mep_intrinsic_unavailable (int);
241static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
242static bool mep_get_move_insn (int, const struct cgen_insn **);
243static rtx mep_convert_arg (machine_mode, rtx);
244static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
245static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
246static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
247static rtx mep_expand_builtin (tree, rtx, rtx, machine_mode, int);
248static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
249static int mep_issue_rate (void);
250static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
251static void mep_move_ready_insn (rtx_insn **, int, rtx_insn *);
252static int mep_sched_reorder (FILE *, int, rtx_insn **, int *, int);
253static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
254static void mep_bundle_insns (rtx_insn *);
255static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
256static int mep_address_cost (rtx, machine_mode, addr_space_t, bool);
257static void mep_setup_incoming_varargs (cumulative_args_t, machine_mode,
258					tree, int *, int);
259static bool mep_pass_by_reference (cumulative_args_t cum, machine_mode,
260				   const_tree, bool);
261static rtx mep_function_arg (cumulative_args_t, machine_mode,
262			     const_tree, bool);
263static void mep_function_arg_advance (cumulative_args_t, machine_mode,
264				      const_tree, bool);
265static bool mep_vector_mode_supported_p (machine_mode);
266static rtx  mep_allocate_initial_value (rtx);
267static void mep_asm_init_sections (void);
268static int mep_comp_type_attributes (const_tree, const_tree);
269static bool mep_narrow_volatile_bitfield (void);
270static rtx mep_expand_builtin_saveregs (void);
271static tree mep_build_builtin_va_list (void);
272static void mep_expand_va_start (tree, rtx);
273static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
274static bool mep_can_eliminate (const int, const int);
275static void mep_conditional_register_usage (void);
276static void mep_trampoline_init (rtx, tree, rtx);
277
278#define WANT_GCC_DEFINITIONS
279#include "mep-intrin.h"
280#undef WANT_GCC_DEFINITIONS
281
282
283/* Command Line Option Support.  */
284
285char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
286
287/* True if we can use cmov instructions to move values back and forth
288   between core and coprocessor registers.  */
289bool mep_have_core_copro_moves_p;
290
291/* True if we can use cmov instructions (or a work-alike) to move
292   values between coprocessor registers.  */
293bool mep_have_copro_copro_moves_p;
294
295/* A table of all coprocessor instructions that can act like
296   a coprocessor-to-coprocessor cmov.  */
297static const int mep_cmov_insns[] = {
298  mep_cmov,
299  mep_cpmov,
300  mep_fmovs,
301  mep_caddi3,
302  mep_csubi3,
303  mep_candi3,
304  mep_cori3,
305  mep_cxori3,
306  mep_cand3,
307  mep_cor3
308};
309
310
311static void
312mep_set_leaf_registers (int enable)
313{
314  int i;
315
316  if (mep_leaf_registers[0] != enable)
317    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
318      mep_leaf_registers[i] = enable;
319}
320
321static void
322mep_conditional_register_usage (void)
323{
324  int i;
325
326  if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
327    {
328      fixed_regs[HI_REGNO] = 1;
329      fixed_regs[LO_REGNO] = 1;
330      call_used_regs[HI_REGNO] = 1;
331      call_used_regs[LO_REGNO] = 1;
332    }
333
334  for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
335    global_regs[i] = 1;
336}
337
338static void
339mep_option_override (void)
340{
341  unsigned int i;
342  int j;
343  cl_deferred_option *opt;
344  vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
345
346  if (v)
347    FOR_EACH_VEC_ELT (*v, i, opt)
348      {
349	switch (opt->opt_index)
350	  {
351	  case OPT_mivc2:
352	    for (j = 0; j < 32; j++)
353	      fixed_regs[j + 48] = 0;
354	    for (j = 0; j < 32; j++)
355	      call_used_regs[j + 48] = 1;
356	    for (j = 6; j < 8; j++)
357	      call_used_regs[j + 48] = 0;
358
359#define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
360	    RN (0, "$csar0");
361	    RN (1, "$cc");
362	    RN (4, "$cofr0");
363	    RN (5, "$cofr1");
364	    RN (6, "$cofa0");
365	    RN (7, "$cofa1");
366	    RN (15, "$csar1");
367
368	    RN (16, "$acc0_0");
369	    RN (17, "$acc0_1");
370	    RN (18, "$acc0_2");
371	    RN (19, "$acc0_3");
372	    RN (20, "$acc0_4");
373	    RN (21, "$acc0_5");
374	    RN (22, "$acc0_6");
375	    RN (23, "$acc0_7");
376
377	    RN (24, "$acc1_0");
378	    RN (25, "$acc1_1");
379	    RN (26, "$acc1_2");
380	    RN (27, "$acc1_3");
381	    RN (28, "$acc1_4");
382	    RN (29, "$acc1_5");
383	    RN (30, "$acc1_6");
384	    RN (31, "$acc1_7");
385#undef RN
386	    break;
387
388	  default:
389	    gcc_unreachable ();
390	  }
391      }
392
393  if (flag_pic == 1)
394    warning (OPT_fpic, "-fpic is not supported");
395  if (flag_pic == 2)
396    warning (OPT_fPIC, "-fPIC is not supported");
397  if (TARGET_S && TARGET_M)
398    error ("only one of -ms and -mm may be given");
399  if (TARGET_S && TARGET_L)
400    error ("only one of -ms and -ml may be given");
401  if (TARGET_M && TARGET_L)
402    error ("only one of -mm and -ml may be given");
403  if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
404    error ("only one of -ms and -mtiny= may be given");
405  if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
406    error ("only one of -mm and -mtiny= may be given");
407  if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
408    warning (0, "-mclip currently has no effect without -mminmax");
409
410  if (mep_const_section)
411    {
412      if (strcmp (mep_const_section, "tiny") != 0
413	  && strcmp (mep_const_section, "near") != 0
414	  && strcmp (mep_const_section, "far") != 0)
415	error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
416    }
417
418  if (TARGET_S)
419    mep_tiny_cutoff = 65536;
420  if (TARGET_M)
421    mep_tiny_cutoff = 0;
422  if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
423    mep_tiny_cutoff = 0;
424
425  if (TARGET_64BIT_CR_REGS)
426    flag_split_wide_types = 0;
427
428  init_machine_status = mep_init_machine_status;
429  mep_init_intrinsics ();
430}
431
432/* Pattern Support - constraints, predicates, expanders.  */
433
434/* MEP has very few instructions that can refer to the span of
435   addresses used by symbols, so it's common to check for them.  */
436
437static bool
438symbol_p (rtx x)
439{
440  int c = GET_CODE (x);
441
442  return (c == CONST_INT
443	  || c == CONST
444	  || c == SYMBOL_REF);
445}
446
447static bool
448symbolref_p (rtx x)
449{
450  int c;
451
452  if (GET_CODE (x) != MEM)
453    return false;
454
455  c = GET_CODE (XEXP (x, 0));
456  return (c == CONST_INT
457	  || c == CONST
458	  || c == SYMBOL_REF);
459}
460
461/* static const char *reg_class_names[] = REG_CLASS_NAMES; */
462
463#define GEN_REG(R, STRICT)				\
464  (GR_REGNO_P (R)					\
465   || (!STRICT						\
466       && ((R) == ARG_POINTER_REGNUM			\
467	   || (R) >= FIRST_PSEUDO_REGISTER)))
468
469static char pattern[12], *patternp;
470static GTY(()) rtx patternr[12];
471#define RTX_IS(x) (strcmp (pattern, x) == 0)
472
473static void
474encode_pattern_1 (rtx x)
475{
476  int i;
477
478  if (patternp == pattern + sizeof (pattern) - 2)
479    {
480      patternp[-1] = '?';
481      return;
482    }
483
484  patternr[patternp-pattern] = x;
485
486  switch (GET_CODE (x))
487    {
488    case REG:
489      *patternp++ = 'r';
490      break;
491    case MEM:
492      *patternp++ = 'm';
493    case CONST:
494      encode_pattern_1 (XEXP(x, 0));
495      break;
496    case PLUS:
497      *patternp++ = '+';
498      encode_pattern_1 (XEXP(x, 0));
499      encode_pattern_1 (XEXP(x, 1));
500      break;
501    case LO_SUM:
502      *patternp++ = 'L';
503      encode_pattern_1 (XEXP(x, 0));
504      encode_pattern_1 (XEXP(x, 1));
505      break;
506    case HIGH:
507      *patternp++ = 'H';
508      encode_pattern_1 (XEXP(x, 0));
509      break;
510    case SYMBOL_REF:
511      *patternp++ = 's';
512      break;
513    case LABEL_REF:
514      *patternp++ = 'l';
515      break;
516    case CONST_INT:
517    case CONST_DOUBLE:
518      *patternp++ = 'i';
519      break;
520    case UNSPEC:
521      *patternp++ = 'u';
522      *patternp++ = '0' + XCINT(x, 1, UNSPEC);
523      for (i=0; i<XVECLEN (x, 0); i++)
524	encode_pattern_1 (XVECEXP (x, 0, i));
525      break;
526    case USE:
527      *patternp++ = 'U';
528      break;
529    default:
530      *patternp++ = '?';
531#if 0
532      fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
533      debug_rtx (x);
534      gcc_unreachable ();
535#endif
536      break;
537    }
538}
539
540static void
541encode_pattern (rtx x)
542{
543  patternp = pattern;
544  encode_pattern_1 (x);
545  *patternp = 0;
546}
547
548int
549mep_section_tag (rtx x)
550{
551  const char *name;
552
553  while (1)
554    {
555      switch (GET_CODE (x))
556	{
557	case MEM:
558	case CONST:
559	  x = XEXP (x, 0);
560	  break;
561	case UNSPEC:
562	  x = XVECEXP (x, 0, 0);
563	  break;
564	case PLUS:
565	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
566	    return 0;
567	  x = XEXP (x, 0);
568	  break;
569	default:
570	  goto done;
571	}
572    }
573 done:
574  if (GET_CODE (x) != SYMBOL_REF)
575    return 0;
576  name = XSTR (x, 0);
577  if (name[0] == '@' && name[2] == '.')
578    {
579      if (name[1] == 'i' || name[1] == 'I')
580	{
581	  if (name[1] == 'I')
582	    return 'f'; /* near */
583	  return 'n'; /* far */
584	}
585      return name[1];
586    }
587  return 0;
588}
589
590int
591mep_regno_reg_class (int regno)
592{
593  switch (regno)
594    {
595    case SP_REGNO:		return SP_REGS;
596    case TP_REGNO:		return TP_REGS;
597    case GP_REGNO:		return GP_REGS;
598    case 0: 			return R0_REGS;
599    case HI_REGNO:		return HI_REGS;
600    case LO_REGNO:		return LO_REGS;
601    case ARG_POINTER_REGNUM:	return GENERAL_REGS;
602    }
603
604  if (GR_REGNO_P (regno))
605    return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
606  if (CONTROL_REGNO_P (regno))
607    return CONTROL_REGS;
608
609  if (CR_REGNO_P (regno))
610    {
611      int i, j;
612
613      /* Search for the register amongst user-defined subclasses of
614	 the coprocessor registers.  */
615      for (i = USER0_REGS; i <= USER3_REGS; ++i)
616	{
617	  if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
618	    continue;
619	  for (j = 0; j < N_REG_CLASSES; ++j)
620	    {
621	      enum reg_class sub = reg_class_subclasses[i][j];
622
623	      if (sub == LIM_REG_CLASSES)
624		return i;
625	      if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
626		break;
627	    }
628	}
629
630      return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
631    }
632
633  if (CCR_REGNO_P (regno))
634    return CCR_REGS;
635
636  gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
637  return NO_REGS;
638}
639
640static bool
641const_in_range (rtx x, int minv, int maxv)
642{
643  return (GET_CODE (x) == CONST_INT
644	  && INTVAL (x) >= minv
645	  && INTVAL (x) <= maxv);
646}
647
648/* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
649   such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2.  If a move
650   is needed, emit it before INSN if INSN is nonnull, otherwise emit it
651   at the end of the insn stream.  */
652
653rtx
654mep_mulr_source (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
655{
656  if (rtx_equal_p (dest, src1))
657    return src2;
658  else if (rtx_equal_p (dest, src2))
659    return src1;
660  else
661    {
662      if (insn == 0)
663	emit_insn (gen_movsi (copy_rtx (dest), src1));
664      else
665	emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
666      return src2;
667    }
668}
669
670/* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
671   Change the last element of PATTERN from (clobber (scratch:SI))
672   to (clobber (reg:SI HI_REGNO)).  */
673
674static void
675mep_rewrite_mult (rtx_insn *insn, rtx pattern)
676{
677  rtx hi_clobber;
678
679  hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
680  XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
681  PATTERN (insn) = pattern;
682  INSN_CODE (insn) = -1;
683}
684
685/* Subroutine of mep_reuse_lo_p.  Rewrite instruction INSN so that it
686   calculates SRC1 * SRC2 and stores the result in $lo.  Also make it
687   store the result in DEST if nonnull.  */
688
689static void
690mep_rewrite_mulsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
691{
692  rtx lo, pattern;
693
694  lo = gen_rtx_REG (SImode, LO_REGNO);
695  if (dest)
696    pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
697			   mep_mulr_source (insn, dest, src1, src2));
698  else
699    pattern = gen_mulsi3_lo (lo, src1, src2);
700  mep_rewrite_mult (insn, pattern);
701}
702
703/* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3.  First copy
704   SRC3 into $lo, then use either madd or maddr.  The move into $lo will
705   be deleted by a peephole2 if SRC3 is already in $lo.  */
706
707static void
708mep_rewrite_maddsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2, rtx src3)
709{
710  rtx lo, pattern;
711
712  lo = gen_rtx_REG (SImode, LO_REGNO);
713  emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
714  if (dest)
715    pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
716			    mep_mulr_source (insn, dest, src1, src2),
717			    copy_rtx (lo));
718  else
719    pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
720  mep_rewrite_mult (insn, pattern);
721}
722
723/* Return true if $lo has the same value as integer register GPR when
724   instruction INSN is reached.  If necessary, rewrite the instruction
725   that sets $lo so that it uses a proper SET, not a CLOBBER.  LO is an
726   rtx for (reg:SI LO_REGNO).
727
728   This function is intended to be used by the peephole2 pass.  Since
729   that pass goes from the end of a basic block to the beginning, and
730   propagates liveness information on the way, there is no need to
731   update register notes here.
732
733   If GPR_DEAD_P is true on entry, and this function returns true,
734   then the caller will replace _every_ use of GPR in and after INSN
735   with LO.  This means that if the instruction that sets $lo is a
736   mulr- or maddr-type instruction, we can rewrite it to use mul or
737   madd instead.  In combination with the copy progagation pass,
738   this allows us to replace sequences like:
739
740	mov GPR,R1
741	mulr GPR,R2
742
743   with:
744
745	mul R1,R2
746
747   if GPR is no longer used.  */
748
749static bool
750mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
751{
752  do
753    {
754      insn = PREV_INSN (insn);
755      if (INSN_P (insn))
756	switch (recog_memoized (insn))
757	  {
758	  case CODE_FOR_mulsi3_1:
759	    extract_insn (insn);
760	    if (rtx_equal_p (recog_data.operand[0], gpr))
761	      {
762		mep_rewrite_mulsi3 (insn,
763				    gpr_dead_p ? NULL : recog_data.operand[0],
764				    recog_data.operand[1],
765				    recog_data.operand[2]);
766		return true;
767	      }
768	    return false;
769
770	  case CODE_FOR_maddsi3:
771	    extract_insn (insn);
772	    if (rtx_equal_p (recog_data.operand[0], gpr))
773	      {
774		mep_rewrite_maddsi3 (insn,
775				     gpr_dead_p ? NULL : recog_data.operand[0],
776				     recog_data.operand[1],
777				     recog_data.operand[2],
778				     recog_data.operand[3]);
779		return true;
780	      }
781	    return false;
782
783	  case CODE_FOR_mulsi3r:
784	  case CODE_FOR_maddsi3r:
785	    extract_insn (insn);
786	    return rtx_equal_p (recog_data.operand[1], gpr);
787
788	  default:
789	    if (reg_set_p (lo, insn)
790		|| reg_set_p (gpr, insn)
791		|| volatile_insn_p (PATTERN (insn)))
792	      return false;
793
794	    if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
795	      gpr_dead_p = false;
796	    break;
797	  }
798    }
799  while (!NOTE_INSN_BASIC_BLOCK_P (insn));
800  return false;
801}
802
803/* A wrapper around mep_reuse_lo_p_1 that preserves recog_data.  */
804
805bool
806mep_reuse_lo_p (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
807{
808  bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
809  extract_insn (insn);
810  return result;
811}
812
813/* Return true if SET can be turned into a post-modify load or store
814   that adds OFFSET to GPR.  In other words, return true if SET can be
815   changed into:
816
817       (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
818
819   It's OK to change SET to an equivalent operation in order to
820   make it match.  */
821
822static bool
823mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
824{
825  rtx *reg, *mem;
826  unsigned int reg_bytes, mem_bytes;
827  machine_mode reg_mode, mem_mode;
828
829  /* Only simple SETs can be converted.  */
830  if (GET_CODE (set) != SET)
831    return false;
832
833  /* Point REG to what we hope will be the register side of the set and
834     MEM to what we hope will be the memory side.  */
835  if (GET_CODE (SET_DEST (set)) == MEM)
836    {
837      mem = &SET_DEST (set);
838      reg = &SET_SRC (set);
839    }
840  else
841    {
842      reg = &SET_DEST (set);
843      mem = &SET_SRC (set);
844      if (GET_CODE (*mem) == SIGN_EXTEND)
845	mem = &XEXP (*mem, 0);
846    }
847
848  /* Check that *REG is a suitable coprocessor register.  */
849  if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
850    return false;
851
852  /* Check that *MEM is a suitable memory reference.  */
853  if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
854    return false;
855
856  /* Get the number of bytes in each operand.  */
857  mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
858  reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
859
860  /* Check that OFFSET is suitably aligned.  */
861  if (INTVAL (offset) & (mem_bytes - 1))
862    return false;
863
864  /* Convert *MEM to a normal integer mode.  */
865  mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
866  *mem = change_address (*mem, mem_mode, NULL);
867
868  /* Adjust *REG as well.  */
869  *reg = shallow_copy_rtx (*reg);
870  if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
871    {
872      /* SET is a subword load.  Convert it to an explicit extension.  */
873      PUT_MODE (*reg, SImode);
874      *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
875    }
876  else
877    {
878      reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
879      PUT_MODE (*reg, reg_mode);
880    }
881  return true;
882}
883
884/* Return the effect of frame-related instruction INSN.  */
885
886static rtx
887mep_frame_expr (rtx_insn *insn)
888{
889  rtx note, expr;
890
891  note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
892  expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
893  RTX_FRAME_RELATED_P (expr) = 1;
894  return expr;
895}
896
897/* Merge instructions INSN1 and INSN2 using a PARALLEL.  Store the
898   new pattern in INSN1; INSN2 will be deleted by the caller.  */
899
900static void
901mep_make_parallel (rtx_insn *insn1, rtx_insn *insn2)
902{
903  rtx expr;
904
905  if (RTX_FRAME_RELATED_P (insn2))
906    {
907      expr = mep_frame_expr (insn2);
908      if (RTX_FRAME_RELATED_P (insn1))
909	expr = gen_rtx_SEQUENCE (VOIDmode,
910				 gen_rtvec (2, mep_frame_expr (insn1), expr));
911      set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
912      RTX_FRAME_RELATED_P (insn1) = 1;
913    }
914
915  PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
916				      gen_rtvec (2, PATTERN (insn1),
917						 PATTERN (insn2)));
918  INSN_CODE (insn1) = -1;
919}
920
921/* SET_INSN is an instruction that adds OFFSET to REG.  Go back through
922   the basic block to see if any previous load or store instruction can
923   be persuaded to do SET_INSN as a side-effect.  Return true if so.  */
924
925static bool
926mep_use_post_modify_p_1 (rtx_insn *set_insn, rtx reg, rtx offset)
927{
928  rtx_insn *insn;
929
930  insn = set_insn;
931  do
932    {
933      insn = PREV_INSN (insn);
934      if (INSN_P (insn))
935	{
936	  if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
937	    {
938	      mep_make_parallel (insn, set_insn);
939	      return true;
940	    }
941
942	  if (reg_set_p (reg, insn)
943	      || reg_referenced_p (reg, PATTERN (insn))
944	      || volatile_insn_p (PATTERN (insn)))
945	    return false;
946	}
947    }
948  while (!NOTE_INSN_BASIC_BLOCK_P (insn));
949  return false;
950}
951
952/* A wrapper around mep_use_post_modify_p_1 that preserves recog_data.  */
953
954bool
955mep_use_post_modify_p (rtx_insn *insn, rtx reg, rtx offset)
956{
957  bool result = mep_use_post_modify_p_1 (insn, reg, offset);
958  extract_insn (insn);
959  return result;
960}
961
962bool
963mep_allow_clip (rtx ux, rtx lx, int s)
964{
965  HOST_WIDE_INT u = INTVAL (ux);
966  HOST_WIDE_INT l = INTVAL (lx);
967  int i;
968
969  if (!TARGET_OPT_CLIP)
970    return false;
971
972  if (s)
973    {
974      for (i = 0; i < 30; i ++)
975	if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
976	    && (l == - ((HOST_WIDE_INT) 1 << i)))
977	  return true;
978    }
979  else
980    {
981      if (l != 0)
982	return false;
983
984      for (i = 0; i < 30; i ++)
985	if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
986	  return true;
987    }
988  return false;
989}
990
991bool
992mep_bit_position_p (rtx x, bool looking_for)
993{
994  if (GET_CODE (x) != CONST_INT)
995    return false;
996  switch ((int) INTVAL(x) & 0xff)
997    {
998    case 0x01: case 0x02: case 0x04: case 0x08:
999    case 0x10: case 0x20: case 0x40: case 0x80:
1000      return looking_for;
1001    case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1002    case 0xef: case 0xdf: case 0xbf: case 0x7f:
1003      return !looking_for;
1004    }
1005  return false;
1006}
1007
1008static bool
1009move_needs_splitting (rtx dest, rtx src,
1010		      machine_mode mode ATTRIBUTE_UNUSED)
1011{
1012  int s = mep_section_tag (src);
1013
1014  while (1)
1015    {
1016      if (GET_CODE (src) == CONST
1017	  || GET_CODE (src) == MEM)
1018	src = XEXP (src, 0);
1019      else if (GET_CODE (src) == SYMBOL_REF
1020	       || GET_CODE (src) == LABEL_REF
1021	       || GET_CODE (src) == PLUS)
1022	break;
1023      else
1024	return false;
1025    }
1026  if (s == 'f'
1027      || (GET_CODE (src) == PLUS
1028	  && GET_CODE (XEXP (src, 1)) == CONST_INT
1029	  && (INTVAL (XEXP (src, 1)) < -65536
1030	      || INTVAL (XEXP (src, 1)) > 0xffffff))
1031      || (GET_CODE (dest) == REG
1032	  && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1033    return true;
1034  return false;
1035}
1036
1037bool
1038mep_split_mov (rtx *operands, int symbolic)
1039{
1040  if (symbolic)
1041    {
1042      if (move_needs_splitting (operands[0], operands[1], SImode))
1043	return true;
1044      return false;
1045    }
1046
1047  if (GET_CODE (operands[1]) != CONST_INT)
1048    return false;
1049
1050  if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1051      || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1052      || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1053    return false;
1054
1055  if (((!reload_completed && !reload_in_progress)
1056       || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1057      && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1058    return false;
1059
1060  return true;
1061}
1062
1063/* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1064   it to one specific value.  So the insn chosen depends on whether
1065   the source and destination modes match.  */
1066
1067bool
1068mep_vliw_mode_match (rtx tgt)
1069{
1070  bool src_vliw = mep_vliw_function_p (cfun->decl);
1071  bool tgt_vliw = INTVAL (tgt);
1072
1073  return src_vliw == tgt_vliw;
1074}
1075
1076/* Like the above, but also test for near/far mismatches.  */
1077
1078bool
1079mep_vliw_jmp_match (rtx tgt)
1080{
1081  bool src_vliw = mep_vliw_function_p (cfun->decl);
1082  bool tgt_vliw = INTVAL (tgt);
1083
1084  if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1085    return false;
1086
1087  return src_vliw == tgt_vliw;
1088}
1089
1090bool
1091mep_multi_slot (rtx_insn *x)
1092{
1093  return get_attr_slot (x) == SLOT_MULTI;
1094}
1095
1096/* Implement TARGET_LEGITIMATE_CONSTANT_P.  */
1097
1098static bool
1099mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1100{
1101  /* We can't convert symbol values to gp- or tp-rel values after
1102     reload, as reload might have used $gp or $tp for other
1103     purposes.  */
1104  if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1105    {
1106      char e = mep_section_tag (x);
1107      return (e != 't' && e != 'b');
1108    }
1109  return 1;
1110}
1111
1112/* Be careful not to use macros that need to be compiled one way for
1113   strict, and another way for not-strict, like REG_OK_FOR_BASE_P.  */
1114
1115bool
1116mep_legitimate_address (machine_mode mode, rtx x, int strict)
1117{
1118  int the_tag;
1119
1120#define DEBUG_LEGIT 0
1121#if DEBUG_LEGIT
1122  fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1123  debug_rtx (x);
1124#endif
1125
1126  if (GET_CODE (x) == LO_SUM
1127      && GET_CODE (XEXP (x, 0)) == REG
1128      && GEN_REG (REGNO (XEXP (x, 0)), strict)
1129      && CONSTANT_P (XEXP (x, 1)))
1130    {
1131      if (GET_MODE_SIZE (mode) > 4)
1132	{
1133	  /* We will end up splitting this, and lo_sums are not
1134	     offsettable for us.  */
1135#if DEBUG_LEGIT
1136	  fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1137#endif
1138	  return false;
1139	}
1140#if DEBUG_LEGIT
1141      fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1142#endif
1143      return true;
1144    }
1145
1146  if (GET_CODE (x) == REG
1147      && GEN_REG (REGNO (x), strict))
1148    {
1149#if DEBUG_LEGIT
1150      fprintf (stderr, " - yup, [reg]\n");
1151#endif
1152      return true;
1153    }
1154
1155  if (GET_CODE (x) == PLUS
1156      && GET_CODE (XEXP (x, 0)) == REG
1157      && GEN_REG (REGNO (XEXP (x, 0)), strict)
1158      && const_in_range (XEXP (x, 1), -32768, 32767))
1159    {
1160#if DEBUG_LEGIT
1161      fprintf (stderr, " - yup, [reg+const]\n");
1162#endif
1163      return true;
1164    }
1165
1166  if (GET_CODE (x) == PLUS
1167      && GET_CODE (XEXP (x, 0)) == REG
1168      && GEN_REG (REGNO (XEXP (x, 0)), strict)
1169      && GET_CODE (XEXP (x, 1)) == CONST
1170      && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1171	  || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1172	      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1173	      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1174    {
1175#if DEBUG_LEGIT
1176      fprintf (stderr, " - yup, [reg+unspec]\n");
1177#endif
1178      return true;
1179    }
1180
1181  the_tag = mep_section_tag (x);
1182
1183  if (the_tag == 'f')
1184    {
1185#if DEBUG_LEGIT
1186      fprintf (stderr, " - nope, [far]\n");
1187#endif
1188      return false;
1189    }
1190
1191  if (mode == VOIDmode
1192      && GET_CODE (x) == SYMBOL_REF)
1193    {
1194#if DEBUG_LEGIT
1195      fprintf (stderr, " - yup, call [symbol]\n");
1196#endif
1197      return true;
1198    }
1199
1200  if ((mode == SImode || mode == SFmode)
1201      && CONSTANT_P (x)
1202      && mep_legitimate_constant_p (mode, x)
1203      && the_tag != 't' && the_tag != 'b')
1204    {
1205      if (GET_CODE (x) != CONST_INT
1206	  || (INTVAL (x) <= 0xfffff
1207	      && INTVAL (x) >= 0
1208	      && (INTVAL (x) % 4) == 0))
1209	{
1210#if DEBUG_LEGIT
1211	  fprintf (stderr, " - yup, [const]\n");
1212#endif
1213	  return true;
1214	}
1215    }
1216
1217#if DEBUG_LEGIT
1218  fprintf (stderr, " - nope.\n");
1219#endif
1220  return false;
1221}
1222
1223int
1224mep_legitimize_reload_address (rtx *x, machine_mode mode, int opnum,
1225			       int type_i,
1226			       int ind_levels ATTRIBUTE_UNUSED)
1227{
1228  enum reload_type type = (enum reload_type) type_i;
1229
1230  if (GET_CODE (*x) == PLUS
1231      && GET_CODE (XEXP (*x, 0)) == MEM
1232      && GET_CODE (XEXP (*x, 1)) == REG)
1233    {
1234      /* GCC will by default copy the MEM into a REG, which results in
1235	 an invalid address.  For us, the best thing to do is move the
1236	 whole expression to a REG.  */
1237      push_reload (*x, NULL_RTX, x, NULL,
1238		   GENERAL_REGS, mode, VOIDmode,
1239		   0, 0, opnum, type);
1240      return 1;
1241    }
1242
1243  if (GET_CODE (*x) == PLUS
1244      && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1245      && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1246    {
1247      char e = mep_section_tag (XEXP (*x, 0));
1248
1249      if (e != 't' && e != 'b')
1250	{
1251	  /* GCC thinks that (sym+const) is a valid address.  Well,
1252	     sometimes it is, this time it isn't.  The best thing to
1253	     do is reload the symbol to a register, since reg+int
1254	     tends to work, and we can't just add the symbol and
1255	     constant anyway.  */
1256	  push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1257		       GENERAL_REGS, mode, VOIDmode,
1258		       0, 0, opnum, type);
1259	  return 1;
1260	}
1261    }
1262  return 0;
1263}
1264
1265int
1266mep_core_address_length (rtx_insn *insn, int opn)
1267{
1268  rtx set = single_set (insn);
1269  rtx mem = XEXP (set, opn);
1270  rtx other = XEXP (set, 1-opn);
1271  rtx addr = XEXP (mem, 0);
1272
1273  if (register_operand (addr, Pmode))
1274    return 2;
1275  if (GET_CODE (addr) == PLUS)
1276    {
1277      rtx addend = XEXP (addr, 1);
1278
1279      gcc_assert (REG_P (XEXP (addr, 0)));
1280
1281      switch (REGNO (XEXP (addr, 0)))
1282	{
1283	case STACK_POINTER_REGNUM:
1284	  if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1285	      && mep_imm7a4_operand (addend, VOIDmode))
1286	    return 2;
1287	  break;
1288
1289	case 13: /* TP */
1290	  gcc_assert (REG_P (other));
1291
1292	  if (REGNO (other) >= 8)
1293	    break;
1294
1295	  if (GET_CODE (addend) == CONST
1296	      && GET_CODE (XEXP (addend, 0)) == UNSPEC
1297	      && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1298	    return 2;
1299
1300	  if (GET_CODE (addend) == CONST_INT
1301	      && INTVAL (addend) >= 0
1302	      && INTVAL (addend) <= 127
1303	      && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1304	    return 2;
1305	  break;
1306	}
1307    }
1308
1309  return 4;
1310}
1311
1312int
1313mep_cop_address_length (rtx_insn *insn, int opn)
1314{
1315  rtx set = single_set (insn);
1316  rtx mem = XEXP (set, opn);
1317  rtx addr = XEXP (mem, 0);
1318
1319  if (GET_CODE (mem) != MEM)
1320    return 2;
1321  if (register_operand (addr, Pmode))
1322    return 2;
1323  if (GET_CODE (addr) == POST_INC)
1324    return 2;
1325
1326  return 4;
1327}
1328
1329#define DEBUG_EXPAND_MOV 0
1330bool
1331mep_expand_mov (rtx *operands, machine_mode mode)
1332{
1333  int i, t;
1334  int tag[2];
1335  rtx tpsym, tpoffs;
1336  int post_reload = 0;
1337
1338  tag[0] = mep_section_tag (operands[0]);
1339  tag[1] = mep_section_tag (operands[1]);
1340
1341  if (!reload_in_progress
1342      && !reload_completed
1343      && GET_CODE (operands[0]) != REG
1344      && GET_CODE (operands[0]) != SUBREG
1345      && GET_CODE (operands[1]) != REG
1346      && GET_CODE (operands[1]) != SUBREG)
1347    operands[1] = copy_to_mode_reg (mode, operands[1]);
1348
1349#if DEBUG_EXPAND_MOV
1350  fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1351	  reload_in_progress || reload_completed);
1352  debug_rtx (operands[0]);
1353  debug_rtx (operands[1]);
1354#endif
1355
1356  if (mode == DImode || mode == DFmode)
1357    return false;
1358
1359  if (reload_in_progress || reload_completed)
1360    {
1361      rtx r;
1362
1363      if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1364	cfun->machine->reload_changes_tp = true;
1365
1366      if (tag[0] == 't' || tag[1] == 't')
1367	{
1368	  r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1369	  if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1370	    post_reload = 1;
1371	}
1372      if (tag[0] == 'b' || tag[1] == 'b')
1373	{
1374	  r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1375	  if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1376	    post_reload = 1;
1377	}
1378      if (cfun->machine->reload_changes_tp == true)
1379	post_reload = 1;
1380    }
1381
1382  if (!post_reload)
1383    {
1384      rtx n;
1385      if (symbol_p (operands[1]))
1386	{
1387	  t = mep_section_tag (operands[1]);
1388	  if (t == 'b' || t == 't')
1389	    {
1390
1391	      if (GET_CODE (operands[1]) == SYMBOL_REF)
1392		{
1393		  tpsym = operands[1];
1394		  n = gen_rtx_UNSPEC (mode,
1395				      gen_rtvec (1, operands[1]),
1396				      t == 'b' ? UNS_TPREL : UNS_GPREL);
1397		  n = gen_rtx_CONST (mode, n);
1398		}
1399	      else if (GET_CODE (operands[1]) == CONST
1400		       && GET_CODE (XEXP (operands[1], 0)) == PLUS
1401		       && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1402		       && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1403		{
1404		  tpsym = XEXP (XEXP (operands[1], 0), 0);
1405		  tpoffs = XEXP (XEXP (operands[1], 0), 1);
1406		  n = gen_rtx_UNSPEC (mode,
1407				      gen_rtvec (1, tpsym),
1408				      t == 'b' ? UNS_TPREL : UNS_GPREL);
1409		  n = gen_rtx_PLUS (mode, n, tpoffs);
1410		  n = gen_rtx_CONST (mode, n);
1411		}
1412	      else if (GET_CODE (operands[1]) == CONST
1413		       && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1414		return false;
1415	      else
1416		{
1417		  error ("unusual TP-relative address");
1418		  return false;
1419		}
1420
1421	      n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1422				       : mep_gp_rtx ()), n);
1423	      n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1424#if DEBUG_EXPAND_MOV
1425	      fprintf(stderr, "mep_expand_mov emitting ");
1426	      debug_rtx(n);
1427#endif
1428	      return true;
1429	    }
1430	}
1431
1432      for (i=0; i < 2; i++)
1433	{
1434	  t = mep_section_tag (operands[i]);
1435	  if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1436	    {
1437	      rtx sym, n, r;
1438	      int u;
1439
1440	      sym = XEXP (operands[i], 0);
1441	      if (GET_CODE (sym) == CONST
1442		  && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1443		sym = XVECEXP (XEXP (sym, 0), 0, 0);
1444
1445	      if (t == 'b')
1446		{
1447		  r = mep_tp_rtx ();
1448		  u = UNS_TPREL;
1449		}
1450	      else
1451		{
1452		  r = mep_gp_rtx ();
1453		  u = UNS_GPREL;
1454		}
1455
1456	      n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1457	      n = gen_rtx_CONST (Pmode, n);
1458	      n = gen_rtx_PLUS (Pmode, r, n);
1459	      operands[i] = replace_equiv_address (operands[i], n);
1460	    }
1461	}
1462    }
1463
1464  if ((GET_CODE (operands[1]) != REG
1465       && MEP_CONTROL_REG (operands[0]))
1466      || (GET_CODE (operands[0]) != REG
1467	  && MEP_CONTROL_REG (operands[1])))
1468    {
1469      rtx temp;
1470#if DEBUG_EXPAND_MOV
1471      fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1472#endif
1473      temp = gen_reg_rtx (mode);
1474      emit_move_insn (temp, operands[1]);
1475      operands[1] = temp;
1476    }
1477
1478  if (symbolref_p (operands[0])
1479      && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1480	  || (GET_MODE_SIZE (mode) != 4)))
1481    {
1482      rtx temp;
1483
1484      gcc_assert (!reload_in_progress && !reload_completed);
1485
1486      temp = force_reg (Pmode, XEXP (operands[0], 0));
1487      operands[0] = replace_equiv_address (operands[0], temp);
1488      emit_move_insn (operands[0], operands[1]);
1489      return true;
1490    }
1491
1492  if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1493    tag[1] = 0;
1494
1495  if (symbol_p (operands[1])
1496      && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1497    {
1498      emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1499      emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1500      return true;
1501    }
1502
1503  if (symbolref_p (operands[1])
1504      && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1505    {
1506      rtx temp;
1507
1508      if (reload_in_progress || reload_completed)
1509	temp = operands[0];
1510      else
1511	temp = gen_reg_rtx (Pmode);
1512
1513      emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1514      emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1515      emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1516      return true;
1517    }
1518
1519  return false;
1520}
1521
1522/* Cases where the pattern can't be made to use at all.  */
1523
1524bool
1525mep_mov_ok (rtx *operands, machine_mode mode ATTRIBUTE_UNUSED)
1526{
1527  int i;
1528
1529#define DEBUG_MOV_OK 0
1530#if DEBUG_MOV_OK
1531  fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1532	   mep_section_tag (operands[1]));
1533  debug_rtx (operands[0]);
1534  debug_rtx (operands[1]);
1535#endif
1536
1537  /* We want the movh patterns to get these.  */
1538  if (GET_CODE (operands[1]) == HIGH)
1539    return false;
1540
1541  /* We can't store a register to a far variable without using a
1542     scratch register to hold the address.  Using far variables should
1543     be split by mep_emit_mov anyway.  */
1544  if (mep_section_tag (operands[0]) == 'f'
1545      || mep_section_tag (operands[1]) == 'f')
1546    {
1547#if DEBUG_MOV_OK
1548      fprintf (stderr, " - no, f\n");
1549#endif
1550      return false;
1551    }
1552  i = mep_section_tag (operands[1]);
1553  if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1554    /* These are supposed to be generated with adds of the appropriate
1555       register.  During and after reload, however, we allow them to
1556       be accessed as normal symbols because adding a dependency on
1557       the base register now might cause problems.  */
1558    {
1559#if DEBUG_MOV_OK
1560      fprintf (stderr, " - no, bt\n");
1561#endif
1562      return false;
1563    }
1564
1565  /* The only moves we can allow involve at least one general
1566     register, so require it.  */
1567  for (i = 0; i < 2; i ++)
1568    {
1569      /* Allow subregs too, before reload.  */
1570      rtx x = operands[i];
1571
1572      if (GET_CODE (x) == SUBREG)
1573	x = XEXP (x, 0);
1574      if (GET_CODE (x) == REG
1575	  && ! MEP_CONTROL_REG (x))
1576	{
1577#if DEBUG_MOV_OK
1578	  fprintf (stderr, " - ok\n");
1579#endif
1580	  return true;
1581	}
1582    }
1583#if DEBUG_MOV_OK
1584  fprintf (stderr, " - no, no gen reg\n");
1585#endif
1586  return false;
1587}
1588
1589#define DEBUG_SPLIT_WIDE_MOVE 0
1590void
1591mep_split_wide_move (rtx *operands, machine_mode mode)
1592{
1593  int i;
1594
1595#if DEBUG_SPLIT_WIDE_MOVE
1596  fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1597  debug_rtx (operands[0]);
1598  debug_rtx (operands[1]);
1599#endif
1600
1601  for (i = 0; i <= 1; i++)
1602    {
1603      rtx op = operands[i], hi, lo;
1604
1605      switch (GET_CODE (op))
1606	{
1607	case REG:
1608	  {
1609	    unsigned int regno = REGNO (op);
1610
1611	    if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1612	      {
1613		rtx i32;
1614
1615		lo = gen_rtx_REG (SImode, regno);
1616		i32 = GEN_INT (32);
1617		hi = gen_rtx_ZERO_EXTRACT (SImode,
1618					   gen_rtx_REG (DImode, regno),
1619					   i32, i32);
1620	      }
1621	    else
1622	      {
1623		hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1624		lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1625	      }
1626	  }
1627	  break;
1628
1629	case CONST_INT:
1630	case CONST_DOUBLE:
1631	case MEM:
1632	  hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1633	  lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1634	  break;
1635
1636	default:
1637	  gcc_unreachable ();
1638	}
1639
1640      /* The high part of CR <- GPR moves must be done after the low part.  */
1641      operands [i + 4] = lo;
1642      operands [i + 2] = hi;
1643    }
1644
1645  if (reg_mentioned_p (operands[2], operands[5])
1646      || GET_CODE (operands[2]) == ZERO_EXTRACT
1647      || GET_CODE (operands[4]) == ZERO_EXTRACT)
1648    {
1649      rtx tmp;
1650
1651      /* Overlapping register pairs -- make sure we don't
1652	 early-clobber ourselves.  */
1653      tmp = operands[2];
1654      operands[2] = operands[4];
1655      operands[4] = tmp;
1656      tmp = operands[3];
1657      operands[3] = operands[5];
1658      operands[5] = tmp;
1659    }
1660
1661#if DEBUG_SPLIT_WIDE_MOVE
1662  fprintf(stderr, "\033[34m");
1663  debug_rtx (operands[2]);
1664  debug_rtx (operands[3]);
1665  debug_rtx (operands[4]);
1666  debug_rtx (operands[5]);
1667  fprintf(stderr, "\033[0m");
1668#endif
1669}
1670
1671/* Emit a setcc instruction in its entirity.  */
1672
1673static bool
1674mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1675{
1676  rtx tmp;
1677
1678  switch (code)
1679    {
1680    case GT:
1681    case GTU:
1682      tmp = op1, op1 = op2, op2 = tmp;
1683      code = swap_condition (code);
1684      /* FALLTHRU */
1685
1686    case LT:
1687    case LTU:
1688      op1 = force_reg (SImode, op1);
1689      emit_insn (gen_rtx_SET (VOIDmode, dest,
1690			      gen_rtx_fmt_ee (code, SImode, op1, op2)));
1691      return true;
1692
1693    case EQ:
1694      if (op2 != const0_rtx)
1695	op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1696      mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1697      return true;
1698
1699    case NE:
1700      /* Branchful sequence:
1701		mov dest, 0		16-bit
1702		beq op1, op2, Lover	16-bit (op2 < 16), 32-bit otherwise
1703		mov dest, 1		16-bit
1704
1705	 Branchless sequence:
1706		add3 tmp, op1, -op2	32-bit (or mov + sub)
1707		sltu3 tmp, tmp, 1	16-bit
1708		xor3 dest, tmp, 1	32-bit
1709	*/
1710      if (optimize_size && op2 != const0_rtx)
1711	return false;
1712
1713      if (op2 != const0_rtx)
1714	op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1715
1716      op2 = gen_reg_rtx (SImode);
1717      mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1718
1719      emit_insn (gen_rtx_SET (VOIDmode, dest,
1720			      gen_rtx_XOR (SImode, op2, const1_rtx)));
1721      return true;
1722
1723    case LE:
1724      if (GET_CODE (op2) != CONST_INT
1725	  || INTVAL (op2) == 0x7ffffff)
1726	return false;
1727      op2 = GEN_INT (INTVAL (op2) + 1);
1728      return mep_expand_setcc_1 (LT, dest, op1, op2);
1729
1730    case LEU:
1731      if (GET_CODE (op2) != CONST_INT
1732	  || INTVAL (op2) == -1)
1733	return false;
1734      op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1735      return mep_expand_setcc_1 (LTU, dest, op1, op2);
1736
1737    case GE:
1738      if (GET_CODE (op2) != CONST_INT
1739	  || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1740	return false;
1741      op2 = GEN_INT (INTVAL (op2) - 1);
1742      return mep_expand_setcc_1 (GT, dest, op1, op2);
1743
1744    case GEU:
1745      if (GET_CODE (op2) != CONST_INT
1746	  || op2 == const0_rtx)
1747	return false;
1748      op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1749      return mep_expand_setcc_1 (GTU, dest, op1, op2);
1750
1751    default:
1752      gcc_unreachable ();
1753    }
1754}
1755
1756bool
1757mep_expand_setcc (rtx *operands)
1758{
1759  rtx dest = operands[0];
1760  enum rtx_code code = GET_CODE (operands[1]);
1761  rtx op0 = operands[2];
1762  rtx op1 = operands[3];
1763
1764  return mep_expand_setcc_1 (code, dest, op0, op1);
1765}
1766
1767rtx
1768mep_expand_cbranch (rtx *operands)
1769{
1770  enum rtx_code code = GET_CODE (operands[0]);
1771  rtx op0 = operands[1];
1772  rtx op1 = operands[2];
1773  rtx tmp;
1774
1775 restart:
1776  switch (code)
1777    {
1778    case LT:
1779      if (mep_imm4_operand (op1, SImode))
1780	break;
1781
1782      tmp = gen_reg_rtx (SImode);
1783      gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1784      code = NE;
1785      op0 = tmp;
1786      op1 = const0_rtx;
1787      break;
1788
1789    case GE:
1790      if (mep_imm4_operand (op1, SImode))
1791	break;
1792
1793      tmp = gen_reg_rtx (SImode);
1794      gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1795
1796      code = EQ;
1797      op0 = tmp;
1798      op1 = const0_rtx;
1799      break;
1800
1801    case EQ:
1802    case NE:
1803      if (! mep_reg_or_imm4_operand (op1, SImode))
1804	op1 = force_reg (SImode, op1);
1805      break;
1806
1807    case LE:
1808    case GT:
1809      if (GET_CODE (op1) == CONST_INT
1810	  && INTVAL (op1) != 0x7fffffff)
1811	{
1812	  op1 = GEN_INT (INTVAL (op1) + 1);
1813	  code = (code == LE ? LT : GE);
1814	  goto restart;
1815	}
1816
1817      tmp = gen_reg_rtx (SImode);
1818      gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1819
1820      code = (code == LE ? EQ : NE);
1821      op0 = tmp;
1822      op1 = const0_rtx;
1823      break;
1824
1825    case LTU:
1826      if (op1 == const1_rtx)
1827	{
1828	  code = EQ;
1829	  op1 = const0_rtx;
1830	  break;
1831	}
1832
1833      tmp = gen_reg_rtx (SImode);
1834      gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1835      code = NE;
1836      op0 = tmp;
1837      op1 = const0_rtx;
1838      break;
1839
1840    case LEU:
1841      tmp = gen_reg_rtx (SImode);
1842      if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1843	code = NE;
1844      else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1845	code = EQ;
1846      else
1847	gcc_unreachable ();
1848      op0 = tmp;
1849      op1 = const0_rtx;
1850      break;
1851
1852    case GTU:
1853      tmp = gen_reg_rtx (SImode);
1854      gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1855		  || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1856      code = NE;
1857      op0 = tmp;
1858      op1 = const0_rtx;
1859      break;
1860
1861    case GEU:
1862      tmp = gen_reg_rtx (SImode);
1863      if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1864	code = NE;
1865      else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1866	code = EQ;
1867      else
1868	gcc_unreachable ();
1869      op0 = tmp;
1870      op1 = const0_rtx;
1871      break;
1872
1873    default:
1874      gcc_unreachable ();
1875    }
1876
1877  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1878}
1879
1880const char *
1881mep_emit_cbranch (rtx *operands, int ne)
1882{
1883  if (GET_CODE (operands[1]) == REG)
1884    return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1885  else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1886    return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1887  else
1888    return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1889}
1890
1891void
1892mep_expand_call (rtx *operands, int returns_value)
1893{
1894  rtx addr = operands[returns_value];
1895  rtx tp = mep_tp_rtx ();
1896  rtx gp = mep_gp_rtx ();
1897
1898  gcc_assert (GET_CODE (addr) == MEM);
1899
1900  addr = XEXP (addr, 0);
1901
1902  if (! mep_call_address_operand (addr, VOIDmode))
1903    addr = force_reg (SImode, addr);
1904
1905  if (! operands[returns_value+2])
1906    operands[returns_value+2] = const0_rtx;
1907
1908  if (returns_value)
1909    emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1910					     operands[3], tp, gp));
1911  else
1912    emit_call_insn (gen_call_internal (addr, operands[1],
1913				       operands[2], tp, gp));
1914}
1915
1916/* Aliasing Support.  */
1917
1918/* If X is a machine specific address (i.e. a symbol or label being
1919   referenced as a displacement from the GOT implemented using an
1920   UNSPEC), then return the base term.  Otherwise return X.  */
1921
1922rtx
1923mep_find_base_term (rtx x)
1924{
1925  rtx base, term;
1926  int unspec;
1927
1928  if (GET_CODE (x) != PLUS)
1929    return x;
1930  base = XEXP (x, 0);
1931  term = XEXP (x, 1);
1932
1933  if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1934      && base == mep_tp_rtx ())
1935    unspec = UNS_TPREL;
1936  else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1937	   && base == mep_gp_rtx ())
1938    unspec = UNS_GPREL;
1939  else
1940    return x;
1941
1942  if (GET_CODE (term) != CONST)
1943    return x;
1944  term = XEXP (term, 0);
1945
1946  if (GET_CODE (term) != UNSPEC
1947      || XINT (term, 1) != unspec)
1948    return x;
1949
1950  return XVECEXP (term, 0, 0);
1951}
1952
1953/* Reload Support.  */
1954
1955/* Return true if the registers in CLASS cannot represent the change from
1956   modes FROM to TO.  */
1957
1958bool
1959mep_cannot_change_mode_class (machine_mode from, machine_mode to,
1960			       enum reg_class regclass)
1961{
1962  if (from == to)
1963    return false;
1964
1965  /* 64-bit COP regs must remain 64-bit COP regs.  */
1966  if (TARGET_64BIT_CR_REGS
1967      && (regclass == CR_REGS
1968	  || regclass == LOADABLE_CR_REGS)
1969      && (GET_MODE_SIZE (to) < 8
1970	  || GET_MODE_SIZE (from) < 8))
1971    return true;
1972
1973  return false;
1974}
1975
1976#define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1977
1978static bool
1979mep_general_reg (rtx x)
1980{
1981  while (GET_CODE (x) == SUBREG)
1982    x = XEXP (x, 0);
1983  return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1984}
1985
1986static bool
1987mep_nongeneral_reg (rtx x)
1988{
1989  while (GET_CODE (x) == SUBREG)
1990    x = XEXP (x, 0);
1991  return (GET_CODE (x) == REG
1992	  && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1993}
1994
1995static bool
1996mep_general_copro_reg (rtx x)
1997{
1998  while (GET_CODE (x) == SUBREG)
1999    x = XEXP (x, 0);
2000  return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2001}
2002
2003static bool
2004mep_nonregister (rtx x)
2005{
2006  while (GET_CODE (x) == SUBREG)
2007    x = XEXP (x, 0);
2008  return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2009}
2010
2011#define DEBUG_RELOAD 0
2012
2013/* Return the secondary reload class needed for moving value X to or
2014   from a register in coprocessor register class CLASS.  */
2015
2016static enum reg_class
2017mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2018{
2019  if (mep_general_reg (x))
2020    /* We can do the move directly if mep_have_core_copro_moves_p,
2021       otherwise we need to go through memory.  Either way, no secondary
2022       register is needed.  */
2023    return NO_REGS;
2024
2025  if (mep_general_copro_reg (x))
2026    {
2027      /* We can do the move directly if mep_have_copro_copro_moves_p.  */
2028      if (mep_have_copro_copro_moves_p)
2029	return NO_REGS;
2030
2031      /* Otherwise we can use a temporary if mep_have_core_copro_moves_p.  */
2032      if (mep_have_core_copro_moves_p)
2033	return GENERAL_REGS;
2034
2035      /* Otherwise we need to do it through memory.  No secondary
2036	 register is needed.  */
2037      return NO_REGS;
2038    }
2039
2040  if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2041      && constraint_satisfied_p (x, CONSTRAINT_U))
2042    /* X is a memory value that we can access directly.  */
2043    return NO_REGS;
2044
2045  /* We have to move X into a GPR first and then copy it to
2046     the coprocessor register.  The move from the GPR to the
2047     coprocessor might be done directly or through memory,
2048     depending on mep_have_core_copro_moves_p. */
2049  return GENERAL_REGS;
2050}
2051
2052/* Copying X to register in RCLASS.  */
2053
2054enum reg_class
2055mep_secondary_input_reload_class (enum reg_class rclass,
2056				  machine_mode mode ATTRIBUTE_UNUSED,
2057				  rtx x)
2058{
2059  int rv = NO_REGS;
2060
2061#if DEBUG_RELOAD
2062  fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2063  debug_rtx (x);
2064#endif
2065
2066  if (reg_class_subset_p (rclass, CR_REGS))
2067    rv = mep_secondary_copro_reload_class (rclass, x);
2068  else if (MEP_NONGENERAL_CLASS (rclass)
2069	   && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2070    rv = GENERAL_REGS;
2071
2072#if DEBUG_RELOAD
2073  fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2074#endif
2075  return (enum reg_class) rv;
2076}
2077
2078/* Copying register in RCLASS to X.  */
2079
2080enum reg_class
2081mep_secondary_output_reload_class (enum reg_class rclass,
2082				   machine_mode mode ATTRIBUTE_UNUSED,
2083				   rtx x)
2084{
2085  int rv = NO_REGS;
2086
2087#if DEBUG_RELOAD
2088  fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2089  debug_rtx (x);
2090#endif
2091
2092  if (reg_class_subset_p (rclass, CR_REGS))
2093    rv = mep_secondary_copro_reload_class (rclass, x);
2094  else if (MEP_NONGENERAL_CLASS (rclass)
2095	   && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2096    rv = GENERAL_REGS;
2097
2098#if DEBUG_RELOAD
2099  fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2100#endif
2101
2102  return (enum reg_class) rv;
2103}
2104
2105/* Implement SECONDARY_MEMORY_NEEDED.  */
2106
2107bool
2108mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2109			     machine_mode mode ATTRIBUTE_UNUSED)
2110{
2111  if (!mep_have_core_copro_moves_p)
2112    {
2113      if (reg_classes_intersect_p (rclass1, CR_REGS)
2114	  && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2115	return true;
2116      if (reg_classes_intersect_p (rclass2, CR_REGS)
2117	  && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2118	return true;
2119      if (!mep_have_copro_copro_moves_p
2120	  && reg_classes_intersect_p (rclass1, CR_REGS)
2121	  && reg_classes_intersect_p (rclass2, CR_REGS))
2122	return true;
2123    }
2124  return false;
2125}
2126
2127void
2128mep_expand_reload (rtx *operands, machine_mode mode)
2129{
2130  /* There are three cases for each direction:
2131     register, farsym
2132     control, farsym
2133     control, nearsym */
2134
2135  int s0 = mep_section_tag (operands[0]) == 'f';
2136  int s1 = mep_section_tag (operands[1]) == 'f';
2137  int c0 = mep_nongeneral_reg (operands[0]);
2138  int c1 = mep_nongeneral_reg (operands[1]);
2139  int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2140
2141#if DEBUG_RELOAD
2142  fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2143  debug_rtx (operands[0]);
2144  debug_rtx (operands[1]);
2145#endif
2146
2147  switch (which)
2148    {
2149    case 00: /* Don't know why this gets here.  */
2150    case 02: /* general = far */
2151      emit_move_insn (operands[0], operands[1]);
2152      return;
2153
2154    case 10: /* cr = mem */
2155    case 11: /* cr = cr */
2156    case 01: /* mem = cr */
2157    case 12: /* cr = far */
2158      emit_move_insn (operands[2], operands[1]);
2159      emit_move_insn (operands[0], operands[2]);
2160      return;
2161
2162    case 20: /* far = general */
2163      emit_move_insn (operands[2], XEXP (operands[1], 0));
2164      emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2165      return;
2166
2167    case 21: /* far = cr */
2168    case 22: /* far = far */
2169    default:
2170      fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2171	       which, mode_name[mode]);
2172      debug_rtx (operands[0]);
2173      debug_rtx (operands[1]);
2174      gcc_unreachable ();
2175    }
2176}
2177
2178/* Implement PREFERRED_RELOAD_CLASS.  See whether X is a constant that
2179   can be moved directly into registers 0 to 7, but not into the rest.
2180   If so, and if the required class includes registers 0 to 7, restrict
2181   it to those registers.  */
2182
2183enum reg_class
2184mep_preferred_reload_class (rtx x, enum reg_class rclass)
2185{
2186  switch (GET_CODE (x))
2187    {
2188    case CONST_INT:
2189      if (INTVAL (x) >= 0x10000
2190	  && INTVAL (x) < 0x01000000
2191	  && (INTVAL (x) & 0xffff) != 0
2192	  && reg_class_subset_p (TPREL_REGS, rclass))
2193	rclass = TPREL_REGS;
2194      break;
2195
2196    case CONST:
2197    case SYMBOL_REF:
2198    case LABEL_REF:
2199      if (mep_section_tag (x) != 'f'
2200	  && reg_class_subset_p (TPREL_REGS, rclass))
2201	rclass = TPREL_REGS;
2202      break;
2203
2204    default:
2205      break;
2206    }
2207  return rclass;
2208}
2209
2210/* Implement REGISTER_MOVE_COST.  Return 2 for direct single-register
2211   moves, 4 for direct double-register moves, and 1000 for anything
2212   that requires a temporary register or temporary stack slot.  */
2213
2214int
2215mep_register_move_cost (machine_mode mode, enum reg_class from, enum reg_class to)
2216{
2217  if (mep_have_copro_copro_moves_p
2218      && reg_class_subset_p (from, CR_REGS)
2219      && reg_class_subset_p (to, CR_REGS))
2220    {
2221      if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2222	return 4;
2223      return 2;
2224    }
2225  if (reg_class_subset_p (from, CR_REGS)
2226      && reg_class_subset_p (to, CR_REGS))
2227    {
2228      if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2229	return 8;
2230      return 4;
2231    }
2232  if (reg_class_subset_p (from, CR_REGS)
2233      || reg_class_subset_p (to, CR_REGS))
2234    {
2235      if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2236	return 4;
2237      return 2;
2238    }
2239  if (mep_secondary_memory_needed (from, to, mode))
2240    return 1000;
2241  if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2242    return 1000;
2243
2244  if (GET_MODE_SIZE (mode) > 4)
2245    return 4;
2246
2247  return 2;
2248}
2249
2250
2251/* Functions to save and restore machine-specific function data.  */
2252
2253static struct machine_function *
2254mep_init_machine_status (void)
2255{
2256  return ggc_cleared_alloc<machine_function> ();
2257}
2258
2259static rtx
2260mep_allocate_initial_value (rtx reg)
2261{
2262  int rss;
2263
2264  if (GET_CODE (reg) != REG)
2265    return NULL_RTX;
2266
2267  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2268    return NULL_RTX;
2269
2270  /* In interrupt functions, the "initial" values of $gp and $tp are
2271     provided by the prologue.  They are not necessarily the same as
2272     the values that the caller was using.  */
2273  if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2274    if (mep_interrupt_p ())
2275      return NULL_RTX;
2276
2277  if (! cfun->machine->reg_save_slot[REGNO(reg)])
2278    {
2279      cfun->machine->reg_save_size += 4;
2280      cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2281    }
2282
2283  rss = cfun->machine->reg_save_slot[REGNO(reg)];
2284  return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2285}
2286
2287rtx
2288mep_return_addr_rtx (int count)
2289{
2290  if (count != 0)
2291    return const0_rtx;
2292
2293  return get_hard_reg_initial_val (Pmode, LP_REGNO);
2294}
2295
2296static rtx
2297mep_tp_rtx (void)
2298{
2299  return get_hard_reg_initial_val (Pmode, TP_REGNO);
2300}
2301
2302static rtx
2303mep_gp_rtx (void)
2304{
2305  return get_hard_reg_initial_val (Pmode, GP_REGNO);
2306}
2307
2308static bool
2309mep_interrupt_p (void)
2310{
2311  if (cfun->machine->interrupt_handler == 0)
2312    {
2313      int interrupt_handler
2314	= (lookup_attribute ("interrupt",
2315			     DECL_ATTRIBUTES (current_function_decl))
2316	   != NULL_TREE);
2317      cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2318    }
2319  return cfun->machine->interrupt_handler == 2;
2320}
2321
2322static bool
2323mep_disinterrupt_p (void)
2324{
2325  if (cfun->machine->disable_interrupts == 0)
2326    {
2327      int disable_interrupts
2328	= (lookup_attribute ("disinterrupt",
2329			     DECL_ATTRIBUTES (current_function_decl))
2330	   != NULL_TREE);
2331      cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2332    }
2333  return cfun->machine->disable_interrupts == 2;
2334}
2335
2336
2337/* Frame/Epilog/Prolog Related.  */
2338
2339static bool
2340mep_reg_set_p (rtx reg, rtx insn)
2341{
2342  /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2343  if (INSN_P (insn))
2344    {
2345      if (FIND_REG_INC_NOTE (insn, reg))
2346	return true;
2347      insn = PATTERN (insn);
2348    }
2349
2350  if (GET_CODE (insn) == SET
2351      && GET_CODE (XEXP (insn, 0)) == REG
2352      && GET_CODE (XEXP (insn, 1)) == REG
2353      && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2354    return false;
2355
2356  return set_of (reg, insn) != NULL_RTX;
2357}
2358
2359
2360#define MEP_SAVES_UNKNOWN 0
2361#define MEP_SAVES_YES 1
2362#define MEP_SAVES_MAYBE 2
2363#define MEP_SAVES_NO 3
2364
2365static bool
2366mep_reg_set_in_function (int regno)
2367{
2368  rtx reg;
2369  rtx_insn *insn;
2370
2371  if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2372    return true;
2373
2374  if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2375    return true;
2376
2377  push_topmost_sequence ();
2378  insn = get_insns ();
2379  pop_topmost_sequence ();
2380
2381  if (!insn)
2382    return false;
2383
2384  reg = gen_rtx_REG (SImode, regno);
2385
2386  for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2387    if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2388      return true;
2389  return false;
2390}
2391
2392static bool
2393mep_asm_without_operands_p (void)
2394{
2395  if (cfun->machine->asms_without_operands == 0)
2396    {
2397      rtx_insn *insn;
2398
2399      push_topmost_sequence ();
2400      insn = get_insns ();
2401      pop_topmost_sequence ();
2402
2403      cfun->machine->asms_without_operands = 1;
2404      while (insn)
2405	{
2406	  if (INSN_P (insn)
2407	      && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2408	    {
2409	      cfun->machine->asms_without_operands = 2;
2410	      break;
2411	    }
2412	  insn = NEXT_INSN (insn);
2413	}
2414
2415    }
2416  return cfun->machine->asms_without_operands == 2;
2417}
2418
2419/* Interrupt functions save/restore every call-preserved register, and
2420   any call-used register it uses (or all if it calls any function,
2421   since they may get clobbered there too).  Here we check to see
2422   which call-used registers need saving.  */
2423
2424#define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2425			   && (r == FIRST_CCR_REGNO + 1 \
2426			       || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2427			       || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2428
2429static bool
2430mep_interrupt_saved_reg (int r)
2431{
2432  if (!mep_interrupt_p ())
2433    return false;
2434  if (r == REGSAVE_CONTROL_TEMP
2435      || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2436    return true;
2437  if (mep_asm_without_operands_p ()
2438      && (!fixed_regs[r]
2439	  || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2440	  || IVC2_ISAVED_REG (r)))
2441    return true;
2442  if (!crtl->is_leaf)
2443    /* Function calls mean we need to save $lp.  */
2444    if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2445      return true;
2446  if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2447    /* The interrupt handler might use these registers for repeat blocks,
2448       or it might call a function that does so.  */
2449    if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2450      return true;
2451  if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2452    return false;
2453  /* Functions we call might clobber these.  */
2454  if (call_used_regs[r] && !fixed_regs[r])
2455    return true;
2456  /* Additional registers that need to be saved for IVC2.  */
2457  if (IVC2_ISAVED_REG (r))
2458    return true;
2459
2460  return false;
2461}
2462
2463static bool
2464mep_call_saves_register (int r)
2465{
2466  if (! cfun->machine->frame_locked)
2467    {
2468      int rv = MEP_SAVES_NO;
2469
2470      if (cfun->machine->reg_save_slot[r])
2471  	rv = MEP_SAVES_YES;
2472      else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2473	rv = MEP_SAVES_YES;
2474      else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2475	rv = MEP_SAVES_YES;
2476      else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2477	rv = MEP_SAVES_YES;
2478      else if (crtl->calls_eh_return && (r == 10 || r == 11))
2479	/* We need these to have stack slots so that they can be set during
2480	   unwinding.  */
2481	rv = MEP_SAVES_YES;
2482      else if (mep_interrupt_saved_reg (r))
2483	rv = MEP_SAVES_YES;
2484      cfun->machine->reg_saved[r] = rv;
2485    }
2486  return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2487}
2488
2489/* Return true if epilogue uses register REGNO.  */
2490
2491bool
2492mep_epilogue_uses (int regno)
2493{
2494  /* Since $lp is a call-saved register, the generic code will normally
2495     mark it used in the epilogue if it needs to be saved and restored.
2496     However, when profiling is enabled, the profiling code will implicitly
2497     clobber $11.  This case has to be handled specially both here and in
2498     mep_call_saves_register.  */
2499  if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2500    return true;
2501  /* Interrupt functions save/restore pretty much everything.  */
2502  return (reload_completed && mep_interrupt_saved_reg (regno));
2503}
2504
2505static int
2506mep_reg_size (int regno)
2507{
2508  if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2509    return 8;
2510  return 4;
2511}
2512
2513/* Worker function for TARGET_CAN_ELIMINATE.  */
2514
2515bool
2516mep_can_eliminate (const int from, const int to)
2517{
2518  return  (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2519           ? ! frame_pointer_needed
2520           : true);
2521}
2522
2523int
2524mep_elimination_offset (int from, int to)
2525{
2526  int reg_save_size;
2527  int i;
2528  int frame_size = get_frame_size () + crtl->outgoing_args_size;
2529  int total_size;
2530
2531  if (!cfun->machine->frame_locked)
2532    memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2533
2534  /* We don't count arg_regs_to_save in the arg pointer offset, because
2535     gcc thinks the arg pointer has moved along with the saved regs.
2536     However, we do count it when we adjust $sp in the prologue.  */
2537  reg_save_size = 0;
2538  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2539    if (mep_call_saves_register (i))
2540      reg_save_size += mep_reg_size (i);
2541
2542  if (reg_save_size % 8)
2543    cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2544  else
2545    cfun->machine->regsave_filler = 0;
2546
2547  /* This is what our total stack adjustment looks like.  */
2548  total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2549
2550  if (total_size % 8)
2551    cfun->machine->frame_filler = 8 - (total_size % 8);
2552  else
2553    cfun->machine->frame_filler = 0;
2554
2555
2556  if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2557    return reg_save_size + cfun->machine->regsave_filler;
2558
2559  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2560    return cfun->machine->frame_filler + frame_size;
2561
2562  if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2563    return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2564
2565  gcc_unreachable ();
2566}
2567
2568static rtx_insn *
2569F (rtx_insn *x)
2570{
2571  RTX_FRAME_RELATED_P (x) = 1;
2572  return x;
2573}
2574
2575/* Since the prologue/epilogue code is generated after optimization,
2576   we can't rely on gcc to split constants for us.  So, this code
2577   captures all the ways to add a constant to a register in one logic
2578   chunk, including optimizing away insns we just don't need.  This
2579   makes the prolog/epilog code easier to follow.  */
2580static void
2581add_constant (int dest, int src, int value, int mark_frame)
2582{
2583  rtx_insn *insn;
2584  int hi, lo;
2585
2586  if (src == dest && value == 0)
2587    return;
2588
2589  if (value == 0)
2590    {
2591      insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2592			     gen_rtx_REG (SImode, src));
2593      if (mark_frame)
2594	RTX_FRAME_RELATED_P(insn) = 1;
2595      return;
2596    }
2597
2598  if (value >= -32768 && value <= 32767)
2599    {
2600      insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2601				    gen_rtx_REG (SImode, src),
2602				    GEN_INT (value)));
2603      if (mark_frame)
2604	RTX_FRAME_RELATED_P(insn) = 1;
2605      return;
2606    }
2607
2608  /* Big constant, need to use a temp register.  We use
2609     REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2610     area is always small enough to directly add to).  */
2611
2612  hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2613  lo = value & 0xffff;
2614
2615  insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2616			 GEN_INT (hi));
2617
2618  if (lo)
2619    {
2620      insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2621				    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2622				    GEN_INT (lo)));
2623    }
2624
2625  insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2626				gen_rtx_REG (SImode, src),
2627				gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2628  if (mark_frame)
2629    {
2630      RTX_FRAME_RELATED_P(insn) = 1;
2631      add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2632		    gen_rtx_SET (SImode,
2633				 gen_rtx_REG (SImode, dest),
2634				 gen_rtx_PLUS (SImode,
2635					       gen_rtx_REG (SImode, dest),
2636					       GEN_INT (value))));
2637    }
2638}
2639
2640/* Move SRC to DEST.  Mark the move as being potentially dead if
2641   MAYBE_DEAD_P.  */
2642
2643static rtx_insn *
2644maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2645{
2646  rtx_insn *insn = emit_move_insn (dest, src);
2647#if 0
2648  if (maybe_dead_p)
2649    REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2650#endif
2651  return insn;
2652}
2653
2654/* Used for interrupt functions, which can't assume that $tp and $gp
2655   contain the correct pointers.  */
2656
2657static void
2658mep_reload_pointer (int regno, const char *symbol)
2659{
2660  rtx reg, sym;
2661
2662  if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2663    return;
2664
2665  reg = gen_rtx_REG (SImode, regno);
2666  sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2667  emit_insn (gen_movsi_topsym_s (reg, sym));
2668  emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2669}
2670
2671/* Assign save slots for any register not already saved.  DImode
2672   registers go at the end of the reg save area; the rest go at the
2673   beginning.  This is for alignment purposes.  Returns true if a frame
2674   is really needed.  */
2675static bool
2676mep_assign_save_slots (int reg_save_size)
2677{
2678  bool really_need_stack_frame = false;
2679  int di_ofs = 0;
2680  int i;
2681
2682  for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2683    if (mep_call_saves_register(i))
2684      {
2685	int regsize = mep_reg_size (i);
2686
2687	if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2688	    || mep_reg_set_in_function (i))
2689	  really_need_stack_frame = true;
2690
2691	if (cfun->machine->reg_save_slot[i])
2692	  continue;
2693
2694	if (regsize < 8)
2695	  {
2696	    cfun->machine->reg_save_size += regsize;
2697	    cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2698	  }
2699	else
2700	  {
2701	    cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2702	    di_ofs += 8;
2703	  }
2704      }
2705  cfun->machine->frame_locked = 1;
2706  return really_need_stack_frame;
2707}
2708
2709void
2710mep_expand_prologue (void)
2711{
2712  int i, rss, sp_offset = 0;
2713  int reg_save_size;
2714  int frame_size;
2715  int really_need_stack_frame;
2716
2717  /* We must not allow register renaming in interrupt functions,
2718     because that invalidates the correctness of the set of call-used
2719     registers we're going to save/restore.  */
2720  mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2721
2722  if (mep_disinterrupt_p ())
2723    emit_insn (gen_mep_disable_int ());
2724
2725  cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2726
2727  reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2728  frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2729  really_need_stack_frame = frame_size;
2730
2731  really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2732
2733  sp_offset = reg_save_size;
2734  if (sp_offset + frame_size < 128)
2735    sp_offset += frame_size ;
2736
2737  add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2738
2739  for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2740    if (mep_call_saves_register(i))
2741      {
2742	rtx mem;
2743	bool maybe_dead_p;
2744	machine_mode rmode;
2745
2746	rss = cfun->machine->reg_save_slot[i];
2747
2748  	if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2749	    && (!mep_reg_set_in_function (i)
2750		&& !mep_interrupt_p ()))
2751	  continue;
2752
2753	if (mep_reg_size (i) == 8)
2754	  rmode = DImode;
2755	else
2756	  rmode = SImode;
2757
2758	/* If there is a pseudo associated with this register's initial value,
2759	   reload might have already spilt it to the stack slot suggested by
2760	   ALLOCATE_INITIAL_VALUE.  The moves emitted here can then be safely
2761	   deleted as dead.  */
2762	mem = gen_rtx_MEM (rmode,
2763			   plus_constant (Pmode, stack_pointer_rtx,
2764					  sp_offset - rss));
2765	maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2766
2767	if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2768	  F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2769	else if (rmode == DImode)
2770	  {
2771	    rtx_insn *insn;
2772	    int be = TARGET_BIG_ENDIAN ? 4 : 0;
2773
2774	    mem = gen_rtx_MEM (SImode,
2775			       plus_constant (Pmode, stack_pointer_rtx,
2776					      sp_offset - rss + be));
2777
2778	    maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2779			     gen_rtx_REG (SImode, i),
2780			     maybe_dead_p);
2781	    maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2782			     gen_rtx_ZERO_EXTRACT (SImode,
2783						   gen_rtx_REG (DImode, i),
2784						   GEN_INT (32),
2785						   GEN_INT (32)),
2786			     maybe_dead_p);
2787	    insn = maybe_dead_move (mem,
2788				    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2789				    maybe_dead_p);
2790	    RTX_FRAME_RELATED_P (insn) = 1;
2791
2792	    add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2793			  gen_rtx_SET (VOIDmode,
2794				       copy_rtx (mem),
2795				       gen_rtx_REG (rmode, i)));
2796	    mem = gen_rtx_MEM (SImode,
2797			       plus_constant (Pmode, stack_pointer_rtx,
2798					      sp_offset - rss + (4-be)));
2799	    insn = maybe_dead_move (mem,
2800				    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2801				    maybe_dead_p);
2802	  }
2803	else
2804	  {
2805	    rtx_insn *insn;
2806	    maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2807			     gen_rtx_REG (rmode, i),
2808			     maybe_dead_p);
2809	    insn = maybe_dead_move (mem,
2810				    gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2811				    maybe_dead_p);
2812	    RTX_FRAME_RELATED_P (insn) = 1;
2813
2814	    add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2815			  gen_rtx_SET (VOIDmode,
2816				       copy_rtx (mem),
2817				       gen_rtx_REG (rmode, i)));
2818	  }
2819      }
2820
2821  if (frame_pointer_needed)
2822    {
2823      /* We've already adjusted down by sp_offset.  Total $sp change
2824	 is reg_save_size + frame_size.  We want a net change here of
2825	 just reg_save_size.  */
2826      add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2827    }
2828
2829  add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2830
2831  if (mep_interrupt_p ())
2832    {
2833      mep_reload_pointer(GP_REGNO, "__sdabase");
2834      mep_reload_pointer(TP_REGNO, "__tpbase");
2835    }
2836}
2837
2838static void
2839mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2840{
2841  int local = hwi_local;
2842  int frame_size = local + crtl->outgoing_args_size;
2843  int reg_save_size;
2844  int ffill;
2845  int i, sp, skip;
2846  int sp_offset;
2847  int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2848
2849  reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2850  frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2851  sp_offset = reg_save_size + frame_size;
2852
2853  ffill = cfun->machine->frame_filler;
2854
2855  if (cfun->machine->mep_frame_pointer_needed)
2856    reg_names[FP_REGNO] = "$fp";
2857  else
2858    reg_names[FP_REGNO] = "$8";
2859
2860  if (sp_offset == 0)
2861    return;
2862
2863  if (debug_info_level == DINFO_LEVEL_NONE)
2864    {
2865      fprintf (file, "\t# frame: %d", sp_offset);
2866      if (reg_save_size)
2867	fprintf (file, "   %d regs", reg_save_size);
2868      if (local)
2869	fprintf (file, "   %d locals", local);
2870      if (crtl->outgoing_args_size)
2871	fprintf (file, "   %d args", crtl->outgoing_args_size);
2872      fprintf (file, "\n");
2873      return;
2874    }
2875
2876  fprintf (file, "\t#\n");
2877  fprintf (file, "\t# Initial Frame Information:\n");
2878  if (sp_offset || !frame_pointer_needed)
2879    fprintf (file, "\t# Entry   ---------- 0\n");
2880
2881  /* Sort registers by save slots, so they're printed in the order
2882     they appear in memory, not the order they're saved in.  */
2883  for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2884    slot_map[si] = si;
2885  for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2886    for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2887      if (cfun->machine->reg_save_slot[slot_map[si]]
2888	  > cfun->machine->reg_save_slot[slot_map[sj]])
2889	{
2890	  int t = slot_map[si];
2891	  slot_map[si] = slot_map[sj];
2892	  slot_map[sj] = t;
2893	}
2894
2895  sp = 0;
2896  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2897    {
2898      int rsize;
2899      int r = slot_map[i];
2900      int rss = cfun->machine->reg_save_slot[r];
2901
2902      if (!mep_call_saves_register (r))
2903	continue;
2904
2905      if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2906	  && (!mep_reg_set_in_function (r)
2907	      && !mep_interrupt_p ()))
2908	continue;
2909
2910      rsize = mep_reg_size(r);
2911      skip = rss - (sp+rsize);
2912      if (skip)
2913	fprintf (file, "\t#         %3d bytes for alignment\n", skip);
2914      fprintf (file, "\t#         %3d bytes for saved %-3s   %3d($sp)\n",
2915	       rsize, reg_names[r], sp_offset - rss);
2916      sp = rss;
2917    }
2918
2919  skip = reg_save_size - sp;
2920  if (skip)
2921    fprintf (file, "\t#         %3d bytes for alignment\n", skip);
2922
2923  if (frame_pointer_needed)
2924    fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2925  if (local)
2926    fprintf (file, "\t#         %3d bytes for local vars\n", local);
2927  if (ffill)
2928    fprintf (file, "\t#         %3d bytes for alignment\n", ffill);
2929  if (crtl->outgoing_args_size)
2930    fprintf (file, "\t#         %3d bytes for outgoing args\n",
2931	     crtl->outgoing_args_size);
2932  fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2933  fprintf (file, "\t#\n");
2934}
2935
2936
2937static int mep_prevent_lp_restore = 0;
2938static int mep_sibcall_epilogue = 0;
2939
2940void
2941mep_expand_epilogue (void)
2942{
2943  int i, sp_offset = 0;
2944  int reg_save_size = 0;
2945  int frame_size;
2946  int lp_temp = LP_REGNO, lp_slot = -1;
2947  int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2948  int interrupt_handler = mep_interrupt_p ();
2949
2950  if (profile_arc_flag == 2)
2951    emit_insn (gen_mep_bb_trace_ret ());
2952
2953  reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2954  frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2955
2956  really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2957
2958  if (frame_pointer_needed)
2959    {
2960      /* If we have a frame pointer, we won't have a reliable stack
2961	 pointer (alloca, you know), so rebase SP from FP */
2962      emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2963		      gen_rtx_REG (SImode, FP_REGNO));
2964      sp_offset = reg_save_size;
2965    }
2966  else
2967    {
2968      /* SP is right under our local variable space.  Adjust it if
2969	 needed.  */
2970      sp_offset = reg_save_size + frame_size;
2971      if (sp_offset >= 128)
2972	{
2973	  add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2974	  sp_offset -= frame_size;
2975	}
2976    }
2977
2978  /* This is backwards so that we restore the control and coprocessor
2979     registers before the temporary registers we use to restore
2980     them.  */
2981  for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2982    if (mep_call_saves_register (i))
2983      {
2984	machine_mode rmode;
2985	int rss = cfun->machine->reg_save_slot[i];
2986
2987	if (mep_reg_size (i) == 8)
2988	  rmode = DImode;
2989	else
2990	  rmode = SImode;
2991
2992	if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2993	    && !(mep_reg_set_in_function (i) || interrupt_handler))
2994	  continue;
2995	if (mep_prevent_lp_restore && i == LP_REGNO)
2996	  continue;
2997	if (!mep_prevent_lp_restore
2998	    && !interrupt_handler
2999	    && (i == 10 || i == 11))
3000	  continue;
3001
3002	if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3003	  emit_move_insn (gen_rtx_REG (rmode, i),
3004			  gen_rtx_MEM (rmode,
3005				       plus_constant (Pmode, stack_pointer_rtx,
3006						      sp_offset - rss)));
3007	else
3008	  {
3009	    if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3010	      /* Defer this one so we can jump indirect rather than
3011		 copying the RA to $lp and "ret".  EH epilogues
3012		 automatically skip this anyway.  */
3013	      lp_slot = sp_offset-rss;
3014	    else
3015	      {
3016		emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3017				gen_rtx_MEM (rmode,
3018					     plus_constant (Pmode,
3019							    stack_pointer_rtx,
3020							    sp_offset-rss)));
3021		emit_move_insn (gen_rtx_REG (rmode, i),
3022				gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3023	      }
3024	  }
3025      }
3026  if (lp_slot != -1)
3027    {
3028      /* Restore this one last so we know it will be in the temp
3029	 register when we return by jumping indirectly via the temp.  */
3030      emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3031		      gen_rtx_MEM (SImode,
3032				   plus_constant (Pmode, stack_pointer_rtx,
3033						  lp_slot)));
3034      lp_temp = REGSAVE_CONTROL_TEMP;
3035    }
3036
3037
3038  add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3039
3040  if (crtl->calls_eh_return && mep_prevent_lp_restore)
3041    emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3042			   gen_rtx_REG (SImode, SP_REGNO),
3043			   cfun->machine->eh_stack_adjust));
3044
3045  if (mep_sibcall_epilogue)
3046    return;
3047
3048  if (mep_disinterrupt_p ())
3049    emit_insn (gen_mep_enable_int ());
3050
3051  if (mep_prevent_lp_restore)
3052    {
3053      emit_jump_insn (gen_eh_return_internal ());
3054      emit_barrier ();
3055    }
3056  else if (interrupt_handler)
3057    emit_jump_insn (gen_mep_reti ());
3058  else
3059    emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3060}
3061
3062void
3063mep_expand_eh_return (rtx *operands)
3064{
3065  if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3066    {
3067      rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3068      emit_move_insn (ra, operands[0]);
3069      operands[0] = ra;
3070    }
3071
3072  emit_insn (gen_eh_epilogue (operands[0]));
3073}
3074
3075void
3076mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3077{
3078  cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3079  mep_prevent_lp_restore = 1;
3080  mep_expand_epilogue ();
3081  mep_prevent_lp_restore = 0;
3082}
3083
3084void
3085mep_expand_sibcall_epilogue (void)
3086{
3087  mep_sibcall_epilogue = 1;
3088  mep_expand_epilogue ();
3089  mep_sibcall_epilogue = 0;
3090}
3091
3092static bool
3093mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3094{
3095  if (decl == NULL)
3096    return false;
3097
3098  if (mep_section_tag (DECL_RTL (decl)) == 'f')
3099    return false;
3100
3101  /* Can't call to a sibcall from an interrupt or disinterrupt function.  */
3102  if (mep_interrupt_p () || mep_disinterrupt_p ())
3103    return false;
3104
3105  return true;
3106}
3107
3108rtx
3109mep_return_stackadj_rtx (void)
3110{
3111  return gen_rtx_REG (SImode, 10);
3112}
3113
3114rtx
3115mep_return_handler_rtx (void)
3116{
3117  return gen_rtx_REG (SImode, LP_REGNO);
3118}
3119
3120void
3121mep_function_profiler (FILE *file)
3122{
3123  /* Always right at the beginning of the function.  */
3124  fprintf (file, "\t# mep function profiler\n");
3125  fprintf (file, "\tadd\t$sp, -8\n");
3126  fprintf (file, "\tsw\t$0, ($sp)\n");
3127  fprintf (file, "\tldc\t$0, $lp\n");
3128  fprintf (file, "\tsw\t$0, 4($sp)\n");
3129  fprintf (file, "\tbsr\t__mep_mcount\n");
3130  fprintf (file, "\tlw\t$0, 4($sp)\n");
3131  fprintf (file, "\tstc\t$0, $lp\n");
3132  fprintf (file, "\tlw\t$0, ($sp)\n");
3133  fprintf (file, "\tadd\t$sp, 8\n\n");
3134}
3135
3136const char *
3137mep_emit_bb_trace_ret (void)
3138{
3139  fprintf (asm_out_file, "\t# end of block profiling\n");
3140  fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3141  fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3142  fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3143  fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3144  fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3145  fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3146  fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3147  fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3148  fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3149  return "";
3150}
3151
3152#undef SAVE
3153#undef RESTORE
3154
3155/* Operand Printing.  */
3156
3157void
3158mep_print_operand_address (FILE *stream, rtx address)
3159{
3160  if (GET_CODE (address) == MEM)
3161    address = XEXP (address, 0);
3162  else
3163    /* cf: gcc.dg/asm-4.c.  */
3164    gcc_assert (GET_CODE (address) == REG);
3165
3166  mep_print_operand (stream, address, 0);
3167}
3168
3169static struct
3170{
3171  char code;
3172  const char *pattern;
3173  const char *format;
3174}
3175const conversions[] =
3176{
3177  { 0, "r", "0" },
3178  { 0, "m+ri", "3(2)" },
3179  { 0, "mr", "(1)" },
3180  { 0, "ms", "(1)" },
3181  { 0, "ml", "(1)" },
3182  { 0, "mLrs", "%lo(3)(2)" },
3183  { 0, "mLr+si", "%lo(4+5)(2)" },
3184  { 0, "m+ru2s", "%tpoff(5)(2)" },
3185  { 0, "m+ru3s", "%sdaoff(5)(2)" },
3186  { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3187  { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3188  { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3189  { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3190  { 0, "mi", "(1)" },
3191  { 0, "m+si", "(2+3)" },
3192  { 0, "m+li", "(2+3)" },
3193  { 0, "i", "0" },
3194  { 0, "s", "0" },
3195  { 0, "+si", "1+2" },
3196  { 0, "+u2si", "%tpoff(3+4)" },
3197  { 0, "+u3si", "%sdaoff(3+4)" },
3198  { 0, "l", "0" },
3199  { 'b', "i", "0" },
3200  { 'B', "i", "0" },
3201  { 'U', "i", "0" },
3202  { 'h', "i", "0" },
3203  { 'h', "Hs", "%hi(1)" },
3204  { 'I', "i", "0" },
3205  { 'I', "u2s", "%tpoff(2)" },
3206  { 'I', "u3s", "%sdaoff(2)" },
3207  { 'I', "+u2si", "%tpoff(3+4)" },
3208  { 'I', "+u3si", "%sdaoff(3+4)" },
3209  { 'J', "i", "0" },
3210  { 'P', "mr", "(1\\+),\\0" },
3211  { 'x', "i", "0" },
3212  { 0, 0, 0 }
3213};
3214
3215static int
3216unique_bit_in (HOST_WIDE_INT i)
3217{
3218  switch (i & 0xff)
3219    {
3220    case 0x01: case 0xfe: return 0;
3221    case 0x02: case 0xfd: return 1;
3222    case 0x04: case 0xfb: return 2;
3223    case 0x08: case 0xf7: return 3;
3224    case 0x10: case 0x7f: return 4;
3225    case 0x20: case 0xbf: return 5;
3226    case 0x40: case 0xdf: return 6;
3227    case 0x80: case 0xef: return 7;
3228    default:
3229      gcc_unreachable ();
3230    }
3231}
3232
3233static int
3234bit_size_for_clip (HOST_WIDE_INT i)
3235{
3236  int rv;
3237
3238  for (rv = 0; rv < 31; rv ++)
3239    if (((HOST_WIDE_INT) 1 << rv) > i)
3240      return rv + 1;
3241  gcc_unreachable ();
3242}
3243
3244/* Print an operand to a assembler instruction.  */
3245
3246void
3247mep_print_operand (FILE *file, rtx x, int code)
3248{
3249  int i, j;
3250  const char *real_name;
3251
3252  if (code == '<')
3253    {
3254      /* Print a mnemonic to do CR <- CR moves.  Find out which intrinsic
3255	 we're using, then skip over the "mep_" part of its name.  */
3256      const struct cgen_insn *insn;
3257
3258      if (mep_get_move_insn (mep_cmov, &insn))
3259	fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3260      else
3261	mep_intrinsic_unavailable (mep_cmov);
3262      return;
3263    }
3264  if (code == 'L')
3265    {
3266      switch (GET_CODE (x))
3267	{
3268	case AND:
3269	  fputs ("clr", file);
3270	  return;
3271	case IOR:
3272	  fputs ("set", file);
3273	  return;
3274	case XOR:
3275	  fputs ("not", file);
3276	  return;
3277	default:
3278	  output_operand_lossage ("invalid %%L code");
3279	}
3280    }
3281  if (code == 'M')
3282    {
3283      /* Print the second operand of a CR <- CR move.  If we're using
3284	 a two-operand instruction (i.e., a real cmov), then just print
3285	 the operand normally.  If we're using a "reg, reg, immediate"
3286	 instruction such as caddi3, print the operand followed by a
3287	 zero field.  If we're using a three-register instruction,
3288	 print the operand twice.  */
3289      const struct cgen_insn *insn;
3290
3291      mep_print_operand (file, x, 0);
3292      if (mep_get_move_insn (mep_cmov, &insn)
3293	  && insn_data[insn->icode].n_operands == 3)
3294	{
3295	  fputs (", ", file);
3296	  if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3297	    mep_print_operand (file, x, 0);
3298	  else
3299	    mep_print_operand (file, const0_rtx, 0);
3300	}
3301      return;
3302    }
3303
3304  encode_pattern (x);
3305  for (i = 0; conversions[i].pattern; i++)
3306    if (conversions[i].code == code
3307	&& strcmp(conversions[i].pattern, pattern) == 0)
3308      {
3309	for (j = 0; conversions[i].format[j]; j++)
3310	  if (conversions[i].format[j] == '\\')
3311	    {
3312	      fputc (conversions[i].format[j+1], file);
3313	      j++;
3314	    }
3315	  else if (ISDIGIT(conversions[i].format[j]))
3316	    {
3317	      rtx r = patternr[conversions[i].format[j] - '0'];
3318	      switch (GET_CODE (r))
3319		{
3320		case REG:
3321		  fprintf (file, "%s", reg_names [REGNO (r)]);
3322		  break;
3323		case CONST_INT:
3324		  switch (code)
3325		    {
3326		    case 'b':
3327		      fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3328		      break;
3329		    case 'B':
3330		      fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3331		      break;
3332		    case 'h':
3333		      fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3334		      break;
3335		    case 'U':
3336		      fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3337		      break;
3338		    case 'J':
3339		      fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3340		      break;
3341		    case 'x':
3342		      if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3343			  && !(INTVAL (r) & 0xff))
3344			fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3345		      else
3346			fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3347		      break;
3348		    case 'I':
3349		      if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3350			  && conversions[i].format[j+1] == 0)
3351			{
3352			  fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3353			  fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3354			}
3355		      else
3356			fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3357		      break;
3358		    default:
3359		      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3360		      break;
3361		    }
3362		  break;
3363		case CONST_DOUBLE:
3364		  fprintf(file, "[const_double 0x%lx]",
3365			  (unsigned long) CONST_DOUBLE_HIGH(r));
3366		  break;
3367		case SYMBOL_REF:
3368		  real_name = targetm.strip_name_encoding (XSTR (r, 0));
3369		  assemble_name (file, real_name);
3370		  break;
3371		case LABEL_REF:
3372		  output_asm_label (r);
3373		  break;
3374		default:
3375		  fprintf (stderr, "don't know how to print this operand:");
3376		  debug_rtx (r);
3377		  gcc_unreachable ();
3378		}
3379	    }
3380	  else
3381	    {
3382	      if (conversions[i].format[j] == '+'
3383		  && (!code || code == 'I')
3384		  && ISDIGIT (conversions[i].format[j+1])
3385		  && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3386		  && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3387		continue;
3388	      fputc(conversions[i].format[j], file);
3389	    }
3390	break;
3391      }
3392  if (!conversions[i].pattern)
3393    {
3394      error ("unconvertible operand %c %qs", code?code:'-', pattern);
3395      debug_rtx(x);
3396    }
3397
3398  return;
3399}
3400
3401void
3402mep_final_prescan_insn (rtx_insn *insn, rtx *operands ATTRIBUTE_UNUSED,
3403			int noperands ATTRIBUTE_UNUSED)
3404{
3405  /* Despite the fact that MeP is perfectly capable of branching and
3406     doing something else in the same bundle, gcc does jump
3407     optimization *after* scheduling, so we cannot trust the bundling
3408     flags on jump instructions.  */
3409  if (GET_MODE (insn) == BImode
3410      && get_attr_slots (insn) != SLOTS_CORE)
3411    fputc ('+', asm_out_file);
3412}
3413
3414/* Function args in registers.  */
3415
3416static void
3417mep_setup_incoming_varargs (cumulative_args_t cum,
3418			    machine_mode mode ATTRIBUTE_UNUSED,
3419			    tree type ATTRIBUTE_UNUSED, int *pretend_size,
3420			    int second_time ATTRIBUTE_UNUSED)
3421{
3422  int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3423
3424  if (nsave > 0)
3425    cfun->machine->arg_regs_to_save = nsave;
3426  *pretend_size = nsave * 4;
3427}
3428
3429static int
3430bytesize (const_tree type, machine_mode mode)
3431{
3432  if (mode == BLKmode)
3433    return int_size_in_bytes (type);
3434  return GET_MODE_SIZE (mode);
3435}
3436
3437static rtx
3438mep_expand_builtin_saveregs (void)
3439{
3440  int bufsize, i, ns;
3441  rtx regbuf;
3442
3443  ns = cfun->machine->arg_regs_to_save;
3444  if (TARGET_IVC2)
3445    {
3446      bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3447      regbuf = assign_stack_local (SImode, bufsize, 64);
3448    }
3449  else
3450    {
3451      bufsize = ns * 4;
3452      regbuf = assign_stack_local (SImode, bufsize, 32);
3453    }
3454
3455  move_block_from_reg (5-ns, regbuf, ns);
3456
3457  if (TARGET_IVC2)
3458    {
3459      rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3460      int ofs = 8 * ((ns+1)/2);
3461
3462      for (i=0; i<ns; i++)
3463	{
3464	  int rn = (4-ns) + i + 49;
3465	  rtx ptr;
3466
3467	  ptr = offset_address (tmp, GEN_INT (ofs), 2);
3468	  emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3469	  ofs += 8;
3470	}
3471    }
3472  return XEXP (regbuf, 0);
3473}
3474
3475static tree
3476mep_build_builtin_va_list (void)
3477{
3478  tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3479  tree record;
3480
3481
3482  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3483
3484  f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3485			  get_identifier ("__va_next_gp"), ptr_type_node);
3486  f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3487				get_identifier ("__va_next_gp_limit"),
3488				ptr_type_node);
3489  f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3490			   ptr_type_node);
3491  f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3492			     ptr_type_node);
3493
3494  DECL_FIELD_CONTEXT (f_next_gp) = record;
3495  DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3496  DECL_FIELD_CONTEXT (f_next_cop) = record;
3497  DECL_FIELD_CONTEXT (f_next_stack) = record;
3498
3499  TYPE_FIELDS (record) = f_next_gp;
3500  DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3501  DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3502  DECL_CHAIN (f_next_cop) = f_next_stack;
3503
3504  layout_type (record);
3505
3506  return record;
3507}
3508
3509static void
3510mep_expand_va_start (tree valist, rtx nextarg)
3511{
3512  tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3513  tree next_gp, next_gp_limit, next_cop, next_stack;
3514  tree t, u;
3515  int ns;
3516
3517  ns = cfun->machine->arg_regs_to_save;
3518
3519  f_next_gp = TYPE_FIELDS (va_list_type_node);
3520  f_next_gp_limit = DECL_CHAIN (f_next_gp);
3521  f_next_cop = DECL_CHAIN (f_next_gp_limit);
3522  f_next_stack = DECL_CHAIN (f_next_cop);
3523
3524  next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3525		    NULL_TREE);
3526  next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3527			  valist, f_next_gp_limit, NULL_TREE);
3528  next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3529		     NULL_TREE);
3530  next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3531		       valist, f_next_stack, NULL_TREE);
3532
3533  /* va_list.next_gp = expand_builtin_saveregs (); */
3534  u = make_tree (sizetype, expand_builtin_saveregs ());
3535  u = fold_convert (ptr_type_node, u);
3536  t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3537  TREE_SIDE_EFFECTS (t) = 1;
3538  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3539
3540  /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3541  u = fold_build_pointer_plus_hwi (u, 4 * ns);
3542  t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3543  TREE_SIDE_EFFECTS (t) = 1;
3544  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3545
3546  u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3547  /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3548  t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3549  TREE_SIDE_EFFECTS (t) = 1;
3550  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3551
3552  /* va_list.next_stack = nextarg; */
3553  u = make_tree (ptr_type_node, nextarg);
3554  t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3555  TREE_SIDE_EFFECTS (t) = 1;
3556  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3557}
3558
3559static tree
3560mep_gimplify_va_arg_expr (tree valist, tree type,
3561			  gimple_seq *pre_p,
3562			  gimple_seq *post_p ATTRIBUTE_UNUSED)
3563{
3564  HOST_WIDE_INT size, rsize;
3565  bool by_reference, ivc2_vec;
3566  tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3567  tree next_gp, next_gp_limit, next_cop, next_stack;
3568  tree label_sover, label_selse;
3569  tree tmp, res_addr;
3570
3571  ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3572
3573  size = int_size_in_bytes (type);
3574  by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3575
3576  if (by_reference)
3577    {
3578      type = build_pointer_type (type);
3579      size = 4;
3580    }
3581  rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3582
3583  f_next_gp = TYPE_FIELDS (va_list_type_node);
3584  f_next_gp_limit = DECL_CHAIN (f_next_gp);
3585  f_next_cop = DECL_CHAIN (f_next_gp_limit);
3586  f_next_stack = DECL_CHAIN (f_next_cop);
3587
3588  next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3589		    NULL_TREE);
3590  next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3591			  valist, f_next_gp_limit, NULL_TREE);
3592  next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3593		     NULL_TREE);
3594  next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3595		       valist, f_next_stack, NULL_TREE);
3596
3597  /* if f_next_gp < f_next_gp_limit
3598       IF (VECTOR_P && IVC2)
3599         val = *f_next_cop;
3600       ELSE
3601         val = *f_next_gp;
3602       f_next_gp += 4;
3603       f_next_cop += 8;
3604     else
3605       label_selse:
3606       val = *f_next_stack;
3607       f_next_stack += rsize;
3608     label_sover:
3609  */
3610
3611  label_sover = create_artificial_label (UNKNOWN_LOCATION);
3612  label_selse = create_artificial_label (UNKNOWN_LOCATION);
3613  res_addr = create_tmp_var (ptr_type_node);
3614
3615  tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3616		unshare_expr (next_gp_limit));
3617  tmp = build3 (COND_EXPR, void_type_node, tmp,
3618		build1 (GOTO_EXPR, void_type_node,
3619			unshare_expr (label_selse)),
3620		NULL_TREE);
3621  gimplify_and_add (tmp, pre_p);
3622
3623  if (ivc2_vec)
3624    {
3625      tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3626      gimplify_and_add (tmp, pre_p);
3627    }
3628  else
3629    {
3630      tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3631      gimplify_and_add (tmp, pre_p);
3632    }
3633
3634  tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3635  gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3636
3637  tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3638  gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3639
3640  tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3641  gimplify_and_add (tmp, pre_p);
3642
3643  /* - - */
3644
3645  tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3646  gimplify_and_add (tmp, pre_p);
3647
3648  tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3649  gimplify_and_add (tmp, pre_p);
3650
3651  tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3652  gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3653
3654  /* - - */
3655
3656  tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3657  gimplify_and_add (tmp, pre_p);
3658
3659  res_addr = fold_convert (build_pointer_type (type), res_addr);
3660
3661  if (by_reference)
3662    res_addr = build_va_arg_indirect_ref (res_addr);
3663
3664  return build_va_arg_indirect_ref (res_addr);
3665}
3666
3667void
3668mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3669			  rtx libname ATTRIBUTE_UNUSED,
3670			  tree fndecl ATTRIBUTE_UNUSED)
3671{
3672  pcum->nregs = 0;
3673
3674  if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3675    pcum->vliw = 1;
3676  else
3677    pcum->vliw = 0;
3678}
3679
3680/* The ABI is thus: Arguments are in $1, $2, $3, $4, stack.  Arguments
3681   larger than 4 bytes are passed indirectly.  Return value in 0,
3682   unless bigger than 4 bytes, then the caller passes a pointer as the
3683   first arg.  For varargs, we copy $1..$4 to the stack.  */
3684
3685static rtx
3686mep_function_arg (cumulative_args_t cum_v, machine_mode mode,
3687		  const_tree type ATTRIBUTE_UNUSED,
3688		  bool named ATTRIBUTE_UNUSED)
3689{
3690  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3691
3692  /* VOIDmode is a signal for the backend to pass data to the call
3693     expander via the second operand to the call pattern.  We use
3694     this to determine whether to use "jsr" or "jsrv".  */
3695  if (mode == VOIDmode)
3696    return GEN_INT (cum->vliw);
3697
3698  /* If we havn't run out of argument registers, return the next.  */
3699  if (cum->nregs < 4)
3700    {
3701      if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3702	return gen_rtx_REG (mode, cum->nregs + 49);
3703      else
3704	return gen_rtx_REG (mode, cum->nregs + 1);
3705    }
3706
3707  /* Otherwise the argument goes on the stack.  */
3708  return NULL_RTX;
3709}
3710
3711static bool
3712mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3713		       machine_mode mode,
3714		       const_tree        type,
3715		       bool              named ATTRIBUTE_UNUSED)
3716{
3717  int size = bytesize (type, mode);
3718
3719  /* This is non-obvious, but yes, large values passed after we've run
3720     out of registers are *still* passed by reference - we put the
3721     address of the parameter on the stack, as well as putting the
3722     parameter itself elsewhere on the stack.  */
3723
3724  if (size <= 0 || size > 8)
3725    return true;
3726  if (size <= 4)
3727    return false;
3728  if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3729      && type != NULL_TREE && VECTOR_TYPE_P (type))
3730    return false;
3731  return true;
3732}
3733
3734static void
3735mep_function_arg_advance (cumulative_args_t pcum,
3736			  machine_mode mode ATTRIBUTE_UNUSED,
3737			  const_tree type ATTRIBUTE_UNUSED,
3738			  bool named ATTRIBUTE_UNUSED)
3739{
3740  get_cumulative_args (pcum)->nregs += 1;
3741}
3742
3743bool
3744mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3745{
3746  int size = bytesize (type, BLKmode);
3747  if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3748    return size > 0 && size <= 8 ? 0 : 1;
3749  return size > 0 && size <= 4 ? 0 : 1;
3750}
3751
3752static bool
3753mep_narrow_volatile_bitfield (void)
3754{
3755  return true;
3756  return false;
3757}
3758
3759/* Implement FUNCTION_VALUE.  All values are returned in $0.  */
3760
3761rtx
3762mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3763{
3764  if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3765    return gen_rtx_REG (TYPE_MODE (type), 48);
3766  return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3767}
3768
3769/* Implement LIBCALL_VALUE, using the same rules as mep_function_value.  */
3770
3771rtx
3772mep_libcall_value (machine_mode mode)
3773{
3774  return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3775}
3776
3777/* Handle pipeline hazards.  */
3778
3779typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3780static const char *opnames[] = { "", "stc", "fsft", "ret" };
3781
3782static int prev_opcode = 0;
3783
3784/* This isn't as optimal as it could be, because we don't know what
3785   control register the STC opcode is storing in.  We only need to add
3786   the nop if it's the relevant register, but we add it for irrelevant
3787   registers also.  */
3788
3789void
3790mep_asm_output_opcode (FILE *file, const char *ptr)
3791{
3792  int this_opcode = op_none;
3793  const char *hazard = 0;
3794
3795  switch (*ptr)
3796    {
3797    case 'f':
3798      if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3799	this_opcode = op_fsft;
3800      break;
3801    case 'r':
3802      if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3803	this_opcode = op_ret;
3804      break;
3805    case 's':
3806      if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3807	this_opcode = op_stc;
3808      break;
3809    }
3810
3811  if (prev_opcode == op_stc && this_opcode == op_fsft)
3812    hazard = "nop";
3813  if (prev_opcode == op_stc && this_opcode == op_ret)
3814    hazard = "nop";
3815
3816  if (hazard)
3817    fprintf(file, "%s\t# %s-%s hazard\n\t",
3818	    hazard, opnames[prev_opcode], opnames[this_opcode]);
3819
3820  prev_opcode = this_opcode;
3821}
3822
3823/* Handle attributes.  */
3824
3825static tree
3826mep_validate_based_tiny (tree *node, tree name, tree args,
3827			 int flags ATTRIBUTE_UNUSED, bool *no_add)
3828{
3829  if (TREE_CODE (*node) != VAR_DECL
3830      && TREE_CODE (*node) != POINTER_TYPE
3831      && TREE_CODE (*node) != TYPE_DECL)
3832    {
3833      warning (0, "%qE attribute only applies to variables", name);
3834      *no_add = true;
3835    }
3836  else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3837    {
3838      if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3839	{
3840	  warning (0, "address region attributes not allowed with auto storage class");
3841	  *no_add = true;
3842	}
3843      /* Ignore storage attribute of pointed to variable: char __far * x;  */
3844      if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3845	{
3846	  warning (0, "address region attributes on pointed-to types ignored");
3847	  *no_add = true;
3848	}
3849    }
3850
3851  return NULL_TREE;
3852}
3853
3854static int
3855mep_multiple_address_regions (tree list, bool check_section_attr)
3856{
3857  tree a;
3858  int count_sections = 0;
3859  int section_attr_count = 0;
3860
3861  for (a = list; a; a = TREE_CHAIN (a))
3862    {
3863      if (is_attribute_p ("based", TREE_PURPOSE (a))
3864	  || is_attribute_p ("tiny", TREE_PURPOSE (a))
3865	  || is_attribute_p ("near", TREE_PURPOSE (a))
3866	  || is_attribute_p ("far", TREE_PURPOSE (a))
3867	  || is_attribute_p ("io", TREE_PURPOSE (a)))
3868	count_sections ++;
3869      if (check_section_attr)
3870	section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3871    }
3872
3873  if (check_section_attr)
3874    return section_attr_count;
3875  else
3876    return count_sections;
3877}
3878
3879#define MEP_ATTRIBUTES(decl) \
3880  (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3881                : DECL_ATTRIBUTES (decl) \
3882                  ? (DECL_ATTRIBUTES (decl)) \
3883		  : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3884
3885static tree
3886mep_validate_near_far (tree *node, tree name, tree args,
3887		       int flags ATTRIBUTE_UNUSED, bool *no_add)
3888{
3889  if (TREE_CODE (*node) != VAR_DECL
3890      && TREE_CODE (*node) != FUNCTION_DECL
3891      && TREE_CODE (*node) != METHOD_TYPE
3892      && TREE_CODE (*node) != POINTER_TYPE
3893      && TREE_CODE (*node) != TYPE_DECL)
3894    {
3895      warning (0, "%qE attribute only applies to variables and functions",
3896	       name);
3897      *no_add = true;
3898    }
3899  else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3900    {
3901      if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3902	{
3903	  warning (0, "address region attributes not allowed with auto storage class");
3904	  *no_add = true;
3905	}
3906      /* Ignore storage attribute of pointed to variable: char __far * x;  */
3907      if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3908	{
3909	  warning (0, "address region attributes on pointed-to types ignored");
3910	  *no_add = true;
3911	}
3912    }
3913  else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3914    {
3915      warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3916	       name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3917      DECL_ATTRIBUTES (*node) = NULL_TREE;
3918    }
3919  return NULL_TREE;
3920}
3921
3922static tree
3923mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3924			   int flags ATTRIBUTE_UNUSED, bool *no_add)
3925{
3926  if (TREE_CODE (*node) != FUNCTION_DECL
3927      && TREE_CODE (*node) != METHOD_TYPE)
3928    {
3929      warning (0, "%qE attribute only applies to functions", name);
3930      *no_add = true;
3931    }
3932  return NULL_TREE;
3933}
3934
3935static tree
3936mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3937			int flags ATTRIBUTE_UNUSED, bool *no_add)
3938{
3939  tree function_type;
3940
3941  if (TREE_CODE (*node) != FUNCTION_DECL)
3942    {
3943      warning (0, "%qE attribute only applies to functions", name);
3944      *no_add = true;
3945      return NULL_TREE;
3946    }
3947
3948  if (DECL_DECLARED_INLINE_P (*node))
3949    error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3950  DECL_UNINLINABLE (*node) = 1;
3951
3952  function_type = TREE_TYPE (*node);
3953
3954  if (TREE_TYPE (function_type) != void_type_node)
3955    error ("interrupt function must have return type of void");
3956
3957  if (prototype_p (function_type)
3958      && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3959	  || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3960    error ("interrupt function must have no arguments");
3961
3962  return NULL_TREE;
3963}
3964
3965static tree
3966mep_validate_io_cb (tree *node, tree name, tree args,
3967		    int flags ATTRIBUTE_UNUSED, bool *no_add)
3968{
3969  if (TREE_CODE (*node) != VAR_DECL)
3970    {
3971      warning (0, "%qE attribute only applies to variables", name);
3972      *no_add = true;
3973    }
3974
3975  if (args != NULL_TREE)
3976    {
3977      if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3978	TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3979      if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3980	{
3981	  warning (0, "%qE attribute allows only an integer constant argument",
3982		   name);
3983	  *no_add = true;
3984	}
3985    }
3986
3987  if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3988    TREE_THIS_VOLATILE (*node) = 1;
3989
3990  return NULL_TREE;
3991}
3992
3993static tree
3994mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3995		   int flags ATTRIBUTE_UNUSED, bool *no_add)
3996{
3997  if (TREE_CODE (*node) != FUNCTION_TYPE
3998      && TREE_CODE (*node) != FUNCTION_DECL
3999      && TREE_CODE (*node) != METHOD_TYPE
4000      && TREE_CODE (*node) != FIELD_DECL
4001      && TREE_CODE (*node) != TYPE_DECL)
4002    {
4003      static int gave_pointer_note = 0;
4004      static int gave_array_note = 0;
4005      static const char * given_type = NULL;
4006
4007      given_type = get_tree_code_name (TREE_CODE (*node));
4008      if (TREE_CODE (*node) == POINTER_TYPE)
4009 	given_type = "pointers";
4010      if (TREE_CODE (*node) == ARRAY_TYPE)
4011 	given_type = "arrays";
4012
4013      if (given_type)
4014 	warning (0, "%qE attribute only applies to functions, not %s",
4015 		 name, given_type);
4016      else
4017 	warning (0, "%qE attribute only applies to functions",
4018 		 name);
4019      *no_add = true;
4020
4021      if (TREE_CODE (*node) == POINTER_TYPE
4022 	  && !gave_pointer_note)
4023 	{
4024 	  inform (input_location,
4025 	          "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4026 	          "   typedef int (__vliw *vfuncptr) ();");
4027 	  gave_pointer_note = 1;
4028 	}
4029
4030      if (TREE_CODE (*node) == ARRAY_TYPE
4031 	  && !gave_array_note)
4032 	{
4033 	  inform (input_location,
4034 	          "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4035 	          "   typedef int (__vliw *vfuncptr[]) ();");
4036 	  gave_array_note = 1;
4037 	}
4038    }
4039  if (!TARGET_VLIW)
4040    error ("VLIW functions are not allowed without a VLIW configuration");
4041  return NULL_TREE;
4042}
4043
4044static const struct attribute_spec mep_attribute_table[11] =
4045{
4046  /* name         min max decl   type   func   handler
4047     affects_type_identity */
4048  { "based",        0, 0, false, false, false, mep_validate_based_tiny, false },
4049  { "tiny",         0, 0, false, false, false, mep_validate_based_tiny, false },
4050  { "near",         0, 0, false, false, false, mep_validate_near_far, false },
4051  { "far",          0, 0, false, false, false, mep_validate_near_far, false },
4052  { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4053    false },
4054  { "interrupt",    0, 0, false, false, false, mep_validate_interrupt, false },
4055  { "io",           0, 1, false, false, false, mep_validate_io_cb, false },
4056  { "cb",           0, 1, false, false, false, mep_validate_io_cb, false },
4057  { "vliw",         0, 0, false, true,  false, mep_validate_vliw, false },
4058  { NULL,           0, 0, false, false, false, NULL, false }
4059};
4060
4061static bool
4062mep_function_attribute_inlinable_p (const_tree callee)
4063{
4064  tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4065  if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4066  return (lookup_attribute ("disinterrupt", attrs) == 0
4067	  && lookup_attribute ("interrupt", attrs) == 0);
4068}
4069
4070static bool
4071mep_can_inline_p (tree caller, tree callee)
4072{
4073  if (TREE_CODE (callee) == ADDR_EXPR)
4074    callee = TREE_OPERAND (callee, 0);
4075
4076  if (!mep_vliw_function_p (caller)
4077      && mep_vliw_function_p (callee))
4078    {
4079      return false;
4080    }
4081  return true;
4082}
4083
4084#define FUNC_CALL		1
4085#define FUNC_DISINTERRUPT	2
4086
4087
4088struct GTY(()) pragma_entry {
4089  int used;
4090  int flag;
4091};
4092
4093struct pragma_traits : default_hashmap_traits
4094{
4095  static hashval_t hash (const char *s) { return htab_hash_string (s); }
4096  static bool
4097  equal_keys (const char *a, const char *b)
4098  {
4099    return strcmp (a, b) == 0;
4100  }
4101};
4102
4103/* Hash table of farcall-tagged sections.  */
4104static GTY(()) hash_map<const char *, pragma_entry, pragma_traits> *
4105  pragma_htab;
4106
4107static void
4108mep_note_pragma_flag (const char *funcname, int flag)
4109{
4110  if (!pragma_htab)
4111    pragma_htab
4112      = hash_map<const char *, pragma_entry, pragma_traits>::create_ggc (31);
4113
4114  bool existed;
4115  const char *name = ggc_strdup (funcname);
4116  pragma_entry *slot = &pragma_htab->get_or_insert (name, &existed);
4117  if (!existed)
4118    {
4119      slot->flag = 0;
4120      slot->used = 0;
4121    }
4122  slot->flag |= flag;
4123}
4124
4125static bool
4126mep_lookup_pragma_flag (const char *funcname, int flag)
4127{
4128  if (!pragma_htab)
4129    return false;
4130
4131  if (funcname[0] == '@' && funcname[2] == '.')
4132    funcname += 3;
4133
4134  pragma_entry *slot = pragma_htab->get (funcname);
4135  if (slot && (slot->flag & flag))
4136    {
4137      slot->used |= flag;
4138      return true;
4139    }
4140  return false;
4141}
4142
4143bool
4144mep_lookup_pragma_call (const char *funcname)
4145{
4146  return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4147}
4148
4149void
4150mep_note_pragma_call (const char *funcname)
4151{
4152  mep_note_pragma_flag (funcname, FUNC_CALL);
4153}
4154
4155bool
4156mep_lookup_pragma_disinterrupt (const char *funcname)
4157{
4158  return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4159}
4160
4161void
4162mep_note_pragma_disinterrupt (const char *funcname)
4163{
4164  mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4165}
4166
4167bool
4168note_unused_pragma_disinterrupt (const char *const &s, const pragma_entry &e,
4169				 void *)
4170{
4171  if ((e.flag & FUNC_DISINTERRUPT)
4172      && !(e.used & FUNC_DISINTERRUPT))
4173    warning (0, "\"#pragma disinterrupt %s\" not used", s);
4174  return 1;
4175}
4176
4177void
4178mep_file_cleanups (void)
4179{
4180  if (pragma_htab)
4181    pragma_htab->traverse<void *, note_unused_pragma_disinterrupt> (NULL);
4182}
4183
4184/* These three functions provide a bridge between the pramgas that
4185   affect register classes, and the functions that maintain them.  We
4186   can't call those functions directly as pragma handling is part of
4187   the front end and doesn't have direct access to them.  */
4188
4189void
4190mep_save_register_info (void)
4191{
4192  save_register_info ();
4193}
4194
4195void
4196mep_reinit_regs (void)
4197{
4198  reinit_regs ();
4199}
4200
4201void
4202mep_init_regs (void)
4203{
4204  init_regs ();
4205}
4206
4207
4208
4209static int
4210mep_attrlist_to_encoding (tree list, tree decl)
4211{
4212  if (mep_multiple_address_regions (list, false) > 1)
4213    {
4214      warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4215	       TREE_PURPOSE (TREE_CHAIN (list)),
4216	       DECL_NAME (decl),
4217	       DECL_SOURCE_LINE (decl));
4218      TREE_CHAIN (list) = NULL_TREE;
4219    }
4220
4221  while (list)
4222    {
4223      if (is_attribute_p ("based", TREE_PURPOSE (list)))
4224	return 'b';
4225      if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4226	return 't';
4227      if (is_attribute_p ("near", TREE_PURPOSE (list)))
4228	return 'n';
4229      if (is_attribute_p ("far", TREE_PURPOSE (list)))
4230	return 'f';
4231      if (is_attribute_p ("io", TREE_PURPOSE (list)))
4232	{
4233	  if (TREE_VALUE (list)
4234	      && TREE_VALUE (TREE_VALUE (list))
4235	      && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4236	    {
4237	      int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4238	      if (location >= 0
4239		  && location <= 0x1000000)
4240		return 'i';
4241	    }
4242	  return 'I';
4243	}
4244      if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4245	return 'c';
4246      list = TREE_CHAIN (list);
4247    }
4248  if (TARGET_TF
4249      && TREE_CODE (decl) == FUNCTION_DECL
4250      && DECL_SECTION_NAME (decl) == 0)
4251    return 'f';
4252  return 0;
4253}
4254
4255static int
4256mep_comp_type_attributes (const_tree t1, const_tree t2)
4257{
4258  int vliw1, vliw2;
4259
4260  vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4261  vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4262
4263  if (vliw1 != vliw2)
4264    return 0;
4265
4266  return 1;
4267}
4268
4269static void
4270mep_insert_attributes (tree decl, tree *attributes)
4271{
4272  int size;
4273  const char *secname = 0;
4274  tree attrib, attrlist;
4275  char encoding;
4276
4277  if (TREE_CODE (decl) == FUNCTION_DECL)
4278    {
4279      const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4280
4281      if (mep_lookup_pragma_disinterrupt (funcname))
4282	{
4283	  attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4284	  *attributes = chainon (*attributes, attrib);
4285	}
4286    }
4287
4288  if (TREE_CODE (decl) != VAR_DECL
4289      || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4290    return;
4291
4292  if (TREE_READONLY (decl) && TARGET_DC)
4293    /* -mdc means that const variables default to the near section,
4294       regardless of the size cutoff.  */
4295    return;
4296
4297  /* User specified an attribute, so override the default.
4298     Ignore storage attribute of pointed to variable. char __far * x;  */
4299  if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4300    {
4301      if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4302	TYPE_ATTRIBUTES (decl) = NULL_TREE;
4303      else if (DECL_ATTRIBUTES (decl) && *attributes)
4304	DECL_ATTRIBUTES (decl) = NULL_TREE;
4305    }
4306
4307  attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4308  encoding = mep_attrlist_to_encoding (attrlist, decl);
4309  if (!encoding && TYPE_P (TREE_TYPE (decl)))
4310    {
4311      attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4312      encoding = mep_attrlist_to_encoding (attrlist, decl);
4313    }
4314  if (encoding)
4315    {
4316      /* This means that the declaration has a specific section
4317	 attribute, so we should not apply the default rules.  */
4318
4319      if (encoding == 'i' || encoding == 'I')
4320	{
4321	  tree attr = lookup_attribute ("io", attrlist);
4322	  if (attr
4323	      && TREE_VALUE (attr)
4324	      && TREE_VALUE (TREE_VALUE(attr)))
4325	    {
4326	      int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4327	      static tree previous_value = 0;
4328	      static int previous_location = 0;
4329	      static tree previous_name = 0;
4330
4331	      /* We take advantage of the fact that gcc will reuse the
4332		 same tree pointer when applying an attribute to a
4333		 list of decls, but produce a new tree for attributes
4334		 on separate source lines, even when they're textually
4335		 identical.  This is the behavior we want.  */
4336	      if (TREE_VALUE (attr) == previous_value
4337		  && location == previous_location)
4338		{
4339		  warning(0, "__io address 0x%x is the same for %qE and %qE",
4340			  location, previous_name, DECL_NAME (decl));
4341		}
4342	      previous_name = DECL_NAME (decl);
4343	      previous_location = location;
4344	      previous_value = TREE_VALUE (attr);
4345	    }
4346	}
4347      return;
4348    }
4349
4350
4351  /* Declarations of arrays can change size.  Don't trust them.  */
4352  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4353    size = 0;
4354  else
4355    size = int_size_in_bytes (TREE_TYPE (decl));
4356
4357  if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4358    {
4359      if (TREE_PUBLIC (decl)
4360	  || DECL_EXTERNAL (decl)
4361	  || TREE_STATIC (decl))
4362	{
4363	  const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4364	  int key = 0;
4365
4366	  while (*name)
4367	    key += *name++;
4368
4369	  switch (key & 3)
4370	    {
4371	    case 0:
4372	      secname = "based";
4373	      break;
4374	    case 1:
4375	      secname = "tiny";
4376	      break;
4377	    case 2:
4378	      secname = "far";
4379	      break;
4380	    default:
4381	      ;
4382	    }
4383	}
4384    }
4385  else
4386    {
4387      if (size <= mep_based_cutoff && size > 0)
4388	secname = "based";
4389      else if (size <= mep_tiny_cutoff && size > 0)
4390	secname = "tiny";
4391      else if (TARGET_L)
4392	secname = "far";
4393    }
4394
4395  if (mep_const_section && TREE_READONLY (decl))
4396    {
4397      if (strcmp (mep_const_section, "tiny") == 0)
4398	secname = "tiny";
4399      else if (strcmp (mep_const_section, "near") == 0)
4400	return;
4401      else if (strcmp (mep_const_section, "far") == 0)
4402	secname = "far";
4403    }
4404
4405  if (!secname)
4406    return;
4407
4408  if (!mep_multiple_address_regions (*attributes, true)
4409      && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4410    {
4411      attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4412
4413      /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4414	 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4415	 and mep_validate_based_tiny.  */
4416      DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4417    }
4418}
4419
4420static void
4421mep_encode_section_info (tree decl, rtx rtl, int first)
4422{
4423  rtx rtlname;
4424  const char *oldname;
4425  const char *secname;
4426  char encoding;
4427  char *newname;
4428  tree idp;
4429  int maxsize;
4430  tree type;
4431  tree mep_attributes;
4432
4433  if (! first)
4434    return;
4435
4436  if (TREE_CODE (decl) != VAR_DECL
4437      && TREE_CODE (decl) != FUNCTION_DECL)
4438    return;
4439
4440  rtlname = XEXP (rtl, 0);
4441  if (GET_CODE (rtlname) == SYMBOL_REF)
4442    oldname = XSTR (rtlname, 0);
4443  else if (GET_CODE (rtlname) == MEM
4444	   && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4445    oldname = XSTR (XEXP (rtlname, 0), 0);
4446  else
4447    gcc_unreachable ();
4448
4449  type = TREE_TYPE (decl);
4450  if (type == error_mark_node)
4451    return;
4452  mep_attributes = MEP_ATTRIBUTES (decl);
4453
4454  encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4455
4456  if (encoding)
4457    {
4458      newname = (char *) alloca (strlen (oldname) + 4);
4459      sprintf (newname, "@%c.%s", encoding, oldname);
4460      idp = get_identifier (newname);
4461      XEXP (rtl, 0) =
4462	gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4463      SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4464      SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4465
4466      switch (encoding)
4467	{
4468	case 'b':
4469	  maxsize = 128;
4470	  secname = "based";
4471	  break;
4472	case 't':
4473	  maxsize = 65536;
4474	  secname = "tiny";
4475	  break;
4476	case 'n':
4477	  maxsize = 0x1000000;
4478	  secname = "near";
4479	  break;
4480	default:
4481	  maxsize = 0;
4482	  secname = 0;
4483	  break;
4484	}
4485      if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4486	{
4487	  warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4488		   oldname,
4489		   (long) int_size_in_bytes (TREE_TYPE (decl)),
4490		   secname,
4491		   maxsize);
4492	}
4493    }
4494}
4495
4496const char *
4497mep_strip_name_encoding (const char *sym)
4498{
4499  while (1)
4500    {
4501      if (*sym == '*')
4502	sym++;
4503      else if (*sym == '@' && sym[2] == '.')
4504	sym += 3;
4505      else
4506	return sym;
4507    }
4508}
4509
4510static section *
4511mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4512		    unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4513{
4514  int readonly = 1;
4515  int encoding;
4516
4517  switch (TREE_CODE (decl))
4518    {
4519    case VAR_DECL:
4520      if (!TREE_READONLY (decl)
4521	  || TREE_SIDE_EFFECTS (decl)
4522	  || !DECL_INITIAL (decl)
4523	  || (DECL_INITIAL (decl) != error_mark_node
4524	      && !TREE_CONSTANT (DECL_INITIAL (decl))))
4525	readonly = 0;
4526      break;
4527    case CONSTRUCTOR:
4528      if (! TREE_CONSTANT (decl))
4529	readonly = 0;
4530      break;
4531
4532    default:
4533      break;
4534    }
4535
4536  if (TREE_CODE (decl) == FUNCTION_DECL)
4537    {
4538      const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4539
4540      if (name[0] == '@' && name[2] == '.')
4541	encoding = name[1];
4542      else
4543	encoding = 0;
4544
4545      if (flag_function_sections || DECL_COMDAT_GROUP (decl))
4546	mep_unique_section (decl, 0);
4547      else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4548	{
4549	  if (encoding == 'f')
4550	    return vftext_section;
4551	  else
4552	    return vtext_section;
4553	}
4554      else if (encoding == 'f')
4555	return ftext_section;
4556      else
4557	return text_section;
4558    }
4559
4560  if (TREE_CODE (decl) == VAR_DECL)
4561    {
4562      const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4563
4564      if (name[0] == '@' && name[2] == '.')
4565	switch (name[1])
4566	  {
4567	  case 'b':
4568	    return based_section;
4569
4570	  case 't':
4571	    if (readonly)
4572	      return srodata_section;
4573	    if (DECL_INITIAL (decl))
4574	      return sdata_section;
4575	    return tinybss_section;
4576
4577	  case 'f':
4578	    if (readonly)
4579	      return frodata_section;
4580	    return far_section;
4581
4582	  case 'i':
4583	  case 'I':
4584	    error_at (DECL_SOURCE_LOCATION (decl),
4585		      "variable %D of type %<io%> must be uninitialized", decl);
4586	    return data_section;
4587
4588	  case 'c':
4589	    error_at (DECL_SOURCE_LOCATION (decl),
4590		      "variable %D of type %<cb%> must be uninitialized", decl);
4591	    return data_section;
4592	  }
4593    }
4594
4595  if (readonly)
4596    return readonly_data_section;
4597
4598  return data_section;
4599}
4600
4601static void
4602mep_unique_section (tree decl, int reloc)
4603{
4604  static const char *prefixes[][2] =
4605  {
4606    { ".text.",   ".gnu.linkonce.t." },
4607    { ".rodata.", ".gnu.linkonce.r." },
4608    { ".data.",   ".gnu.linkonce.d." },
4609    { ".based.",   ".gnu.linkonce.based." },
4610    { ".sdata.",   ".gnu.linkonce.s." },
4611    { ".far.",     ".gnu.linkonce.far." },
4612    { ".ftext.",   ".gnu.linkonce.ft." },
4613    { ".frodata.", ".gnu.linkonce.frd." },
4614    { ".srodata.", ".gnu.linkonce.srd." },
4615    { ".vtext.",   ".gnu.linkonce.v." },
4616    { ".vftext.",   ".gnu.linkonce.vf." }
4617  };
4618  int sec = 2; /* .data */
4619  int len;
4620  const char *name, *prefix;
4621  char *string;
4622
4623  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4624  if (DECL_RTL (decl))
4625    name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4626
4627  if (TREE_CODE (decl) == FUNCTION_DECL)
4628    {
4629      if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4630	sec = 9; /* .vtext */
4631      else
4632	sec = 0; /* .text */
4633    }
4634  else if (decl_readonly_section (decl, reloc))
4635    sec = 1; /* .rodata */
4636
4637  if (name[0] == '@' && name[2] == '.')
4638    {
4639      switch (name[1])
4640	{
4641	case 'b':
4642	  sec = 3; /* .based */
4643	  break;
4644	case 't':
4645	  if (sec == 1)
4646	    sec = 8; /* .srodata */
4647	  else
4648	    sec = 4; /* .sdata */
4649	  break;
4650	case 'f':
4651	  if (sec == 0)
4652	    sec = 6; /* .ftext */
4653	  else if (sec == 9)
4654	    sec = 10; /* .vftext */
4655	  else if (sec == 1)
4656	    sec = 7; /* .frodata */
4657	  else
4658	    sec = 5; /* .far. */
4659	  break;
4660	}
4661      name += 3;
4662    }
4663
4664  prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
4665  len    = strlen (name) + strlen (prefix);
4666  string = (char *) alloca (len + 1);
4667
4668  sprintf (string, "%s%s", prefix, name);
4669
4670  set_decl_section_name (decl, string);
4671}
4672
4673/* Given a decl, a section name, and whether the decl initializer
4674   has relocs, choose attributes for the section.  */
4675
4676#define SECTION_MEP_VLIW	SECTION_MACH_DEP
4677
4678static unsigned int
4679mep_section_type_flags (tree decl, const char *name, int reloc)
4680{
4681  unsigned int flags = default_section_type_flags (decl, name, reloc);
4682
4683  if (decl && TREE_CODE (decl) == FUNCTION_DECL
4684      && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4685    flags |= SECTION_MEP_VLIW;
4686
4687  return flags;
4688}
4689
4690/* Switch to an arbitrary section NAME with attributes as specified
4691   by FLAGS.  ALIGN specifies any known alignment requirements for
4692   the section; 0 if the default should be used.
4693
4694   Differs from the standard ELF version only in support of VLIW mode.  */
4695
4696static void
4697mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4698{
4699  char flagchars[8], *f = flagchars;
4700  const char *type;
4701
4702  if (!(flags & SECTION_DEBUG))
4703    *f++ = 'a';
4704  if (flags & SECTION_WRITE)
4705    *f++ = 'w';
4706  if (flags & SECTION_CODE)
4707    *f++ = 'x';
4708  if (flags & SECTION_SMALL)
4709    *f++ = 's';
4710  if (flags & SECTION_MEP_VLIW)
4711    *f++ = 'v';
4712  *f = '\0';
4713
4714  if (flags & SECTION_BSS)
4715    type = "nobits";
4716  else
4717    type = "progbits";
4718
4719  fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4720	   name, flagchars, type);
4721
4722  if (flags & SECTION_CODE)
4723    fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4724	   asm_out_file);
4725}
4726
4727void
4728mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4729			   int size, int align, int global)
4730{
4731  /* We intentionally don't use mep_section_tag() here.  */
4732  if (name[0] == '@'
4733      && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4734      && name[2] == '.')
4735    {
4736      int location = -1;
4737      tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4738				    DECL_ATTRIBUTES (decl));
4739      if (attr
4740	  && TREE_VALUE (attr)
4741	  && TREE_VALUE (TREE_VALUE(attr)))
4742	location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4743      if (location == -1)
4744	return;
4745      if (global)
4746	{
4747	  fprintf (stream, "\t.globl\t");
4748	  assemble_name (stream, name);
4749	  fprintf (stream, "\n");
4750	}
4751      assemble_name (stream, name);
4752      fprintf (stream, " = %d\n", location);
4753      return;
4754    }
4755  if (name[0] == '@' && name[2] == '.')
4756    {
4757      const char *sec = 0;
4758      switch (name[1])
4759	{
4760	case 'b':
4761	  switch_to_section (based_section);
4762	  sec = ".based";
4763	  break;
4764	case 't':
4765	  switch_to_section (tinybss_section);
4766	  sec = ".sbss";
4767	  break;
4768	case 'f':
4769	  switch_to_section (farbss_section);
4770	  sec = ".farbss";
4771	  break;
4772	}
4773      if (sec)
4774	{
4775	  const char *name2;
4776	  int p2align = 0;
4777
4778	  while (align > BITS_PER_UNIT)
4779	    {
4780	      align /= 2;
4781	      p2align ++;
4782	    }
4783	  name2 = targetm.strip_name_encoding (name);
4784	  if (global)
4785	    fprintf (stream, "\t.globl\t%s\n", name2);
4786	  fprintf (stream, "\t.p2align %d\n", p2align);
4787	  fprintf (stream, "\t.type\t%s,@object\n", name2);
4788	  fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4789	  fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4790	  return;
4791	}
4792    }
4793
4794  if (!global)
4795    {
4796      fprintf (stream, "\t.local\t");
4797      assemble_name (stream, name);
4798      fprintf (stream, "\n");
4799    }
4800  fprintf (stream, "\t.comm\t");
4801  assemble_name (stream, name);
4802  fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4803}
4804
4805/* Trampolines.  */
4806
4807static void
4808mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4809{
4810  rtx addr = XEXP (m_tramp, 0);
4811  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4812
4813  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4814		     LCT_NORMAL, VOIDmode, 3,
4815		     addr, Pmode,
4816		     fnaddr, Pmode,
4817		     static_chain, Pmode);
4818}
4819
4820/* Experimental Reorg.  */
4821
4822static bool
4823mep_mentioned_p (rtx in,
4824		 rtx reg, /* NULL for mem */
4825		 int modes_too) /* if nonzero, modes must match also.  */
4826{
4827  const char *fmt;
4828  int i;
4829  enum rtx_code code;
4830
4831  if (in == 0)
4832    return false;
4833  if (reg && GET_CODE (reg) != REG)
4834    return false;
4835
4836  if (GET_CODE (in) == LABEL_REF)
4837    return (reg == 0);
4838
4839  code = GET_CODE (in);
4840
4841  switch (code)
4842    {
4843    case MEM:
4844      if (reg)
4845	return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4846      return true;
4847
4848    case REG:
4849      if (!reg)
4850	return false;
4851      if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4852	return false;
4853      return (REGNO (in) == REGNO (reg));
4854
4855    case SCRATCH:
4856    case CC0:
4857    case PC:
4858    case CONST_INT:
4859    case CONST_DOUBLE:
4860      return false;
4861
4862    default:
4863      break;
4864    }
4865
4866  /* Set's source should be read-only.  */
4867  if (code == SET && !reg)
4868    return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4869
4870  fmt = GET_RTX_FORMAT (code);
4871
4872  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4873    {
4874      if (fmt[i] == 'E')
4875	{
4876	  register int j;
4877	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4878	    if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4879	      return true;
4880	}
4881      else if (fmt[i] == 'e'
4882	       && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4883	return true;
4884    }
4885  return false;
4886}
4887
4888#define EXPERIMENTAL_REGMOVE_REORG 1
4889
4890#if EXPERIMENTAL_REGMOVE_REORG
4891
4892static int
4893mep_compatible_reg_class (int r1, int r2)
4894{
4895  if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4896    return 1;
4897  if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4898    return 1;
4899  return 0;
4900}
4901
4902static void
4903mep_reorg_regmove (rtx_insn *insns)
4904{
4905  rtx_insn *insn, *next, *follow;
4906  rtx pat, *where;
4907  int count = 0, done = 0, replace, before = 0;
4908
4909  if (dump_file)
4910    for (insn = insns; insn; insn = NEXT_INSN (insn))
4911      if (NONJUMP_INSN_P (insn))
4912	before++;
4913
4914  /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4915     set that uses the r2 and r2 dies there.  We replace r2 with r1
4916     and see if it's still a valid insn.  If so, delete the first set.
4917     Copied from reorg.c.  */
4918
4919  while (!done)
4920    {
4921      done = 1;
4922      for (insn = insns; insn; insn = next)
4923	{
4924	  next = next_nonnote_nondebug_insn (insn);
4925	  if (! NONJUMP_INSN_P (insn))
4926	    continue;
4927	  pat = PATTERN (insn);
4928
4929	  replace = 0;
4930
4931	  if (GET_CODE (pat) == SET
4932	      && GET_CODE (SET_SRC (pat)) == REG
4933	      && GET_CODE (SET_DEST (pat)) == REG
4934	      && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4935	      && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4936	    {
4937	      follow = next_nonnote_nondebug_insn (insn);
4938	      if (dump_file)
4939		fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4940
4941	      while (follow && NONJUMP_INSN_P (follow)
4942		     && GET_CODE (PATTERN (follow)) == SET
4943		     && !dead_or_set_p (follow, SET_SRC (pat))
4944		     && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4945		     && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4946		{
4947		  if (dump_file)
4948		    fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4949		  follow = next_nonnote_insn (follow);
4950		}
4951
4952	      if (dump_file)
4953		fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4954	      if (follow && NONJUMP_INSN_P (follow)
4955		  && GET_CODE (PATTERN (follow)) == SET
4956		  && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4957		{
4958		  if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4959		    {
4960		      if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4961			{
4962			  replace = 1;
4963			  where = & SET_SRC (PATTERN (follow));
4964			}
4965		    }
4966		  else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4967		    {
4968		      if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4969			{
4970			  replace = 1;
4971			  where = & PATTERN (follow);
4972			}
4973		    }
4974		}
4975	    }
4976
4977	  /* If so, follow is the corresponding insn */
4978	  if (replace)
4979	    {
4980	      if (dump_file)
4981		{
4982		  rtx_insn *x;
4983
4984		  fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4985		  for (x = insn; x ;x = NEXT_INSN (x))
4986		    {
4987		      print_rtl_single (dump_file, x);
4988		      if (x == follow)
4989			break;
4990		      fprintf (dump_file, "\n");
4991		    }
4992		}
4993
4994	      if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4995					       follow, where))
4996		{
4997		  count ++;
4998		  delete_insn (insn);
4999		  if (dump_file)
5000		    {
5001		      fprintf (dump_file, "\n----- Success!  new insn:\n\n");
5002		      print_rtl_single (dump_file, follow);
5003		    }
5004		  done = 0;
5005		}
5006	    }
5007	}
5008    }
5009
5010  if (dump_file)
5011    {
5012      fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5013      fprintf (dump_file, "=====\n");
5014    }
5015}
5016#endif
5017
5018
5019/* Figure out where to put LABEL, which is the label for a repeat loop.
5020   If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5021   the loop ends just before LAST_INSN.  If SHARED, insns other than the
5022   "repeat" might use LABEL to jump to the loop's continuation point.
5023
5024   Return the last instruction in the adjusted loop.  */
5025
5026static rtx_insn *
5027mep_insert_repeat_label_last (rtx_insn *last_insn, rtx_code_label *label,
5028			      bool including, bool shared)
5029{
5030  rtx_insn *next, *prev;
5031  int count = 0, code, icode;
5032
5033  if (dump_file)
5034    fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5035	     INSN_UID (last_insn));
5036
5037  /* Set PREV to the last insn in the loop.  */
5038  prev = last_insn;
5039  if (!including)
5040    prev = PREV_INSN (prev);
5041
5042  /* Set NEXT to the next insn after the repeat label.  */
5043  next = last_insn;
5044  if (!shared)
5045    while (prev != 0)
5046      {
5047	code = GET_CODE (prev);
5048	if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5049	  break;
5050
5051	if (INSN_P (prev))
5052	  {
5053	    if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5054	      prev = as_a <rtx_insn *> (XVECEXP (PATTERN (prev), 0, 1));
5055
5056	    /* Other insns that should not be in the last two opcodes.  */
5057	    icode = recog_memoized (prev);
5058	    if (icode < 0
5059		|| icode == CODE_FOR_repeat
5060		|| icode == CODE_FOR_erepeat
5061		|| get_attr_may_trap (prev) == MAY_TRAP_YES)
5062	      break;
5063
5064	    /* That leaves JUMP_INSN and INSN.  It will have BImode if it
5065	       is the second instruction in a VLIW bundle.  In that case,
5066	       loop again: if the first instruction also satisfies the
5067	       conditions above then we will reach here again and put
5068	       both of them into the repeat epilogue.  Otherwise both
5069	       should remain outside.  */
5070	    if (GET_MODE (prev) != BImode)
5071	      {
5072		count++;
5073		next = prev;
5074		if (dump_file)
5075		  print_rtl_single (dump_file, next);
5076		if (count == 2)
5077		  break;
5078	      }
5079	  }
5080	prev = PREV_INSN (prev);
5081      }
5082
5083  /* See if we're adding the label immediately after the repeat insn.
5084     If so, we need to separate them with a nop.  */
5085  prev = prev_real_insn (next);
5086  if (prev)
5087    switch (recog_memoized (prev))
5088      {
5089      case CODE_FOR_repeat:
5090      case CODE_FOR_erepeat:
5091	if (dump_file)
5092	  fprintf (dump_file, "Adding nop inside loop\n");
5093	emit_insn_before (gen_nop (), next);
5094	break;
5095
5096      default:
5097	break;
5098      }
5099
5100  /* Insert the label.  */
5101  emit_label_before (label, next);
5102
5103  /* Insert the nops.  */
5104  if (dump_file && count < 2)
5105    fprintf (dump_file, "Adding %d nop%s\n\n",
5106	     2 - count, count == 1 ? "" : "s");
5107
5108  for (; count < 2; count++)
5109    if (including)
5110      last_insn = emit_insn_after (gen_nop (), last_insn);
5111    else
5112      emit_insn_before (gen_nop (), last_insn);
5113
5114  return last_insn;
5115}
5116
5117
5118void
5119mep_emit_doloop (rtx *operands, int is_end)
5120{
5121  rtx tag;
5122
5123  if (cfun->machine->doloop_tags == 0
5124      || cfun->machine->doloop_tag_from_end == is_end)
5125    {
5126      cfun->machine->doloop_tags++;
5127      cfun->machine->doloop_tag_from_end = is_end;
5128    }
5129
5130  tag = GEN_INT (cfun->machine->doloop_tags - 1);
5131  if (is_end)
5132    emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
5133  else
5134    emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5135}
5136
5137
5138/* Code for converting doloop_begins and doloop_ends into valid
5139   MeP instructions.  A doloop_begin is just a placeholder:
5140
5141	$count = unspec ($count)
5142
5143   where $count is initially the number of iterations - 1.
5144   doloop_end has the form:
5145
5146	if ($count-- == 0) goto label
5147
5148   The counter variable is private to the doloop insns, nothing else
5149   relies on its value.
5150
5151   There are three cases, in decreasing order of preference:
5152
5153      1. A loop has exactly one doloop_begin and one doloop_end.
5154	 The doloop_end branches to the first instruction after
5155	 the doloop_begin.
5156
5157	 In this case we can replace the doloop_begin with a repeat
5158	 instruction and remove the doloop_end.  I.e.:
5159
5160		$count1 = unspec ($count1)
5161	    label:
5162		...
5163		insn1
5164		insn2
5165		if ($count2-- == 0) goto label
5166
5167	  becomes:
5168
5169		repeat $count1,repeat_label
5170	    label:
5171		...
5172	    repeat_label:
5173		insn1
5174		insn2
5175		# end repeat
5176
5177      2. As for (1), except there are several doloop_ends.  One of them
5178	 (call it X) falls through to a label L.  All the others fall
5179	 through to branches to L.
5180
5181	 In this case, we remove X and replace the other doloop_ends
5182	 with branches to the repeat label.  For example:
5183
5184		$count1 = unspec ($count1)
5185	    start:
5186		...
5187		if ($count2-- == 0) goto label
5188	    end:
5189		...
5190		if ($count3-- == 0) goto label
5191		goto end
5192
5193	 becomes:
5194
5195		repeat $count1,repeat_label
5196	    start:
5197		...
5198	    repeat_label:
5199		nop
5200		nop
5201		# end repeat
5202	    end:
5203		...
5204		goto repeat_label
5205
5206      3. The fallback case.  Replace doloop_begins with:
5207
5208		$count = $count + 1
5209
5210	 Replace doloop_ends with the equivalent of:
5211
5212		$count = $count - 1
5213		if ($count == 0) goto label
5214
5215	 Note that this might need a scratch register if $count
5216	 is stored in memory.  */
5217
5218/* A structure describing one doloop_begin.  */
5219struct mep_doloop_begin {
5220  /* The next doloop_begin with the same tag.  */
5221  struct mep_doloop_begin *next;
5222
5223  /* The instruction itself.  */
5224  rtx_insn *insn;
5225
5226  /* The initial counter value.  This is known to be a general register.  */
5227  rtx counter;
5228};
5229
5230/* A structure describing a doloop_end.  */
5231struct mep_doloop_end {
5232  /* The next doloop_end with the same loop tag.  */
5233  struct mep_doloop_end *next;
5234
5235  /* The instruction itself.  */
5236  rtx_insn *insn;
5237
5238  /* The first instruction after INSN when the branch isn't taken.  */
5239  rtx_insn *fallthrough;
5240
5241  /* The location of the counter value.  Since doloop_end_internal is a
5242     jump instruction, it has to allow the counter to be stored anywhere
5243     (any non-fixed register or memory location).  */
5244  rtx counter;
5245
5246  /* The target label (the place where the insn branches when the counter
5247     isn't zero).  */
5248  rtx label;
5249
5250  /* A scratch register.  Only available when COUNTER isn't stored
5251     in a general register.  */
5252  rtx scratch;
5253};
5254
5255
5256/* One do-while loop.  */
5257struct mep_doloop {
5258  /* All the doloop_begins for this loop (in no particular order).  */
5259  struct mep_doloop_begin *begin;
5260
5261  /* All the doloop_ends.  When there is more than one, arrange things
5262     so that the first one is the most likely to be X in case (2) above.  */
5263  struct mep_doloop_end *end;
5264};
5265
5266
5267/* Return true if LOOP can be converted into repeat/repeat_end form
5268   (that is, if it matches cases (1) or (2) above).  */
5269
5270static bool
5271mep_repeat_loop_p (struct mep_doloop *loop)
5272{
5273  struct mep_doloop_end *end;
5274  rtx fallthrough;
5275
5276  /* There must be exactly one doloop_begin and at least one doloop_end.  */
5277  if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5278    return false;
5279
5280  /* The first doloop_end (X) must branch back to the insn after
5281     the doloop_begin.  */
5282  if (prev_real_insn (loop->end->label) != loop->begin->insn)
5283    return false;
5284
5285  /* All the other doloop_ends must branch to the same place as X.
5286     When the branch isn't taken, they must jump to the instruction
5287     after X.  */
5288  fallthrough = loop->end->fallthrough;
5289  for (end = loop->end->next; end != 0; end = end->next)
5290    if (end->label != loop->end->label
5291	|| !simplejump_p (end->fallthrough)
5292	|| next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5293      return false;
5294
5295  return true;
5296}
5297
5298
5299/* The main repeat reorg function.  See comment above for details.  */
5300
5301static void
5302mep_reorg_repeat (rtx_insn *insns)
5303{
5304  rtx_insn *insn;
5305  struct mep_doloop *loops, *loop;
5306  struct mep_doloop_begin *begin;
5307  struct mep_doloop_end *end;
5308
5309  /* Quick exit if we haven't created any loops.  */
5310  if (cfun->machine->doloop_tags == 0)
5311    return;
5312
5313  /* Create an array of mep_doloop structures.  */
5314  loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5315  memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5316
5317  /* Search the function for do-while insns and group them by loop tag.  */
5318  for (insn = insns; insn; insn = NEXT_INSN (insn))
5319    if (INSN_P (insn))
5320      switch (recog_memoized (insn))
5321	{
5322	case CODE_FOR_doloop_begin_internal:
5323	  insn_extract (insn);
5324	  loop = &loops[INTVAL (recog_data.operand[2])];
5325
5326	  begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5327	  begin->next = loop->begin;
5328	  begin->insn = insn;
5329	  begin->counter = recog_data.operand[0];
5330
5331	  loop->begin = begin;
5332	  break;
5333
5334	case CODE_FOR_doloop_end_internal:
5335	  insn_extract (insn);
5336	  loop = &loops[INTVAL (recog_data.operand[2])];
5337
5338	  end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5339	  end->insn = insn;
5340	  end->fallthrough = next_real_insn (insn);
5341	  end->counter = recog_data.operand[0];
5342	  end->label = recog_data.operand[1];
5343	  end->scratch = recog_data.operand[3];
5344
5345	  /* If this insn falls through to an unconditional jump,
5346	     give it a lower priority than the others.  */
5347	  if (loop->end != 0 && simplejump_p (end->fallthrough))
5348	    {
5349	      end->next = loop->end->next;
5350	      loop->end->next = end;
5351	    }
5352	  else
5353	    {
5354	      end->next = loop->end;
5355	      loop->end = end;
5356	    }
5357	  break;
5358	}
5359
5360  /* Convert the insns for each loop in turn.  */
5361  for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5362    if (mep_repeat_loop_p (loop))
5363      {
5364	/* Case (1) or (2).  */
5365	rtx_code_label *repeat_label;
5366	rtx label_ref;
5367
5368	/* Create a new label for the repeat insn.  */
5369	repeat_label = gen_label_rtx ();
5370
5371	/* Replace the doloop_begin with a repeat.  */
5372	label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5373	emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5374			  loop->begin->insn);
5375	delete_insn (loop->begin->insn);
5376
5377	/* Insert the repeat label before the first doloop_end.
5378	   Fill the gap with nops if there are other doloop_ends.  */
5379	mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5380				      false, loop->end->next != 0);
5381
5382	/* Emit a repeat_end (to improve the readability of the output).  */
5383	emit_insn_before (gen_repeat_end (), loop->end->insn);
5384
5385	/* Delete the first doloop_end.  */
5386	delete_insn (loop->end->insn);
5387
5388	/* Replace the others with branches to REPEAT_LABEL.  */
5389	for (end = loop->end->next; end != 0; end = end->next)
5390	  {
5391	    emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5392	    delete_insn (end->insn);
5393	    delete_insn (end->fallthrough);
5394	  }
5395      }
5396    else
5397      {
5398	/* Case (3).  First replace all the doloop_begins with increment
5399	   instructions.  */
5400	for (begin = loop->begin; begin != 0; begin = begin->next)
5401	  {
5402	    emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5403					     begin->counter, const1_rtx),
5404			      begin->insn);
5405	    delete_insn (begin->insn);
5406	  }
5407
5408	/* Replace all the doloop_ends with decrement-and-branch sequences.  */
5409	for (end = loop->end; end != 0; end = end->next)
5410	  {
5411	    rtx reg;
5412
5413	    start_sequence ();
5414
5415	    /* Load the counter value into a general register.  */
5416	    reg = end->counter;
5417	    if (!REG_P (reg) || REGNO (reg) > 15)
5418	      {
5419		reg = end->scratch;
5420		emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5421	      }
5422
5423	    /* Decrement the counter.  */
5424	    emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5425				      constm1_rtx));
5426
5427	    /* Copy it back to its original location.  */
5428	    if (reg != end->counter)
5429	      emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5430
5431	    /* Jump back to the start label.  */
5432	    insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5433						     end->label));
5434	    JUMP_LABEL (insn) = end->label;
5435	    LABEL_NUSES (end->label)++;
5436
5437	    /* Emit the whole sequence before the doloop_end.  */
5438	    insn = get_insns ();
5439	    end_sequence ();
5440	    emit_insn_before (insn, end->insn);
5441
5442	    /* Delete the doloop_end.  */
5443	    delete_insn (end->insn);
5444	  }
5445      }
5446}
5447
5448
5449static bool
5450mep_invertable_branch_p (rtx_insn *insn)
5451{
5452  rtx cond, set;
5453  enum rtx_code old_code;
5454  int i;
5455
5456  set = PATTERN (insn);
5457  if (GET_CODE (set) != SET)
5458    return false;
5459  if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5460    return false;
5461  cond = XEXP (XEXP (set, 1), 0);
5462  old_code = GET_CODE (cond);
5463  switch (old_code)
5464    {
5465    case EQ:
5466      PUT_CODE (cond, NE);
5467      break;
5468    case NE:
5469      PUT_CODE (cond, EQ);
5470      break;
5471    case LT:
5472      PUT_CODE (cond, GE);
5473      break;
5474    case GE:
5475      PUT_CODE (cond, LT);
5476      break;
5477    default:
5478      return false;
5479    }
5480  INSN_CODE (insn) = -1;
5481  i = recog_memoized (insn);
5482  PUT_CODE (cond, old_code);
5483  INSN_CODE (insn) = -1;
5484  return i >= 0;
5485}
5486
5487static void
5488mep_invert_branch (rtx_insn *insn, rtx_insn *after)
5489{
5490  rtx cond, set, label;
5491  int i;
5492
5493  set = PATTERN (insn);
5494
5495  gcc_assert (GET_CODE (set) == SET);
5496  gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5497
5498  cond = XEXP (XEXP (set, 1), 0);
5499  switch (GET_CODE (cond))
5500    {
5501    case EQ:
5502      PUT_CODE (cond, NE);
5503      break;
5504    case NE:
5505      PUT_CODE (cond, EQ);
5506      break;
5507    case LT:
5508      PUT_CODE (cond, GE);
5509      break;
5510    case GE:
5511      PUT_CODE (cond, LT);
5512      break;
5513    default:
5514      gcc_unreachable ();
5515    }
5516  label = gen_label_rtx ();
5517  emit_label_after (label, after);
5518  for (i=1; i<=2; i++)
5519    if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5520      {
5521	rtx ref = XEXP (XEXP (set, 1), i);
5522	if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5523	  delete_insn (XEXP (ref, 0));
5524	XEXP (ref, 0) = label;
5525	LABEL_NUSES (label) ++;
5526	JUMP_LABEL (insn) = label;
5527      }
5528  INSN_CODE (insn) = -1;
5529  i = recog_memoized (insn);
5530  gcc_assert (i >= 0);
5531}
5532
5533static void
5534mep_reorg_erepeat (rtx_insn *insns)
5535{
5536  rtx_insn *insn, *prev;
5537  rtx_code_label *l;
5538  rtx x;
5539  int count;
5540
5541  for (insn = insns; insn; insn = NEXT_INSN (insn))
5542    if (JUMP_P (insn)
5543	&& mep_invertable_branch_p (insn))
5544      {
5545	if (dump_file)
5546	  {
5547	    fprintf (dump_file, "\n------------------------------\n");
5548	    fprintf (dump_file, "erepeat: considering this jump:\n");
5549	    print_rtl_single (dump_file, insn);
5550	  }
5551	count = simplejump_p (insn) ? 0 : 1;
5552	for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5553	  {
5554	    if (CALL_P (prev) || BARRIER_P (prev))
5555	      break;
5556
5557	    if (prev == JUMP_LABEL (insn))
5558	      {
5559		rtx_insn *newlast;
5560		if (dump_file)
5561		  fprintf (dump_file, "found loop top, %d insns\n", count);
5562
5563		if (LABEL_NUSES (prev) == 1)
5564		  /* We're the only user, always safe */ ;
5565		else if (LABEL_NUSES (prev) == 2)
5566		  {
5567		    /* See if there's a barrier before this label.  If
5568		       so, we know nobody inside the loop uses it.
5569		       But we must be careful to put the erepeat
5570		       *after* the label.  */
5571		    rtx_insn *barrier;
5572		    for (barrier = PREV_INSN (prev);
5573			 barrier && NOTE_P (barrier);
5574			 barrier = PREV_INSN (barrier))
5575		      ;
5576		    if (barrier && ! BARRIER_P (barrier))
5577		      break;
5578		  }
5579		else
5580		  {
5581		    /* We don't know who else, within or without our loop, uses this */
5582		    if (dump_file)
5583		      fprintf (dump_file, "... but there are multiple users, too risky.\n");
5584		    break;
5585		  }
5586
5587		/* Generate a label to be used by the erepat insn.  */
5588		l = gen_label_rtx ();
5589
5590		/* Insert the erepeat after INSN's target label.  */
5591		x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5592		LABEL_NUSES (l)++;
5593		emit_insn_after (x, prev);
5594
5595		/* Insert the erepeat label.  */
5596		newlast = (mep_insert_repeat_label_last
5597			   (insn, l, !simplejump_p (insn), false));
5598		if (simplejump_p (insn))
5599		  {
5600		    emit_insn_before (gen_erepeat_end (), insn);
5601		    delete_insn (insn);
5602		  }
5603		else
5604		  {
5605		    mep_invert_branch (insn, newlast);
5606		    emit_insn_after (gen_erepeat_end (), newlast);
5607		  }
5608		break;
5609	      }
5610
5611	    if (LABEL_P (prev))
5612	      {
5613		/* A label is OK if there is exactly one user, and we
5614		   can find that user before the next label.  */
5615		rtx_insn *user = 0;
5616		int safe = 0;
5617		if (LABEL_NUSES (prev) == 1)
5618		  {
5619		    for (user = PREV_INSN (prev);
5620			 user && (INSN_P (user) || NOTE_P (user));
5621			 user = PREV_INSN (user))
5622		      if (JUMP_P (user) && JUMP_LABEL (user) == prev)
5623			{
5624			  safe = INSN_UID (user);
5625			  break;
5626			}
5627		  }
5628		if (!safe)
5629		  break;
5630		if (dump_file)
5631		  fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5632			   safe, INSN_UID (prev));
5633	      }
5634
5635	    if (INSN_P (prev))
5636	      {
5637		count ++;
5638	      }
5639	  }
5640      }
5641  if (dump_file)
5642    fprintf (dump_file, "\n==============================\n");
5643}
5644
5645/* Replace a jump to a return, with a copy of the return.  GCC doesn't
5646   always do this on its own.  */
5647
5648static void
5649mep_jmp_return_reorg (rtx_insn *insns)
5650{
5651  rtx_insn *insn, *label, *ret;
5652  int ret_code;
5653
5654  for (insn = insns; insn; insn = NEXT_INSN (insn))
5655    if (simplejump_p (insn))
5656    {
5657      /* Find the fist real insn the jump jumps to.  */
5658      label = ret = safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
5659      while (ret
5660	     && (NOTE_P (ret)
5661		 || LABEL_P (ret)
5662		 || GET_CODE (PATTERN (ret)) == USE))
5663	ret = NEXT_INSN (ret);
5664
5665      if (ret)
5666	{
5667	  /* Is it a return?  */
5668	  ret_code = recog_memoized (ret);
5669	  if (ret_code == CODE_FOR_return_internal
5670	      || ret_code == CODE_FOR_eh_return_internal)
5671	    {
5672	      /* It is.  Replace the jump with a return.  */
5673	      LABEL_NUSES (label) --;
5674	      if (LABEL_NUSES (label) == 0)
5675		delete_insn (label);
5676	      PATTERN (insn) = copy_rtx (PATTERN (ret));
5677	      INSN_CODE (insn) = -1;
5678	    }
5679	}
5680    }
5681}
5682
5683
5684static void
5685mep_reorg_addcombine (rtx_insn *insns)
5686{
5687  rtx_insn *i, *n;
5688
5689  for (i = insns; i; i = NEXT_INSN (i))
5690    if (INSN_P (i)
5691	&& INSN_CODE (i) == CODE_FOR_addsi3
5692	&& GET_CODE (SET_DEST (PATTERN (i))) == REG
5693	&& GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5694	&& REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5695	&& GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5696      {
5697	n = NEXT_INSN (i);
5698	if (INSN_P (n)
5699	    && INSN_CODE (n) == CODE_FOR_addsi3
5700	    && GET_CODE (SET_DEST (PATTERN (n))) == REG
5701	    && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5702	    && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5703	    && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5704	  {
5705	    int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5706	    int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5707	    if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5708		&& ic + nc < 32767
5709		&& ic + nc > -32768)
5710	      {
5711		XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5712		SET_NEXT_INSN (i) = NEXT_INSN (n);
5713		if (NEXT_INSN (i))
5714		  SET_PREV_INSN (NEXT_INSN (i)) = i;
5715	      }
5716	  }
5717      }
5718}
5719
5720/* If this insn adjusts the stack, return the adjustment, else return
5721   zero.  */
5722static int
5723add_sp_insn_p (rtx_insn *insn)
5724{
5725  rtx pat;
5726
5727  if (! single_set (insn))
5728    return 0;
5729  pat = PATTERN (insn);
5730  if (GET_CODE (SET_DEST (pat)) != REG)
5731    return 0;
5732  if (REGNO (SET_DEST (pat)) != SP_REGNO)
5733    return 0;
5734  if (GET_CODE (SET_SRC (pat)) != PLUS)
5735    return 0;
5736  if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5737    return 0;
5738  if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5739    return 0;
5740  if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5741    return 0;
5742  return INTVAL (XEXP (SET_SRC (pat), 1));
5743}
5744
5745/* Check for trivial functions that set up an unneeded stack
5746   frame.  */
5747static void
5748mep_reorg_noframe (rtx_insn *insns)
5749{
5750  rtx_insn *start_frame_insn;
5751  rtx_insn *end_frame_insn = 0;
5752  int sp_adjust, sp2;
5753  rtx sp;
5754
5755  /* The first insn should be $sp = $sp + N */
5756  while (insns && ! INSN_P (insns))
5757    insns = NEXT_INSN (insns);
5758  if (!insns)
5759    return;
5760
5761  sp_adjust = add_sp_insn_p (insns);
5762  if (sp_adjust == 0)
5763    return;
5764
5765  start_frame_insn = insns;
5766  sp = SET_DEST (PATTERN (start_frame_insn));
5767
5768  insns = next_real_insn (insns);
5769
5770  while (insns)
5771    {
5772      rtx_insn *next = next_real_insn (insns);
5773      if (!next)
5774	break;
5775
5776      sp2 = add_sp_insn_p (insns);
5777      if (sp2)
5778	{
5779	  if (end_frame_insn)
5780	    return;
5781	  end_frame_insn = insns;
5782	  if (sp2 != -sp_adjust)
5783	    return;
5784	}
5785      else if (mep_mentioned_p (insns, sp, 0))
5786	return;
5787      else if (CALL_P (insns))
5788	return;
5789
5790      insns = next;
5791    }
5792
5793  if (end_frame_insn)
5794    {
5795      delete_insn (start_frame_insn);
5796      delete_insn (end_frame_insn);
5797    }
5798}
5799
5800static void
5801mep_reorg (void)
5802{
5803  rtx_insn *insns = get_insns ();
5804
5805  /* We require accurate REG_DEAD notes.  */
5806  compute_bb_for_insn ();
5807  df_note_add_problem ();
5808  df_analyze ();
5809
5810  mep_reorg_addcombine (insns);
5811#if EXPERIMENTAL_REGMOVE_REORG
5812  /* VLIW packing has been done already, so we can't just delete things.  */
5813  if (!mep_vliw_function_p (cfun->decl))
5814    mep_reorg_regmove (insns);
5815#endif
5816  mep_jmp_return_reorg (insns);
5817  mep_bundle_insns (insns);
5818  mep_reorg_repeat (insns);
5819  if (optimize
5820      && !profile_flag
5821      && !profile_arc_flag
5822      && TARGET_OPT_REPEAT
5823      && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5824    mep_reorg_erepeat (insns);
5825
5826  /* This may delete *insns so make sure it's last.  */
5827  mep_reorg_noframe (insns);
5828
5829  df_finish_pass (false);
5830}
5831
5832
5833
5834/*----------------------------------------------------------------------*/
5835/* Builtins								*/
5836/*----------------------------------------------------------------------*/
5837
5838/* Element X gives the index into cgen_insns[] of the most general
5839   implementation of intrinsic X.  Unimplemented intrinsics are
5840   mapped to -1.  */
5841int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5842
5843/* Element X gives the index of another instruction that is mapped to
5844   the same intrinsic as cgen_insns[X].  It is -1 when there is no other
5845   instruction.
5846
5847   Things are set up so that mep_intrinsic_chain[X] < X.  */
5848static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5849
5850/* The bitmask for the current ISA.  The ISA masks are declared
5851   in mep-intrin.h.  */
5852unsigned int mep_selected_isa;
5853
5854struct mep_config {
5855  const char *config_name;
5856  unsigned int isa;
5857};
5858
5859static struct mep_config mep_configs[] = {
5860#ifdef COPROC_SELECTION_TABLE
5861  COPROC_SELECTION_TABLE,
5862#endif
5863  { 0, 0 }
5864};
5865
5866/* Initialize the global intrinsics variables above.  */
5867
5868static void
5869mep_init_intrinsics (void)
5870{
5871  size_t i;
5872
5873  /* Set MEP_SELECTED_ISA to the ISA flag for this configuration.  */
5874  mep_selected_isa = mep_configs[0].isa;
5875  if (mep_config_string != 0)
5876    for (i = 0; mep_configs[i].config_name; i++)
5877      if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5878	{
5879	  mep_selected_isa = mep_configs[i].isa;
5880	  break;
5881	}
5882
5883  /* Assume all intrinsics are unavailable.  */
5884  for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5885    mep_intrinsic_insn[i] = -1;
5886
5887  /* Build up the global intrinsic tables.  */
5888  for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5889    if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5890      {
5891	mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5892	mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5893      }
5894  /* See whether we can directly move values between one coprocessor
5895     register and another.  */
5896  for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5897    if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5898      mep_have_copro_copro_moves_p = true;
5899
5900  /* See whether we can directly move values between core and
5901     coprocessor registers.  */
5902  mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5903                                 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5904
5905  mep_have_core_copro_moves_p = 1;
5906}
5907
5908/* Declare all available intrinsic functions.  Called once only.  */
5909
5910static tree cp_data_bus_int_type_node;
5911static tree opaque_vector_type_node;
5912static tree v8qi_type_node;
5913static tree v4hi_type_node;
5914static tree v2si_type_node;
5915static tree v8uqi_type_node;
5916static tree v4uhi_type_node;
5917static tree v2usi_type_node;
5918
5919static tree
5920mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5921{
5922  switch (cr)
5923    {
5924    case cgen_regnum_operand_type_POINTER:	return ptr_type_node;
5925    case cgen_regnum_operand_type_LONG:		return long_integer_type_node;
5926    case cgen_regnum_operand_type_ULONG:	return long_unsigned_type_node;
5927    case cgen_regnum_operand_type_SHORT:	return short_integer_type_node;
5928    case cgen_regnum_operand_type_USHORT:	return short_unsigned_type_node;
5929    case cgen_regnum_operand_type_CHAR:		return char_type_node;
5930    case cgen_regnum_operand_type_UCHAR:	return unsigned_char_type_node;
5931    case cgen_regnum_operand_type_SI:		return intSI_type_node;
5932    case cgen_regnum_operand_type_DI:		return intDI_type_node;
5933    case cgen_regnum_operand_type_VECTOR:	return opaque_vector_type_node;
5934    case cgen_regnum_operand_type_V8QI:		return v8qi_type_node;
5935    case cgen_regnum_operand_type_V4HI:		return v4hi_type_node;
5936    case cgen_regnum_operand_type_V2SI:		return v2si_type_node;
5937    case cgen_regnum_operand_type_V8UQI:	return v8uqi_type_node;
5938    case cgen_regnum_operand_type_V4UHI:	return v4uhi_type_node;
5939    case cgen_regnum_operand_type_V2USI:	return v2usi_type_node;
5940    case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5941    default:
5942      return void_type_node;
5943    }
5944}
5945
5946static void
5947mep_init_builtins (void)
5948{
5949  size_t i;
5950
5951  if (TARGET_64BIT_CR_REGS)
5952    cp_data_bus_int_type_node = long_long_integer_type_node;
5953  else
5954    cp_data_bus_int_type_node = long_integer_type_node;
5955
5956  opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5957  v8qi_type_node = build_vector_type (intQI_type_node, 8);
5958  v4hi_type_node = build_vector_type (intHI_type_node, 4);
5959  v2si_type_node = build_vector_type (intSI_type_node, 2);
5960  v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5961  v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5962  v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5963
5964  add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5965
5966  add_builtin_type ("cp_vector", opaque_vector_type_node);
5967
5968  add_builtin_type ("cp_v8qi", v8qi_type_node);
5969  add_builtin_type ("cp_v4hi", v4hi_type_node);
5970  add_builtin_type ("cp_v2si", v2si_type_node);
5971
5972  add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5973  add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5974  add_builtin_type ("cp_v2usi", v2usi_type_node);
5975
5976  /* Intrinsics like mep_cadd3 are implemented with two groups of
5977     instructions, one which uses UNSPECs and one which uses a specific
5978     rtl code such as PLUS.  Instructions in the latter group belong
5979     to GROUP_KNOWN_CODE.
5980
5981     In such cases, the intrinsic will have two entries in the global
5982     tables above.  The unspec form is accessed using builtin functions
5983     while the specific form is accessed using the mep_* enum in
5984     mep-intrin.h.
5985
5986     The idea is that __cop arithmetic and builtin functions have
5987     different optimization requirements.  If mep_cadd3() appears in
5988     the source code, the user will surely except gcc to use cadd3
5989     rather than a work-alike such as add3.  However, if the user
5990     just writes "a + b", where a or b are __cop variables, it is
5991     reasonable for gcc to choose a core instruction rather than
5992     cadd3 if it believes that is more optimal.  */
5993  for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5994    if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5995	&& mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5996      {
5997	tree ret_type = void_type_node;
5998	tree bi_type;
5999
6000	if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6001	  continue;
6002
6003	if (cgen_insns[i].cret_p)
6004	  ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6005
6006	bi_type = build_function_type_list (ret_type, NULL_TREE);
6007	add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6008			      bi_type,
6009			      cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6010      }
6011}
6012
6013/* Report the unavailablity of the given intrinsic.  */
6014
6015#if 1
6016static void
6017mep_intrinsic_unavailable (int intrinsic)
6018{
6019  static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6020
6021  if (already_reported_p[intrinsic])
6022    return;
6023
6024  if (mep_intrinsic_insn[intrinsic] < 0)
6025    error ("coprocessor intrinsic %qs is not available in this configuration",
6026	   cgen_intrinsics[intrinsic]);
6027  else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6028    error ("%qs is not available in VLIW functions",
6029	   cgen_intrinsics[intrinsic]);
6030  else
6031    error ("%qs is not available in non-VLIW functions",
6032	   cgen_intrinsics[intrinsic]);
6033
6034  already_reported_p[intrinsic] = 1;
6035}
6036#endif
6037
6038
6039/* See if any implementation of INTRINSIC is available to the
6040   current function.  If so, store the most general implementation
6041   in *INSN_PTR and return true.  Return false otherwise.  */
6042
6043static bool
6044mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6045{
6046  int i;
6047
6048  i = mep_intrinsic_insn[intrinsic];
6049  while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6050    i = mep_intrinsic_chain[i];
6051
6052  if (i >= 0)
6053    {
6054      *insn_ptr = &cgen_insns[i];
6055      return true;
6056    }
6057  return false;
6058}
6059
6060
6061/* Like mep_get_intrinsic_insn, but with extra handling for moves.
6062   If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6063   try using a work-alike instead.  In this case, the returned insn
6064   may have three operands rather than two.  */
6065
6066static bool
6067mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6068{
6069  size_t i;
6070
6071  if (intrinsic == mep_cmov)
6072    {
6073      for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6074	if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6075	  return true;
6076      return false;
6077    }
6078  return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6079}
6080
6081
6082/* If ARG is a register operand that is the same size as MODE, convert it
6083   to MODE using a subreg.  Otherwise return ARG as-is.  */
6084
6085static rtx
6086mep_convert_arg (machine_mode mode, rtx arg)
6087{
6088  if (GET_MODE (arg) != mode
6089      && register_operand (arg, VOIDmode)
6090      && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6091    return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6092  return arg;
6093}
6094
6095
6096/* Apply regnum conversions to ARG using the description given by REGNUM.
6097   Return the new argument on success and null on failure.  */
6098
6099static rtx
6100mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6101{
6102  if (regnum->count == 0)
6103    return arg;
6104
6105  if (GET_CODE (arg) != CONST_INT
6106      || INTVAL (arg) < 0
6107      || INTVAL (arg) >= regnum->count)
6108    return 0;
6109
6110  return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6111}
6112
6113
6114/* Try to make intrinsic argument ARG match the given operand.
6115   UNSIGNED_P is true if the argument has an unsigned type.  */
6116
6117static rtx
6118mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6119		    int unsigned_p)
6120{
6121  if (GET_CODE (arg) == CONST_INT)
6122    {
6123      /* CONST_INTs can only be bound to integer operands.  */
6124      if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6125	return 0;
6126    }
6127  else if (GET_CODE (arg) == CONST_DOUBLE)
6128    /* These hold vector constants.  */;
6129  else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6130    {
6131      /* If the argument is a different size from what's expected, we must
6132	 have a value in the right mode class in order to convert it.  */
6133      if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6134	return 0;
6135
6136      /* If the operand is an rvalue, promote or demote it to match the
6137	 operand's size.  This might not need extra instructions when
6138	 ARG is a register value.  */
6139      if (operand->constraint[0] != '=')
6140	arg = convert_to_mode (operand->mode, arg, unsigned_p);
6141    }
6142
6143  /* If the operand is an lvalue, bind the operand to a new register.
6144     The caller will copy this value into ARG after the main
6145     instruction.  By doing this always, we produce slightly more
6146     optimal code.  */
6147  /* But not for control registers.  */
6148  if (operand->constraint[0] == '='
6149      && (! REG_P (arg)
6150	  || ! (CONTROL_REGNO_P (REGNO (arg))
6151		|| CCR_REGNO_P (REGNO (arg))
6152		|| CR_REGNO_P (REGNO (arg)))
6153	  ))
6154    return gen_reg_rtx (operand->mode);
6155
6156  /* Try simple mode punning.  */
6157  arg = mep_convert_arg (operand->mode, arg);
6158  if (operand->predicate (arg, operand->mode))
6159    return arg;
6160
6161  /* See if forcing the argument into a register will make it match.  */
6162  if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6163    arg = force_reg (operand->mode, arg);
6164  else
6165    arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6166  if (operand->predicate (arg, operand->mode))
6167    return arg;
6168
6169  return 0;
6170}
6171
6172
6173/* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6174   function FNNAME.  OPERAND describes the operand to which ARGNUM
6175   is mapped.  */
6176
6177static void
6178mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6179		      int argnum, tree fnname)
6180{
6181  size_t i;
6182
6183  if (GET_CODE (arg) == CONST_INT)
6184    for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6185      if (operand->predicate == cgen_immediate_predicates[i].predicate)
6186	{
6187	  const struct cgen_immediate_predicate *predicate;
6188	  HOST_WIDE_INT argval;
6189
6190	  predicate = &cgen_immediate_predicates[i];
6191	  argval = INTVAL (arg);
6192	  if (argval < predicate->lower || argval >= predicate->upper)
6193	    error ("argument %d of %qE must be in the range %d...%d",
6194		   argnum, fnname, predicate->lower, predicate->upper - 1);
6195	  else
6196	    error ("argument %d of %qE must be a multiple of %d",
6197		   argnum, fnname, predicate->align);
6198	  return;
6199	}
6200
6201  error ("incompatible type for argument %d of %qE", argnum, fnname);
6202}
6203
6204static rtx
6205mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6206		    rtx subtarget ATTRIBUTE_UNUSED,
6207		    machine_mode mode ATTRIBUTE_UNUSED,
6208		    int ignore ATTRIBUTE_UNUSED)
6209{
6210  rtx pat, op[10], arg[10];
6211  unsigned int a;
6212  int opindex, unsigned_p[10];
6213  tree fndecl, args;
6214  unsigned int n_args;
6215  tree fnname;
6216  const struct cgen_insn *cgen_insn;
6217  const struct insn_data_d *idata;
6218  unsigned int first_arg = 0;
6219  unsigned int builtin_n_args;
6220
6221  fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6222  fnname = DECL_NAME (fndecl);
6223
6224  /* Find out which instruction we should emit.  Note that some coprocessor
6225     intrinsics may only be available in VLIW mode, or only in normal mode.  */
6226  if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6227    {
6228      mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6229      return NULL_RTX;
6230    }
6231  idata = &insn_data[cgen_insn->icode];
6232
6233  builtin_n_args = cgen_insn->num_args;
6234
6235  if (cgen_insn->cret_p)
6236    {
6237      if (cgen_insn->cret_p > 1)
6238	builtin_n_args ++;
6239      first_arg = 1;
6240      mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6241      builtin_n_args --;
6242    }
6243
6244  /* Evaluate each argument.  */
6245  n_args = call_expr_nargs (exp);
6246
6247  if (n_args < builtin_n_args)
6248    {
6249      error ("too few arguments to %qE", fnname);
6250      return NULL_RTX;
6251    }
6252  if (n_args > builtin_n_args)
6253    {
6254      error ("too many arguments to %qE", fnname);
6255      return NULL_RTX;
6256    }
6257
6258  for (a = first_arg; a < builtin_n_args + first_arg; a++)
6259    {
6260      tree value;
6261
6262      args = CALL_EXPR_ARG (exp, a - first_arg);
6263
6264      value = args;
6265
6266#if 0
6267      if (cgen_insn->regnums[a].reference_p)
6268	{
6269	  if (TREE_CODE (value) != ADDR_EXPR)
6270	    {
6271	      debug_tree(value);
6272	      error ("argument %d of %qE must be an address", a+1, fnname);
6273	      return NULL_RTX;
6274	    }
6275	  value = TREE_OPERAND (value, 0);
6276	}
6277#endif
6278
6279      /* If the argument has been promoted to int, get the unpromoted
6280	 value.  This is necessary when sub-int memory values are bound
6281	 to reference parameters.  */
6282      if (TREE_CODE (value) == NOP_EXPR
6283	  && TREE_TYPE (value) == integer_type_node
6284	  && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6285	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6286	      < TYPE_PRECISION (TREE_TYPE (value))))
6287	value = TREE_OPERAND (value, 0);
6288
6289      /* If the argument has been promoted to double, get the unpromoted
6290	 SFmode value.  This is necessary for FMAX support, for example.  */
6291      if (TREE_CODE (value) == NOP_EXPR
6292	  && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6293	  && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6294	  && TYPE_MODE (TREE_TYPE (value)) == DFmode
6295	  && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6296	value = TREE_OPERAND (value, 0);
6297
6298      unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6299      arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6300      arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6301      if (cgen_insn->regnums[a].reference_p)
6302	{
6303	  tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6304	  machine_mode pointed_mode = TYPE_MODE (pointed_to);
6305
6306	  arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6307	}
6308      if (arg[a] == 0)
6309	{
6310	  error ("argument %d of %qE must be in the range %d...%d",
6311		 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6312	  return NULL_RTX;
6313	}
6314    }
6315
6316  for (a = 0; a < first_arg; a++)
6317    {
6318      if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6319	arg[a] = target;
6320      else
6321	arg[a] = gen_reg_rtx (idata->operand[0].mode);
6322    }
6323
6324  /* Convert the arguments into a form suitable for the intrinsic.
6325     Report an error if this isn't possible.  */
6326  for (opindex = 0; opindex < idata->n_operands; opindex++)
6327    {
6328      a = cgen_insn->op_mapping[opindex];
6329      op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6330					arg[a], unsigned_p[a]);
6331      if (op[opindex] == 0)
6332	{
6333	  mep_incompatible_arg (&idata->operand[opindex],
6334				arg[a], a + 1 - first_arg, fnname);
6335	  return NULL_RTX;
6336	}
6337    }
6338
6339  /* Emit the instruction.  */
6340  pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6341		       op[5], op[6], op[7], op[8], op[9]);
6342
6343  if (GET_CODE (pat) == SET
6344      && GET_CODE (SET_DEST (pat)) == PC
6345      && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6346    emit_jump_insn (pat);
6347  else
6348    emit_insn (pat);
6349
6350  /* Copy lvalues back to their final locations.  */
6351  for (opindex = 0; opindex < idata->n_operands; opindex++)
6352    if (idata->operand[opindex].constraint[0] == '=')
6353      {
6354	a = cgen_insn->op_mapping[opindex];
6355	if (a >= first_arg)
6356	  {
6357	    if (GET_MODE_CLASS (GET_MODE (arg[a]))
6358		!= GET_MODE_CLASS (GET_MODE (op[opindex])))
6359	      emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6360						   op[opindex]));
6361	    else
6362	      {
6363		/* First convert the operand to the right mode, then copy it
6364		   into the destination.  Doing the conversion as a separate
6365		   step (rather than using convert_move) means that we can
6366		   avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6367		   refer to the same register.  */
6368		op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6369					       op[opindex], unsigned_p[a]);
6370		if (!rtx_equal_p (arg[a], op[opindex]))
6371		  emit_move_insn (arg[a], op[opindex]);
6372	      }
6373	  }
6374      }
6375
6376  if (first_arg > 0 && target && target != op[0])
6377    {
6378      emit_move_insn (target, op[0]);
6379    }
6380
6381  return target;
6382}
6383
6384static bool
6385mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED)
6386{
6387  return false;
6388}
6389
6390/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6391   a global register.  */
6392
6393static bool
6394global_reg_mentioned_p_1 (const_rtx x)
6395{
6396  int regno;
6397
6398  switch (GET_CODE (x))
6399    {
6400    case SUBREG:
6401      if (REG_P (SUBREG_REG (x)))
6402	{
6403	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6404	      && global_regs[subreg_regno (x)])
6405	    return true;
6406	  return false;
6407	}
6408      break;
6409
6410    case REG:
6411      regno = REGNO (x);
6412      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6413	return true;
6414      return false;
6415
6416    case CALL:
6417      /* A non-constant call might use a global register.  */
6418      return true;
6419
6420    default:
6421      break;
6422    }
6423
6424  return false;
6425}
6426
6427/* Returns nonzero if X mentions a global register.  */
6428
6429static bool
6430global_reg_mentioned_p (rtx x)
6431{
6432  if (INSN_P (x))
6433    {
6434      if (CALL_P (x))
6435	{
6436	  if (! RTL_CONST_OR_PURE_CALL_P (x))
6437	    return true;
6438	  x = CALL_INSN_FUNCTION_USAGE (x);
6439	  if (x == 0)
6440	    return false;
6441	}
6442      else
6443	x = PATTERN (x);
6444    }
6445
6446  subrtx_iterator::array_type array;
6447  FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6448    if (global_reg_mentioned_p_1 (*iter))
6449      return true;
6450  return false;
6451}
6452/* Scheduling hooks for VLIW mode.
6453
6454   Conceptually this is very simple: we have a two-pack architecture
6455   that takes one core insn and one coprocessor insn to make up either
6456   a 32- or 64-bit instruction word (depending on the option bit set in
6457   the chip).  I.e. in VL32 mode, we can pack one 16-bit core insn and
6458   one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6459   and one 48-bit cop insn or two 32-bit core/cop insns.
6460
6461   In practice, instruction selection will be a bear.  Consider in
6462   VL64 mode the following insns
6463
6464	add $1, 1
6465	cmov $cr0, $0
6466
6467   these cannot pack, since the add is a 16-bit core insn and cmov
6468   is a 32-bit cop insn.  However,
6469
6470	add3 $1, $1, 1
6471	cmov $cr0, $0
6472
6473   packs just fine.  For good VLIW code generation in VL64 mode, we
6474   will have to have 32-bit alternatives for many of the common core
6475   insns.  Not implemented.  */
6476
6477static int
6478mep_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
6479{
6480  int cost_specified;
6481
6482  if (REG_NOTE_KIND (link) != 0)
6483    {
6484      /* See whether INSN and DEP_INSN are intrinsics that set the same
6485	 hard register.  If so, it is more important to free up DEP_INSN
6486	 than it is to free up INSN.
6487
6488	 Note that intrinsics like mep_mulr are handled differently from
6489	 the equivalent mep.md patterns.  In mep.md, if we don't care
6490	 about the value of $lo and $hi, the pattern will just clobber
6491	 the registers, not set them.  Since clobbers don't count as
6492	 output dependencies, it is often possible to reorder two mulrs,
6493	 even after reload.
6494
6495	 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6496	 so any pair of mep_mulr()s will be inter-dependent.   We should
6497	 therefore give the first mep_mulr() a higher priority.  */
6498      if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6499	  && global_reg_mentioned_p (PATTERN (insn))
6500	  && global_reg_mentioned_p (PATTERN (dep_insn)))
6501	return 1;
6502
6503      /* If the dependence is an anti or output dependence, assume it
6504	 has no cost.  */
6505      return 0;
6506    }
6507
6508  /* If we can't recognize the insns, we can't really do anything.  */
6509  if (recog_memoized (dep_insn) < 0)
6510    return cost;
6511
6512  /* The latency attribute doesn't apply to MeP-h1: we use the stall
6513     attribute instead.  */
6514  if (!TARGET_H1)
6515    {
6516      cost_specified = get_attr_latency (dep_insn);
6517      if (cost_specified != 0)
6518	return cost_specified;
6519    }
6520
6521  return cost;
6522}
6523
6524/* ??? We don't properly compute the length of a load/store insn,
6525   taking into account the addressing mode.  */
6526
6527static int
6528mep_issue_rate (void)
6529{
6530  return TARGET_IVC2 ? 3 : 2;
6531}
6532
6533/* Return true if function DECL was declared with the vliw attribute.  */
6534
6535bool
6536mep_vliw_function_p (tree decl)
6537{
6538  return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6539}
6540
6541static rtx_insn *
6542mep_find_ready_insn (rtx_insn **ready, int nready, enum attr_slot slot,
6543		     int length)
6544{
6545  int i;
6546
6547  for (i = nready - 1; i >= 0; --i)
6548    {
6549      rtx_insn *insn = ready[i];
6550      if (recog_memoized (insn) >= 0
6551	  && get_attr_slot (insn) == slot
6552	  && get_attr_length (insn) == length)
6553	return insn;
6554    }
6555
6556  return NULL;
6557}
6558
6559static void
6560mep_move_ready_insn (rtx_insn **ready, int nready, rtx_insn *insn)
6561{
6562  int i;
6563
6564  for (i = 0; i < nready; ++i)
6565    if (ready[i] == insn)
6566      {
6567	for (; i < nready - 1; ++i)
6568	  ready[i] = ready[i + 1];
6569	ready[i] = insn;
6570	return;
6571      }
6572
6573  gcc_unreachable ();
6574}
6575
6576static void
6577mep_print_sched_insn (FILE *dump, rtx_insn *insn)
6578{
6579  const char *slots = "none";
6580  const char *name = NULL;
6581  int code;
6582  char buf[30];
6583
6584  if (GET_CODE (PATTERN (insn)) == SET
6585      || GET_CODE (PATTERN (insn)) == PARALLEL)
6586    {
6587      switch (get_attr_slots (insn))
6588	{
6589	case SLOTS_CORE: slots = "core"; break;
6590	case SLOTS_C3: slots = "c3"; break;
6591	case SLOTS_P0: slots = "p0"; break;
6592	case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6593	case SLOTS_P0_P1: slots = "p0,p1"; break;
6594	case SLOTS_P0S: slots = "p0s"; break;
6595	case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6596	case SLOTS_P1: slots = "p1"; break;
6597	default:
6598	  sprintf(buf, "%d", get_attr_slots (insn));
6599	  slots = buf;
6600	  break;
6601	}
6602    }
6603  if (GET_CODE (PATTERN (insn)) == USE)
6604    slots = "use";
6605
6606  code = INSN_CODE (insn);
6607  if (code >= 0)
6608    name = get_insn_name (code);
6609  if (!name)
6610    name = "{unknown}";
6611
6612  fprintf (dump,
6613	   "insn %4d %4d  %8s  %s\n",
6614	   code,
6615	   INSN_UID (insn),
6616	   name,
6617	   slots);
6618}
6619
6620static int
6621mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6622		   int sched_verbose ATTRIBUTE_UNUSED, rtx_insn **ready,
6623		   int *pnready, int clock ATTRIBUTE_UNUSED)
6624{
6625  int nready = *pnready;
6626  rtx_insn *core_insn, *cop_insn;
6627  int i;
6628
6629  if (dump && sched_verbose > 1)
6630    {
6631      fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6632      for (i=0; i<nready; i++)
6633	mep_print_sched_insn (dump, ready[i]);
6634      fprintf (dump, "\n");
6635    }
6636
6637  if (!mep_vliw_function_p (cfun->decl))
6638    return 1;
6639  if (nready < 2)
6640    return 1;
6641
6642  /* IVC2 uses a DFA to determine what's ready and what's not. */
6643  if (TARGET_IVC2)
6644    return nready;
6645
6646  /* We can issue either a core or coprocessor instruction.
6647     Look for a matched pair of insns to reorder.  If we don't
6648     find any, don't second-guess the scheduler's priorities.  */
6649
6650  if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6651      && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6652					  TARGET_OPT_VL64 ? 6 : 2)))
6653    ;
6654  else if (TARGET_OPT_VL64
6655	   && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6656	   && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6657    ;
6658  else
6659    /* We didn't find a pair.  Issue the single insn at the head
6660       of the ready list.  */
6661    return 1;
6662
6663  /* Reorder the two insns first.  */
6664  mep_move_ready_insn (ready, nready, core_insn);
6665  mep_move_ready_insn (ready, nready - 1, cop_insn);
6666  return 2;
6667}
6668
6669/* Return true if X contains a register that is set by insn PREV.  */
6670
6671static bool
6672mep_store_find_set (const_rtx x, const rtx_insn *prev)
6673{
6674  subrtx_iterator::array_type array;
6675  FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6676    if (REG_P (x) && reg_set_p (x, prev))
6677      return true;
6678  return false;
6679}
6680
6681/* Like mep_store_bypass_p, but takes a pattern as the second argument,
6682   not the containing insn.  */
6683
6684static bool
6685mep_store_data_bypass_1 (rtx_insn *prev, rtx pat)
6686{
6687  /* Cope with intrinsics like swcpa.  */
6688  if (GET_CODE (pat) == PARALLEL)
6689    {
6690      int i;
6691
6692      for (i = 0; i < XVECLEN (pat, 0); i++)
6693	if (mep_store_data_bypass_p (prev,
6694				     as_a <rtx_insn *> (XVECEXP (pat, 0, i))))
6695	  return true;
6696
6697      return false;
6698    }
6699
6700  /* Check for some sort of store.  */
6701  if (GET_CODE (pat) != SET
6702      || GET_CODE (SET_DEST (pat)) != MEM)
6703    return false;
6704
6705  /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6706     The first operand to the unspec is the store data and the other operands
6707     are used to calculate the address.  */
6708  if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6709    {
6710      rtx src;
6711      int i;
6712
6713      src = SET_SRC (pat);
6714      for (i = 1; i < XVECLEN (src, 0); i++)
6715	if (mep_store_find_set (XVECEXP (src, 0, i), prev))
6716	  return false;
6717
6718      return true;
6719    }
6720
6721  /* Otherwise just check that PREV doesn't modify any register mentioned
6722     in the memory destination.  */
6723  return !mep_store_find_set (SET_DEST (pat), prev);
6724}
6725
6726/* Return true if INSN is a store instruction and if the store address
6727   has no true dependence on PREV.  */
6728
6729bool
6730mep_store_data_bypass_p (rtx_insn *prev, rtx_insn *insn)
6731{
6732  return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6733}
6734
6735/* Return true if, apart from HI/LO, there are no true dependencies
6736   between multiplication instructions PREV and INSN.  */
6737
6738bool
6739mep_mul_hilo_bypass_p (rtx_insn *prev, rtx_insn *insn)
6740{
6741  rtx pat;
6742
6743  pat = PATTERN (insn);
6744  if (GET_CODE (pat) == PARALLEL)
6745    pat = XVECEXP (pat, 0, 0);
6746  if (GET_CODE (pat) != SET)
6747    return false;
6748  subrtx_iterator::array_type array;
6749  FOR_EACH_SUBRTX (iter, array, SET_SRC (pat), NONCONST)
6750    {
6751      const_rtx x = *iter;
6752      if (REG_P (x)
6753	  && REGNO (x) != LO_REGNO
6754	  && REGNO (x) != HI_REGNO
6755	  && reg_set_p (x, prev))
6756	return false;
6757    }
6758  return true;
6759}
6760
6761/* Return true if INSN is an ldc instruction that issues to the
6762   MeP-h1 integer pipeline.  This is true for instructions that
6763   read from PSW, LP, SAR, HI and LO.  */
6764
6765bool
6766mep_ipipe_ldc_p (rtx_insn *insn)
6767{
6768  rtx pat, src;
6769
6770  pat = PATTERN (insn);
6771
6772  /* Cope with instrinsics that set both a hard register and its shadow.
6773     The set of the hard register comes first.  */
6774  if (GET_CODE (pat) == PARALLEL)
6775    pat = XVECEXP (pat, 0, 0);
6776
6777  if (GET_CODE (pat) == SET)
6778    {
6779      src = SET_SRC (pat);
6780
6781      /* Cope with intrinsics.  The first operand to the unspec is
6782	 the source register.  */
6783      if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6784	src = XVECEXP (src, 0, 0);
6785
6786      if (REG_P (src))
6787	switch (REGNO (src))
6788	  {
6789	  case PSW_REGNO:
6790	  case LP_REGNO:
6791	  case SAR_REGNO:
6792	  case HI_REGNO:
6793	  case LO_REGNO:
6794	    return true;
6795	  }
6796    }
6797  return false;
6798}
6799
6800/* Create a VLIW bundle from core instruction CORE and coprocessor
6801   instruction COP.  COP always satisfies INSN_P, but CORE can be
6802   either a new pattern or an existing instruction.
6803
6804   Emit the bundle in place of COP and return it.  */
6805
6806static rtx_insn *
6807mep_make_bundle (rtx core_insn_or_pat, rtx_insn *cop)
6808{
6809  rtx seq;
6810  rtx_insn *core_insn;
6811  rtx_insn *insn;
6812
6813  /* If CORE is an existing instruction, remove it, otherwise put
6814     the new pattern in an INSN harness.  */
6815  if (INSN_P (core_insn_or_pat))
6816    {
6817      core_insn = as_a <rtx_insn *> (core_insn_or_pat);
6818      remove_insn (core_insn);
6819    }
6820  else
6821    core_insn = make_insn_raw (core_insn_or_pat);
6822
6823  /* Generate the bundle sequence and replace COP with it.  */
6824  seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core_insn, cop));
6825  insn = emit_insn_after (seq, cop);
6826  remove_insn (cop);
6827
6828  /* Set up the links of the insns inside the SEQUENCE.  */
6829  SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6830  SET_NEXT_INSN (core_insn) = cop;
6831  SET_PREV_INSN (cop) = core_insn;
6832  SET_NEXT_INSN (cop) = NEXT_INSN (insn);
6833
6834  /* Set the VLIW flag for the coprocessor instruction.  */
6835  PUT_MODE (core_insn, VOIDmode);
6836  PUT_MODE (cop, BImode);
6837
6838  /* Derive a location for the bundle.  Individual instructions cannot
6839     have their own location because there can be no assembler labels
6840     between CORE_INSN and COP.  */
6841  INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core_insn) ? core_insn : cop);
6842  INSN_LOCATION (core_insn) = 0;
6843  INSN_LOCATION (cop) = 0;
6844
6845  return insn;
6846}
6847
6848/* A helper routine for ms1_insn_dependent_p called through note_stores.  */
6849
6850static void
6851mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6852{
6853  rtx * pinsn = (rtx *) data;
6854
6855  if (*pinsn && reg_mentioned_p (x, *pinsn))
6856    *pinsn = NULL_RTX;
6857}
6858
6859/* Return true if anything in insn X is (anti,output,true) dependent on
6860   anything in insn Y.  */
6861
6862static int
6863mep_insn_dependent_p (rtx x, rtx y)
6864{
6865  rtx tmp;
6866
6867  gcc_assert (INSN_P (x));
6868  gcc_assert (INSN_P (y));
6869
6870  tmp = PATTERN (y);
6871  note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6872  if (tmp == NULL_RTX)
6873    return 1;
6874
6875  tmp = PATTERN (x);
6876  note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6877  if (tmp == NULL_RTX)
6878    return 1;
6879
6880  return 0;
6881}
6882
6883static int
6884core_insn_p (rtx_insn *insn)
6885{
6886  if (GET_CODE (PATTERN (insn)) == USE)
6887    return 0;
6888  if (get_attr_slot (insn) == SLOT_CORE)
6889    return 1;
6890  return 0;
6891}
6892
6893/* Mark coprocessor instructions that can be bundled together with
6894   the immediately preceding core instruction.  This is later used
6895   to emit the "+" that tells the assembler to create a VLIW insn.
6896
6897   For unbundled insns, the assembler will automatically add coprocessor
6898   nops, and 16-bit core nops.  Due to an apparent oversight in the
6899   spec, the assembler will _not_ automatically add 32-bit core nops,
6900   so we have to emit those here.
6901
6902   Called from mep_insn_reorg.  */
6903
6904static void
6905mep_bundle_insns (rtx_insn *insns)
6906{
6907  rtx_insn *insn, *last = NULL, *first = NULL;
6908  int saw_scheduling = 0;
6909
6910  /* Only do bundling if we're in vliw mode.  */
6911  if (!mep_vliw_function_p (cfun->decl))
6912    return;
6913
6914  /* The first insn in a bundle are TImode, the remainder are
6915     VOIDmode.  After this function, the first has VOIDmode and the
6916     rest have BImode.  */
6917
6918  /* Note: this doesn't appear to be true for JUMP_INSNs.  */
6919
6920  /* First, move any NOTEs that are within a bundle, to the beginning
6921     of the bundle.  */
6922  for (insn = insns; insn ; insn = NEXT_INSN (insn))
6923    {
6924      if (NOTE_P (insn) && first)
6925	/* Don't clear FIRST.  */;
6926
6927      else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6928	first = insn;
6929
6930      else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6931	{
6932	  rtx_insn *note, *prev;
6933
6934	  /* INSN is part of a bundle; FIRST is the first insn in that
6935	     bundle.  Move all intervening notes out of the bundle.
6936	     In addition, since the debug pass may insert a label
6937	     whenever the current line changes, set the location info
6938	     for INSN to match FIRST.  */
6939
6940	  INSN_LOCATION (insn) = INSN_LOCATION (first);
6941
6942	  note = PREV_INSN (insn);
6943	  while (note && note != first)
6944	    {
6945	      prev = PREV_INSN (note);
6946
6947	      if (NOTE_P (note))
6948		{
6949		  /* Remove NOTE from here... */
6950		  SET_PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6951		  SET_NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6952		  /* ...and put it in here.  */
6953		  SET_NEXT_INSN (note) = first;
6954		  SET_PREV_INSN (note) = PREV_INSN (first);
6955		  SET_NEXT_INSN (PREV_INSN (note)) = note;
6956		  SET_PREV_INSN (NEXT_INSN (note)) = note;
6957		}
6958
6959	      note = prev;
6960	    }
6961	}
6962
6963      else if (!NONJUMP_INSN_P (insn))
6964	first = 0;
6965    }
6966
6967  /* Now fix up the bundles.  */
6968  for (insn = insns; insn ; insn = NEXT_INSN (insn))
6969    {
6970      if (NOTE_P (insn))
6971	continue;
6972
6973      if (!NONJUMP_INSN_P (insn))
6974	{
6975	  last = 0;
6976	  continue;
6977	}
6978
6979      /* If we're not optimizing enough, there won't be scheduling
6980	 info.  We detect that here.  */
6981      if (GET_MODE (insn) == TImode)
6982	saw_scheduling = 1;
6983      if (!saw_scheduling)
6984	continue;
6985
6986      if (TARGET_IVC2)
6987	{
6988	  rtx_insn *core_insn = NULL;
6989
6990	  /* IVC2 slots are scheduled by DFA, so we just accept
6991	     whatever the scheduler gives us.  However, we must make
6992	     sure the core insn (if any) is the first in the bundle.
6993	     The IVC2 assembler can insert whatever NOPs are needed,
6994	     and allows a COP insn to be first.  */
6995
6996	  if (NONJUMP_INSN_P (insn)
6997	      && GET_CODE (PATTERN (insn)) != USE
6998	      && GET_MODE (insn) == TImode)
6999	    {
7000	      for (last = insn;
7001		   NEXT_INSN (last)
7002		     && GET_MODE (NEXT_INSN (last)) == VOIDmode
7003		     && NONJUMP_INSN_P (NEXT_INSN (last));
7004		   last = NEXT_INSN (last))
7005		{
7006		  if (core_insn_p (last))
7007		    core_insn = last;
7008		}
7009	      if (core_insn_p (last))
7010		core_insn = last;
7011
7012	      if (core_insn && core_insn != insn)
7013		{
7014		  /* Swap core insn to first in the bundle.  */
7015
7016		  /* Remove core insn.  */
7017		  if (PREV_INSN (core_insn))
7018		    SET_NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7019		  if (NEXT_INSN (core_insn))
7020		    SET_PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7021
7022		  /* Re-insert core insn.  */
7023		  SET_PREV_INSN (core_insn) = PREV_INSN (insn);
7024		  SET_NEXT_INSN (core_insn) = insn;
7025
7026		  if (PREV_INSN (core_insn))
7027		    SET_NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7028		  SET_PREV_INSN (insn) = core_insn;
7029
7030		  PUT_MODE (core_insn, TImode);
7031		  PUT_MODE (insn, VOIDmode);
7032		}
7033	    }
7034
7035	  /* The first insn has TImode, the rest have VOIDmode */
7036	  if (GET_MODE (insn) == TImode)
7037	    PUT_MODE (insn, VOIDmode);
7038	  else
7039	    PUT_MODE (insn, BImode);
7040	  continue;
7041	}
7042
7043      PUT_MODE (insn, VOIDmode);
7044      if (recog_memoized (insn) >= 0
7045	  && get_attr_slot (insn) == SLOT_COP)
7046	{
7047	  if (JUMP_P (insn)
7048	      || ! last
7049	      || recog_memoized (last) < 0
7050	      || get_attr_slot (last) != SLOT_CORE
7051	      || (get_attr_length (insn)
7052		  != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7053	      || mep_insn_dependent_p (insn, last))
7054	    {
7055	      switch (get_attr_length (insn))
7056		{
7057		case 8:
7058		  break;
7059		case 6:
7060		  insn = mep_make_bundle (gen_nop (), insn);
7061		  break;
7062		case 4:
7063		  if (TARGET_OPT_VL64)
7064		    insn = mep_make_bundle (gen_nop32 (), insn);
7065		  break;
7066		case 2:
7067		  if (TARGET_OPT_VL64)
7068		    error ("2 byte cop instructions are"
7069			   " not allowed in 64-bit VLIW mode");
7070		  else
7071		    insn = mep_make_bundle (gen_nop (), insn);
7072		  break;
7073		default:
7074		  error ("unexpected %d byte cop instruction",
7075			 get_attr_length (insn));
7076		  break;
7077		}
7078	    }
7079	  else
7080	    insn = mep_make_bundle (last, insn);
7081	}
7082
7083      last = insn;
7084    }
7085}
7086
7087
7088/* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7089   Return true on success.  This function can fail if the intrinsic
7090   is unavailable or if the operands don't satisfy their predicates.  */
7091
7092bool
7093mep_emit_intrinsic (int intrinsic, const rtx *operands)
7094{
7095  const struct cgen_insn *cgen_insn;
7096  const struct insn_data_d *idata;
7097  rtx newop[10];
7098  int i;
7099
7100  if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7101    return false;
7102
7103  idata = &insn_data[cgen_insn->icode];
7104  for (i = 0; i < idata->n_operands; i++)
7105    {
7106      newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7107      if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7108	return false;
7109    }
7110
7111  emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7112			    newop[3], newop[4], newop[5],
7113			    newop[6], newop[7], newop[8]));
7114
7115  return true;
7116}
7117
7118
7119/* Apply the given unary intrinsic to OPERANDS[1] and store it on
7120   OPERANDS[0].  Report an error if the instruction could not
7121   be synthesized.  OPERANDS[1] is a register_operand.  For sign
7122   and zero extensions, it may be smaller than SImode.  */
7123
7124bool
7125mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7126			    rtx * operands ATTRIBUTE_UNUSED)
7127{
7128  return false;
7129}
7130
7131
7132/* Likewise, but apply a binary operation to OPERANDS[1] and
7133   OPERANDS[2].  OPERANDS[1] is a register_operand, OPERANDS[2]
7134   can be a general_operand.
7135
7136   IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7137   third operand.  REG and REG3 take register operands only.  */
7138
7139bool
7140mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7141			     int ATTRIBUTE_UNUSED immediate3,
7142			     int ATTRIBUTE_UNUSED reg,
7143			     int ATTRIBUTE_UNUSED reg3,
7144			     rtx * operands ATTRIBUTE_UNUSED)
7145{
7146  return false;
7147}
7148
7149static bool
7150mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7151	      int opno ATTRIBUTE_UNUSED, int *total,
7152	      bool ATTRIBUTE_UNUSED speed_t)
7153{
7154  switch (code)
7155    {
7156    case CONST_INT:
7157      if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7158	*total = 0;
7159      else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7160	*total = 1;
7161      else
7162	*total = 3;
7163      return true;
7164
7165    case SYMBOL_REF:
7166      *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7167      return true;
7168
7169    case MULT:
7170      *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7171		? COSTS_N_INSNS (3)
7172		: COSTS_N_INSNS (2));
7173      return true;
7174    }
7175  return false;
7176}
7177
7178static int
7179mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7180		  machine_mode mode ATTRIBUTE_UNUSED,
7181		  addr_space_t as ATTRIBUTE_UNUSED,
7182		  bool ATTRIBUTE_UNUSED speed_p)
7183{
7184  return 1;
7185}
7186
7187static void
7188mep_asm_init_sections (void)
7189{
7190  based_section
7191    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7192			   "\t.section .based,\"aw\"");
7193
7194  tinybss_section
7195    = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7196			   "\t.section .sbss,\"aw\"");
7197
7198  sdata_section
7199    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7200			   "\t.section .sdata,\"aw\",@progbits");
7201
7202  far_section
7203    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7204			   "\t.section .far,\"aw\"");
7205
7206  farbss_section
7207    = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7208			   "\t.section .farbss,\"aw\"");
7209
7210  frodata_section
7211    = get_unnamed_section (0, output_section_asm_op,
7212			   "\t.section .frodata,\"a\"");
7213
7214  srodata_section
7215    = get_unnamed_section (0, output_section_asm_op,
7216			   "\t.section .srodata,\"a\"");
7217
7218  vtext_section
7219    = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7220			   "\t.section .vtext,\"axv\"\n\t.vliw");
7221
7222  vftext_section
7223    = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7224			   "\t.section .vftext,\"axv\"\n\t.vliw");
7225
7226  ftext_section
7227    = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7228			   "\t.section .ftext,\"ax\"\n\t.core");
7229
7230}
7231
7232/* Initialize the GCC target structure.  */
7233
7234#undef  TARGET_ASM_FUNCTION_PROLOGUE
7235#define TARGET_ASM_FUNCTION_PROLOGUE	mep_start_function
7236#undef  TARGET_ATTRIBUTE_TABLE
7237#define TARGET_ATTRIBUTE_TABLE		mep_attribute_table
7238#undef  TARGET_COMP_TYPE_ATTRIBUTES
7239#define TARGET_COMP_TYPE_ATTRIBUTES	mep_comp_type_attributes
7240#undef  TARGET_INSERT_ATTRIBUTES
7241#define TARGET_INSERT_ATTRIBUTES	mep_insert_attributes
7242#undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7243#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P	mep_function_attribute_inlinable_p
7244#undef  TARGET_CAN_INLINE_P
7245#define TARGET_CAN_INLINE_P		mep_can_inline_p
7246#undef  TARGET_SECTION_TYPE_FLAGS
7247#define TARGET_SECTION_TYPE_FLAGS	mep_section_type_flags
7248#undef  TARGET_ASM_NAMED_SECTION
7249#define TARGET_ASM_NAMED_SECTION	mep_asm_named_section
7250#undef  TARGET_INIT_BUILTINS
7251#define TARGET_INIT_BUILTINS		mep_init_builtins
7252#undef  TARGET_EXPAND_BUILTIN
7253#define TARGET_EXPAND_BUILTIN		mep_expand_builtin
7254#undef  TARGET_SCHED_ADJUST_COST
7255#define TARGET_SCHED_ADJUST_COST	mep_adjust_cost
7256#undef  TARGET_SCHED_ISSUE_RATE
7257#define TARGET_SCHED_ISSUE_RATE		mep_issue_rate
7258#undef  TARGET_SCHED_REORDER
7259#define TARGET_SCHED_REORDER		mep_sched_reorder
7260#undef  TARGET_STRIP_NAME_ENCODING
7261#define TARGET_STRIP_NAME_ENCODING	mep_strip_name_encoding
7262#undef  TARGET_ASM_SELECT_SECTION
7263#define TARGET_ASM_SELECT_SECTION	mep_select_section
7264#undef  TARGET_ASM_UNIQUE_SECTION
7265#define TARGET_ASM_UNIQUE_SECTION	mep_unique_section
7266#undef  TARGET_ENCODE_SECTION_INFO
7267#define TARGET_ENCODE_SECTION_INFO	mep_encode_section_info
7268#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
7269#define TARGET_FUNCTION_OK_FOR_SIBCALL	mep_function_ok_for_sibcall
7270#undef  TARGET_RTX_COSTS
7271#define TARGET_RTX_COSTS		mep_rtx_cost
7272#undef  TARGET_ADDRESS_COST
7273#define TARGET_ADDRESS_COST 		mep_address_cost
7274#undef  TARGET_MACHINE_DEPENDENT_REORG
7275#define TARGET_MACHINE_DEPENDENT_REORG  mep_reorg
7276#undef  TARGET_SETUP_INCOMING_VARARGS
7277#define TARGET_SETUP_INCOMING_VARARGS	mep_setup_incoming_varargs
7278#undef  TARGET_PASS_BY_REFERENCE
7279#define TARGET_PASS_BY_REFERENCE        mep_pass_by_reference
7280#undef  TARGET_FUNCTION_ARG
7281#define TARGET_FUNCTION_ARG             mep_function_arg
7282#undef  TARGET_FUNCTION_ARG_ADVANCE
7283#define TARGET_FUNCTION_ARG_ADVANCE     mep_function_arg_advance
7284#undef  TARGET_VECTOR_MODE_SUPPORTED_P
7285#define TARGET_VECTOR_MODE_SUPPORTED_P	mep_vector_mode_supported_p
7286#undef  TARGET_OPTION_OVERRIDE
7287#define TARGET_OPTION_OVERRIDE		mep_option_override
7288#undef  TARGET_ALLOCATE_INITIAL_VALUE
7289#define TARGET_ALLOCATE_INITIAL_VALUE   mep_allocate_initial_value
7290#undef  TARGET_ASM_INIT_SECTIONS
7291#define TARGET_ASM_INIT_SECTIONS 	mep_asm_init_sections
7292#undef  TARGET_RETURN_IN_MEMORY
7293#define TARGET_RETURN_IN_MEMORY		mep_return_in_memory
7294#undef  TARGET_NARROW_VOLATILE_BITFIELD
7295#define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7296#undef	TARGET_EXPAND_BUILTIN_SAVEREGS
7297#define	TARGET_EXPAND_BUILTIN_SAVEREGS	mep_expand_builtin_saveregs
7298#undef  TARGET_BUILD_BUILTIN_VA_LIST
7299#define TARGET_BUILD_BUILTIN_VA_LIST	mep_build_builtin_va_list
7300#undef  TARGET_EXPAND_BUILTIN_VA_START
7301#define TARGET_EXPAND_BUILTIN_VA_START	mep_expand_va_start
7302#undef	TARGET_GIMPLIFY_VA_ARG_EXPR
7303#define	TARGET_GIMPLIFY_VA_ARG_EXPR	mep_gimplify_va_arg_expr
7304#undef  TARGET_CAN_ELIMINATE
7305#define TARGET_CAN_ELIMINATE            mep_can_eliminate
7306#undef  TARGET_CONDITIONAL_REGISTER_USAGE
7307#define TARGET_CONDITIONAL_REGISTER_USAGE	mep_conditional_register_usage
7308#undef  TARGET_TRAMPOLINE_INIT
7309#define TARGET_TRAMPOLINE_INIT		mep_trampoline_init
7310#undef  TARGET_LEGITIMATE_CONSTANT_P
7311#define TARGET_LEGITIMATE_CONSTANT_P	mep_legitimate_constant_p
7312#undef  TARGET_CAN_USE_DOLOOP_P
7313#define TARGET_CAN_USE_DOLOOP_P		can_use_doloop_if_innermost
7314
7315struct gcc_target targetm = TARGET_INITIALIZER;
7316
7317#include "gt-mep.h"
7318