1/* Subroutines used for code generation on Renesas RX processors.
2   Copyright (C) 2008-2022 Free Software Foundation, Inc.
3   Contributed by Red Hat.
4
5   This file is part of GCC.
6
7   GCC is free software; you can redistribute it and/or modify
8   it under the terms of the GNU General Public License as published by
9   the Free Software Foundation; either version 3, or (at your option)
10   any later version.
11
12   GCC is distributed in the hope that it will be useful,
13   but WITHOUT ANY WARRANTY; without even the implied warranty of
14   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15   GNU General Public License for more details.
16
17   You should have received a copy of the GNU General Public License
18   along with GCC; see the file COPYING3.  If not see
19   <http://www.gnu.org/licenses/>.  */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload.  */
24
25#define IN_TARGET_CODE 1
26
27#include "config.h"
28#include "system.h"
29#include "coretypes.h"
30#include "backend.h"
31#include "target.h"
32#include "rtl.h"
33#include "tree.h"
34#include "stringpool.h"
35#include "attribs.h"
36#include "cfghooks.h"
37#include "df.h"
38#include "memmodel.h"
39#include "tm_p.h"
40#include "regs.h"
41#include "emit-rtl.h"
42#include "diagnostic-core.h"
43#include "varasm.h"
44#include "stor-layout.h"
45#include "calls.h"
46#include "output.h"
47#include "flags.h"
48#include "explow.h"
49#include "expr.h"
50#include "toplev.h"
51#include "langhooks.h"
52#include "opts.h"
53#include "builtins.h"
54
55/* This file should be included last.  */
56#include "target-def.h"
57
58static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
59static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
60static unsigned int rx_num_interrupt_regs;
61
62static unsigned int
63rx_gp_base_regnum (void)
64{
65  if (rx_gp_base_regnum_val == INVALID_REGNUM)
66    gcc_unreachable ();
67  return rx_gp_base_regnum_val;
68}
69
70static unsigned int
71rx_pid_base_regnum (void)
72{
73  if (rx_pid_base_regnum_val == INVALID_REGNUM)
74    gcc_unreachable ();
75  return rx_pid_base_regnum_val;
76}
77
78/* Find a SYMBOL_REF in a "standard" MEM address and return its decl.  */
79
80static tree
81rx_decl_for_addr (rtx op)
82{
83  if (GET_CODE (op) == MEM)
84    op = XEXP (op, 0);
85  if (GET_CODE (op) == CONST)
86    op = XEXP (op, 0);
87  while (GET_CODE (op) == PLUS)
88    op = XEXP (op, 0);
89  if (GET_CODE (op) == SYMBOL_REF)
90    return SYMBOL_REF_DECL (op);
91  return NULL_TREE;
92}
93
94static void rx_print_operand (FILE *, rtx, int);
95
96#define CC_FLAG_S	(1 << 0)
97#define CC_FLAG_Z	(1 << 1)
98#define CC_FLAG_O	(1 << 2)
99#define CC_FLAG_C	(1 << 3)
100#define CC_FLAG_FP	(1 << 4)	/* Fake, to differentiate CC_Fmode.  */
101
102static unsigned int flags_from_mode (machine_mode mode);
103static unsigned int flags_from_code (enum rtx_code code);
104
105/* Return true if OP is a reference to an object in a PID data area.  */
106
107enum pid_type
108{
109  PID_NOT_PID = 0,	/* The object is not in the PID data area.  */
110  PID_ENCODED,		/* The object is in the PID data area.  */
111  PID_UNENCODED		/* The object will be placed in the PID data area, but it has not been placed there yet.  */
112};
113
114static enum pid_type
115rx_pid_data_operand (rtx op)
116{
117  tree op_decl;
118
119  if (!TARGET_PID)
120    return PID_NOT_PID;
121
122  if (GET_CODE (op) == PLUS
123      && GET_CODE (XEXP (op, 0)) == REG
124      && GET_CODE (XEXP (op, 1)) == CONST
125      && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
126    return PID_ENCODED;
127
128  op_decl = rx_decl_for_addr (op);
129
130  if (op_decl)
131    {
132      if (TREE_READONLY (op_decl))
133	return PID_UNENCODED;
134    }
135  else
136    {
137      /* Sigh, some special cases.  */
138      if (GET_CODE (op) == SYMBOL_REF
139	  || GET_CODE (op) == LABEL_REF)
140	return PID_UNENCODED;
141    }
142
143  return PID_NOT_PID;
144}
145
146static rtx
147rx_legitimize_address (rtx x,
148		       rtx oldx ATTRIBUTE_UNUSED,
149		       machine_mode mode ATTRIBUTE_UNUSED)
150{
151  if (rx_pid_data_operand (x) == PID_UNENCODED)
152    {
153      rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
154      return rv;
155    }
156
157  if (GET_CODE (x) == PLUS
158      && GET_CODE (XEXP (x, 0)) == PLUS
159      && REG_P (XEXP (XEXP (x, 0), 0))
160      && REG_P (XEXP (x, 1)))
161    return force_reg (SImode, x);
162
163  return x;
164}
165
166/* Return true if OP is a reference to an object in a small data area.  */
167
168static bool
169rx_small_data_operand (rtx op)
170{
171  if (rx_small_data_limit == 0)
172    return false;
173
174  if (GET_CODE (op) == SYMBOL_REF)
175    return SYMBOL_REF_SMALL_P (op);
176
177  return false;
178}
179
180static bool
181rx_is_legitimate_address (machine_mode mode, rtx x,
182			  bool strict ATTRIBUTE_UNUSED)
183{
184  if (RTX_OK_FOR_BASE (x, strict))
185    /* Register Indirect.  */
186    return true;
187
188  if ((GET_MODE_SIZE (mode) == 4
189       || GET_MODE_SIZE (mode) == 2
190       || GET_MODE_SIZE (mode) == 1)
191      && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
192    /* Pre-decrement Register Indirect or
193       Post-increment Register Indirect.  */
194    return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
195
196  switch (rx_pid_data_operand (x))
197    {
198    case PID_UNENCODED:
199      return false;
200    case PID_ENCODED:
201      return true;
202    default:
203      break;
204    }
205
206  if (GET_CODE (x) == PLUS)
207    {
208      rtx arg1 = XEXP (x, 0);
209      rtx arg2 = XEXP (x, 1);
210      rtx index = NULL_RTX;
211
212      if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
213	index = arg2;
214      else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
215	index = arg1;
216      else
217	return false;
218
219      switch (GET_CODE (index))
220	{
221	case CONST_INT:
222	  {
223	    /* Register Relative: REG + INT.
224	       Only positive, mode-aligned, mode-sized
225	       displacements are allowed.  */
226	    HOST_WIDE_INT val = INTVAL (index);
227	    int factor;
228
229	    if (val < 0)
230	      return false;
231
232	    switch (GET_MODE_SIZE (mode))
233	      {
234	      default:
235	      case 4: factor = 4; break;
236	      case 2: factor = 2; break;
237	      case 1: factor = 1; break;
238	      }
239
240	    if (val > (65535 * factor))
241	      return false;
242	    return (val % factor) == 0;
243	  }
244
245	case REG:
246	  /* Unscaled Indexed Register Indirect: REG + REG
247	     Size has to be "QI", REG has to be valid.  */
248	  return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
249
250	case MULT:
251	  {
252	    /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
253	       Factor has to equal the mode size, REG has to be valid.  */
254	    rtx factor;
255
256	    factor = XEXP (index, 1);
257	    index = XEXP (index, 0);
258
259	    return REG_P (index)
260	      && RTX_OK_FOR_BASE (index, strict)
261	      && CONST_INT_P (factor)
262	      && GET_MODE_SIZE (mode) == INTVAL (factor);
263	  }
264
265	default:
266	  return false;
267	}
268    }
269
270  /* Small data area accesses turn into register relative offsets.  */
271  return rx_small_data_operand (x);
272}
273
274/* Returns TRUE for simple memory addresses, ie ones
275   that do not involve register indirect addressing
276   or pre/post increment/decrement.  */
277
278bool
279rx_is_restricted_memory_address (rtx mem, machine_mode mode)
280{
281  if (! rx_is_legitimate_address
282      (mode, mem, reload_in_progress || reload_completed))
283    return false;
284
285  switch (GET_CODE (mem))
286    {
287    case REG:
288      /* Simple memory addresses are OK.  */
289      return true;
290
291    case SUBREG:
292      return RX_REG_P (SUBREG_REG (mem));
293
294    case PRE_DEC:
295    case POST_INC:
296      return false;
297
298    case PLUS:
299      {
300	rtx base, index;
301
302	/* Only allow REG+INT addressing.  */
303	base = XEXP (mem, 0);
304	index = XEXP (mem, 1);
305
306	if (! RX_REG_P (base) || ! CONST_INT_P (index))
307	  return false;
308
309	return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
310      }
311
312    case SYMBOL_REF:
313      /* Can happen when small data is being supported.
314         Assume that it will be resolved into GP+INT.  */
315      return true;
316
317    default:
318      gcc_unreachable ();
319    }
320}
321
322/* Implement TARGET_MODE_DEPENDENT_ADDRESS_P.  */
323
324static bool
325rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
326{
327  if (GET_CODE (addr) == CONST)
328    addr = XEXP (addr, 0);
329
330  switch (GET_CODE (addr))
331    {
332      /* --REG and REG++ only work in SImode.  */
333    case PRE_DEC:
334    case POST_INC:
335      return true;
336
337    case MINUS:
338    case PLUS:
339      if (! REG_P (XEXP (addr, 0)))
340	return true;
341
342      addr = XEXP (addr, 1);
343
344      switch (GET_CODE (addr))
345	{
346	case REG:
347	  /* REG+REG only works in SImode.  */
348	  return true;
349
350	case CONST_INT:
351	  /* REG+INT is only mode independent if INT is a
352	     multiple of 4, positive and will fit into 16-bits.  */
353	  if (((INTVAL (addr) & 3) == 0)
354	      && IN_RANGE (INTVAL (addr), 4, 0xfffc))
355	    return false;
356	  return true;
357
358	case SYMBOL_REF:
359	case LABEL_REF:
360	  return true;
361
362	case MULT:
363	  /* REG+REG*SCALE is always mode dependent.  */
364	  return true;
365
366	default:
367	  /* Not recognized, so treat as mode dependent.  */
368	  return true;
369	}
370
371    case CONST_INT:
372    case SYMBOL_REF:
373    case LABEL_REF:
374    case REG:
375      /* These are all mode independent.  */
376      return false;
377
378    default:
379      /* Everything else is unrecognized,
380	 so treat as mode dependent.  */
381      return true;
382    }
383}
384
385/* A C compound statement to output to stdio stream FILE the
386   assembler syntax for an instruction operand that is a memory
387   reference whose address is ADDR.  */
388
389static void
390rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
391{
392  switch (GET_CODE (addr))
393    {
394    case REG:
395      fprintf (file, "[");
396      rx_print_operand (file, addr, 0);
397      fprintf (file, "]");
398      break;
399
400    case PRE_DEC:
401      fprintf (file, "[-");
402      rx_print_operand (file, XEXP (addr, 0), 0);
403      fprintf (file, "]");
404      break;
405
406    case POST_INC:
407      fprintf (file, "[");
408      rx_print_operand (file, XEXP (addr, 0), 0);
409      fprintf (file, "+]");
410      break;
411
412    case PLUS:
413      {
414	rtx arg1 = XEXP (addr, 0);
415	rtx arg2 = XEXP (addr, 1);
416	rtx base, index;
417
418	if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
419	  base = arg1, index = arg2;
420	else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
421	  base = arg2, index = arg1;
422	else
423	  {
424	    rx_print_operand (file, arg1, 0);
425	    fprintf (file, " + ");
426	    rx_print_operand (file, arg2, 0);
427	    break;
428	  }
429
430	if (REG_P (index) || GET_CODE (index) == MULT)
431	  {
432	    fprintf (file, "[");
433	    rx_print_operand (file, index, 'A');
434	    fprintf (file, ",");
435	  }
436	else /* GET_CODE (index) == CONST_INT  */
437	  {
438	    rx_print_operand (file, index, 'A');
439	    fprintf (file, "[");
440	  }
441	rx_print_operand (file, base, 0);
442	fprintf (file, "]");
443	break;
444      }
445
446    case CONST:
447      if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
448	{
449	  addr = XEXP (addr, 0);
450	  gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
451
452          addr = XVECEXP (addr, 0, 0);
453	  gcc_assert (CONST_INT_P (addr));
454	  fprintf (file, "#");
455	  output_addr_const (file, addr);
456	  break;
457	}
458      fprintf (file, "#");
459      output_addr_const (file, XEXP (addr, 0));
460      break;
461
462    case UNSPEC:
463      addr = XVECEXP (addr, 0, 0);
464      /* Fall through.  */
465    case LABEL_REF:
466    case SYMBOL_REF:
467      fprintf (file, "#");
468      /* Fall through.  */
469    default:
470      output_addr_const (file, addr);
471      break;
472    }
473}
474
475static void
476rx_print_integer (FILE * file, HOST_WIDE_INT val)
477{
478  if (val < 64)
479    fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
480  else
481    fprintf (file,
482	     TARGET_AS100_SYNTAX
483	     ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
484	     val);
485}
486
487static bool
488rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
489{
490  const char *  op = integer_asm_op (size, is_aligned);
491
492  if (! CONST_INT_P (x))
493    return default_assemble_integer (x, size, is_aligned);
494
495  if (op == NULL)
496    return false;
497  fputs (op, asm_out_file);
498
499  rx_print_integer (asm_out_file, INTVAL (x));
500  fputc ('\n', asm_out_file);
501  return true;
502}
503
504
505/* Handles the insertion of a single operand into the assembler output.
506   The %<letter> directives supported are:
507
508     %A  Print an operand without a leading # character.
509     %B  Print an integer comparison name.
510     %C  Print a control register name.
511     %F  Print a condition code flag name.
512     %G  Register used for small-data-area addressing
513     %H  Print high part of a DImode register, integer or address.
514     %L  Print low part of a DImode register, integer or address.
515     %N  Print the negation of the immediate value.
516     %P  Register used for PID addressing
517     %Q  If the operand is a MEM, then correctly generate
518         register indirect or register relative addressing.
519     %R  Like %Q but for zero-extending loads.  */
520
521static void
522rx_print_operand (FILE * file, rtx op, int letter)
523{
524  bool unsigned_load = false;
525  bool print_hash = true;
526
527  if (letter == 'A'
528      && ((GET_CODE (op) == CONST
529	   && GET_CODE (XEXP (op, 0)) == UNSPEC)
530	  || GET_CODE (op) == UNSPEC))
531    {
532      print_hash = false;
533      letter = 0;
534    }
535
536  switch (letter)
537    {
538    case 'A':
539      /* Print an operand without a leading #.  */
540      if (MEM_P (op))
541	op = XEXP (op, 0);
542
543      switch (GET_CODE (op))
544	{
545	case LABEL_REF:
546	case SYMBOL_REF:
547	  output_addr_const (file, op);
548	  break;
549	case CONST_INT:
550	  fprintf (file, "%ld", (long) INTVAL (op));
551	  break;
552	default:
553	  rx_print_operand (file, op, 0);
554	  break;
555	}
556      break;
557
558    case 'B':
559      {
560	enum rtx_code code = GET_CODE (op);
561	machine_mode mode = GET_MODE (XEXP (op, 0));
562	const char *ret;
563
564	if (mode == CC_Fmode)
565	  {
566	    /* C flag is undefined, and O flag carries unordered.  None of the
567	       branch combinations that include O use it helpfully.  */
568	    switch (code)
569	      {
570	      case ORDERED:
571		ret = "no";
572		break;
573	      case UNORDERED:
574		ret = "o";
575		break;
576	      case LT:
577		ret = "n";
578		break;
579	      case GE:
580		ret = "pz";
581		break;
582	      case EQ:
583		ret = "eq";
584		break;
585	      case NE:
586		ret = "ne";
587		break;
588	      default:
589		gcc_unreachable ();
590	      }
591	  }
592	else
593	  {
594	    unsigned int flags = flags_from_mode (mode);
595
596	    switch (code)
597	      {
598	      case LT:
599		ret = (flags & CC_FLAG_O ? "lt" : "n");
600		break;
601	      case GE:
602		ret = (flags & CC_FLAG_O ? "ge" : "pz");
603		break;
604	      case GT:
605		ret = "gt";
606		break;
607	      case LE:
608		ret = "le";
609		break;
610	      case GEU:
611		ret = "geu";
612		break;
613	      case LTU:
614		ret = "ltu";
615		break;
616	      case GTU:
617		ret = "gtu";
618		break;
619	      case LEU:
620		ret = "leu";
621		break;
622	      case EQ:
623		ret = "eq";
624		break;
625	      case NE:
626		ret = "ne";
627		break;
628	      default:
629		gcc_unreachable ();
630	      }
631	    gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
632	  }
633	fputs (ret, file);
634	break;
635      }
636
637    case 'C':
638      gcc_assert (CONST_INT_P (op));
639      switch (INTVAL (op))
640	{
641	case CTRLREG_PSW:   fprintf (file, "psw"); break;
642	case CTRLREG_PC:    fprintf (file, "pc"); break;
643	case CTRLREG_USP:   fprintf (file, "usp"); break;
644	case CTRLREG_FPSW:  fprintf (file, "fpsw"); break;
645	case CTRLREG_BPSW:  fprintf (file, "bpsw"); break;
646	case CTRLREG_BPC:   fprintf (file, "bpc"); break;
647	case CTRLREG_ISP:   fprintf (file, "isp"); break;
648	case CTRLREG_FINTV: fprintf (file, "fintv"); break;
649	case CTRLREG_INTB:  fprintf (file, "intb"); break;
650	default:
651	  warning (0, "unrecognized control register number: %d"
652		   " - using %<psw%>", (int) INTVAL (op));
653	  fprintf (file, "psw");
654	  break;
655	}
656      break;
657
658    case 'F':
659      gcc_assert (CONST_INT_P (op));
660      switch (INTVAL (op))
661	{
662	case 0: case 'c': case 'C': fprintf (file, "C"); break;
663	case 1:	case 'z': case 'Z': fprintf (file, "Z"); break;
664	case 2: case 's': case 'S': fprintf (file, "S"); break;
665	case 3: case 'o': case 'O': fprintf (file, "O"); break;
666	case 8: case 'i': case 'I': fprintf (file, "I"); break;
667	case 9: case 'u': case 'U': fprintf (file, "U"); break;
668	default:
669	  gcc_unreachable ();
670	}
671      break;
672
673    case 'G':
674      fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
675      break;
676
677    case 'H':
678      switch (GET_CODE (op))
679	{
680	case REG:
681	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
682	  break;
683	case CONST_INT:
684	  {
685	    HOST_WIDE_INT v = INTVAL (op);
686
687	    fprintf (file, "#");
688	    /* Trickery to avoid problems with shifting 32 bits at a time.  */
689	    v = v >> 16;
690	    v = v >> 16;
691	    rx_print_integer (file, v);
692	    break;
693	  }
694	case CONST_DOUBLE:
695	  fprintf (file, "#");
696	  rx_print_integer (file, CONST_DOUBLE_HIGH (op));
697	  break;
698	case MEM:
699	  if (! WORDS_BIG_ENDIAN)
700	    op = adjust_address (op, SImode, 4);
701	  output_address (GET_MODE (op), XEXP (op, 0));
702	  break;
703	default:
704	  gcc_unreachable ();
705	}
706      break;
707
708    case 'L':
709      switch (GET_CODE (op))
710	{
711	case REG:
712	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
713	  break;
714	case CONST_INT:
715	  fprintf (file, "#");
716	  rx_print_integer (file, INTVAL (op) & 0xffffffff);
717	  break;
718	case CONST_DOUBLE:
719	  fprintf (file, "#");
720	  rx_print_integer (file, CONST_DOUBLE_LOW (op));
721	  break;
722	case MEM:
723	  if (WORDS_BIG_ENDIAN)
724	    op = adjust_address (op, SImode, 4);
725	  output_address (GET_MODE (op), XEXP (op, 0));
726	  break;
727	default:
728	  gcc_unreachable ();
729	}
730      break;
731
732    case 'N':
733      gcc_assert (CONST_INT_P (op));
734      fprintf (file, "#");
735      rx_print_integer (file, - INTVAL (op));
736      break;
737
738    case 'P':
739      fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
740      break;
741
742    case 'R':
743      gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
744      unsigned_load = true;
745      /* Fall through.  */
746    case 'Q':
747      if (MEM_P (op))
748	{
749	  HOST_WIDE_INT offset;
750	  rtx mem = op;
751
752	  op = XEXP (op, 0);
753
754	  if (REG_P (op))
755	    offset = 0;
756	  else if (GET_CODE (op) == PLUS)
757	    {
758	      rtx displacement;
759
760	      if (REG_P (XEXP (op, 0)))
761		{
762		  displacement = XEXP (op, 1);
763		  op = XEXP (op, 0);
764		}
765	      else
766		{
767		  displacement = XEXP (op, 0);
768		  op = XEXP (op, 1);
769		  gcc_assert (REG_P (op));
770		}
771
772	      gcc_assert (CONST_INT_P (displacement));
773	      offset = INTVAL (displacement);
774	      gcc_assert (offset >= 0);
775
776	      fprintf (file, "%ld", offset);
777	    }
778	  else
779	    gcc_unreachable ();
780
781	  fprintf (file, "[");
782	  rx_print_operand (file, op, 0);
783	  fprintf (file, "].");
784
785	  switch (GET_MODE_SIZE (GET_MODE (mem)))
786	    {
787	    case 1:
788	      gcc_assert (offset <= 65535 * 1);
789	      fprintf (file, unsigned_load ? "UB" : "B");
790	      break;
791	    case 2:
792	      gcc_assert (offset % 2 == 0);
793	      gcc_assert (offset <= 65535 * 2);
794	      fprintf (file, unsigned_load ? "UW" : "W");
795	      break;
796	    case 4:
797	      gcc_assert (offset % 4 == 0);
798	      gcc_assert (offset <= 65535 * 4);
799	      fprintf (file, "L");
800	      break;
801	    default:
802	      gcc_unreachable ();
803	    }
804	  break;
805	}
806
807      /* Fall through.  */
808
809    default:
810      if (GET_CODE (op) == CONST
811	  && GET_CODE (XEXP (op, 0)) == UNSPEC)
812	op = XEXP (op, 0);
813      else if (GET_CODE (op) == CONST
814	       && GET_CODE (XEXP (op, 0)) == PLUS
815	       && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
816	       && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
817	{
818	  if (print_hash)
819	    fprintf (file, "#");
820	  fprintf (file, "(");
821	  rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
822	  fprintf (file, " + ");
823	  output_addr_const (file, XEXP (XEXP (op, 0), 1));
824	  fprintf (file, ")");
825	  return;
826	}
827
828      switch (GET_CODE (op))
829	{
830	case MULT:
831	  /* Should be the scaled part of an
832	     indexed register indirect address.  */
833	  {
834	    rtx base = XEXP (op, 0);
835	    rtx index = XEXP (op, 1);
836
837	    /* Check for a swaped index register and scaling factor.
838	       Not sure if this can happen, but be prepared to handle it.  */
839	    if (CONST_INT_P (base) && REG_P (index))
840	      {
841		rtx tmp = base;
842		base = index;
843		index = tmp;
844	      }
845
846	    gcc_assert (REG_P (base));
847	    gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
848	    gcc_assert (CONST_INT_P (index));
849	    /* Do not try to verify the value of the scalar as it is based
850	       on the mode of the MEM not the mode of the MULT.  (Which
851	       will always be SImode).  */
852	    fprintf (file, "%s", reg_names [REGNO (base)]);
853	    break;
854	  }
855
856	case MEM:
857	  output_address (GET_MODE (op), XEXP (op, 0));
858	  break;
859
860	case PLUS:
861	  output_address (VOIDmode, op);
862	  break;
863
864	case REG:
865	  gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
866	  fprintf (file, "%s", reg_names [REGNO (op)]);
867	  break;
868
869	case SUBREG:
870	  gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
871	  fprintf (file, "%s", reg_names [subreg_regno (op)]);
872	  break;
873
874	  /* This will only be single precision....  */
875	case CONST_DOUBLE:
876	  {
877	    unsigned long val;
878
879	    REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
880	    if (print_hash)
881	      fprintf (file, "#");
882	    fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
883	    break;
884	  }
885
886	case CONST_INT:
887	  if (print_hash)
888	    fprintf (file, "#");
889	  rx_print_integer (file, INTVAL (op));
890	  break;
891
892	case UNSPEC:
893	  switch (XINT (op, 1))
894	    {
895	    case UNSPEC_PID_ADDR:
896	      {
897		rtx sym, add;
898
899		if (print_hash)
900		  fprintf (file, "#");
901		sym = XVECEXP (op, 0, 0);
902		add = NULL_RTX;
903		fprintf (file, "(");
904		if (GET_CODE (sym) == PLUS)
905		  {
906		    add = XEXP (sym, 1);
907		    sym = XEXP (sym, 0);
908		  }
909		output_addr_const (file, sym);
910		if (add != NULL_RTX)
911		  {
912		    fprintf (file, "+");
913		    output_addr_const (file, add);
914		  }
915		fprintf (file, "-__pid_base");
916		fprintf (file, ")");
917		return;
918	      }
919	    }
920	  /* Fall through */
921
922	case CONST:
923	case SYMBOL_REF:
924	case LABEL_REF:
925	case CODE_LABEL:
926	  rx_print_operand_address (file, VOIDmode, op);
927	  break;
928
929	default:
930	  gcc_unreachable ();
931	}
932      break;
933    }
934}
935
936/* Maybe convert an operand into its PID format.  */
937
938rtx
939rx_maybe_pidify_operand (rtx op, int copy_to_reg)
940{
941  if (rx_pid_data_operand (op) == PID_UNENCODED)
942    {
943      if (GET_CODE (op) == MEM)
944	{
945	  rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
946	  op = replace_equiv_address (op, a);
947	}
948      else
949	{
950	  op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
951	}
952
953      if (copy_to_reg)
954	op = copy_to_mode_reg (GET_MODE (op), op);
955    }
956  return op;
957}
958
959/* Returns an assembler template for a move instruction.  */
960
961char *
962rx_gen_move_template (rtx * operands, bool is_movu)
963{
964  static char  out_template [64];
965  const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
966  const char * src_template;
967  const char * dst_template;
968  rtx          dest = operands[0];
969  rtx          src  = operands[1];
970
971  /* Decide which extension, if any, should be given to the move instruction.  */
972  switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
973    {
974    case E_QImode:
975      /* The .B extension is not valid when
976	 loading an immediate into a register.  */
977      if (! REG_P (dest) || ! CONST_INT_P (src))
978	extension = ".B";
979      break;
980    case E_HImode:
981      if (! REG_P (dest) || ! CONST_INT_P (src))
982	/* The .W extension is not valid when
983	   loading an immediate into a register.  */
984	extension = ".W";
985      break;
986    case E_DFmode:
987    case E_DImode:
988    case E_SFmode:
989    case E_SImode:
990      extension = ".L";
991      break;
992    case E_VOIDmode:
993      /* This mode is used by constants.  */
994      break;
995    default:
996      debug_rtx (src);
997      gcc_unreachable ();
998    }
999
1000  if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
1001    {
1002      gcc_assert (GET_MODE (src) != DImode);
1003      gcc_assert (GET_MODE (src) != DFmode);
1004
1005      src_template = "(%A1 - __pid_base)[%P1]";
1006    }
1007  else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1008    {
1009      gcc_assert (GET_MODE (src) != DImode);
1010      gcc_assert (GET_MODE (src) != DFmode);
1011
1012      src_template = "%%gp(%A1)[%G1]";
1013    }
1014  else
1015    src_template = "%1";
1016
1017  if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1018    {
1019      gcc_assert (GET_MODE (dest) != DImode);
1020      gcc_assert (GET_MODE (dest) != DFmode);
1021
1022      dst_template = "%%gp(%A0)[%G0]";
1023    }
1024  else
1025    dst_template = "%0";
1026
1027  if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1028    {
1029      gcc_assert (! is_movu);
1030
1031      if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1032	sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1033      else
1034	sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1035    }
1036  else
1037    sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1038	     extension, src_template, dst_template);
1039  return out_template;
1040}
1041
1042/* Return VALUE rounded up to the next ALIGNMENT boundary.  */
1043
1044static inline unsigned int
1045rx_round_up (unsigned int value, unsigned int alignment)
1046{
1047  alignment -= 1;
1048  return (value + alignment) & (~ alignment);
1049}
1050
1051/* Return the number of bytes in the argument registers
1052   occupied by an argument of type TYPE and mode MODE.  */
1053
1054static unsigned int
1055rx_function_arg_size (machine_mode mode, const_tree type)
1056{
1057  unsigned int num_bytes;
1058
1059  num_bytes = (mode == BLKmode)
1060    ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1061  return rx_round_up (num_bytes, UNITS_PER_WORD);
1062}
1063
1064#define NUM_ARG_REGS		4
1065#define MAX_NUM_ARG_BYTES	(NUM_ARG_REGS * UNITS_PER_WORD)
1066
1067/* Return an RTL expression describing the register holding function
1068   argument ARG or NULL_RTX if the parameter should be passed on the
1069   stack.  CUM describes the previous parameters to the function.  */
1070
1071static rtx
1072rx_function_arg (cumulative_args_t cum, const function_arg_info &arg)
1073{
1074  unsigned int next_reg;
1075  unsigned int bytes_so_far = *get_cumulative_args (cum);
1076  unsigned int size;
1077  unsigned int rounded_size;
1078
1079  size = arg.promoted_size_in_bytes ();
1080  /* If the size is not known it cannot be passed in registers.  */
1081  if (size < 1)
1082    return NULL_RTX;
1083
1084  rounded_size = rx_round_up (size, UNITS_PER_WORD);
1085
1086  /* Don't pass this arg via registers if there
1087     are insufficient registers to hold all of it.  */
1088  if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1089    return NULL_RTX;
1090
1091  /* Unnamed arguments and the last named argument in a
1092     variadic function are always passed on the stack.  */
1093  if (!arg.named)
1094    return NULL_RTX;
1095
1096  /* Structures must occupy an exact number of registers,
1097     otherwise they are passed on the stack.  */
1098  if ((arg.type == NULL || AGGREGATE_TYPE_P (arg.type))
1099      && (size % UNITS_PER_WORD) != 0)
1100    return NULL_RTX;
1101
1102  next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1103
1104  return gen_rtx_REG (arg.mode, next_reg);
1105}
1106
1107static void
1108rx_function_arg_advance (cumulative_args_t cum,
1109			 const function_arg_info &arg)
1110{
1111  *get_cumulative_args (cum) += rx_function_arg_size (arg.mode, arg.type);
1112}
1113
1114static unsigned int
1115rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1116			  const_tree type ATTRIBUTE_UNUSED)
1117{
1118  /* Older versions of the RX backend aligned all on-stack arguments
1119     to 32-bits.  The RX C ABI however says that they should be
1120     aligned to their natural alignment.  (See section 5.2.2 of the ABI).  */
1121  if (TARGET_GCC_ABI)
1122    return STACK_BOUNDARY;
1123
1124  if (type)
1125    {
1126      if (DECL_P (type))
1127	return DECL_ALIGN (type);
1128      return TYPE_ALIGN (type);
1129    }
1130
1131  return PARM_BOUNDARY;
1132}
1133
1134/* Return an RTL describing where a function return value of type RET_TYPE
1135   is held.  */
1136
1137static rtx
1138rx_function_value (const_tree ret_type,
1139		   const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1140		   bool       outgoing ATTRIBUTE_UNUSED)
1141{
1142  machine_mode mode = TYPE_MODE (ret_type);
1143
1144  /* RX ABI specifies that small integer types are
1145     promoted to int when returned by a function.  */
1146  if (GET_MODE_SIZE (mode) > 0
1147      && GET_MODE_SIZE (mode) < 4
1148      && ! COMPLEX_MODE_P (mode)
1149      && ! VECTOR_TYPE_P (ret_type)
1150      && ! VECTOR_MODE_P (mode)
1151      )
1152    return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1153
1154  return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1155}
1156
1157/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1158   regard to function returns as does TARGET_FUNCTION_VALUE.  */
1159
1160static machine_mode
1161rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1162			  machine_mode mode,
1163			  int * punsignedp ATTRIBUTE_UNUSED,
1164			  const_tree funtype ATTRIBUTE_UNUSED,
1165			  int for_return)
1166{
1167  if (for_return != 1
1168      || GET_MODE_SIZE (mode) >= 4
1169      || COMPLEX_MODE_P (mode)
1170      || VECTOR_MODE_P (mode)
1171      || VECTOR_TYPE_P (type)
1172      || GET_MODE_SIZE (mode) < 1)
1173    return mode;
1174
1175  return SImode;
1176}
1177
1178static bool
1179rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1180{
1181  HOST_WIDE_INT size;
1182
1183  if (TYPE_MODE (type) != BLKmode
1184      && ! AGGREGATE_TYPE_P (type))
1185    return false;
1186
1187  size = int_size_in_bytes (type);
1188  /* Large structs and those whose size is not an
1189     exact multiple of 4 are returned in memory.  */
1190  return size < 1
1191    || size > 16
1192    || (size % UNITS_PER_WORD) != 0;
1193}
1194
1195static rtx
1196rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1197		     int incoming ATTRIBUTE_UNUSED)
1198{
1199  return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1200}
1201
1202static bool
1203rx_return_in_msb (const_tree valtype)
1204{
1205  return TARGET_BIG_ENDIAN_DATA
1206    && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1207}
1208
1209/* Returns true if the provided function has the specified attribute.  */
1210
1211static inline bool
1212has_func_attr (const_tree decl, const char * func_attr)
1213{
1214  if (decl == NULL_TREE)
1215    decl = current_function_decl;
1216
1217  return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1218}
1219
1220/* Returns true if the provided function has the "fast_interrupt" attribute.  */
1221
1222bool
1223is_fast_interrupt_func (const_tree decl)
1224{
1225  return has_func_attr (decl, "fast_interrupt");
1226}
1227
1228/* Returns true if the provided function has the "interrupt" attribute.  */
1229
1230bool
1231is_interrupt_func (const_tree decl)
1232{
1233  return has_func_attr (decl, "interrupt");
1234}
1235
1236/* Returns true if the provided function has the "naked" attribute.  */
1237
1238static inline bool
1239is_naked_func (const_tree decl)
1240{
1241  return has_func_attr (decl, "naked");
1242}
1243
1244static bool use_fixed_regs = false;
1245
1246static void
1247rx_conditional_register_usage (void)
1248{
1249  static bool using_fixed_regs = false;
1250
1251  if (TARGET_PID)
1252    {
1253      rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1254      fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1255    }
1256
1257  if (rx_small_data_limit > 0)
1258    {
1259      if (TARGET_PID)
1260	rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1261      else
1262	rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1263
1264      fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1265    }
1266
1267  if (use_fixed_regs != using_fixed_regs)
1268    {
1269      static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1270      static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1271
1272      if (use_fixed_regs)
1273	{
1274	  unsigned int r;
1275
1276	  memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1277	  memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1278
1279	  /* This is for fast interrupt handlers.  Any register in
1280	     the range r10 to r13 (inclusive) that is currently
1281	     marked as fixed is now a viable, call-used register.  */
1282	  for (r = 10; r <= 13; r++)
1283	    if (fixed_regs[r])
1284	      {
1285		fixed_regs[r] = 0;
1286		call_used_regs[r] = 1;
1287	      }
1288
1289	  /* Mark r7 as fixed.  This is just a hack to avoid
1290	     altering the reg_alloc_order array so that the newly
1291	     freed r10-r13 registers are the preferred registers.  */
1292	  fixed_regs[7] = call_used_regs[7] = 1;
1293	}
1294      else
1295	{
1296	  /* Restore the normal register masks.  */
1297	  memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1298	  memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1299	}
1300
1301      using_fixed_regs = use_fixed_regs;
1302    }
1303}
1304
1305struct decl_chain
1306{
1307  tree fndecl;
1308  struct decl_chain * next;
1309};
1310
1311/* Stack of decls for which we have issued warnings.  */
1312static struct decl_chain * warned_decls = NULL;
1313
1314static void
1315add_warned_decl (tree fndecl)
1316{
1317  struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1318
1319  warned->fndecl = fndecl;
1320  warned->next = warned_decls;
1321  warned_decls = warned;
1322}
1323
1324/* Returns TRUE if FNDECL is on our list of warned about decls.  */
1325
1326static bool
1327already_warned (tree fndecl)
1328{
1329  struct decl_chain * warned;
1330
1331  for (warned = warned_decls;
1332       warned != NULL;
1333       warned = warned->next)
1334    if (warned->fndecl == fndecl)
1335      return true;
1336
1337  return false;
1338}
1339
1340/* Perform any actions necessary before starting to compile FNDECL.
1341   For the RX we use this to make sure that we have the correct
1342   set of register masks selected.  If FNDECL is NULL then we are
1343   compiling top level things.  */
1344
1345static void
1346rx_set_current_function (tree fndecl)
1347{
1348  /* Remember the last target of rx_set_current_function.  */
1349  static tree rx_previous_fndecl;
1350  bool prev_was_fast_interrupt;
1351  bool current_is_fast_interrupt;
1352
1353  /* Only change the context if the function changes.  This hook is called
1354     several times in the course of compiling a function, and we don't want
1355     to slow things down too much or call target_reinit when it isn't safe.  */
1356  if (fndecl == rx_previous_fndecl)
1357    return;
1358
1359  prev_was_fast_interrupt
1360    = rx_previous_fndecl
1361    ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1362
1363  current_is_fast_interrupt
1364    = fndecl ? is_fast_interrupt_func (fndecl) : false;
1365
1366  if (prev_was_fast_interrupt != current_is_fast_interrupt)
1367    {
1368      use_fixed_regs = current_is_fast_interrupt;
1369      target_reinit ();
1370    }
1371
1372  if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1373    {
1374      /* We do not warn about the first fast interrupt routine that
1375	 we see.  Instead we just push it onto the stack.  */
1376      if (warned_decls == NULL)
1377	add_warned_decl (fndecl);
1378
1379      /* Otherwise if this fast interrupt is one for which we have
1380	 not already issued a warning, generate one and then push
1381	 it onto the stack as well.  */
1382      else if (! already_warned (fndecl))
1383	{
1384	  warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1385		   fndecl, warned_decls->fndecl);
1386	  add_warned_decl (fndecl);
1387	}
1388    }
1389
1390  rx_previous_fndecl = fndecl;
1391}
1392
1393/* Typical stack layout should looks like this after the function's prologue:
1394
1395                            |    |
1396                              --                       ^
1397                            |    | \                   |
1398                            |    |   arguments saved   | Increasing
1399                            |    |   on the stack      |  addresses
1400    PARENT   arg pointer -> |    | /
1401  -------------------------- ---- -------------------
1402    CHILD                   |ret |   return address
1403                              --
1404                            |    | \
1405                            |    |   call saved
1406                            |    |   registers
1407			    |    | /
1408                              --
1409                            |    | \
1410                            |    |   local
1411                            |    |   variables
1412        frame pointer ->    |    | /
1413                              --
1414                            |    | \
1415                            |    |   outgoing          | Decreasing
1416                            |    |   arguments         |  addresses
1417   current stack pointer -> |    | /                   |
1418  -------------------------- ---- ------------------   V
1419                            |    |                 */
1420
1421static unsigned int
1422bit_count (unsigned int x)
1423{
1424  const unsigned int m1 = 0x55555555;
1425  const unsigned int m2 = 0x33333333;
1426  const unsigned int m4 = 0x0f0f0f0f;
1427
1428  x -= (x >> 1) & m1;
1429  x = (x & m2) + ((x >> 2) & m2);
1430  x = (x + (x >> 4)) & m4;
1431  x += x >>  8;
1432
1433  return (x + (x >> 16)) & 0x3f;
1434}
1435
1436#if defined(TARGET_SAVE_ACC_REGISTER)
1437#define MUST_SAVE_ACC_REGISTER			\
1438  (TARGET_SAVE_ACC_REGISTER			\
1439   && (is_interrupt_func (NULL_TREE)		\
1440       || is_fast_interrupt_func (NULL_TREE)))
1441#else
1442#define MUST_SAVE_ACC_REGISTER 0
1443#endif
1444
1445/* Returns either the lowest numbered and highest numbered registers that
1446   occupy the call-saved area of the stack frame, if the registers are
1447   stored as a contiguous block, or else a bitmask of the individual
1448   registers if they are stored piecemeal.
1449
1450   Also computes the size of the frame and the size of the outgoing
1451   arguments block (in bytes).  */
1452
1453static void
1454rx_get_stack_layout (unsigned int * lowest,
1455		     unsigned int * highest,
1456		     unsigned int * register_mask,
1457		     unsigned int * frame_size,
1458		     unsigned int * stack_size)
1459{
1460  unsigned int reg;
1461  unsigned int low;
1462  unsigned int high;
1463  unsigned int fixed_reg = 0;
1464  unsigned int save_mask;
1465  unsigned int pushed_mask;
1466  unsigned int unneeded_pushes;
1467
1468  if (is_naked_func (NULL_TREE))
1469    {
1470      /* Naked functions do not create their own stack frame.
1471	 Instead the programmer must do that for us.  */
1472      * lowest = 0;
1473      * highest = 0;
1474      * register_mask = 0;
1475      * frame_size = 0;
1476      * stack_size = 0;
1477      return;
1478    }
1479
1480  for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1481    {
1482      if ((df_regs_ever_live_p (reg)
1483	   /* Always save all call clobbered registers inside non-leaf
1484	      interrupt handlers, even if they are not live - they may
1485	      be used in (non-interrupt aware) routines called from this one.  */
1486	   || (call_used_or_fixed_reg_p (reg)
1487	       && is_interrupt_func (NULL_TREE)
1488	       && ! crtl->is_leaf))
1489	  && (! call_used_or_fixed_reg_p (reg)
1490	      /* Even call clobbered registered must
1491		 be pushed inside interrupt handlers.  */
1492	      || is_interrupt_func (NULL_TREE)
1493	      /* Likewise for fast interrupt handlers, except registers r10 -
1494		 r13.  These are normally call-saved, but may have been set
1495		 to call-used by rx_conditional_register_usage.  If so then
1496		 they can be used in the fast interrupt handler without
1497		 saving them on the stack.  */
1498	      || (is_fast_interrupt_func (NULL_TREE)
1499		  && ! IN_RANGE (reg, 10, 13))))
1500	{
1501	  if (low == 0)
1502	    low = reg;
1503	  high = reg;
1504
1505	  save_mask |= 1 << reg;
1506	}
1507
1508      /* Remember if we see a fixed register
1509	 after having found the low register.  */
1510      if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1511	fixed_reg = reg;
1512    }
1513
1514  /* If we have to save the accumulator register, make sure
1515     that at least two registers are pushed into the frame.  */
1516  if (MUST_SAVE_ACC_REGISTER
1517      && bit_count (save_mask) < 2)
1518    {
1519      save_mask |= (1 << 13) | (1 << 14);
1520      if (low == 0)
1521	low = 13;
1522      if (high == 0 || low == high)
1523	high = low + 1;
1524    }
1525
1526  /* Decide if it would be faster fill in the call-saved area of the stack
1527     frame using multiple PUSH instructions instead of a single PUSHM
1528     instruction.
1529
1530     SAVE_MASK is a bitmask of the registers that must be stored in the
1531     call-save area.  PUSHED_MASK is a bitmask of the registers that would
1532     be pushed into the area if we used a PUSHM instruction.  UNNEEDED_PUSHES
1533     is a bitmask of those registers in pushed_mask that are not in
1534     save_mask.
1535
1536     We use a simple heuristic that says that it is better to use
1537     multiple PUSH instructions if the number of unnecessary pushes is
1538     greater than the number of necessary pushes.
1539
1540     We also use multiple PUSH instructions if there are any fixed registers
1541     between LOW and HIGH.  The only way that this can happen is if the user
1542     has specified --fixed-<reg-name> on the command line and in such
1543     circumstances we do not want to touch the fixed registers at all.
1544
1545     Note also that the code in the prologue/epilogue handlers will
1546     automatically merge multiple PUSHes of adjacent registers into a single
1547     PUSHM.
1548
1549     FIXME: Is it worth improving this heuristic ?  */
1550  pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
1551  unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1552
1553  if ((fixed_reg && fixed_reg <= high)
1554      || (optimize_function_for_speed_p (cfun)
1555	  && bit_count (save_mask) < bit_count (unneeded_pushes)))
1556    {
1557      /* Use multiple pushes.  */
1558      * lowest = 0;
1559      * highest = 0;
1560      * register_mask = save_mask;
1561    }
1562  else
1563    {
1564      /* Use one push multiple instruction.  */
1565      * lowest = low;
1566      * highest = high;
1567      * register_mask = 0;
1568    }
1569
1570  * frame_size = rx_round_up
1571    (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1572
1573  if (crtl->args.size > 0)
1574    * frame_size += rx_round_up
1575      (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1576
1577  * stack_size = rx_round_up
1578    (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1579}
1580
1581/* Generate a PUSHM instruction that matches the given operands.  */
1582
1583void
1584rx_emit_stack_pushm (rtx * operands)
1585{
1586  HOST_WIDE_INT last_reg;
1587  rtx first_push;
1588
1589  gcc_assert (CONST_INT_P (operands[0]));
1590  last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1591
1592  gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1593  first_push = XVECEXP (operands[1], 0, 1);
1594  gcc_assert (SET_P (first_push));
1595  first_push = SET_SRC (first_push);
1596  gcc_assert (REG_P (first_push));
1597
1598  asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1599	       reg_names [REGNO (first_push) - last_reg],
1600	       reg_names [REGNO (first_push)]);
1601}
1602
1603/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate.  */
1604
1605static rtx
1606gen_rx_store_vector (unsigned int low, unsigned int high)
1607{
1608  unsigned int i;
1609  unsigned int count = (high - low) + 2;
1610  rtx vector;
1611
1612  vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1613
1614  XVECEXP (vector, 0, 0) =
1615    gen_rtx_SET (stack_pointer_rtx,
1616		 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1617				GEN_INT ((count - 1) * UNITS_PER_WORD)));
1618
1619  for (i = 0; i < count - 1; i++)
1620    XVECEXP (vector, 0, i + 1) =
1621      gen_rtx_SET (gen_rtx_MEM (SImode,
1622				gen_rtx_MINUS (SImode, stack_pointer_rtx,
1623					       GEN_INT ((i + 1) * UNITS_PER_WORD))),
1624		   gen_rtx_REG (SImode, high - i));
1625  return vector;
1626}
1627
1628/* Mark INSN as being frame related.  If it is a PARALLEL
1629   then mark each element as being frame related as well.  */
1630
1631static void
1632mark_frame_related (rtx insn)
1633{
1634  RTX_FRAME_RELATED_P (insn) = 1;
1635  insn = PATTERN (insn);
1636
1637  if (GET_CODE (insn) == PARALLEL)
1638    {
1639      unsigned int i;
1640
1641      for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1642	RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1643    }
1644}
1645
1646/* Create CFI notes for register pops.  */
1647static void
1648add_pop_cfi_notes (rtx_insn *insn, unsigned int high, unsigned int low)
1649{
1650  rtx t = plus_constant (Pmode, stack_pointer_rtx,
1651                        (high - low + 1) * UNITS_PER_WORD);
1652  t = gen_rtx_SET (stack_pointer_rtx, t);
1653  add_reg_note (insn, REG_CFA_ADJUST_CFA, t);
1654  RTX_FRAME_RELATED_P (insn) = 1;
1655  for (unsigned int i = low; i <= high; i++)
1656    add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (word_mode, i));
1657}
1658
1659
1660static bool
1661ok_for_max_constant (HOST_WIDE_INT val)
1662{
1663  if (rx_max_constant_size == 0  || rx_max_constant_size == 4)
1664    /* If there is no constraint on the size of constants
1665       used as operands, then any value is legitimate.  */
1666    return true;
1667
1668  /* rx_max_constant_size specifies the maximum number
1669     of bytes that can be used to hold a signed value.  */
1670  return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
1671		        ( 1 << (rx_max_constant_size * 8)));
1672}
1673
1674/* Generate an ADD of SRC plus VAL into DEST.
1675   Handles the case where VAL is too big for max_constant_value.
1676   Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true.  */
1677
1678static void
1679gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1680{
1681  rtx insn;
1682
1683  if (val == NULL_RTX || INTVAL (val) == 0)
1684    {
1685      gcc_assert (dest != src);
1686
1687      insn = emit_move_insn (dest, src);
1688    }
1689  else if (ok_for_max_constant (INTVAL (val)))
1690    insn = emit_insn (gen_addsi3 (dest, src, val));
1691  else
1692    {
1693      /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1694	 will not reject it.  */
1695      val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1696      insn = emit_insn (gen_addsi3 (dest, src, val));
1697
1698      if (is_frame_related)
1699	/* We have to provide our own frame related note here
1700	   as the dwarf2out code cannot be expected to grok
1701	   our unspec.  */
1702	add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1703		      gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
1704      return;
1705    }
1706
1707  if (is_frame_related)
1708    RTX_FRAME_RELATED_P (insn) = 1;
1709}
1710
1711static void
1712push_regs (unsigned int high, unsigned int low)
1713{
1714  rtx insn;
1715
1716  if (low == high)
1717    insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1718  else
1719    insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1720				       gen_rx_store_vector (low, high)));
1721  mark_frame_related (insn);
1722}
1723
1724void
1725rx_expand_prologue (void)
1726{
1727  unsigned int stack_size;
1728  unsigned int frame_size;
1729  unsigned int mask;
1730  unsigned int low;
1731  unsigned int high;
1732  unsigned int reg;
1733
1734  /* Naked functions use their own, programmer provided prologues.  */
1735  if (is_naked_func (NULL_TREE))
1736    return;
1737
1738  rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1739
1740  if (flag_stack_usage_info)
1741    current_function_static_stack_size = frame_size + stack_size;
1742
1743  /* If we use any of the callee-saved registers, save them now.  */
1744  if (mask)
1745    {
1746      /* Push registers in reverse order.  */
1747      for (reg = CC_REGNUM; reg --;)
1748	if (mask & (1 << reg))
1749	  {
1750	    low = high = reg;
1751
1752	    /* Look for a span of registers.
1753	       Note - we do not have to worry about -Os and whether
1754	       it is better to use a single, longer PUSHM as
1755	       rx_get_stack_layout has already done that for us.  */
1756	    while (reg-- > 0)
1757	      if ((mask & (1 << reg)) == 0)
1758		break;
1759	      else
1760		--low;
1761
1762	    push_regs (high, low);
1763	    if (reg == (unsigned) -1)
1764	      break;
1765	  }
1766    }
1767  else if (low)
1768    push_regs (high, low);
1769
1770  if (MUST_SAVE_ACC_REGISTER)
1771    {
1772      unsigned int acc_high, acc_low;
1773
1774      /* Interrupt handlers have to preserve the accumulator
1775	 register if so requested by the user.  Use the first
1776         two pushed registers as intermediaries.  */
1777      if (mask)
1778	{
1779	  acc_low = acc_high = 0;
1780
1781	  for (reg = 1; reg < CC_REGNUM; reg ++)
1782	    if (mask & (1 << reg))
1783	      {
1784		if (acc_low == 0)
1785		  acc_low = reg;
1786		else
1787		  {
1788		    acc_high = reg;
1789		    break;
1790		  }
1791	      }
1792
1793	  /* We have assumed that there are at least two registers pushed... */
1794	  gcc_assert (acc_high != 0);
1795
1796	  /* Note - the bottom 16 bits of the accumulator are inaccessible.
1797	     We just assume that they are zero.  */
1798	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1799	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1800	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1801	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1802	}
1803      else
1804	{
1805	  acc_low = low;
1806	  acc_high = low + 1;
1807
1808	  /* We have assumed that there are at least two registers pushed... */
1809	  gcc_assert (acc_high <= high);
1810
1811	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1812	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1813	  emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1814				      gen_rx_store_vector (acc_low, acc_high)));
1815	}
1816    }
1817
1818  /* If needed, set up the frame pointer.  */
1819  if (frame_pointer_needed)
1820    gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1821		  GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1822
1823  /* Allocate space for the outgoing args.
1824     If the stack frame has not already been set up then handle this as well.  */
1825  if (stack_size)
1826    {
1827      if (frame_size)
1828	{
1829	  if (frame_pointer_needed)
1830	    gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1831			  GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1832	  else
1833	    gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1834			  GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1835			  true);
1836	}
1837      else
1838	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1839		      GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1840    }
1841  else if (frame_size)
1842    {
1843      if (! frame_pointer_needed)
1844	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1845		      GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1846      else
1847	gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1848		      false /* False because the epilogue will use the FP not the SP.  */);
1849    }
1850}
1851
1852static void
1853add_vector_labels (FILE *file, const char *aname)
1854{
1855  tree vec_attr;
1856  tree val_attr;
1857  const char *vname = "vect";
1858  const char *s;
1859  int vnum;
1860
1861  /* This node is for the vector/interrupt tag itself */
1862  vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1863  if (!vec_attr)
1864    return;
1865
1866  /* Now point it at the first argument */
1867  vec_attr = TREE_VALUE (vec_attr);
1868
1869  /* Iterate through the arguments.  */
1870  while (vec_attr)
1871    {
1872      val_attr = TREE_VALUE (vec_attr);
1873      switch (TREE_CODE (val_attr))
1874	{
1875	case STRING_CST:
1876	  s = TREE_STRING_POINTER (val_attr);
1877	  goto string_id_common;
1878
1879	case IDENTIFIER_NODE:
1880	  s = IDENTIFIER_POINTER (val_attr);
1881
1882	string_id_common:
1883	  if (strcmp (s, "$default") == 0)
1884	    {
1885	      fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1886	      fprintf (file, "$tableentry$default$%s:\n", vname);
1887	    }
1888	  else
1889	    vname = s;
1890	  break;
1891
1892	case INTEGER_CST:
1893	  vnum = TREE_INT_CST_LOW (val_attr);
1894
1895	  fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1896	  fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1897	  break;
1898
1899	default:
1900	  ;
1901	}
1902
1903      vec_attr = TREE_CHAIN (vec_attr);
1904    }
1905
1906}
1907
1908static void
1909rx_output_function_prologue (FILE * file)
1910{
1911  add_vector_labels (file, "interrupt");
1912  add_vector_labels (file, "vector");
1913
1914  if (is_fast_interrupt_func (NULL_TREE))
1915    asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1916
1917  if (is_interrupt_func (NULL_TREE))
1918    asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1919
1920  if (is_naked_func (NULL_TREE))
1921    asm_fprintf (file, "\t; Note: Naked Function\n");
1922
1923  if (cfun->static_chain_decl != NULL)
1924    asm_fprintf (file, "\t; Note: Nested function declared "
1925		 "inside another function.\n");
1926
1927  if (crtl->calls_eh_return)
1928    asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1929}
1930
1931/* Generate a POPM or RTSD instruction that matches the given operands.  */
1932
1933void
1934rx_emit_stack_popm (rtx * operands, bool is_popm)
1935{
1936  HOST_WIDE_INT stack_adjust;
1937  HOST_WIDE_INT last_reg;
1938  rtx first_push;
1939
1940  gcc_assert (CONST_INT_P (operands[0]));
1941  stack_adjust = INTVAL (operands[0]);
1942
1943  gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1944  last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1945
1946  first_push = XVECEXP (operands[1], 0, 1);
1947  gcc_assert (SET_P (first_push));
1948  first_push = SET_DEST (first_push);
1949  gcc_assert (REG_P (first_push));
1950
1951  if (is_popm)
1952    asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1953		 reg_names [REGNO (first_push)],
1954		 reg_names [REGNO (first_push) + last_reg]);
1955  else
1956    asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1957		 (int) stack_adjust,
1958		 reg_names [REGNO (first_push)],
1959		 reg_names [REGNO (first_push) + last_reg]);
1960}
1961
1962/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate.  */
1963
1964static rtx
1965gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1966{
1967  unsigned int i;
1968  unsigned int bias = 3;
1969  unsigned int count = (high - low) + bias;
1970  rtx vector;
1971
1972  vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1973
1974  XVECEXP (vector, 0, 0) =
1975    gen_rtx_SET (stack_pointer_rtx,
1976		 plus_constant (Pmode, stack_pointer_rtx, adjust));
1977
1978  for (i = 0; i < count - 2; i++)
1979    XVECEXP (vector, 0, i + 1) =
1980      gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1981		   gen_rtx_MEM (SImode,
1982				i == 0 ? stack_pointer_rtx
1983				: plus_constant (Pmode, stack_pointer_rtx,
1984						 i * UNITS_PER_WORD)));
1985
1986  XVECEXP (vector, 0, count - 1) = ret_rtx;
1987
1988  return vector;
1989}
1990
1991/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate.  */
1992
1993static rtx
1994gen_rx_popm_vector (unsigned int low, unsigned int high)
1995{
1996  unsigned int i;
1997  unsigned int count = (high - low) + 2;
1998  rtx vector;
1999
2000  vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2001
2002  XVECEXP (vector, 0, 0) =
2003    gen_rtx_SET (stack_pointer_rtx,
2004		 plus_constant (Pmode, stack_pointer_rtx,
2005				(count - 1) * UNITS_PER_WORD));
2006
2007  for (i = 0; i < count - 1; i++)
2008    XVECEXP (vector, 0, i + 1) =
2009      gen_rtx_SET (gen_rtx_REG (SImode, low + i),
2010		   gen_rtx_MEM (SImode,
2011				i == 0 ? stack_pointer_rtx
2012				: plus_constant (Pmode, stack_pointer_rtx,
2013						 i * UNITS_PER_WORD)));
2014
2015  return vector;
2016}
2017
2018/* Returns true if a simple return insn can be used.  */
2019
2020bool
2021rx_can_use_simple_return (void)
2022{
2023  unsigned int low;
2024  unsigned int high;
2025  unsigned int frame_size;
2026  unsigned int stack_size;
2027  unsigned int register_mask;
2028
2029  if (is_naked_func (NULL_TREE)
2030      || is_fast_interrupt_func (NULL_TREE)
2031      || is_interrupt_func (NULL_TREE))
2032    return false;
2033
2034  rx_get_stack_layout (& low, & high, & register_mask,
2035		       & frame_size, & stack_size);
2036
2037  return (register_mask == 0
2038	  && (frame_size + stack_size) == 0
2039	  && low == 0);
2040}
2041
2042static void
2043pop_regs (unsigned int high, unsigned int low)
2044{
2045  rtx_insn *insn;
2046  if (high == low)
2047    insn = emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2048  else
2049    insn = emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1)
2050						* UNITS_PER_WORD),
2051				      gen_rx_popm_vector (low, high)));
2052  add_pop_cfi_notes (insn, high, low);
2053}
2054
2055void
2056rx_expand_epilogue (bool is_sibcall)
2057{
2058  unsigned int low;
2059  unsigned int high;
2060  unsigned int frame_size;
2061  unsigned int stack_size;
2062  unsigned int register_mask;
2063  unsigned int regs_size;
2064  unsigned int reg;
2065  unsigned HOST_WIDE_INT total_size;
2066
2067  /* FIXME: We do not support indirect sibcalls at the moment becaause we
2068     cannot guarantee that the register holding the function address is a
2069     call-used register.  If it is a call-saved register then the stack
2070     pop instructions generated in the epilogue will corrupt the address
2071     before it is used.
2072
2073     Creating a new call-used-only register class works but then the
2074     reload pass gets stuck because it cannot always find a call-used
2075     register for spilling sibcalls.
2076
2077     The other possible solution is for this pass to scan forward for the
2078     sibcall instruction (if it has been generated) and work out if it
2079     is an indirect sibcall using a call-saved register.  If it is then
2080     the address can copied into a call-used register in this epilogue
2081     code and the sibcall instruction modified to use that register.  */
2082
2083  if (is_naked_func (NULL_TREE))
2084    {
2085      gcc_assert (! is_sibcall);
2086
2087      /* Naked functions use their own, programmer provided epilogues.
2088	 But, in order to keep gcc happy we have to generate some kind of
2089	 epilogue RTL.  */
2090      emit_jump_insn (gen_naked_return ());
2091      return;
2092    }
2093
2094  rx_get_stack_layout (& low, & high, & register_mask,
2095		       & frame_size, & stack_size);
2096
2097  total_size = frame_size + stack_size;
2098  regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2099
2100  /* See if we are unable to use the special stack frame deconstruct and
2101     return instructions.  In most cases we can use them, but the exceptions
2102     are:
2103
2104     - Sibling calling functions deconstruct the frame but do not return to
2105       their caller.  Instead they branch to their sibling and allow their
2106       return instruction to return to this function's parent.
2107
2108     - Fast and normal interrupt handling functions have to use special
2109       return instructions.
2110
2111     - Functions where we have pushed a fragmented set of registers into the
2112       call-save area must have the same set of registers popped.  */
2113  if (is_sibcall
2114      || is_fast_interrupt_func (NULL_TREE)
2115      || is_interrupt_func (NULL_TREE)
2116      || register_mask)
2117    {
2118      /* Cannot use the special instructions - deconstruct by hand.  */
2119      if (total_size)
2120	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2121		      GEN_INT (total_size), false);
2122
2123      if (MUST_SAVE_ACC_REGISTER)
2124	{
2125	  unsigned int acc_low, acc_high;
2126
2127	  /* Reverse the saving of the accumulator register onto the stack.
2128	     Note we must adjust the saved "low" accumulator value as it
2129	     is really the middle 32-bits of the accumulator.  */
2130	  if (register_mask)
2131	    {
2132	      acc_low = acc_high = 0;
2133
2134	      for (reg = 1; reg < CC_REGNUM; reg ++)
2135		if (register_mask & (1 << reg))
2136		  {
2137		    if (acc_low == 0)
2138		      acc_low = reg;
2139		    else
2140		      {
2141			acc_high = reg;
2142			break;
2143		      }
2144		  }
2145	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2146	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2147	    }
2148	  else
2149	    {
2150	      acc_low = low;
2151	      acc_high = low + 1;
2152	      emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2153					 gen_rx_popm_vector (acc_low, acc_high)));
2154	    }
2155
2156	  emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2157				  gen_rtx_REG (SImode, acc_low),
2158				  GEN_INT (16)));
2159	  emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2160	  emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2161	}
2162
2163      if (register_mask)
2164	{
2165	  for (reg = 0; reg < CC_REGNUM; reg ++)
2166	    if (register_mask & (1 << reg))
2167	      {
2168		low = high = reg;
2169		while (register_mask & (1 << high))
2170		  high ++;
2171		pop_regs (high - 1, low);
2172		reg = high;
2173	      }
2174	}
2175      else if (low)
2176	pop_regs (high, low);
2177
2178      if (is_fast_interrupt_func (NULL_TREE))
2179	{
2180	  gcc_assert (! is_sibcall);
2181	  emit_jump_insn (gen_fast_interrupt_return ());
2182	}
2183      else if (is_interrupt_func (NULL_TREE))
2184	{
2185	  gcc_assert (! is_sibcall);
2186	  emit_jump_insn (gen_exception_return ());
2187	}
2188      else if (! is_sibcall)
2189	emit_jump_insn (gen_simple_return ());
2190
2191      return;
2192    }
2193
2194  /* If we allocated space on the stack, free it now.  */
2195  if (total_size)
2196    {
2197      unsigned HOST_WIDE_INT rtsd_size;
2198
2199      /* See if we can use the RTSD instruction.  */
2200      rtsd_size = total_size + regs_size;
2201      if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2202	{
2203	  if (low)
2204	    emit_jump_insn (gen_pop_and_return
2205			    (GEN_INT (rtsd_size),
2206			     gen_rx_rtsd_vector (rtsd_size, low, high)));
2207	  else
2208	    emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2209
2210	  return;
2211	}
2212
2213      gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2214		    GEN_INT (total_size), false);
2215    }
2216
2217  if (low)
2218    emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2219					gen_rx_rtsd_vector (regs_size,
2220							    low, high)));
2221  else
2222    emit_jump_insn (gen_simple_return ());
2223}
2224
2225
2226/* Compute the offset (in words) between FROM (arg pointer
2227   or frame pointer) and TO (frame pointer or stack pointer).
2228   See ASCII art comment at the start of rx_expand_prologue
2229   for more information.  */
2230
2231int
2232rx_initial_elimination_offset (int from, int to)
2233{
2234  unsigned int low;
2235  unsigned int high;
2236  unsigned int frame_size;
2237  unsigned int stack_size;
2238  unsigned int mask;
2239
2240  rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2241
2242  if (from == ARG_POINTER_REGNUM)
2243    {
2244      /* Extend the computed size of the stack frame to
2245	 include the registers pushed in the prologue.  */
2246      if (low)
2247	frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2248      else
2249	frame_size += bit_count (mask) * UNITS_PER_WORD;
2250
2251      /* Remember to include the return address.  */
2252      frame_size += 1 * UNITS_PER_WORD;
2253
2254      if (to == FRAME_POINTER_REGNUM)
2255	return frame_size;
2256
2257      gcc_assert (to == STACK_POINTER_REGNUM);
2258      return frame_size + stack_size;
2259    }
2260
2261  gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2262  return stack_size;
2263}
2264
2265/* Decide if a variable should go into one of the small data sections.  */
2266
2267static bool
2268rx_in_small_data (const_tree decl)
2269{
2270  int size;
2271  const char * section;
2272
2273  if (rx_small_data_limit == 0)
2274    return false;
2275
2276  if (TREE_CODE (decl) != VAR_DECL)
2277    return false;
2278
2279  /* We do not put read-only variables into a small data area because
2280     they would be placed with the other read-only sections, far away
2281     from the read-write data sections, and we only have one small
2282     data area pointer.
2283     Similarly commons are placed in the .bss section which might be
2284     far away (and out of alignment with respect to) the .data section.  */
2285  if (TREE_READONLY (decl) || DECL_COMMON (decl))
2286    return false;
2287
2288  section = DECL_SECTION_NAME (decl);
2289  if (section)
2290    return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2291
2292  size = int_size_in_bytes (TREE_TYPE (decl));
2293
2294  return (size > 0) && (size <= rx_small_data_limit);
2295}
2296
2297/* Return a section for X.
2298   The only special thing we do here is to honor small data.  */
2299
2300static section *
2301rx_select_rtx_section (machine_mode mode,
2302		       rtx x,
2303		       unsigned HOST_WIDE_INT align)
2304{
2305  if (rx_small_data_limit > 0
2306      && GET_MODE_SIZE (mode) <= rx_small_data_limit
2307      && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2308    return sdata_section;
2309
2310  return default_elf_select_rtx_section (mode, x, align);
2311}
2312
2313static section *
2314rx_select_section (tree decl,
2315		   int reloc,
2316		   unsigned HOST_WIDE_INT align)
2317{
2318  if (rx_small_data_limit > 0)
2319    {
2320      switch (categorize_decl_for_section (decl, reloc))
2321	{
2322	case SECCAT_SDATA:	return sdata_section;
2323	case SECCAT_SBSS:	return sbss_section;
2324	case SECCAT_SRODATA:
2325	  /* Fall through.  We do not put small, read only
2326	     data into the C_2 section because we are not
2327	     using the C_2 section.  We do not use the C_2
2328	     section because it is located with the other
2329	     read-only data sections, far away from the read-write
2330	     data sections and we only have one small data
2331	     pointer (r13).  */
2332	default:
2333	  break;
2334	}
2335    }
2336
2337  /* If we are supporting the Renesas assembler
2338     we cannot use mergeable sections.  */
2339  if (TARGET_AS100_SYNTAX)
2340    switch (categorize_decl_for_section (decl, reloc))
2341      {
2342      case SECCAT_RODATA_MERGE_CONST:
2343      case SECCAT_RODATA_MERGE_STR_INIT:
2344      case SECCAT_RODATA_MERGE_STR:
2345	return readonly_data_section;
2346
2347      default:
2348	break;
2349      }
2350
2351  return default_elf_select_section (decl, reloc, align);
2352}
2353
2354enum rx_builtin
2355{
2356  RX_BUILTIN_BRK,
2357  RX_BUILTIN_CLRPSW,
2358  RX_BUILTIN_INT,
2359  RX_BUILTIN_MACHI,
2360  RX_BUILTIN_MACLO,
2361  RX_BUILTIN_MULHI,
2362  RX_BUILTIN_MULLO,
2363  RX_BUILTIN_MVFACHI,
2364  RX_BUILTIN_MVFACMI,
2365  RX_BUILTIN_MVFC,
2366  RX_BUILTIN_MVTACHI,
2367  RX_BUILTIN_MVTACLO,
2368  RX_BUILTIN_MVTC,
2369  RX_BUILTIN_MVTIPL,
2370  RX_BUILTIN_RACW,
2371  RX_BUILTIN_REVW,
2372  RX_BUILTIN_RMPA,
2373  RX_BUILTIN_ROUND,
2374  RX_BUILTIN_SETPSW,
2375  RX_BUILTIN_WAIT,
2376  RX_BUILTIN_max
2377};
2378
2379static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2380
2381static void
2382rx_init_builtins (void)
2383{
2384#define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE)		\
2385   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2386   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2387			build_function_type_list (RET_TYPE##_type_node, \
2388						  NULL_TREE),		\
2389			RX_BUILTIN_##UC_NAME,				\
2390			BUILT_IN_MD, NULL, NULL_TREE)
2391
2392#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE)		\
2393   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2394   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2395			build_function_type_list (RET_TYPE##_type_node, \
2396						  ARG_TYPE##_type_node, \
2397						  NULL_TREE),		\
2398			RX_BUILTIN_##UC_NAME,				\
2399			BUILT_IN_MD, NULL, NULL_TREE)
2400
2401#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2402  rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2403  add_builtin_function ("__builtin_rx_" LC_NAME,			\
2404			build_function_type_list (RET_TYPE##_type_node, \
2405						  ARG_TYPE1##_type_node,\
2406						  ARG_TYPE2##_type_node,\
2407						  NULL_TREE),		\
2408			RX_BUILTIN_##UC_NAME,				\
2409			BUILT_IN_MD, NULL, NULL_TREE)
2410
2411#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2412  rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2413  add_builtin_function ("__builtin_rx_" LC_NAME,			\
2414			build_function_type_list (RET_TYPE##_type_node, \
2415						  ARG_TYPE1##_type_node,\
2416						  ARG_TYPE2##_type_node,\
2417						  ARG_TYPE3##_type_node,\
2418						  NULL_TREE),		\
2419			RX_BUILTIN_##UC_NAME,				\
2420			BUILT_IN_MD, NULL, NULL_TREE)
2421
2422  ADD_RX_BUILTIN0 (BRK,     "brk",     void);
2423  ADD_RX_BUILTIN1 (CLRPSW,  "clrpsw",  void,  integer);
2424  ADD_RX_BUILTIN1 (SETPSW,  "setpsw",  void,  integer);
2425  ADD_RX_BUILTIN1 (INT,     "int",     void,  integer);
2426  ADD_RX_BUILTIN2 (MACHI,   "machi",   void,  intSI, intSI);
2427  ADD_RX_BUILTIN2 (MACLO,   "maclo",   void,  intSI, intSI);
2428  ADD_RX_BUILTIN2 (MULHI,   "mulhi",   void,  intSI, intSI);
2429  ADD_RX_BUILTIN2 (MULLO,   "mullo",   void,  intSI, intSI);
2430  ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2431  ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2432  ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void,  intSI);
2433  ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void,  intSI);
2434  ADD_RX_BUILTIN0 (RMPA,    "rmpa",    void);
2435  ADD_RX_BUILTIN1 (MVFC,    "mvfc",    intSI, integer);
2436  ADD_RX_BUILTIN2 (MVTC,    "mvtc",    void,  integer, integer);
2437  ADD_RX_BUILTIN1 (MVTIPL,  "mvtipl",  void,  integer);
2438  ADD_RX_BUILTIN1 (RACW,    "racw",    void,  integer);
2439  ADD_RX_BUILTIN1 (ROUND,   "round",   intSI, float);
2440  ADD_RX_BUILTIN1 (REVW,    "revw",    intSI, intSI);
2441  ADD_RX_BUILTIN0 (WAIT,    "wait",    void);
2442}
2443
2444/* Return the RX builtin for CODE.  */
2445
2446static tree
2447rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2448{
2449  if (code >= RX_BUILTIN_max)
2450    return error_mark_node;
2451
2452  return rx_builtins[code];
2453}
2454
2455static rtx
2456rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2457{
2458  if (reg && ! REG_P (arg))
2459    arg = force_reg (SImode, arg);
2460
2461  emit_insn (gen_func (arg));
2462
2463  return NULL_RTX;
2464}
2465
2466static rtx
2467rx_expand_builtin_mvtc (tree exp)
2468{
2469  rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2470  rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2471
2472  if (! CONST_INT_P (arg1))
2473    return NULL_RTX;
2474
2475  if (! REG_P (arg2))
2476    arg2 = force_reg (SImode, arg2);
2477
2478  if (INTVAL (arg1) == 1)
2479    {
2480      warning (0, "invalid control register %d for mvtc; using %<psw%>",
2481	       (int) INTVAL (arg1));
2482      arg1 = const0_rtx;
2483    }
2484
2485  emit_insn (gen_mvtc (arg1, arg2));
2486
2487  return NULL_RTX;
2488}
2489
2490static rtx
2491rx_expand_builtin_mvfc (tree t_arg, rtx target)
2492{
2493  rtx arg = expand_normal (t_arg);
2494
2495  if (! CONST_INT_P (arg))
2496    return NULL_RTX;
2497
2498  if (target == NULL_RTX)
2499    return NULL_RTX;
2500
2501  if (! REG_P (target))
2502    target = force_reg (SImode, target);
2503
2504  emit_insn (gen_mvfc (target, arg));
2505
2506  return target;
2507}
2508
2509static rtx
2510rx_expand_builtin_mvtipl (rtx arg)
2511{
2512  /* The RX610 does not support the MVTIPL instruction.  */
2513  if (rx_cpu_type == RX610)
2514    return NULL_RTX;
2515
2516  if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2517    return NULL_RTX;
2518
2519  emit_insn (gen_mvtipl (arg));
2520
2521  return NULL_RTX;
2522}
2523
2524static rtx
2525rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2526{
2527  rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2528  rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2529
2530  if (! REG_P (arg1))
2531    arg1 = force_reg (SImode, arg1);
2532
2533  if (! REG_P (arg2))
2534    arg2 = force_reg (SImode, arg2);
2535
2536  emit_insn (gen_func (arg1, arg2));
2537
2538  return NULL_RTX;
2539}
2540
2541static rtx
2542rx_expand_int_builtin_1_arg (rtx arg,
2543			     rtx target,
2544			     rtx (* gen_func)(rtx, rtx),
2545			     bool mem_ok)
2546{
2547  if (! REG_P (arg))
2548    if (!mem_ok || ! MEM_P (arg))
2549      arg = force_reg (SImode, arg);
2550
2551  if (target == NULL_RTX || ! REG_P (target))
2552    target = gen_reg_rtx (SImode);
2553
2554  emit_insn (gen_func (target, arg));
2555
2556  return target;
2557}
2558
2559static rtx
2560rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2561{
2562  if (target == NULL_RTX || ! REG_P (target))
2563    target = gen_reg_rtx (SImode);
2564
2565  emit_insn (gen_func (target));
2566
2567  return target;
2568}
2569
2570static rtx
2571rx_expand_builtin_round (rtx arg, rtx target)
2572{
2573  if ((! REG_P (arg) && ! MEM_P (arg))
2574      || GET_MODE (arg) != SFmode)
2575    arg = force_reg (SFmode, arg);
2576
2577  if (target == NULL_RTX || ! REG_P (target))
2578    target = gen_reg_rtx (SImode);
2579
2580  emit_insn (gen_lrintsf2 (target, arg));
2581
2582  return target;
2583}
2584
2585static int
2586valid_psw_flag (rtx op, const char *which)
2587{
2588  static int mvtc_inform_done = 0;
2589
2590  if (GET_CODE (op) == CONST_INT)
2591    switch (INTVAL (op))
2592      {
2593      case 0: case 'c': case 'C':
2594      case 1: case 'z': case 'Z':
2595      case 2: case 's': case 'S':
2596      case 3: case 'o': case 'O':
2597      case 8: case 'i': case 'I':
2598      case 9: case 'u': case 'U':
2599	return 1;
2600      }
2601
2602  error ("%<__builtin_rx_%s%> takes %<C%>, %<Z%>, %<S%>, %<O%>, %<I%>, "
2603	 "or %<U%>", which);
2604  if (!mvtc_inform_done)
2605    error ("use %<__builtin_rx_mvtc (0, ... )%> to write arbitrary values to PSW");
2606  mvtc_inform_done = 1;
2607
2608  return 0;
2609}
2610
2611static rtx
2612rx_expand_builtin (tree exp,
2613		   rtx target,
2614		   rtx subtarget ATTRIBUTE_UNUSED,
2615		   machine_mode mode ATTRIBUTE_UNUSED,
2616		   int ignore ATTRIBUTE_UNUSED)
2617{
2618  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2619  tree arg    = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2620  rtx  op     = arg ? expand_normal (arg) : NULL_RTX;
2621  unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
2622
2623  switch (fcode)
2624    {
2625    case RX_BUILTIN_BRK:     emit_insn (gen_brk ()); return NULL_RTX;
2626    case RX_BUILTIN_CLRPSW:
2627      if (!valid_psw_flag (op, "clrpsw"))
2628	return NULL_RTX;
2629      return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2630    case RX_BUILTIN_SETPSW:
2631      if (!valid_psw_flag (op, "setpsw"))
2632	return NULL_RTX;
2633      return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2634    case RX_BUILTIN_INT:     return rx_expand_void_builtin_1_arg
2635	(op, gen_int, false);
2636    case RX_BUILTIN_MACHI:   return rx_expand_builtin_mac (exp, gen_machi);
2637    case RX_BUILTIN_MACLO:   return rx_expand_builtin_mac (exp, gen_maclo);
2638    case RX_BUILTIN_MULHI:   return rx_expand_builtin_mac (exp, gen_mulhi);
2639    case RX_BUILTIN_MULLO:   return rx_expand_builtin_mac (exp, gen_mullo);
2640    case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2641	(target, gen_mvfachi);
2642    case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2643	(target, gen_mvfacmi);
2644    case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2645	(op, gen_mvtachi, true);
2646    case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2647	(op, gen_mvtaclo, true);
2648    case RX_BUILTIN_RMPA:
2649      if (rx_allow_string_insns)
2650	emit_insn (gen_rmpa ());
2651      else
2652	error ("%<-mno-allow-string-insns%> forbids the generation "
2653	       "of the RMPA instruction");
2654      return NULL_RTX;
2655    case RX_BUILTIN_MVFC:    return rx_expand_builtin_mvfc (arg, target);
2656    case RX_BUILTIN_MVTC:    return rx_expand_builtin_mvtc (exp);
2657    case RX_BUILTIN_MVTIPL:  return rx_expand_builtin_mvtipl (op);
2658    case RX_BUILTIN_RACW:    return rx_expand_void_builtin_1_arg
2659	(op, gen_racw, false);
2660    case RX_BUILTIN_ROUND:   return rx_expand_builtin_round (op, target);
2661    case RX_BUILTIN_REVW:    return rx_expand_int_builtin_1_arg
2662	(op, target, gen_revw, false);
2663    case RX_BUILTIN_WAIT:    emit_insn (gen_wait ()); return NULL_RTX;
2664
2665    default:
2666      internal_error ("bad builtin code");
2667      break;
2668    }
2669
2670  return NULL_RTX;
2671}
2672
2673/* Place an element into a constructor or destructor section.
2674   Like default_ctor_section_asm_out_constructor in varasm.cc
2675   except that it uses .init_array (or .fini_array) and it
2676   handles constructor priorities.  */
2677
2678static void
2679rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2680{
2681  section * s;
2682
2683  if (priority != DEFAULT_INIT_PRIORITY)
2684    {
2685      char buf[18];
2686
2687      sprintf (buf, "%s.%.5u",
2688	       is_ctor ? ".init_array" : ".fini_array",
2689	       priority);
2690      s = get_section (buf, SECTION_WRITE, NULL_TREE);
2691    }
2692  else if (is_ctor)
2693    s = ctors_section;
2694  else
2695    s = dtors_section;
2696
2697  switch_to_section (s);
2698  assemble_align (POINTER_SIZE);
2699  assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2700}
2701
2702static void
2703rx_elf_asm_constructor (rtx symbol, int priority)
2704{
2705  rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2706}
2707
2708static void
2709rx_elf_asm_destructor (rtx symbol, int priority)
2710{
2711  rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2712}
2713
2714/* Check "fast_interrupt", "interrupt" and "naked" attributes.  */
2715
2716static tree
2717rx_handle_func_attribute (tree * node,
2718			  tree   name,
2719			  tree   args ATTRIBUTE_UNUSED,
2720			  int    flags ATTRIBUTE_UNUSED,
2721			  bool * no_add_attrs)
2722{
2723  gcc_assert (DECL_P (* node));
2724
2725  if (TREE_CODE (* node) != FUNCTION_DECL)
2726    {
2727      warning (OPT_Wattributes, "%qE attribute only applies to functions",
2728	       name);
2729      * no_add_attrs = true;
2730    }
2731
2732  /* FIXME: We ought to check for conflicting attributes.  */
2733
2734  /* FIXME: We ought to check that the interrupt and exception
2735     handler attributes have been applied to void functions.  */
2736  return NULL_TREE;
2737}
2738
2739/* Check "vector" attribute.  */
2740
2741static tree
2742rx_handle_vector_attribute (tree * node,
2743			    tree   name,
2744			    tree   args,
2745			    int    flags ATTRIBUTE_UNUSED,
2746			    bool * no_add_attrs)
2747{
2748  gcc_assert (DECL_P (* node));
2749  gcc_assert (args != NULL_TREE);
2750
2751  if (TREE_CODE (* node) != FUNCTION_DECL)
2752    {
2753      warning (OPT_Wattributes, "%qE attribute only applies to functions",
2754	       name);
2755      * no_add_attrs = true;
2756    }
2757
2758  return NULL_TREE;
2759}
2760
2761/* Table of RX specific attributes.  */
2762const struct attribute_spec rx_attribute_table[] =
2763{
2764  /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
2765     affects_type_identity, handler, exclude.  */
2766  { "fast_interrupt", 0, 0, true, false, false, false,
2767    rx_handle_func_attribute, NULL },
2768  { "interrupt",      0, -1, true, false, false, false,
2769    rx_handle_func_attribute, NULL },
2770  { "naked",          0, 0, true, false, false, false,
2771    rx_handle_func_attribute, NULL },
2772  { "vector",         1, -1, true, false, false, false,
2773    rx_handle_vector_attribute, NULL },
2774  { NULL,             0, 0, false, false, false, false, NULL, NULL }
2775};
2776
2777/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE.  */
2778
2779static void
2780rx_override_options_after_change (void)
2781{
2782  static bool first_time = TRUE;
2783
2784  if (first_time)
2785    {
2786      /* If this is the first time through and the user has not disabled
2787	 the use of RX FPU hardware then enable -ffinite-math-only,
2788	 since the FPU instructions do not support NaNs and infinities.  */
2789      if (TARGET_USE_FPU)
2790	flag_finite_math_only = 1;
2791
2792      first_time = FALSE;
2793    }
2794  else
2795    {
2796      /* Alert the user if they are changing the optimization options
2797	 to use IEEE compliant floating point arithmetic with RX FPU insns.  */
2798      if (TARGET_USE_FPU
2799	  && !flag_finite_math_only)
2800	warning (0, "RX FPU instructions do not support NaNs and infinities");
2801    }
2802}
2803
2804static void
2805rx_option_override (void)
2806{
2807  unsigned int i;
2808  cl_deferred_option *opt;
2809  vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2810
2811  if (v)
2812    FOR_EACH_VEC_ELT (*v, i, opt)
2813      {
2814	switch (opt->opt_index)
2815	  {
2816	  case OPT_mint_register_:
2817	    switch (opt->value)
2818	      {
2819	      case 4:
2820		fixed_regs[10] = call_used_regs [10] = 1;
2821		/* Fall through.  */
2822	      case 3:
2823		fixed_regs[11] = call_used_regs [11] = 1;
2824		/* Fall through.  */
2825	      case 2:
2826		fixed_regs[12] = call_used_regs [12] = 1;
2827		/* Fall through.  */
2828	      case 1:
2829		fixed_regs[13] = call_used_regs [13] = 1;
2830		/* Fall through.  */
2831	      case 0:
2832		rx_num_interrupt_regs = opt->value;
2833		break;
2834	      default:
2835		rx_num_interrupt_regs = 0;
2836		/* Error message already given because rx_handle_option
2837		  returned false.  */
2838		break;
2839	      }
2840	    break;
2841
2842	  default:
2843	    gcc_unreachable ();
2844	  }
2845      }
2846
2847  /* This target defaults to strict volatile bitfields.  */
2848  if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2849    flag_strict_volatile_bitfields = 1;
2850
2851  rx_override_options_after_change ();
2852
2853  /* These values are bytes, not log.  */
2854  if (! optimize_size)
2855    {
2856      if (flag_align_jumps && !str_align_jumps)
2857	str_align_jumps = ((rx_cpu_type == RX100
2858			    || rx_cpu_type == RX200) ? "4" : "8");
2859      if (flag_align_loops && !str_align_loops)
2860	str_align_loops = ((rx_cpu_type == RX100
2861			    || rx_cpu_type == RX200) ? "4" : "8");
2862      if (flag_align_labels && !str_align_labels)
2863	str_align_labels = ((rx_cpu_type == RX100
2864			     || rx_cpu_type == RX200) ? "4" : "8");
2865    }
2866}
2867
2868
2869static bool
2870rx_allocate_stack_slots_for_args (void)
2871{
2872  /* Naked functions should not allocate stack slots for arguments.  */
2873  return ! is_naked_func (NULL_TREE);
2874}
2875
2876static bool
2877rx_func_attr_inlinable (const_tree decl)
2878{
2879  return ! is_fast_interrupt_func (decl)
2880    &&   ! is_interrupt_func (decl)
2881    &&   ! is_naked_func (decl);
2882}
2883
2884static bool
2885rx_warn_func_return (tree decl)
2886{
2887  /* Naked functions are implemented entirely in assembly, including the
2888     return sequence, so suppress warnings about this.  */
2889  return !is_naked_func (decl);
2890}
2891
2892/* Return nonzero if it is ok to make a tail-call to DECL,
2893   a function_decl or NULL if this is an indirect call, using EXP  */
2894
2895static bool
2896rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2897{
2898  if (TARGET_JSR)
2899    return false;
2900
2901  /* Do not allow indirect tailcalls.  The
2902     sibcall patterns do not support them.  */
2903  if (decl == NULL)
2904    return false;
2905
2906  /* Never tailcall from inside interrupt handlers or naked functions.  */
2907  if (is_fast_interrupt_func (NULL_TREE)
2908      || is_interrupt_func (NULL_TREE)
2909      || is_naked_func (NULL_TREE))
2910    return false;
2911
2912  return true;
2913}
2914
2915static void
2916rx_file_start (void)
2917{
2918  if (! TARGET_AS100_SYNTAX)
2919    default_file_start ();
2920}
2921
2922static bool
2923rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2924{
2925  /* The packed attribute overrides the MS behavior.  */
2926  return ! TYPE_PACKED (record_type);
2927}
2928
2929/* Returns true if X a legitimate constant for an immediate
2930   operand on the RX.  X is already known to satisfy CONSTANT_P.  */
2931
2932bool
2933rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2934{
2935  switch (GET_CODE (x))
2936    {
2937    case CONST:
2938      x = XEXP (x, 0);
2939
2940      if (GET_CODE (x) == PLUS)
2941	{
2942	  if (! CONST_INT_P (XEXP (x, 1)))
2943	    return false;
2944
2945	  /* GCC would not pass us CONST_INT + CONST_INT so we
2946	     know that we have {SYMBOL|LABEL} + CONST_INT.  */
2947	  x = XEXP (x, 0);
2948	  gcc_assert (! CONST_INT_P (x));
2949	}
2950
2951      switch (GET_CODE (x))
2952	{
2953	case LABEL_REF:
2954	case SYMBOL_REF:
2955	  return true;
2956
2957	case UNSPEC:
2958	  return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2959
2960	default:
2961	  /* FIXME: Can this ever happen ?  */
2962	  gcc_unreachable ();
2963	}
2964      break;
2965
2966    case LABEL_REF:
2967    case SYMBOL_REF:
2968      return true;
2969    case CONST_DOUBLE:
2970      return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2971    case CONST_VECTOR:
2972      return false;
2973    default:
2974      gcc_assert (CONST_INT_P (x));
2975      break;
2976    }
2977
2978  return ok_for_max_constant (INTVAL (x));
2979}
2980
2981static int
2982rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2983		 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2984{
2985  rtx a, b;
2986
2987  if (GET_CODE (addr) != PLUS)
2988    return COSTS_N_INSNS (1);
2989
2990  a = XEXP (addr, 0);
2991  b = XEXP (addr, 1);
2992
2993  if (REG_P (a) && REG_P (b))
2994    /* Try to discourage REG+REG addressing as it keeps two registers live.  */
2995    return COSTS_N_INSNS (4);
2996
2997  if (speed)
2998    /* [REG+OFF] is just as fast as [REG].  */
2999    return COSTS_N_INSNS (1);
3000
3001  if (CONST_INT_P (b)
3002      && ((INTVAL (b) > 128) || INTVAL (b) < -127))
3003    /* Try to discourage REG + <large OFF> when optimizing for size.  */
3004    return COSTS_N_INSNS (2);
3005
3006  return COSTS_N_INSNS (1);
3007}
3008
3009static bool
3010rx_rtx_costs (rtx x, machine_mode mode, int outer_code ATTRIBUTE_UNUSED,
3011	      int opno ATTRIBUTE_UNUSED, int* total, bool speed)
3012{
3013  if (x == const0_rtx)
3014    {
3015      *total = 0;
3016      return true;
3017    }
3018
3019  switch (GET_CODE (x))
3020    {
3021    case MULT:
3022      if (mode == DImode)
3023	{
3024	  *total = COSTS_N_INSNS (2);
3025	  return true;
3026	}
3027      /* fall through */
3028
3029    case PLUS:
3030    case MINUS:
3031    case AND:
3032    case COMPARE:
3033    case IOR:
3034    case XOR:
3035      *total = COSTS_N_INSNS (1);
3036      return true;
3037
3038    case DIV:
3039      if (speed)
3040	/* This is the worst case for a division.  Pessimize divisions when
3041	   not optimizing for size and allow reciprocal optimizations which
3042	   produce bigger code.  */
3043	*total = COSTS_N_INSNS (20);
3044      else
3045	*total = COSTS_N_INSNS (3);
3046      return true;
3047
3048    case UDIV:
3049      if (speed)
3050	/* This is the worst case for a division.  Pessimize divisions when
3051	   not optimizing for size and allow reciprocal optimizations which
3052	   produce bigger code.  */
3053	*total = COSTS_N_INSNS (18);
3054      else
3055	*total = COSTS_N_INSNS (3);
3056      return true;
3057
3058    default:
3059      break;
3060    }
3061
3062  return false;
3063}
3064
3065static bool
3066rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3067{
3068  /* We can always eliminate to the frame pointer.
3069     We can eliminate to the stack pointer unless a frame
3070     pointer is needed.  */
3071
3072  return to == FRAME_POINTER_REGNUM
3073    || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
3074}
3075
3076
3077static void
3078rx_trampoline_template (FILE * file)
3079{
3080  /* Output assembler code for a block containing the constant
3081     part of a trampoline, leaving space for the variable parts.
3082
3083     On the RX, (where r8 is the static chain regnum) the trampoline
3084     looks like:
3085
3086	   mov 		#<static chain value>, r8
3087	   mov          #<function's address>, r9
3088	   jmp		r9
3089
3090     In big-endian-data-mode however instructions are read into the CPU
3091     4 bytes at a time.  These bytes are then swapped around before being
3092     passed to the decoder.  So...we must partition our trampoline into
3093     4 byte packets and swap these packets around so that the instruction
3094     reader will reverse the process.  But, in order to avoid splitting
3095     the 32-bit constants across these packet boundaries, (making inserting
3096     them into the constructed trampoline very difficult) we have to pad the
3097     instruction sequence with NOP insns.  ie:
3098
3099           nop
3100	   nop
3101           mov.l	#<...>, r8
3102	   nop
3103	   nop
3104           mov.l	#<...>, r9
3105           jmp		r9
3106	   nop
3107	   nop             */
3108
3109  if (! TARGET_BIG_ENDIAN_DATA)
3110    {
3111      asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3112      asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3113      asm_fprintf (file, "\tjmp\tr%d\n",                TRAMPOLINE_TEMP_REGNUM);
3114    }
3115  else
3116    {
3117      char r8 = '0' + STATIC_CHAIN_REGNUM;
3118      char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3119
3120      if (TARGET_AS100_SYNTAX)
3121        {
3122          asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r8);
3123          asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
3124          asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r9);
3125          asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
3126          asm_fprintf (file, "\t.BYTE 003H,  003H, 00%cH, 07fH\n", r9);
3127        }
3128      else
3129        {
3130          asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r8);
3131          asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
3132          asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r9);
3133          asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
3134          asm_fprintf (file, "\t.byte 0x03,  0x03, 0x0%c, 0x7f\n", r9);
3135        }
3136    }
3137}
3138
3139static void
3140rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3141{
3142  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3143
3144  emit_block_move (tramp, assemble_trampoline_template (),
3145		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3146
3147  if (TARGET_BIG_ENDIAN_DATA)
3148    {
3149      emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3150      emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3151    }
3152  else
3153    {
3154      emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3155      emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3156    }
3157}
3158
3159static int
3160rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3161		     reg_class_t regclass ATTRIBUTE_UNUSED,
3162		     bool in)
3163{
3164  return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3165}
3166
3167/* Convert a CC_MODE to the set of flags that it represents.  */
3168
3169static unsigned int
3170flags_from_mode (machine_mode mode)
3171{
3172  switch (mode)
3173    {
3174    case E_CC_ZSmode:
3175      return CC_FLAG_S | CC_FLAG_Z;
3176    case E_CC_ZSOmode:
3177      return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3178    case E_CC_ZSCmode:
3179      return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3180    case E_CCmode:
3181      return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3182    case E_CC_Fmode:
3183      return CC_FLAG_FP;
3184    default:
3185      gcc_unreachable ();
3186    }
3187}
3188
3189/* Convert a set of flags to a CC_MODE that can implement it.  */
3190
3191static machine_mode
3192mode_from_flags (unsigned int f)
3193{
3194  if (f & CC_FLAG_FP)
3195    return CC_Fmode;
3196  if (f & CC_FLAG_O)
3197    {
3198      if (f & CC_FLAG_C)
3199	return CCmode;
3200      else
3201	return CC_ZSOmode;
3202    }
3203  else if (f & CC_FLAG_C)
3204    return CC_ZSCmode;
3205  else
3206    return CC_ZSmode;
3207}
3208
3209/* Convert an RTX_CODE to the set of flags needed to implement it.
3210   This assumes an integer comparison.  */
3211
3212static unsigned int
3213flags_from_code (enum rtx_code code)
3214{
3215  switch (code)
3216    {
3217    case LT:
3218    case GE:
3219      return CC_FLAG_S;
3220    case GT:
3221    case LE:
3222      return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3223    case GEU:
3224    case LTU:
3225      return CC_FLAG_C;
3226    case GTU:
3227    case LEU:
3228      return CC_FLAG_C | CC_FLAG_Z;
3229    case EQ:
3230    case NE:
3231      return CC_FLAG_Z;
3232    default:
3233      gcc_unreachable ();
3234    }
3235}
3236
3237/* Return a CC_MODE of which both M1 and M2 are subsets.  */
3238
3239static machine_mode
3240rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
3241{
3242  unsigned f;
3243
3244  /* Early out for identical modes.  */
3245  if (m1 == m2)
3246    return m1;
3247
3248  /* There's no valid combination for FP vs non-FP.  */
3249  f = flags_from_mode (m1) | flags_from_mode (m2);
3250  if (f & CC_FLAG_FP)
3251    return VOIDmode;
3252
3253  /* Otherwise, see what mode can implement all the flags.  */
3254  return mode_from_flags (f);
3255}
3256
3257/* Return the minimal CC mode needed to implement (CMP_CODE X Y).  */
3258
3259machine_mode
3260rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3261{
3262  if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3263    return CC_Fmode;
3264
3265  if (y != const0_rtx)
3266    return CCmode;
3267
3268  return mode_from_flags (flags_from_code (cmp_code));
3269}
3270
3271/* Split the conditional branch.  Emit (COMPARE C1 C2) into CC_REG with
3272   CC_MODE, and use that in branches based on that compare.  */
3273
3274void
3275rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
3276		  rtx c1, rtx c2, rtx label)
3277{
3278  rtx flags, x;
3279
3280  flags = gen_rtx_REG (cc_mode, CC_REG);
3281  x = gen_rtx_COMPARE (cc_mode, c1, c2);
3282  x = gen_rtx_SET (flags, x);
3283  emit_insn (x);
3284
3285  x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3286  x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3287  x = gen_rtx_SET (pc_rtx, x);
3288  emit_jump_insn (x);
3289}
3290
3291/* A helper function for matching parallels that set the flags.  */
3292
3293bool
3294rx_match_ccmode (rtx insn, machine_mode cc_mode)
3295{
3296  rtx op1, flags;
3297  machine_mode flags_mode;
3298
3299  gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3300
3301  op1 = XVECEXP (PATTERN (insn), 0, 0);
3302  gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3303
3304  flags = SET_DEST (op1);
3305  flags_mode = GET_MODE (flags);
3306
3307  if (GET_MODE (SET_SRC (op1)) != flags_mode)
3308    return false;
3309  if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3310    return false;
3311
3312  /* Ensure that the mode of FLAGS is compatible with CC_MODE.  */
3313  if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3314    return false;
3315
3316  return true;
3317}
3318
3319
3320static int
3321rx_max_skip_for_label (rtx_insn *lab)
3322{
3323  int opsize;
3324  rtx_insn *op;
3325
3326  if (optimize_size)
3327    return 0;
3328
3329  if (lab == NULL)
3330    return 0;
3331
3332  op = lab;
3333  do
3334    {
3335      op = next_nonnote_nondebug_insn (op);
3336    }
3337  while (op && (LABEL_P (op)
3338		|| (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3339  if (!op)
3340    return 0;
3341
3342  opsize = get_attr_length (op);
3343  if (opsize >= 0 && opsize < 8)
3344    return MAX (0, opsize - 1);
3345  return 0;
3346}
3347
3348static int
3349rx_align_log_for_label (rtx_insn *lab, int uses_threshold)
3350{
3351  /* This is a simple heuristic to guess when an alignment would not be useful
3352     because the delay due to the inserted NOPs would be greater than the delay
3353     due to the misaligned branch.  If uses_threshold is zero then the alignment
3354     is always useful.  */
3355  if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3356    return 0;
3357
3358  if (optimize_size)
3359    return 0;
3360
3361  /* Return zero if max_skip not a positive number.  */
3362  int max_skip = rx_max_skip_for_label (lab);
3363  if (max_skip <= 0)
3364    return 0;
3365
3366  /* These values are log, not bytes.  */
3367  if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3368    return 2; /* 4 bytes */
3369  return 3;   /* 8 bytes */
3370}
3371
3372align_flags
3373rx_align_for_label (rtx_insn *lab, int uses_threshold)
3374{
3375  return align_flags (rx_align_log_for_label (lab, uses_threshold),
3376		      rx_max_skip_for_label (lab));
3377}
3378
3379/* Compute the real length of the extending load-and-op instructions.  */
3380
3381int
3382rx_adjust_insn_length (rtx_insn *insn, int current_length)
3383{
3384  rtx extend, mem, offset;
3385  bool zero;
3386  int factor;
3387
3388  if (!INSN_P (insn))
3389    return current_length;
3390
3391  switch (INSN_CODE (insn))
3392    {
3393    default:
3394      return current_length;
3395
3396    case CODE_FOR_plussi3_zero_extendhi:
3397    case CODE_FOR_andsi3_zero_extendhi:
3398    case CODE_FOR_iorsi3_zero_extendhi:
3399    case CODE_FOR_xorsi3_zero_extendhi:
3400    case CODE_FOR_divsi3_zero_extendhi:
3401    case CODE_FOR_udivsi3_zero_extendhi:
3402    case CODE_FOR_minussi3_zero_extendhi:
3403    case CODE_FOR_smaxsi3_zero_extendhi:
3404    case CODE_FOR_sminsi3_zero_extendhi:
3405    case CODE_FOR_multsi3_zero_extendhi:
3406    case CODE_FOR_comparesi3_zero_extendhi:
3407      zero = true;
3408      factor = 2;
3409      break;
3410
3411    case CODE_FOR_plussi3_sign_extendhi:
3412    case CODE_FOR_andsi3_sign_extendhi:
3413    case CODE_FOR_iorsi3_sign_extendhi:
3414    case CODE_FOR_xorsi3_sign_extendhi:
3415    case CODE_FOR_divsi3_sign_extendhi:
3416    case CODE_FOR_udivsi3_sign_extendhi:
3417    case CODE_FOR_minussi3_sign_extendhi:
3418    case CODE_FOR_smaxsi3_sign_extendhi:
3419    case CODE_FOR_sminsi3_sign_extendhi:
3420    case CODE_FOR_multsi3_sign_extendhi:
3421    case CODE_FOR_comparesi3_sign_extendhi:
3422      zero = false;
3423      factor = 2;
3424      break;
3425
3426    case CODE_FOR_plussi3_zero_extendqi:
3427    case CODE_FOR_andsi3_zero_extendqi:
3428    case CODE_FOR_iorsi3_zero_extendqi:
3429    case CODE_FOR_xorsi3_zero_extendqi:
3430    case CODE_FOR_divsi3_zero_extendqi:
3431    case CODE_FOR_udivsi3_zero_extendqi:
3432    case CODE_FOR_minussi3_zero_extendqi:
3433    case CODE_FOR_smaxsi3_zero_extendqi:
3434    case CODE_FOR_sminsi3_zero_extendqi:
3435    case CODE_FOR_multsi3_zero_extendqi:
3436    case CODE_FOR_comparesi3_zero_extendqi:
3437      zero = true;
3438      factor = 1;
3439      break;
3440
3441    case CODE_FOR_plussi3_sign_extendqi:
3442    case CODE_FOR_andsi3_sign_extendqi:
3443    case CODE_FOR_iorsi3_sign_extendqi:
3444    case CODE_FOR_xorsi3_sign_extendqi:
3445    case CODE_FOR_divsi3_sign_extendqi:
3446    case CODE_FOR_udivsi3_sign_extendqi:
3447    case CODE_FOR_minussi3_sign_extendqi:
3448    case CODE_FOR_smaxsi3_sign_extendqi:
3449    case CODE_FOR_sminsi3_sign_extendqi:
3450    case CODE_FOR_multsi3_sign_extendqi:
3451    case CODE_FOR_comparesi3_sign_extendqi:
3452      zero = false;
3453      factor = 1;
3454      break;
3455    }
3456
3457  /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))).  */
3458  extend = single_set (insn);
3459  gcc_assert (extend != NULL_RTX);
3460
3461  extend = SET_SRC (extend);
3462  if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3463      || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3464    extend = XEXP (extend, 0);
3465  else
3466    extend = XEXP (extend, 1);
3467
3468  gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3469	      || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3470
3471  mem = XEXP (extend, 0);
3472  gcc_checking_assert (MEM_P (mem));
3473  if (REG_P (XEXP (mem, 0)))
3474    return (zero && factor == 1) ? 2 : 3;
3475
3476  /* We are expecting: (MEM (PLUS (REG) (CONST_INT))).  */
3477  gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3478  gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3479
3480  offset = XEXP (XEXP (mem, 0), 1);
3481  gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3482
3483  if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3484    return (zero && factor == 1) ? 3 : 4;
3485
3486  return (zero && factor == 1) ? 4 : 5;
3487}
3488
3489static bool
3490rx_narrow_volatile_bitfield (void)
3491{
3492  return true;
3493}
3494
3495static bool
3496rx_ok_to_inline (tree caller, tree callee)
3497{
3498  /* Do not inline functions with local variables
3499     into a naked CALLER - naked function have no stack frame and
3500     locals need a frame in order to have somewhere to live.
3501
3502     Unfortunately we have no way to determine the presence of
3503     local variables in CALLEE, so we have to be cautious and
3504     assume that there might be some there.
3505
3506     We do allow inlining when CALLEE has the "inline" type
3507     modifier or the "always_inline" or "gnu_inline" attributes.  */
3508  return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3509    || DECL_DECLARED_INLINE_P (callee)
3510    || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3511    || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3512}
3513
3514static bool
3515rx_enable_lra (void)
3516{
3517  return TARGET_ENABLE_LRA;
3518}
3519
3520rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
3521{
3522  if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
3523    {
3524      /* If we are inside an interrupt handler, assume that interrupts are
3525	 off -- which is the default hardware behavior.  In this case, there
3526	 is no need to disable the interrupts.  */
3527      m_prev_psw_reg = NULL;
3528    }
3529  else
3530    {
3531      m_prev_psw_reg = gen_reg_rtx (SImode);
3532      emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
3533      emit_insn (gen_clrpsw (GEN_INT ('I')));
3534    }
3535}
3536
3537rx_atomic_sequence::~rx_atomic_sequence (void)
3538{
3539  if (m_prev_psw_reg != NULL)
3540    emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
3541}
3542
3543/* Given an insn and a reg number, tell whether the reg dies or is unused
3544   after the insn.  */
3545bool
3546rx_reg_dead_or_unused_after_insn (const rtx_insn* i, int regno)
3547{
3548  return find_regno_note (i, REG_DEAD, regno) != NULL
3549	 || find_regno_note (i, REG_UNUSED, regno) != NULL;
3550}
3551
3552/* Copy dead and unused notes from SRC to DST for the specified REGNO.  */
3553void
3554rx_copy_reg_dead_or_unused_notes (rtx reg, const rtx_insn* src, rtx_insn* dst)
3555{
3556  int regno = REGNO (SUBREG_P (reg) ? SUBREG_REG (reg) : reg);
3557
3558  if (rtx note = find_regno_note (src, REG_DEAD, regno))
3559    add_shallow_copy_of_reg_note (dst, note);
3560
3561  if (rtx note = find_regno_note (src, REG_UNUSED, regno))
3562    add_shallow_copy_of_reg_note (dst, note);
3563}
3564
3565/* Try to fuse the current bit-operation insn with the surrounding memory load
3566   and store.  */
3567bool
3568rx_fuse_in_memory_bitop (rtx* operands, rtx_insn* curr_insn,
3569			 rtx (*gen_insn)(rtx, rtx))
3570{
3571  rtx op2_reg = SUBREG_P (operands[2]) ? SUBREG_REG (operands[2]) : operands[2];
3572
3573  set_of_reg op2_def = rx_find_set_of_reg (op2_reg, curr_insn,
3574					   prev_nonnote_nondebug_insn_bb);
3575  if (op2_def.set_src == NULL_RTX
3576      || !MEM_P (op2_def.set_src)
3577      || GET_MODE (op2_def.set_src) != QImode
3578      || !rx_is_restricted_memory_address (XEXP (op2_def.set_src, 0),
3579					   GET_MODE (op2_def.set_src))
3580      || reg_used_between_p (operands[2], op2_def.insn, curr_insn)
3581      || !rx_reg_dead_or_unused_after_insn (curr_insn, REGNO (op2_reg))
3582    )
3583    return false;
3584
3585  /* The register operand originates from a memory load and the memory load
3586     could be fused with the bitop insn.
3587     Look for the following memory store with the same memory operand.  */
3588  rtx mem = op2_def.set_src;
3589
3590  /* If the memory is an auto-mod address, it can't be fused.  */
3591  if (GET_CODE (XEXP (mem, 0)) == POST_INC
3592      || GET_CODE (XEXP (mem, 0)) == PRE_INC
3593      || GET_CODE (XEXP (mem, 0)) == POST_DEC
3594      || GET_CODE (XEXP (mem, 0)) == PRE_DEC)
3595    return false;
3596
3597  rtx_insn* op0_use = rx_find_use_of_reg (operands[0], curr_insn,
3598					  next_nonnote_nondebug_insn_bb);
3599  if (op0_use == NULL
3600      || !(GET_CODE (PATTERN (op0_use)) == SET
3601	   && RX_REG_P (XEXP (PATTERN (op0_use), 1))
3602	   && reg_overlap_mentioned_p (operands[0], XEXP (PATTERN (op0_use), 1))
3603	   && rtx_equal_p (mem, XEXP (PATTERN (op0_use), 0)))
3604      || !rx_reg_dead_or_unused_after_insn (op0_use, REGNO (operands[0]))
3605      || reg_set_between_p (operands[2], curr_insn, op0_use))
3606    return false;
3607
3608  /* If the load-modify-store operation is fused it could potentially modify
3609     load/store ordering if there are other memory accesses between the load
3610     and the store for this insn.  If there are volatile mems between the load
3611     and store it's better not to change the ordering.  If there is a call
3612     between the load and store, it's also not safe to fuse it.  */
3613  for (rtx_insn* i = next_nonnote_nondebug_insn_bb (op2_def.insn);
3614       i != NULL && i != op0_use;
3615       i = next_nonnote_nondebug_insn_bb (i))
3616    if (volatile_insn_p (PATTERN (i)) || CALL_P (i))
3617      return false;
3618
3619  emit_insn (gen_insn (mem, gen_lowpart (QImode, operands[1])));
3620  set_insn_deleted (op2_def.insn);
3621  set_insn_deleted (op0_use);
3622  return true;
3623}
3624
3625/* Implement TARGET_HARD_REGNO_NREGS.  */
3626
3627static unsigned int
3628rx_hard_regno_nregs (unsigned int, machine_mode mode)
3629{
3630  return CLASS_MAX_NREGS (0, mode);
3631}
3632
3633/* Implement TARGET_HARD_REGNO_MODE_OK.  */
3634
3635static bool
3636rx_hard_regno_mode_ok (unsigned int regno, machine_mode)
3637{
3638  return REGNO_REG_CLASS (regno) == GR_REGS;
3639}
3640
3641/* Implement TARGET_MODES_TIEABLE_P.  */
3642
3643static bool
3644rx_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3645{
3646  return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
3647	   || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
3648	  == (GET_MODE_CLASS (mode2) == MODE_FLOAT
3649	      || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
3650}
3651
3652#undef  TARGET_NARROW_VOLATILE_BITFIELD
3653#define TARGET_NARROW_VOLATILE_BITFIELD		rx_narrow_volatile_bitfield
3654
3655#undef  TARGET_CAN_INLINE_P
3656#define TARGET_CAN_INLINE_P			rx_ok_to_inline
3657
3658#undef  TARGET_FUNCTION_VALUE
3659#define TARGET_FUNCTION_VALUE		rx_function_value
3660
3661#undef  TARGET_RETURN_IN_MSB
3662#define TARGET_RETURN_IN_MSB		rx_return_in_msb
3663
3664#undef  TARGET_IN_SMALL_DATA_P
3665#define TARGET_IN_SMALL_DATA_P		rx_in_small_data
3666
3667#undef  TARGET_RETURN_IN_MEMORY
3668#define TARGET_RETURN_IN_MEMORY		rx_return_in_memory
3669
3670#undef  TARGET_HAVE_SRODATA_SECTION
3671#define TARGET_HAVE_SRODATA_SECTION	true
3672
3673#undef	TARGET_ASM_SELECT_RTX_SECTION
3674#define	TARGET_ASM_SELECT_RTX_SECTION	rx_select_rtx_section
3675
3676#undef	TARGET_ASM_SELECT_SECTION
3677#define	TARGET_ASM_SELECT_SECTION	rx_select_section
3678
3679#undef  TARGET_INIT_BUILTINS
3680#define TARGET_INIT_BUILTINS		rx_init_builtins
3681
3682#undef  TARGET_BUILTIN_DECL
3683#define TARGET_BUILTIN_DECL		rx_builtin_decl
3684
3685#undef  TARGET_EXPAND_BUILTIN
3686#define TARGET_EXPAND_BUILTIN		rx_expand_builtin
3687
3688#undef  TARGET_ASM_CONSTRUCTOR
3689#define TARGET_ASM_CONSTRUCTOR		rx_elf_asm_constructor
3690
3691#undef  TARGET_ASM_DESTRUCTOR
3692#define TARGET_ASM_DESTRUCTOR		rx_elf_asm_destructor
3693
3694#undef  TARGET_STRUCT_VALUE_RTX
3695#define TARGET_STRUCT_VALUE_RTX		rx_struct_value_rtx
3696
3697#undef  TARGET_ATTRIBUTE_TABLE
3698#define TARGET_ATTRIBUTE_TABLE		rx_attribute_table
3699
3700#undef  TARGET_ASM_FILE_START
3701#define TARGET_ASM_FILE_START			rx_file_start
3702
3703#undef  TARGET_MS_BITFIELD_LAYOUT_P
3704#define TARGET_MS_BITFIELD_LAYOUT_P		rx_is_ms_bitfield_layout
3705
3706#undef  TARGET_LEGITIMATE_ADDRESS_P
3707#define TARGET_LEGITIMATE_ADDRESS_P		rx_is_legitimate_address
3708
3709#undef  TARGET_MODE_DEPENDENT_ADDRESS_P
3710#define TARGET_MODE_DEPENDENT_ADDRESS_P		rx_mode_dependent_address_p
3711
3712#undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3713#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS	rx_allocate_stack_slots_for_args
3714
3715#undef  TARGET_ASM_FUNCTION_PROLOGUE
3716#define TARGET_ASM_FUNCTION_PROLOGUE 		rx_output_function_prologue
3717
3718#undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3719#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P 	rx_func_attr_inlinable
3720
3721#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
3722#define TARGET_FUNCTION_OK_FOR_SIBCALL		rx_function_ok_for_sibcall
3723
3724#undef  TARGET_FUNCTION_ARG
3725#define TARGET_FUNCTION_ARG     		rx_function_arg
3726
3727#undef  TARGET_FUNCTION_ARG_ADVANCE
3728#define TARGET_FUNCTION_ARG_ADVANCE     	rx_function_arg_advance
3729
3730#undef	TARGET_FUNCTION_ARG_BOUNDARY
3731#define	TARGET_FUNCTION_ARG_BOUNDARY		rx_function_arg_boundary
3732
3733#undef  TARGET_SET_CURRENT_FUNCTION
3734#define TARGET_SET_CURRENT_FUNCTION		rx_set_current_function
3735
3736#undef  TARGET_ASM_INTEGER
3737#define TARGET_ASM_INTEGER			rx_assemble_integer
3738
3739#undef  TARGET_USE_BLOCKS_FOR_CONSTANT_P
3740#define TARGET_USE_BLOCKS_FOR_CONSTANT_P	hook_bool_mode_const_rtx_true
3741
3742#undef  TARGET_MAX_ANCHOR_OFFSET
3743#define TARGET_MAX_ANCHOR_OFFSET		32
3744
3745#undef  TARGET_ADDRESS_COST
3746#define TARGET_ADDRESS_COST			rx_address_cost
3747
3748#undef  TARGET_CAN_ELIMINATE
3749#define TARGET_CAN_ELIMINATE			rx_can_eliminate
3750
3751#undef  TARGET_CONDITIONAL_REGISTER_USAGE
3752#define TARGET_CONDITIONAL_REGISTER_USAGE	rx_conditional_register_usage
3753
3754#undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
3755#define TARGET_ASM_TRAMPOLINE_TEMPLATE		rx_trampoline_template
3756
3757#undef  TARGET_TRAMPOLINE_INIT
3758#define TARGET_TRAMPOLINE_INIT			rx_trampoline_init
3759
3760#undef  TARGET_PRINT_OPERAND
3761#define TARGET_PRINT_OPERAND			rx_print_operand
3762
3763#undef  TARGET_PRINT_OPERAND_ADDRESS
3764#define TARGET_PRINT_OPERAND_ADDRESS		rx_print_operand_address
3765
3766#undef  TARGET_CC_MODES_COMPATIBLE
3767#define TARGET_CC_MODES_COMPATIBLE		rx_cc_modes_compatible
3768
3769#undef  TARGET_MEMORY_MOVE_COST
3770#define TARGET_MEMORY_MOVE_COST			rx_memory_move_cost
3771
3772#undef  TARGET_OPTION_OVERRIDE
3773#define TARGET_OPTION_OVERRIDE			rx_option_override
3774
3775#undef  TARGET_PROMOTE_FUNCTION_MODE
3776#define TARGET_PROMOTE_FUNCTION_MODE		rx_promote_function_mode
3777
3778#undef  TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3779#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE	rx_override_options_after_change
3780
3781#undef  TARGET_FLAGS_REGNUM
3782#define TARGET_FLAGS_REGNUM			CC_REG
3783
3784#undef  TARGET_LEGITIMATE_CONSTANT_P
3785#define TARGET_LEGITIMATE_CONSTANT_P		rx_is_legitimate_constant
3786
3787#undef  TARGET_LEGITIMIZE_ADDRESS
3788#define TARGET_LEGITIMIZE_ADDRESS		rx_legitimize_address
3789
3790#undef  TARGET_WARN_FUNC_RETURN
3791#define TARGET_WARN_FUNC_RETURN 		rx_warn_func_return
3792
3793#undef  TARGET_LRA_P
3794#define TARGET_LRA_P 				rx_enable_lra
3795
3796#undef  TARGET_HARD_REGNO_NREGS
3797#define TARGET_HARD_REGNO_NREGS			rx_hard_regno_nregs
3798#undef  TARGET_HARD_REGNO_MODE_OK
3799#define TARGET_HARD_REGNO_MODE_OK		rx_hard_regno_mode_ok
3800
3801#undef  TARGET_MODES_TIEABLE_P
3802#define TARGET_MODES_TIEABLE_P			rx_modes_tieable_p
3803
3804#undef  TARGET_RTX_COSTS
3805#define TARGET_RTX_COSTS rx_rtx_costs
3806
3807#undef  TARGET_HAVE_SPECULATION_SAFE_VALUE
3808#define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
3809
3810struct gcc_target targetm = TARGET_INITIALIZER;
3811
3812#include "gt-rx.h"
3813