1/* Subroutines for insn-output.c for VAX.
2   Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3   2004, 2005
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to
20the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21Boston, MA 02110-1301, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "rtl.h"
28#include "tree.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
34#include "function.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "recog.h"
38#include "expr.h"
39#include "optabs.h"
40#include "flags.h"
41#include "debug.h"
42#include "toplev.h"
43#include "tm_p.h"
44#include "target.h"
45#include "target-def.h"
46
47static void vax_output_function_prologue (FILE *, HOST_WIDE_INT);
48static void vax_file_start (void);
49static void vax_init_libfuncs (void);
50static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
51				 HOST_WIDE_INT, tree);
52static int vax_address_cost_1 (rtx);
53static int vax_address_cost (rtx);
54static bool vax_rtx_costs (rtx, int, int, int *);
55static rtx vax_struct_value_rtx (tree, int);
56
57/* Initialize the GCC target structure.  */
58#undef TARGET_ASM_ALIGNED_HI_OP
59#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
60
61#undef TARGET_ASM_FUNCTION_PROLOGUE
62#define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
63
64#undef TARGET_ASM_FILE_START
65#define TARGET_ASM_FILE_START vax_file_start
66#undef TARGET_ASM_FILE_START_APP_OFF
67#define TARGET_ASM_FILE_START_APP_OFF true
68
69#undef TARGET_INIT_LIBFUNCS
70#define TARGET_INIT_LIBFUNCS vax_init_libfuncs
71
72#undef TARGET_ASM_OUTPUT_MI_THUNK
73#define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
74#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
75#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
76
77#undef TARGET_DEFAULT_TARGET_FLAGS
78#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
79
80#undef TARGET_RTX_COSTS
81#define TARGET_RTX_COSTS vax_rtx_costs
82#undef TARGET_ADDRESS_COST
83#define TARGET_ADDRESS_COST vax_address_cost
84
85#undef TARGET_PROMOTE_PROTOTYPES
86#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
87
88#undef TARGET_STRUCT_VALUE_RTX
89#define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
90
91struct gcc_target targetm = TARGET_INITIALIZER;
92
93/* Set global variables as needed for the options enabled.  */
94
95void
96override_options (void)
97{
98  /* We're VAX floating point, not IEEE floating point.  */
99  if (TARGET_G_FLOAT)
100    REAL_MODE_FORMAT (DFmode) = &vax_g_format;
101}
102
103/* Generate the assembly code for function entry.  FILE is a stdio
104   stream to output the code to.  SIZE is an int: how many units of
105   temporary storage to allocate.
106
107   Refer to the array `regs_ever_live' to determine which registers to
108   save; `regs_ever_live[I]' is nonzero if register number I is ever
109   used in the function.  This function is responsible for knowing
110   which registers should not be saved even if used.  */
111
112static void
113vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
114{
115  int regno;
116  int mask = 0;
117
118  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
119    if (regs_ever_live[regno] && !call_used_regs[regno])
120      mask |= 1 << regno;
121
122  fprintf (file, "\t.word 0x%x\n", mask);
123
124  if (dwarf2out_do_frame ())
125    {
126      const char *label = dwarf2out_cfi_label ();
127      int offset = 0;
128
129      for (regno = FIRST_PSEUDO_REGISTER-1; regno >= 0; --regno)
130	if (regs_ever_live[regno] && !call_used_regs[regno])
131	  dwarf2out_reg_save (label, regno, offset -= 4);
132
133      dwarf2out_reg_save (label, PC_REGNUM, offset -= 4);
134      dwarf2out_reg_save (label, FRAME_POINTER_REGNUM, offset -= 4);
135      dwarf2out_reg_save (label, ARG_POINTER_REGNUM, offset -= 4);
136      dwarf2out_def_cfa (label, FRAME_POINTER_REGNUM, -(offset - 4));
137    }
138
139  size -= STARTING_FRAME_OFFSET;
140  if (size >= 64)
141    asm_fprintf (file, "\tmovab %wd(%Rsp),%Rsp\n", -size);
142  else if (size)
143    asm_fprintf (file, "\tsubl2 $%wd,%Rsp\n", size);
144}
145
146/* When debugging with stabs, we want to output an extra dummy label
147   so that gas can distinguish between D_float and G_float prior to
148   processing the .stabs directive identifying type double.  */
149static void
150vax_file_start (void)
151{
152  default_file_start ();
153
154  if (write_symbols == DBX_DEBUG)
155    fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
156}
157
158/* We can use the BSD C library routines for the libgcc calls that are
159   still generated, since that's what they boil down to anyways.  When
160   ELF, avoid the user's namespace.  */
161
162static void
163vax_init_libfuncs (void)
164{
165  set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
166  set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
167}
168
169/* This is like nonimmediate_operand with a restriction on the type of MEM.  */
170
171static void
172split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
173			 rtx * low, int n)
174{
175  int i;
176
177  for (i = 0; i < n; i++)
178    low[i] = 0;
179
180  for (i = 0; i < n; i++)
181    {
182      if (MEM_P (operands[i])
183	  && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
184	      || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
185	{
186	  rtx addr = XEXP (operands[i], 0);
187	  operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
188	}
189      else if (optimize_size && MEM_P (operands[i])
190	       && REG_P (XEXP (operands[i], 0))
191	       && (code != MINUS || operands[1] != const0_rtx)
192	       && find_regno_note (insn, REG_DEAD,
193				   REGNO (XEXP (operands[i], 0))))
194	{
195	  low[i] = gen_rtx_MEM (SImode,
196				gen_rtx_POST_INC (Pmode,
197						  XEXP (operands[i], 0)));
198	  operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
199	}
200      else
201	{
202	  low[i] = operand_subword (operands[i], 0, 0, DImode);
203	  operands[i] = operand_subword (operands[i], 1, 0, DImode);
204	}
205    }
206}
207
208static const char *
209register_name (rtx reg)
210{
211  int regno;
212  regno = REGNO (reg);
213  if (regno >= FIRST_PSEUDO_REGISTER)
214    regno = reg_renumber[regno];
215  gcc_assert (regno >= 0);
216  return reg_names[regno];
217}
218
219void
220print_operand_address (FILE * file, rtx addr)
221{
222  rtx orig = addr;
223  rtx reg1, breg, ireg;
224  rtx offset;
225
226 retry:
227  switch (GET_CODE (addr))
228    {
229    case MEM:
230      fprintf (file, "*");
231      addr = XEXP (addr, 0);
232      goto retry;
233
234    case REG:
235      fprintf (file, "(%s)", register_name (addr));
236      break;
237
238    case PRE_DEC:
239      fprintf (file, "-(%s)", register_name (XEXP (addr, 0)));
240      break;
241
242    case POST_INC:
243      fprintf (file, "(%s)+", register_name (XEXP (addr, 0)));
244      break;
245
246    case PLUS:
247      /* There can be either two or three things added here.  One must be a
248	 REG.  One can be either a REG or a MULT of a REG and an appropriate
249	 constant, and the third can only be a constant or a MEM.
250
251	 We get these two or three things and put the constant or MEM in
252	 OFFSET, the MULT or REG in IREG, and the REG in BREG.  If we have
253	 a register and can't tell yet if it is a base or index register,
254	 put it into REG1.  */
255
256      reg1 = 0; ireg = 0; breg = 0; offset = 0;
257
258      if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
259	  || MEM_P (XEXP (addr, 0)))
260	{
261	  offset = XEXP (addr, 0);
262	  addr = XEXP (addr, 1);
263	}
264      else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
265	       || MEM_P (XEXP (addr, 1)))
266	{
267	  offset = XEXP (addr, 1);
268	  addr = XEXP (addr, 0);
269	}
270      else if (GET_CODE (XEXP (addr, 1)) == MULT)
271	{
272	  ireg = XEXP (addr, 1);
273	  addr = XEXP (addr, 0);
274	}
275      else if (GET_CODE (XEXP (addr, 0)) == MULT)
276	{
277	  ireg = XEXP (addr, 0);
278	  addr = XEXP (addr, 1);
279	}
280      else if (REG_P (XEXP (addr, 1)))
281	{
282	  reg1 = XEXP (addr, 1);
283	  addr = XEXP (addr, 0);
284	}
285      else if (REG_P (XEXP (addr, 0)))
286	{
287	  reg1 = XEXP (addr, 0);
288	  addr = XEXP (addr, 1);
289	}
290      else
291	gcc_unreachable ();
292
293      if (REG_P (addr))
294	{
295	  if (reg1)
296	    ireg = addr;
297	  else
298	    reg1 = addr;
299	}
300      else if (GET_CODE (addr) == MULT)
301	ireg = addr;
302      else
303	{
304	  gcc_assert (GET_CODE (addr) == PLUS);
305	  if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
306	      || MEM_P (XEXP (addr, 0)))
307	    {
308	      if (offset)
309		{
310		  if (CONST_INT_P (offset))
311		    offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
312		  else
313		    {
314		      gcc_assert (CONST_INT_P (XEXP (addr, 0)));
315		      offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
316		    }
317		}
318	      offset = XEXP (addr, 0);
319	    }
320	  else if (REG_P (XEXP (addr, 0)))
321	    {
322	      if (reg1)
323		ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
324	      else
325		reg1 = XEXP (addr, 0);
326	    }
327	  else
328	    {
329	      gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
330	      gcc_assert (!ireg);
331	      ireg = XEXP (addr, 0);
332	    }
333
334	  if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
335	      || MEM_P (XEXP (addr, 1)))
336	    {
337	      if (offset)
338		{
339		  if (CONST_INT_P (offset))
340		    offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
341		  else
342		    {
343		      gcc_assert (CONST_INT_P (XEXP (addr, 1)));
344		      offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
345		    }
346		}
347	      offset = XEXP (addr, 1);
348	    }
349	  else if (REG_P (XEXP (addr, 1)))
350	    {
351	      if (reg1)
352		ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
353	      else
354		reg1 = XEXP (addr, 1);
355	    }
356	  else
357	    {
358	      gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
359	      gcc_assert (!ireg);
360	      ireg = XEXP (addr, 1);
361	    }
362	}
363
364      /* If REG1 is nonzero, figure out if it is a base or index register.  */
365      if (reg1)
366	{
367	  if (breg
368	      || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
369	      || (offset
370		  && (MEM_P (offset)
371		      || (flag_pic && symbolic_operand (offset, SImode)))))
372	    {
373	      gcc_assert (!ireg);
374	      ireg = reg1;
375	    }
376	  else
377	    breg = reg1;
378	}
379
380      if (offset != 0)
381	{
382	  if (flag_pic && symbolic_operand (offset, SImode))
383	    {
384	      if (breg && ireg)
385		{
386		  debug_rtx (orig);
387		  output_operand_lossage ("symbol used with both base and indexed registers");
388		}
389
390#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
391	      if (flag_pic > 1 && GET_CODE (offset) == CONST
392		  && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
393		  && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
394		{
395		  debug_rtx (orig);
396		  output_operand_lossage ("symbol with offset used in PIC mode");
397		}
398#endif
399
400	      /* symbol(reg) isn't PIC, but symbol[reg] is.  */
401	      if (breg)
402		{
403		  ireg = breg;
404		  breg = 0;
405		}
406
407	    }
408
409	  output_address (offset);
410	}
411
412      if (breg != 0)
413	fprintf (file, "(%s)", register_name (breg));
414
415      if (ireg != 0)
416	{
417	  if (GET_CODE (ireg) == MULT)
418	    ireg = XEXP (ireg, 0);
419	  gcc_assert (REG_P (ireg));
420	  fprintf (file, "[%s]", register_name (ireg));
421	}
422      break;
423
424    default:
425      output_addr_const (file, addr);
426    }
427}
428
429void
430print_operand (FILE *file, rtx x, int code)
431{
432  if (code == '#')
433    fputc (ASM_DOUBLE_CHAR, file);
434  else if (code == '|')
435    fputs (REGISTER_PREFIX, file);
436  else if (code == 'C')
437    fputs (rev_cond_name (x), file);
438  else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
439    fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
440  else if (code == 'P' && CONST_INT_P (x))
441    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
442  else if (code == 'N' && CONST_INT_P (x))
443    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
444  /* rotl instruction cannot deal with negative arguments.  */
445  else if (code == 'R' && CONST_INT_P (x))
446    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
447  else if (code == 'H' && CONST_INT_P (x))
448    fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
449  else if (code == 'h' && CONST_INT_P (x))
450    fprintf (file, "$%d", (short) - INTVAL (x));
451  else if (code == 'B' && CONST_INT_P (x))
452    fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
453  else if (code == 'b' && CONST_INT_P (x))
454    fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
455  else if (code == 'M' && CONST_INT_P (x))
456    fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
457  else if (REG_P (x))
458    fprintf (file, "%s", register_name (x));
459  else if (MEM_P (x))
460    output_address (XEXP (x, 0));
461  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
462    {
463      char dstr[30];
464      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
465		       sizeof (dstr), 0, 1);
466      fprintf (file, "$0f%s", dstr);
467    }
468  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
469    {
470      char dstr[30];
471      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
472		       sizeof (dstr), 0, 1);
473      fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
474    }
475  else
476    {
477      if (flag_pic > 1 && symbolic_operand (x, SImode))
478	{
479	  debug_rtx (x);
480	  output_operand_lossage ("symbol used as immediate operand");
481	}
482      putc ('$', file);
483      output_addr_const (file, x);
484    }
485}
486
487const char *
488rev_cond_name (rtx op)
489{
490  switch (GET_CODE (op))
491    {
492    case EQ:
493      return "neq";
494    case NE:
495      return "eql";
496    case LT:
497      return "geq";
498    case LE:
499      return "gtr";
500    case GT:
501      return "leq";
502    case GE:
503      return "lss";
504    case LTU:
505      return "gequ";
506    case LEU:
507      return "gtru";
508    case GTU:
509      return "lequ";
510    case GEU:
511      return "lssu";
512
513    default:
514      gcc_unreachable ();
515    }
516}
517
518static bool
519vax_float_literal(rtx c)
520{
521  enum machine_mode mode;
522  REAL_VALUE_TYPE r, s;
523  int i;
524
525  if (GET_CODE (c) != CONST_DOUBLE)
526    return false;
527
528  mode = GET_MODE (c);
529
530  if (c == const_tiny_rtx[(int) mode][0]
531      || c == const_tiny_rtx[(int) mode][1]
532      || c == const_tiny_rtx[(int) mode][2])
533    return true;
534
535  REAL_VALUE_FROM_CONST_DOUBLE (r, c);
536
537  for (i = 0; i < 7; i++)
538    {
539      int x = 1 << i;
540      bool ok;
541      REAL_VALUE_FROM_INT (s, x, 0, mode);
542
543      if (REAL_VALUES_EQUAL (r, s))
544	return true;
545      ok = exact_real_inverse (mode, &s);
546      gcc_assert (ok);
547      if (REAL_VALUES_EQUAL (r, s))
548	return true;
549    }
550  return false;
551}
552
553
554/* Return the cost in cycles of a memory address, relative to register
555   indirect.
556
557   Each of the following adds the indicated number of cycles:
558
559   1 - symbolic address
560   1 - pre-decrement
561   1 - indexing and/or offset(register)
562   2 - indirect */
563
564
565static int
566vax_address_cost_1 (rtx addr)
567{
568  int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
569  rtx plus_op0 = 0, plus_op1 = 0;
570 restart:
571  switch (GET_CODE (addr))
572    {
573    case PRE_DEC:
574      predec = 1;
575    case REG:
576    case SUBREG:
577    case POST_INC:
578      reg = 1;
579      break;
580    case MULT:
581      indexed = 1;	/* 2 on VAX 2 */
582      break;
583    case CONST_INT:
584      /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
585      if (offset == 0)
586	offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
587      break;
588    case CONST:
589    case SYMBOL_REF:
590      offset = 1;	/* 2 on VAX 2 */
591      break;
592    case LABEL_REF:	/* this is probably a byte offset from the pc */
593      if (offset == 0)
594	offset = 1;
595      break;
596    case PLUS:
597      if (plus_op0)
598	plus_op1 = XEXP (addr, 0);
599      else
600	plus_op0 = XEXP (addr, 0);
601      addr = XEXP (addr, 1);
602      goto restart;
603    case MEM:
604      indir = 2;	/* 3 on VAX 2 */
605      addr = XEXP (addr, 0);
606      goto restart;
607    default:
608      break;
609    }
610
611  /* Up to 3 things can be added in an address.  They are stored in
612     plus_op0, plus_op1, and addr.  */
613
614  if (plus_op0)
615    {
616      addr = plus_op0;
617      plus_op0 = 0;
618      goto restart;
619    }
620  if (plus_op1)
621    {
622      addr = plus_op1;
623      plus_op1 = 0;
624      goto restart;
625    }
626  /* Indexing and register+offset can both be used (except on a VAX 2)
627     without increasing execution time over either one alone.  */
628  if (reg && indexed && offset)
629    return reg + indir + offset + predec;
630  return reg + indexed + indir + offset + predec;
631}
632
633static int
634vax_address_cost (rtx x)
635{
636  return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
637}
638
639/* Cost of an expression on a VAX.  This version has costs tuned for the
640   CVAX chip (found in the VAX 3 series) with comments for variations on
641   other models.
642
643   FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
644   and FLOAT_TRUNCATE.  We need a -mcpu option to allow provision of
645   costs on a per cpu basis.  */
646
647static bool
648vax_rtx_costs (rtx x, int code, int outer_code, int *total)
649{
650  enum machine_mode mode = GET_MODE (x);
651  int i = 0;				   /* may be modified in switch */
652  const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
653
654  switch (code)
655    {
656      /* On a VAX, constants from 0..63 are cheap because they can use the
657	 1 byte literal constant format.  Compare to -1 should be made cheap
658	 so that decrement-and-branch insns can be formed more easily (if
659	 the value -1 is copied to a register some decrement-and-branch
660	 patterns will not match).  */
661    case CONST_INT:
662      if (INTVAL (x) == 0)
663	{
664	  *total = 0;
665	  return true;
666	}
667      if (outer_code == AND)
668	{
669	  *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
670	  return true;
671	}
672      if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
673	  || (outer_code == COMPARE
674	      && INTVAL (x) == -1)
675	  || ((outer_code == PLUS || outer_code == MINUS)
676	      && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
677	{
678	  *total = 1;
679	  return true;
680	}
681      /* FALLTHRU */
682
683    case CONST:
684    case LABEL_REF:
685    case SYMBOL_REF:
686      *total = 3;
687      return true;
688
689    case CONST_DOUBLE:
690      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
691	*total = vax_float_literal (x) ? 5 : 8;
692      else
693	*total = ((CONST_DOUBLE_HIGH (x) == 0
694		   && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
695		  || (outer_code == PLUS
696		      && CONST_DOUBLE_HIGH (x) == -1
697		      && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
698		 ? 2 : 5;
699      return true;
700
701    case POST_INC:
702      *total = 2;
703      return true;		/* Implies register operand.  */
704
705    case PRE_DEC:
706      *total = 3;
707      return true;		/* Implies register operand.  */
708
709    case MULT:
710      switch (mode)
711	{
712	case DFmode:
713	  *total = 16;		/* 4 on VAX 9000 */
714	  break;
715	case SFmode:
716	  *total = 9;		/* 4 on VAX 9000, 12 on VAX 2 */
717	  break;
718	case DImode:
719	  *total = 16;		/* 6 on VAX 9000, 28 on VAX 2 */
720	  break;
721	case SImode:
722	case HImode:
723	case QImode:
724	  *total = 10;		/* 3-4 on VAX 9000, 20-28 on VAX 2 */
725	  break;
726	default:
727	  *total = MAX_COST;	/* Mode is not supported.  */
728	  return true;
729	}
730      break;
731
732    case UDIV:
733      if (mode != SImode)
734	{
735	  *total = MAX_COST;	/* Mode is not supported.  */
736	  return true;
737	}
738      *total = 17;
739      break;
740
741    case DIV:
742      if (mode == DImode)
743	*total = 30;		/* Highly variable.  */
744      else if (mode == DFmode)
745	/* divide takes 28 cycles if the result is not zero, 13 otherwise */
746	*total = 24;
747      else
748	*total = 11;		/* 25 on VAX 2 */
749      break;
750
751    case MOD:
752      *total = 23;
753      break;
754
755    case UMOD:
756      if (mode != SImode)
757	{
758	  *total = MAX_COST;	/* Mode is not supported.  */
759	  return true;
760	}
761      *total = 29;
762      break;
763
764    case FLOAT:
765      *total = (6		/* 4 on VAX 9000 */
766		+ (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
767      break;
768
769    case FIX:
770      *total = 7;		/* 17 on VAX 2 */
771      break;
772
773    case ASHIFT:
774    case LSHIFTRT:
775    case ASHIFTRT:
776      if (mode == DImode)
777	*total = 12;
778      else
779	*total = 10;		/* 6 on VAX 9000 */
780      break;
781
782    case ROTATE:
783    case ROTATERT:
784      *total = 6;		/* 5 on VAX 2, 4 on VAX 9000 */
785      if (CONST_INT_P (XEXP (x, 1)))
786	fmt = "e"; 		/* all constant rotate counts are short */
787      break;
788
789    case PLUS:
790    case MINUS:
791      *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
792      /* Small integer operands can use subl2 and addl2.  */
793      if ((CONST_INT_P (XEXP (x, 1)))
794	  && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
795	fmt = "e";
796      break;
797
798    case IOR:
799    case XOR:
800      *total = 3;
801      break;
802
803    case AND:
804      /* AND is special because the first operand is complemented.  */
805      *total = 3;
806      if (CONST_INT_P (XEXP (x, 0)))
807	{
808	  if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
809	    *total = 4;
810	  fmt = "e";
811	  i = 1;
812	}
813      break;
814
815    case NEG:
816      if (mode == DFmode)
817	*total = 9;
818      else if (mode == SFmode)
819	*total = 6;
820      else if (mode == DImode)
821	*total = 4;
822      else
823	*total = 2;
824      break;
825
826    case NOT:
827      *total = 2;
828      break;
829
830    case ZERO_EXTRACT:
831    case SIGN_EXTRACT:
832      *total = 15;
833      break;
834
835    case MEM:
836      if (mode == DImode || mode == DFmode)
837	*total = 5;		/* 7 on VAX 2 */
838      else
839	*total = 3;		/* 4 on VAX 2 */
840      x = XEXP (x, 0);
841      if (!REG_P (x) && GET_CODE (x) != POST_INC)
842	*total += vax_address_cost_1 (x);
843      return true;
844
845    case FLOAT_EXTEND:
846    case FLOAT_TRUNCATE:
847    case TRUNCATE:
848      *total = 3;		/* FIXME: Costs need to be checked  */
849      break;
850
851    default:
852      return false;
853    }
854
855  /* Now look inside the expression.  Operands which are not registers or
856     short constants add to the cost.
857
858     FMT and I may have been adjusted in the switch above for instructions
859     which require special handling.  */
860
861  while (*fmt++ == 'e')
862    {
863      rtx op = XEXP (x, i);
864
865      i += 1;
866      code = GET_CODE (op);
867
868      /* A NOT is likely to be found as the first operand of an AND
869	 (in which case the relevant cost is of the operand inside
870	 the not) and not likely to be found anywhere else.  */
871      if (code == NOT)
872	op = XEXP (op, 0), code = GET_CODE (op);
873
874      switch (code)
875	{
876	case CONST_INT:
877	  if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
878	      && GET_MODE (x) != QImode)
879	    *total += 1;	/* 2 on VAX 2 */
880	  break;
881	case CONST:
882	case LABEL_REF:
883	case SYMBOL_REF:
884	  *total += 1;		/* 2 on VAX 2 */
885	  break;
886	case CONST_DOUBLE:
887	  if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
888	    {
889	      /* Registers are faster than floating point constants -- even
890		 those constants which can be encoded in a single byte.  */
891	      if (vax_float_literal (op))
892		*total += 1;
893	      else
894		*total += (GET_MODE (x) == DFmode) ? 3 : 2;
895	    }
896	  else
897	    {
898	      if (CONST_DOUBLE_HIGH (op) != 0
899		  || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
900		*total += 2;
901	    }
902	  break;
903	case MEM:
904	  *total += 1;		/* 2 on VAX 2 */
905	  if (!REG_P (XEXP (op, 0)))
906	    *total += vax_address_cost_1 (XEXP (op, 0));
907	  break;
908	case REG:
909	case SUBREG:
910	  break;
911	default:
912	  *total += 1;
913	  break;
914	}
915    }
916  return true;
917}
918
919/* Output code to add DELTA to the first argument, and then jump to FUNCTION.
920   Used for C++ multiple inheritance.
921	.mask	^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11>  #conservative entry mask
922	addl2	$DELTA, 4(ap)	#adjust first argument
923	jmp	FUNCTION+2	#jump beyond FUNCTION's entry mask
924*/
925
926static void
927vax_output_mi_thunk (FILE * file,
928		     tree thunk ATTRIBUTE_UNUSED,
929		     HOST_WIDE_INT delta,
930		     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
931		     tree function)
932{
933  fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
934  asm_fprintf (file, ",4(%Rap)\n");
935  fprintf (file, "\tjmp ");
936  assemble_name (file,  XSTR (XEXP (DECL_RTL (function), 0), 0));
937  fprintf (file, "+2\n");
938}
939
940static rtx
941vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
942		      int incoming ATTRIBUTE_UNUSED)
943{
944  return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
945}
946
947/* Worker function for NOTICE_UPDATE_CC.  */
948
949void
950vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
951{
952  if (GET_CODE (exp) == SET)
953    {
954      if (GET_CODE (SET_SRC (exp)) == CALL)
955	CC_STATUS_INIT;
956      else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
957	       && GET_CODE (SET_DEST (exp)) != PC)
958	{
959	  cc_status.flags = 0;
960	  /* The integer operations below don't set carry or
961	     set it in an incompatible way.  That's ok though
962	     as the Z bit is all we need when doing unsigned
963	     comparisons on the result of these insns (since
964	     they're always with 0).  Set CC_NO_OVERFLOW to
965	     generate the correct unsigned branches.  */
966	  switch (GET_CODE (SET_SRC (exp)))
967	    {
968	    case NEG:
969	      if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
970		break;
971	    case AND:
972	    case IOR:
973	    case XOR:
974	    case NOT:
975	    case MEM:
976	    case REG:
977	      cc_status.flags = CC_NO_OVERFLOW;
978	      break;
979	    default:
980	      break;
981	    }
982	  cc_status.value1 = SET_DEST (exp);
983	  cc_status.value2 = SET_SRC (exp);
984	}
985    }
986  else if (GET_CODE (exp) == PARALLEL
987	   && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
988    {
989      if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
990	CC_STATUS_INIT;
991      else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
992	{
993	  cc_status.flags = 0;
994	  cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
995	  cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
996	}
997      else
998	/* PARALLELs whose first element sets the PC are aob,
999	   sob insns.  They do change the cc's.  */
1000	CC_STATUS_INIT;
1001    }
1002  else
1003    CC_STATUS_INIT;
1004  if (cc_status.value1 && REG_P (cc_status.value1)
1005      && cc_status.value2
1006      && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1007    cc_status.value2 = 0;
1008  if (cc_status.value1 && MEM_P (cc_status.value1)
1009      && cc_status.value2
1010      && MEM_P (cc_status.value2))
1011    cc_status.value2 = 0;
1012  /* Actual condition, one line up, should be that value2's address
1013     depends on value1, but that is too much of a pain.  */
1014}
1015
1016/* Output integer move instructions.  */
1017
1018const char *
1019vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1020		     enum machine_mode mode)
1021{
1022  rtx hi[3], lo[3];
1023  const char *pattern_hi, *pattern_lo;
1024
1025  switch (mode)
1026    {
1027    case DImode:
1028      if (operands[1] == const0_rtx)
1029	return "clrq %0";
1030      if (TARGET_QMATH && optimize_size
1031	  && (CONST_INT_P (operands[1])
1032	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1033	{
1034	  unsigned HOST_WIDE_INT hval, lval;
1035	  int n;
1036
1037	  if (GET_CODE (operands[1]) == CONST_DOUBLE)
1038	    {
1039	      gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1040
1041	      /* Make sure only the low 32 bits are valid.  */
1042	      lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1043	      hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1044	    }
1045	  else
1046	    {
1047	      lval = INTVAL (operands[1]);
1048	      hval = 0;
1049	    }
1050
1051	  /* Here we see if we are trying to see if the 64bit value is really
1052	     a 6bit shifted some arbitrary amount.  If so, we can use ashq to
1053	     shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1054	     8 bytes - 1 shift byte - 1 short literal byte.  */
1055	  if (lval != 0
1056	      && (n = exact_log2 (lval & (- lval))) != -1
1057	      && (lval >> n) < 64)
1058	    {
1059	      lval >>= n;
1060
1061#if HOST_BITS_PER_WIDE_INT == 32
1062	      /* On 32bit platforms, if the 6bits didn't overflow into the
1063		 upper 32bit value that value better be 0.  If we have
1064		 overflowed, make sure it wasn't too much.  */
1065	      if (hval != 0)
1066		{
1067		  if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1068		    n = 0;	/* failure */
1069		  else
1070		    lval |= hval << (32 - n);
1071		}
1072#endif
1073	      /*  If n is 0, then ashq is not the best way to emit this.  */
1074	      if (n > 0)
1075		{
1076		  operands[1] = GEN_INT (lval);
1077		  operands[2] = GEN_INT (n);
1078		  return "ashq %2,%D1,%0";
1079		}
1080#if HOST_BITS_PER_WIDE_INT == 32
1081	    }
1082	  /* On 32bit platforms, if the low 32bit value is 0, checkout the
1083	     upper 32bit value.  */
1084	  else if (hval != 0
1085		   && (n = exact_log2 (hval & (- hval)) - 1) != -1
1086		   && (hval >> n) < 64)
1087	    {
1088	      operands[1] = GEN_INT (hval >> n);
1089	      operands[2] = GEN_INT (n + 32);
1090	      return "ashq %2,%D1,%0";
1091#endif
1092	    }
1093	}
1094
1095      if (TARGET_QMATH
1096	  && (!MEM_P (operands[0])
1097	      || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1098	      || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1099	      || !illegal_addsub_di_memory_operand (operands[0], DImode))
1100	  && ((CONST_INT_P (operands[1])
1101	       && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1102	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1103	{
1104	  hi[0] = operands[0];
1105	  hi[1] = operands[1];
1106
1107	  split_quadword_operands (insn, SET, hi, lo, 2);
1108
1109	  pattern_lo = vax_output_int_move (NULL, lo, SImode);
1110	  pattern_hi = vax_output_int_move (NULL, hi, SImode);
1111
1112	  /* The patterns are just movl/movl or pushl/pushl then a movq will
1113	     be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1114	     bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1115	     value bytes.  */
1116	  if ((!strncmp (pattern_lo, "movl", 4)
1117	      && !strncmp (pattern_hi, "movl", 4))
1118	      || (!strncmp (pattern_lo, "pushl", 5)
1119		  && !strncmp (pattern_hi, "pushl", 5)))
1120	    return "movq %1,%0";
1121
1122	  if (MEM_P (operands[0])
1123	      && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1124	    {
1125	      output_asm_insn (vax_output_int_move (NULL, hi, SImode), hi);
1126	      operands[0] = lo[0];
1127	      operands[1] = lo[1];
1128	      operands[2] = lo[2];
1129	      return pattern_lo;
1130	    }
1131	  else
1132	    {
1133	      output_asm_insn (vax_output_int_move (NULL, lo, SImode), lo);
1134	      operands[0] = hi[0];
1135	      operands[1] = hi[1];
1136	      operands[2] = hi[2];
1137	      return pattern_hi;
1138	    }
1139	}
1140      return "movq %1,%0";
1141
1142    case SImode:
1143      if (symbolic_operand (operands[1], SImode))
1144	{
1145	  if (push_operand (operands[0], SImode))
1146	    return "pushab %a1";
1147	  return "movab %a1,%0";
1148	}
1149
1150      if (operands[1] == const0_rtx)
1151	{
1152	  if (push_operand (operands[1], SImode))
1153	    return "pushl %1";
1154	  return "clrl %0";
1155	}
1156
1157      if (CONST_INT_P (operands[1])
1158	  && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1159	{
1160	  HOST_WIDE_INT i = INTVAL (operands[1]);
1161	  int n;
1162	  if ((unsigned HOST_WIDE_INT)(~i) < 64)
1163	    return "mcoml %N1,%0";
1164	  if ((unsigned HOST_WIDE_INT)i < 0x100)
1165	    return "movzbl %1,%0";
1166	  if (i >= -0x80 && i < 0)
1167	    return "cvtbl %1,%0";
1168	  if (optimize_size
1169	      && (n = exact_log2 (i & (-i))) != -1
1170	      && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1171	    {
1172	      operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1173	      operands[2] = GEN_INT (n);
1174	      return "ashl %2,%1,%0";
1175	    }
1176	  if ((unsigned HOST_WIDE_INT)i < 0x10000)
1177	    return "movzwl %1,%0";
1178	  if (i >= -0x8000 && i < 0)
1179	    return "cvtwl %1,%0";
1180	}
1181      if (push_operand (operands[0], SImode))
1182	return "pushl %1";
1183      return "movl %1,%0";
1184
1185    case HImode:
1186      if (CONST_INT_P (operands[1]))
1187	{
1188	  HOST_WIDE_INT i = INTVAL (operands[1]);
1189	  if (i == 0)
1190	    return "clrw %0";
1191	  else if ((unsigned HOST_WIDE_INT)i < 64)
1192	    return "movw %1,%0";
1193	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1194	    return "mcomw %H1,%0";
1195	  else if ((unsigned HOST_WIDE_INT)i < 256)
1196	    return "movzbw %1,%0";
1197	  else if (i >= -0x80 && i < 0)
1198	    return "cvtbw %1,%0";
1199	}
1200      return "movw %1,%0";
1201
1202    case QImode:
1203      if (CONST_INT_P (operands[1]))
1204	{
1205	  HOST_WIDE_INT i = INTVAL (operands[1]);
1206	  if (i == 0)
1207	    return "clrb %0";
1208	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1209	    return "mcomb %B1,%0";
1210	}
1211      return "movb %1,%0";
1212
1213    default:
1214      gcc_unreachable ();
1215    }
1216}
1217
1218/* Output integer add instructions.
1219
1220   The space-time-opcode tradeoffs for addition vary by model of VAX.
1221
1222   On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1223   but it not faster on other models.
1224
1225   "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1226   faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1227   a register is used in an address too soon after it is set.
1228   Compromise by using movab only when it is shorter than the add
1229   or the base register in the address is one of sp, ap, and fp,
1230   which are not modified very often.  */
1231
1232const char *
1233vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1234{
1235  switch (mode)
1236    {
1237    case DImode:
1238      {
1239	rtx low[3];
1240	const char *pattern;
1241	int carry = 1;
1242	bool sub;
1243
1244	if (TARGET_QMATH && 0)
1245	  debug_rtx (insn);
1246
1247	split_quadword_operands (insn, PLUS, operands, low, 3);
1248
1249	if (TARGET_QMATH)
1250	  {
1251	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1252#ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1253            gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1254            gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1255#endif
1256
1257	    /* No reason to add a 0 to the low part and thus no carry, so just
1258	       emit the appropriate add/sub instruction.  */
1259	    if (low[2] == const0_rtx)
1260	      return vax_output_int_add (NULL, operands, SImode);
1261
1262	    /* Are we doing addition or subtraction?  */
1263	    sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1264
1265	    /* We can't use vax_output_int_add since some the patterns don't
1266	       modify the carry bit.  */
1267	    if (sub)
1268	      {
1269		if (low[2] == constm1_rtx)
1270		  pattern = "decl %0";
1271		else
1272		  pattern = "subl2 $%n2,%0";
1273	      }
1274	    else
1275	      {
1276	        if (low[2] == const1_rtx)
1277		  pattern = "incl %0";
1278		else
1279	          pattern = "addl2 %2,%0";
1280	      }
1281	    output_asm_insn (pattern, low);
1282
1283	    /* In 2's complement, -n = ~n + 1.  Since we are dealing with
1284	       two 32bit parts, we complement each and then add one to
1285	       low part.  We know that the low part can't overflow since
1286	       it's value can never be 0.  */
1287	    if (sub)
1288		return "sbwc %N2,%0";
1289	    return "adwc %2,%0";
1290	  }
1291
1292	/* Add low parts.  */
1293	if (rtx_equal_p (operands[0], operands[1]))
1294	  {
1295	    if (low[2] == const0_rtx)
1296	/* Should examine operand, punt if not POST_INC.  */
1297	      pattern = "tstl %0", carry = 0;
1298	    else if (low[2] == const1_rtx)
1299	      pattern = "incl %0";
1300	    else
1301	      pattern = "addl2 %2,%0";
1302	  }
1303	else
1304	  {
1305	    if (low[2] == const0_rtx)
1306	      pattern = "movl %1,%0", carry = 0;
1307	    else
1308	      pattern = "addl3 %2,%1,%0";
1309	  }
1310	if (pattern)
1311	  output_asm_insn (pattern, low);
1312	if (!carry)
1313	  /* If CARRY is 0, we don't have any carry value to worry about.  */
1314	  return get_insn_template (CODE_FOR_addsi3, insn);
1315	/* %0 = C + %1 + %2 */
1316	if (!rtx_equal_p (operands[0], operands[1]))
1317	  output_asm_insn ((operands[1] == const0_rtx
1318			    ? "clrl %0"
1319			    : "movl %1,%0"), operands);
1320	return "adwc %2,%0";
1321      }
1322
1323    case SImode:
1324      if (rtx_equal_p (operands[0], operands[1]))
1325	{
1326	  if (operands[2] == const1_rtx)
1327	    return "incl %0";
1328	  if (operands[2] == constm1_rtx)
1329	    return "decl %0";
1330	  if (CONST_INT_P (operands[2])
1331	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1332	    return "subl2 $%n2,%0";
1333	  if (CONST_INT_P (operands[2])
1334	      && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1335	      && REG_P (operands[1])
1336	      && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1337		   || REGNO (operands[1]) > 11))
1338	    return "movab %c2(%1),%0";
1339	  if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1340	    return "movab %a2[%0],%0";
1341	  return "addl2 %2,%0";
1342	}
1343
1344      if (rtx_equal_p (operands[0], operands[2]))
1345	{
1346	  if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1347	    return "movab %a1[%0],%0";
1348	  return "addl2 %1,%0";
1349	}
1350
1351      if (CONST_INT_P (operands[2])
1352	  && INTVAL (operands[2]) < 32767
1353	  && INTVAL (operands[2]) > -32768
1354	  && REG_P (operands[1])
1355	  && push_operand (operands[0], SImode))
1356	return "pushab %c2(%1)";
1357
1358      if (CONST_INT_P (operands[2])
1359	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1360	return "subl3 $%n2,%1,%0";
1361
1362      if (CONST_INT_P (operands[2])
1363	  && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1364	  && REG_P (operands[1])
1365	  && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1366	       || REGNO (operands[1]) > 11))
1367	return "movab %c2(%1),%0";
1368
1369      /* Add this if using gcc on a VAX 3xxx:
1370      if (REG_P (operands[1]) && REG_P (operands[2]))
1371	return "movab (%1)[%2],%0";
1372      */
1373
1374      if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1375	{
1376	  if (push_operand (operands[0], SImode))
1377	    return "pushab %a2[%1]";
1378	  return "movab %a2[%1],%0";
1379	}
1380
1381      if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1382	{
1383	  if (push_operand (operands[0], SImode))
1384	    return "pushab %a1[%2]";
1385	  return "movab %a1[%2],%0";
1386	}
1387
1388      if (flag_pic && REG_P (operands[0])
1389	  && symbolic_operand (operands[2], SImode))
1390	return "movab %a2,%0;addl2 %1,%0";
1391
1392      if (flag_pic
1393	  && (symbolic_operand (operands[1], SImode)
1394	      || symbolic_operand (operands[1], SImode)))
1395	debug_rtx (insn);
1396
1397      return "addl3 %1,%2,%0";
1398
1399    case HImode:
1400      if (rtx_equal_p (operands[0], operands[1]))
1401	{
1402	  if (operands[2] == const1_rtx)
1403	    return "incw %0";
1404	  if (operands[2] == constm1_rtx)
1405	    return "decw %0";
1406	  if (CONST_INT_P (operands[2])
1407	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1408	    return "subw2 $%n2,%0";
1409	  return "addw2 %2,%0";
1410	}
1411      if (rtx_equal_p (operands[0], operands[2]))
1412	return "addw2 %1,%0";
1413      if (CONST_INT_P (operands[2])
1414	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1415	return "subw3 $%n2,%1,%0";
1416      return "addw3 %1,%2,%0";
1417
1418    case QImode:
1419      if (rtx_equal_p (operands[0], operands[1]))
1420	{
1421	  if (operands[2] == const1_rtx)
1422	    return "incb %0";
1423	  if (operands[2] == constm1_rtx)
1424	    return "decb %0";
1425	  if (CONST_INT_P (operands[2])
1426	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1427	    return "subb2 $%n2,%0";
1428	  return "addb2 %2,%0";
1429	}
1430      if (rtx_equal_p (operands[0], operands[2]))
1431	return "addb2 %1,%0";
1432      if (CONST_INT_P (operands[2])
1433	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1434	return "subb3 $%n2,%1,%0";
1435      return "addb3 %1,%2,%0";
1436
1437    default:
1438      gcc_unreachable ();
1439    }
1440}
1441
1442const char *
1443vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1444{
1445  switch (mode)
1446    {
1447    case DImode:
1448      {
1449	rtx low[3];
1450	const char *pattern;
1451	int carry = 1;
1452
1453	if (TARGET_QMATH && 0)
1454	  debug_rtx (insn);
1455
1456	split_quadword_operands (insn, MINUS, operands, low, 3);
1457
1458	if (TARGET_QMATH)
1459	  {
1460	    if (operands[1] == const0_rtx && low[1] == const0_rtx)
1461	      {
1462		/* Negation is tricky.  It's basically complement and increment.
1463		   Negate hi, then lo, and subtract the carry back.  */
1464		if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1465		    || (MEM_P (operands[0])
1466			&& GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1467		  fatal_insn ("illegal operand detected", insn);
1468		output_asm_insn ("mnegl %2,%0", operands);
1469		output_asm_insn ("mnegl %2,%0", low);
1470		return "sbwc $0,%0";
1471	      }
1472	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1473	    gcc_assert (rtx_equal_p (low[0], low[1]));
1474	    if (low[2] == const1_rtx)
1475	      output_asm_insn ("decl %0", low);
1476	    else
1477	      output_asm_insn ("subl2 %2,%0", low);
1478	    return "sbwc %2,%0";
1479	  }
1480
1481	/* Subtract low parts.  */
1482	if (rtx_equal_p (operands[0], operands[1]))
1483	  {
1484	    if (low[2] == const0_rtx)
1485	      pattern = 0, carry = 0;
1486	    else if (low[2] == constm1_rtx)
1487	      pattern = "decl %0";
1488	    else
1489	      pattern = "subl2 %2,%0";
1490	  }
1491	else
1492	  {
1493	    if (low[2] == constm1_rtx)
1494	      pattern = "decl %0";
1495	    else if (low[2] == const0_rtx)
1496	      pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1497	    else
1498	      pattern = "subl3 %2,%1,%0";
1499	  }
1500	if (pattern)
1501	  output_asm_insn (pattern, low);
1502	if (carry)
1503	  {
1504	    if (!rtx_equal_p (operands[0], operands[1]))
1505	      return "movl %1,%0;sbwc %2,%0";
1506	    return "sbwc %2,%0";
1507	    /* %0 = %2 - %1 - C */
1508	  }
1509	return get_insn_template (CODE_FOR_subsi3, insn);
1510      }
1511
1512    default:
1513      gcc_unreachable ();
1514  }
1515}
1516
1517/* Output a conditional branch.  */
1518const char *
1519vax_output_conditional_branch (enum rtx_code code)
1520{
1521  switch (code)
1522    {
1523      case EQ:  return "jeql %l0";
1524      case NE:  return "jneq %l0";
1525      case GT:  return "jgtr %l0";
1526      case LT:  return "jlss %l0";
1527      case GTU: return "jgtru %l0";
1528      case LTU: return "jlssu %l0";
1529      case GE:  return "jgeq %l0";
1530      case LE:  return "jleq %l0";
1531      case GEU: return "jgequ %l0";
1532      case LEU: return "jlequ %l0";
1533      default:
1534	gcc_unreachable ();
1535    }
1536}
1537
1538static rtx
1539mkrtx(enum rtx_code code, enum machine_mode mode, rtx base, HOST_WIDE_INT off)
1540{
1541  rtx tmp;
1542
1543  if (GET_CODE (base) == CONST)
1544    base = XEXP (base, 0);
1545
1546  if (GET_CODE (base) == PLUS)
1547    {
1548      rtx a = XEXP (base, 0);
1549      rtx b = XEXP (base, 1);
1550      if (GET_CODE (b) == CONST)
1551	b = XEXP (b, 0);
1552      if (CONST_INT_P (b))
1553	{
1554          off += INTVAL (b);
1555          base = a;
1556	}
1557      else if (REG_P (a) && GET_CODE (b) == SYMBOL_REF)
1558	{
1559	  if (off != 0)
1560	    {
1561	      base = gen_rtx_PLUS (Pmode, a, plus_constant(b, off));
1562	      off = 0;
1563	    }
1564	}
1565      else if (REG_P (a) && GET_CODE (b) == PLUS)
1566	{
1567          off += INTVAL (XEXP (b, 1));
1568	  base = gen_rtx_PLUS (Pmode, a, plus_constant(XEXP (b, 0), off));
1569	  off = 0;
1570	}
1571      else
1572        {
1573	  debug_rtx(base);
1574	  gcc_unreachable ();
1575	}
1576    }
1577  if (code == POST_INC)
1578    tmp = gen_rtx_POST_INC (SImode, base);
1579  else if (off == 0 || (REG_P (base) && code == REG))
1580    tmp = base;
1581  else
1582    tmp = plus_constant (base, off);
1583  return gen_rtx_MEM (mode, tmp);
1584}
1585
1586const char *
1587vax_output_movmemsi (rtx insn, rtx *operands)
1588{
1589  HOST_WIDE_INT n = INTVAL (operands[2]);
1590  HOST_WIDE_INT off;
1591  rtx src, dest;
1592  const char *pat = NULL;
1593  const enum rtx_code *src_codes;
1594  const enum rtx_code *dest_codes;
1595  int code_idx = 0;
1596  int mode_idx;
1597
1598  static const enum machine_mode xmodes[4] =
1599    {
1600      QImode, HImode, SImode, DImode
1601    };
1602  static const char * const pats[4] =
1603    {
1604      "movb %1,%0", "movw %1,%0", "movl %1,%0", "movq %1,%0",
1605    };
1606  static const enum rtx_code codes[2][3] =
1607    {
1608      { PLUS, PLUS, PLUS },
1609      { POST_INC, POST_INC, REG },
1610    };
1611
1612  src = XEXP (operands[1], 0);
1613
1614  src_codes =
1615    codes[REG_P (src) && find_regno_note (insn, REG_DEAD, REGNO(src))];
1616
1617  dest = XEXP (operands[0], 0);
1618
1619  dest_codes =
1620    codes[REG_P (dest) && find_regno_note (insn, REG_DEAD, REGNO(dest))];
1621
1622  for (off = 0, code_idx = 0, mode_idx = 3; mode_idx >= 0; mode_idx--)
1623    {
1624      const enum machine_mode mode = xmodes[mode_idx];
1625      const HOST_WIDE_INT mode_len = GET_MODE_SIZE (mode);
1626      for (; n >= mode_len; n -= mode_len, off += mode_len)
1627	{
1628	  if (pat != NULL)
1629	    output_asm_insn (pat, operands);
1630	  if (n == mode_len)
1631	    code_idx = 2;
1632	  operands[0] = mkrtx(dest_codes[code_idx], mode, dest, off);
1633	  operands[1] = mkrtx(src_codes[code_idx], mode, src, off);
1634	  if (pat == NULL)
1635	    code_idx = 1;
1636	  pat = pats[mode_idx];
1637	}
1638    }
1639
1640  return pat;
1641}
1642
1643/* 1 if X is an rtx for a constant that is a valid address.  */
1644
1645bool
1646legitimate_constant_address_p (rtx x)
1647{
1648  if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1649	  || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1650    return true;
1651  if (GET_CODE (x) != CONST)
1652    return false;
1653#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1654  if (flag_pic
1655      && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1656      && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1657    return false;
1658#endif
1659   return true;
1660}
1661
1662/* Nonzero if the constant value X is a legitimate general operand.
1663   It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE.  */
1664
1665bool
1666legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
1667{
1668  return true;
1669}
1670
1671/* The other macros defined here are used only in legitimate_address_p ().  */
1672
1673/* Nonzero if X is a hard reg that can be used as an index
1674   or, if not strict, if it is a pseudo reg.  */
1675#define	INDEX_REGISTER_P(X, STRICT) \
1676(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1677
1678/* Nonzero if X is a hard reg that can be used as a base reg
1679   or, if not strict, if it is a pseudo reg.  */
1680#define	BASE_REGISTER_P(X, STRICT) \
1681(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1682
1683#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1684
1685/* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1686   are no SYMBOL_REFs for external symbols present.  */
1687
1688static bool
1689indirectable_constant_address_p (rtx x, bool indirect)
1690{
1691  if (GET_CODE (x) == SYMBOL_REF)
1692    return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1693
1694  if (GET_CODE (x) == CONST)
1695    return !flag_pic
1696	   || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1697	   || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1698
1699  return CONSTANT_ADDRESS_P (x);
1700}
1701
1702#else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1703
1704static bool
1705indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1706{
1707  return CONSTANT_ADDRESS_P (x);
1708}
1709
1710#endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1711
1712/* Nonzero if X is an address which can be indirected.  External symbols
1713   could be in a sharable image library, so we disallow those.  */
1714
1715static bool
1716indirectable_address_p(rtx x, bool strict, bool indirect)
1717{
1718  if (indirectable_constant_address_p (x, indirect)
1719      || BASE_REGISTER_P (x, strict))
1720    return true;
1721  if (GET_CODE (x) != PLUS
1722      || !BASE_REGISTER_P (XEXP (x, 0), strict)
1723      || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1724    return false;
1725  return indirectable_constant_address_p (XEXP (x, 1), indirect);
1726}
1727
1728/* Return 1 if x is a valid address not using indexing.
1729   (This much is the easy part.)  */
1730static bool
1731nonindexed_address_p (rtx x, bool strict)
1732{
1733  rtx xfoo0;
1734  if (REG_P (x))
1735    {
1736      extern rtx *reg_equiv_mem;
1737      if (! reload_in_progress
1738	  || reg_equiv_mem[REGNO (x)] == 0
1739	  || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict, false))
1740	return true;
1741    }
1742  if (indirectable_constant_address_p (x, false))
1743    return true;
1744  if (indirectable_address_p (x, strict, false))
1745    return true;
1746  xfoo0 = XEXP (x, 0);
1747  if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1748    return true;
1749  if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1750      && BASE_REGISTER_P (xfoo0, strict))
1751    return true;
1752  return false;
1753}
1754
1755/* 1 if PROD is either a reg times size of mode MODE and MODE is less
1756   than or equal 8 bytes, or just a reg if MODE is one byte.  */
1757
1758static bool
1759index_term_p (rtx prod, enum machine_mode mode, bool strict)
1760{
1761  rtx xfoo0, xfoo1;
1762
1763  if (GET_MODE_SIZE (mode) == 1)
1764    return BASE_REGISTER_P (prod, strict);
1765
1766  if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1767    return false;
1768
1769  xfoo0 = XEXP (prod, 0);
1770  xfoo1 = XEXP (prod, 1);
1771
1772  if (CONST_INT_P (xfoo0)
1773      && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1774      && INDEX_REGISTER_P (xfoo1, strict))
1775    return true;
1776
1777  if (CONST_INT_P (xfoo1)
1778      && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1779      && INDEX_REGISTER_P (xfoo0, strict))
1780    return true;
1781
1782  return false;
1783}
1784
1785/* Return 1 if X is the sum of a register
1786   and a valid index term for mode MODE.  */
1787static bool
1788reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1789{
1790  rtx xfoo0, xfoo1;
1791
1792  if (GET_CODE (x) != PLUS)
1793    return false;
1794
1795  xfoo0 = XEXP (x, 0);
1796  xfoo1 = XEXP (x, 1);
1797
1798  if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1799    return true;
1800
1801  if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1802    return true;
1803
1804  return false;
1805}
1806
1807/* Return true if xfoo0 and xfoo1 constitute a valid indexed address.  */
1808static bool
1809indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1810{
1811  if (!CONSTANT_ADDRESS_P (xfoo0))
1812    return false;
1813  if (BASE_REGISTER_P (xfoo1, strict))
1814    return !flag_pic || mode == QImode;
1815  if (flag_pic && symbolic_operand (xfoo0, SImode))
1816    return false;
1817  return reg_plus_index_p (xfoo1, mode, strict);
1818}
1819
1820/* legitimate_address_p returns 1 if it recognizes an RTL expression "x"
1821   that is a valid memory address for an instruction.
1822   The MODE argument is the machine mode for the MEM expression
1823   that wants to use this address.  */
1824bool
1825legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1826{
1827  rtx xfoo0, xfoo1;
1828
1829  if (nonindexed_address_p (x, strict))
1830    return true;
1831
1832  if (GET_CODE (x) != PLUS)
1833    return false;
1834
1835  /* Handle <address>[index] represented with index-sum outermost */
1836
1837  xfoo0 = XEXP (x, 0);
1838  xfoo1 = XEXP (x, 1);
1839
1840  if (index_term_p (xfoo0, mode, strict)
1841      && nonindexed_address_p (xfoo1, strict))
1842    return true;
1843
1844  if (index_term_p (xfoo1, mode, strict)
1845      && nonindexed_address_p (xfoo0, strict))
1846    return true;
1847
1848  /* Handle offset(reg)[index] with offset added outermost */
1849
1850  if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1851      || indexable_address_p (xfoo1, xfoo0, mode, strict))
1852    return true;
1853
1854  return false;
1855}
1856
1857/* Return 1 if x (a legitimate address expression) has an effect that
1858   depends on the machine mode it is used for.  On the VAX, the predecrement
1859   and postincrement address depend thus (the amount of decrement or
1860   increment being the length of the operand) and all indexed address depend
1861   thus (because the index scale factor is the length of the operand).  */
1862
1863bool
1864vax_mode_dependent_address_p (rtx x)
1865{
1866  rtx xfoo0, xfoo1;
1867
1868  if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
1869    return true;
1870  if (GET_CODE (x) != PLUS)
1871    return false;
1872
1873  xfoo0 = XEXP (x, 0);
1874  xfoo1 = XEXP (x, 1);
1875
1876  if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1877    return false;
1878  if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1879    return false;
1880  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1881    return false;
1882  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1883    return false;
1884
1885  return true;
1886}
1887
1888static rtx
1889fixup_mathdi_operand (rtx x, enum machine_mode mode)
1890{
1891  if (illegal_addsub_di_memory_operand (x, mode))
1892    {
1893      rtx addr = XEXP (x, 0);
1894      rtx temp = gen_reg_rtx (Pmode);
1895      rtx offset = 0;
1896#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1897      if (GET_CODE (addr) == CONST && flag_pic)
1898	{
1899	  offset = XEXP (XEXP (addr, 0), 1);
1900	  addr = XEXP (XEXP (addr, 0), 0);
1901	}
1902#endif
1903      emit_move_insn (temp, addr);
1904      if (offset)
1905	temp = gen_rtx_PLUS (Pmode, temp, offset);
1906      x = gen_rtx_MEM (DImode, temp);
1907    }
1908  return x;
1909}
1910
1911void
1912vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1913{
1914  int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1915  rtx temp;
1916
1917  rtx (*gen_old_insn)(rtx, rtx, rtx);
1918  rtx (*gen_si_insn)(rtx, rtx, rtx);
1919  rtx (*gen_insn)(rtx, rtx, rtx);
1920
1921  if (code == PLUS)
1922    {
1923      gen_old_insn = gen_adddi3_old;
1924      gen_si_insn = gen_addsi3;
1925      gen_insn = gen_adcdi3;
1926    }
1927  else if (code == MINUS)
1928    {
1929      gen_old_insn = gen_subdi3_old;
1930      gen_si_insn = gen_subsi3;
1931      gen_insn = gen_sbcdi3;
1932    }
1933  else
1934    gcc_unreachable ();
1935
1936  /* If this is addition (thus operands are commutative) and if there is one
1937     addend that duplicates the desination, we want that addend to be the
1938     first addend.  */
1939  if (code == PLUS
1940      && rtx_equal_p (operands[0], operands[2])
1941      && !rtx_equal_p (operands[1], operands[2]))
1942    {
1943      temp = operands[2];
1944      operands[2] = operands[1];
1945      operands[1] = temp;
1946    }
1947
1948  if (!TARGET_QMATH)
1949    {
1950      emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1951    }
1952  else if (hi_only)
1953    {
1954      if (!rtx_equal_p (operands[0], operands[1])
1955	  && (REG_P (operands[0]) && MEM_P (operands[1])))
1956	{
1957	  emit_move_insn (operands[0], operands[1]);
1958	  operands[1] = operands[0];
1959	}
1960
1961      operands[0] = fixup_mathdi_operand (operands[0], DImode);
1962      operands[1] = fixup_mathdi_operand (operands[1], DImode);
1963      operands[2] = fixup_mathdi_operand (operands[2], DImode);
1964
1965      if (!rtx_equal_p (operands[0], operands[1]))
1966	emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1967			  operand_subword (operands[1], 0, 0, DImode));
1968
1969      emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1970				 operand_subword (operands[1], 1, 0, DImode),
1971				 operand_subword (operands[2], 1, 0, DImode)));
1972    }
1973  else
1974    {
1975      /* If are adding the same value together, that's really a multiply by 2,
1976	 and that's just a left shift of 1.  */
1977      if (rtx_equal_p (operands[1], operands[2]))
1978	{
1979	  gcc_assert (code != MINUS);
1980	  emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1981	  return;
1982	}
1983
1984      operands[0] = fixup_mathdi_operand (operands[0], DImode);
1985
1986      /* If an operand is the same as operand[0], use the operand[0] rtx
1987	 because fixup will an equivalent rtx but not an equal one. */
1988
1989      if (rtx_equal_p (operands[0], operands[1]))
1990	operands[1] = operands[0];
1991      else
1992	operands[1] = fixup_mathdi_operand (operands[1], DImode);
1993
1994      if (rtx_equal_p (operands[0], operands[2]))
1995	operands[2] = operands[0];
1996      else
1997	operands[2] = fixup_mathdi_operand (operands[2], DImode);
1998
1999      /* If we are subtracting not from ourselves [d = a - b], and because the
2000	 carry ops are two operand only, we would need to do a move prior to
2001	 the subtract.  And if d == b, we would need a temp otherwise
2002	 [d = a, d -= d] and we end up with 0.  Instead we rewrite d = a - b
2003	 into d = -b, d += a.  Since -b can never overflow, even if b == d,
2004	 no temp is needed.
2005
2006	 If we are doing addition, since the carry ops are two operand, if
2007	 we aren't adding to ourselves, move the first addend to the
2008	 destination first.  */
2009
2010      gcc_assert (operands[1] != const0_rtx || code == MINUS);
2011      if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2012	{
2013	  if (code == MINUS && CONSTANT_P (operands[1]))
2014	    {
2015	      temp = gen_reg_rtx (DImode);
2016	      emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2017	      code = PLUS;
2018	      gen_insn = gen_adcdi3;
2019	      operands[2] = operands[1];
2020	      operands[1] = operands[0];
2021	    }
2022	  else
2023	    emit_move_insn (operands[0], operands[1]);
2024	}
2025
2026      /* Subtracting a constant will have been rewritten to an addition of the
2027	 negative of that constant before we get here.  */
2028      gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2029      emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2030    }
2031}
2032