1/* Subroutines for insn-output.cc for VAX.
2   Copyright (C) 1987-2022 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#define IN_TARGET_CODE 1
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "backend.h"
26#include "target.h"
27#include "rtl.h"
28#include "tree.h"
29#include "stringpool.h"
30#include "attribs.h"
31#include "df.h"
32#include "memmodel.h"
33#include "tm_p.h"
34#include "optabs.h"
35#include "regs.h"
36#include "emit-rtl.h"
37#include "calls.h"
38#include "varasm.h"
39#include "conditions.h"
40#include "output.h"
41#include "expr.h"
42#include "reload.h"
43#include "builtins.h"
44
45/* This file should be included last.  */
46#include "target-def.h"
47
48static void vax_option_override (void);
49static bool vax_legitimate_address_p (machine_mode, rtx, bool);
50static void vax_file_start (void);
51static void vax_init_libfuncs (void);
52static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
53				 HOST_WIDE_INT, tree);
54static int vax_address_cost_1 (rtx);
55static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
56static bool vax_rtx_costs (rtx, machine_mode, int, int, int *, bool);
57static machine_mode vax_cc_modes_compatible (machine_mode, machine_mode);
58static rtx_insn *vax_md_asm_adjust (vec<rtx> &, vec<rtx> &,
59				    vec<machine_mode> &, vec<const char *> &,
60				    vec<rtx> &, HARD_REG_SET &, location_t);
61static rtx vax_function_arg (cumulative_args_t, const function_arg_info &);
62static void vax_function_arg_advance (cumulative_args_t,
63				      const function_arg_info &);
64static rtx vax_struct_value_rtx (tree, int);
65static bool vax_lra_p (void);
66static void vax_asm_trampoline_template (FILE *);
67static void vax_trampoline_init (rtx, tree, rtx);
68static poly_int64 vax_return_pops_args (tree, tree, poly_int64);
69static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
70static HOST_WIDE_INT vax_starting_frame_offset (void);
71
72/* Initialize the GCC target structure.  */
73#undef TARGET_ASM_ALIGNED_HI_OP
74#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
75
76#undef TARGET_ASM_FILE_START
77#define TARGET_ASM_FILE_START vax_file_start
78#undef TARGET_ASM_FILE_START_APP_OFF
79#define TARGET_ASM_FILE_START_APP_OFF true
80
81#undef TARGET_INIT_LIBFUNCS
82#define TARGET_INIT_LIBFUNCS vax_init_libfuncs
83
84#undef TARGET_ASM_OUTPUT_MI_THUNK
85#define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
86#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
87#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
88
89/* Enable compare elimination pass.  */
90#undef TARGET_FLAGS_REGNUM
91#define TARGET_FLAGS_REGNUM VAX_PSL_REGNUM
92
93#undef TARGET_RTX_COSTS
94#define TARGET_RTX_COSTS vax_rtx_costs
95#undef TARGET_ADDRESS_COST
96#define TARGET_ADDRESS_COST vax_address_cost
97
98/* Return the narrowest CC mode that spans both modes offered.  */
99#undef TARGET_CC_MODES_COMPATIBLE
100#define TARGET_CC_MODES_COMPATIBLE vax_cc_modes_compatible
101
102/* Mark PSL as clobbered for compatibility with the CC0 representation.  */
103#undef TARGET_MD_ASM_ADJUST
104#define TARGET_MD_ASM_ADJUST vax_md_asm_adjust
105
106#undef TARGET_PROMOTE_PROTOTYPES
107#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
108
109#undef TARGET_FUNCTION_ARG
110#define TARGET_FUNCTION_ARG vax_function_arg
111#undef TARGET_FUNCTION_ARG_ADVANCE
112#define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
113
114#undef TARGET_STRUCT_VALUE_RTX
115#define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
116
117#undef TARGET_LRA_P
118#define TARGET_LRA_P vax_lra_p
119
120#undef TARGET_LEGITIMATE_ADDRESS_P
121#define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
122#undef TARGET_MODE_DEPENDENT_ADDRESS_P
123#define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
124
125#undef TARGET_FRAME_POINTER_REQUIRED
126#define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
127
128#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
129#define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
130#undef TARGET_TRAMPOLINE_INIT
131#define TARGET_TRAMPOLINE_INIT vax_trampoline_init
132#undef TARGET_RETURN_POPS_ARGS
133#define TARGET_RETURN_POPS_ARGS vax_return_pops_args
134
135#undef TARGET_OPTION_OVERRIDE
136#define TARGET_OPTION_OVERRIDE vax_option_override
137
138#undef TARGET_STARTING_FRAME_OFFSET
139#define TARGET_STARTING_FRAME_OFFSET vax_starting_frame_offset
140
141#undef TARGET_HAVE_SPECULATION_SAFE_VALUE
142#define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
143
144struct gcc_target targetm = TARGET_INITIALIZER;
145
146/* Set global variables as needed for the options enabled.  */
147
148static void
149vax_option_override (void)
150{
151  /* We're VAX floating point, not IEEE floating point.  */
152  if (TARGET_G_FLOAT)
153    REAL_MODE_FORMAT (DFmode) = &vax_g_format;
154
155  /* XXX For NetBSD, disable gas(1) directives for CFI tables.
156     Otherwise, wired relocations occur for readonly section
157     `.eh_frame', by which libc.so cannot link.  */
158  flag_dwarf2_cfi_asm = 0;
159
160#ifdef SUBTARGET_OVERRIDE_OPTIONS
161  SUBTARGET_OVERRIDE_OPTIONS;
162#endif
163}
164
165static void
166vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
167{
168  rtx x;
169
170  x = plus_constant (Pmode, frame_pointer_rtx, offset);
171  x = gen_rtx_MEM (SImode, x);
172  x = gen_rtx_SET (x, src);
173  add_reg_note (insn, REG_CFA_OFFSET, x);
174}
175
176/* Generate the assembly code for function entry.  FILE is a stdio
177   stream to output the code to.  SIZE is an int: how many units of
178   temporary storage to allocate.
179
180   Refer to the array `regs_ever_live' to determine which registers to
181   save; `regs_ever_live[I]' is nonzero if register number I is ever
182   used in the function.  This function is responsible for knowing
183   which registers should not be saved even if used.  */
184
185void
186vax_expand_prologue (void)
187{
188  int regno, offset;
189  int mask = 0;
190  HOST_WIDE_INT size;
191  rtx insn;
192
193  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
194    if (df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
195      mask |= 1 << regno;
196
197  insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
198  RTX_FRAME_RELATED_P (insn) = 1;
199
200  /* The layout of the CALLG/S stack frame is follows:
201
202		<- CFA, AP
203	r11
204	r10
205	...	Registers saved as specified by MASK
206	r3
207	r2
208	return-addr
209	old fp
210	old ap
211	old psw
212	zero
213		<- FP, SP
214
215     The rest of the prologue will adjust the SP for the local frame.  */
216
217  vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
218  vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
219  vax_add_reg_cfa_offset (insn, 12, pc_rtx);
220
221  offset = 16;
222  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
223    if (mask & (1 << regno))
224      {
225	vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
226	offset += 4;
227      }
228
229  /* Because add_reg_note pushes the notes, adding this last means that
230     it will be processed first.  This is required to allow the other
231     notes be interpreted properly.  */
232  add_reg_note (insn, REG_CFA_DEF_CFA,
233		plus_constant (Pmode, frame_pointer_rtx, offset));
234
235  /* Allocate the local stack frame.  */
236  size = get_frame_size ();
237  size -= vax_starting_frame_offset ();
238  emit_insn (gen_addsi3 (stack_pointer_rtx,
239			 stack_pointer_rtx, GEN_INT (-size)));
240
241  /* Do not allow instructions referencing local stack memory to be
242     scheduled before the frame is allocated.  This is more pedantic
243     than anything else, given that VAX does not currently have a
244     scheduling description.  */
245  emit_insn (gen_blockage ());
246}
247
248/* When debugging with stabs, we want to output an extra dummy label
249   so that gas can distinguish between D_float and G_float prior to
250   processing the .stabs directive identifying type double.  */
251static void
252vax_file_start (void)
253{
254  default_file_start ();
255
256  if (write_symbols == DBX_DEBUG)
257    fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
258}
259
260/* We can use the BSD C library routines for the libgcc calls that are
261   still generated, since that's what they boil down to anyways.  When
262   ELF, avoid the user's namespace.  */
263
264static void
265vax_init_libfuncs (void)
266{
267  if (TARGET_BSD_DIVMOD)
268    {
269      set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
270      set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
271    }
272}
273
274/* This is like nonimmediate_operand with a restriction on the type of MEM.  */
275
276static void
277split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
278			 rtx * low, int n)
279{
280  int i;
281
282  for (i = 0; i < n; i++)
283    low[i] = 0;
284
285  for (i = 0; i < n; i++)
286    {
287      if (MEM_P (operands[i])
288	  && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
289	      || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
290	{
291	  rtx addr = XEXP (operands[i], 0);
292	  operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
293	}
294      else if (optimize_size && MEM_P (operands[i])
295	       && REG_P (XEXP (operands[i], 0))
296	       && (code != MINUS || operands[1] != const0_rtx)
297	       && find_regno_note (insn, REG_DEAD,
298				   REGNO (XEXP (operands[i], 0))))
299	{
300	  low[i] = gen_rtx_MEM (SImode,
301				gen_rtx_POST_INC (Pmode,
302						  XEXP (operands[i], 0)));
303	  operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
304	}
305      else
306	{
307	  low[i] = operand_subword (operands[i], 0, 0, DImode);
308	  operands[i] = operand_subword (operands[i], 1, 0, DImode);
309	}
310    }
311}
312
313void
314print_operand_address (FILE * file, rtx addr)
315{
316  rtx orig = addr;
317  rtx reg1, breg, ireg;
318  rtx offset;
319
320 retry:
321  switch (GET_CODE (addr))
322    {
323    case MEM:
324      fprintf (file, "*");
325      addr = XEXP (addr, 0);
326      goto retry;
327
328    case REG:
329      fprintf (file, "(%s)", reg_names[REGNO (addr)]);
330      break;
331
332    case PRE_DEC:
333      fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
334      break;
335
336    case POST_INC:
337      fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
338      break;
339
340    case PLUS:
341      /* There can be either two or three things added here.  One must be a
342	 REG.  One can be either a REG or a MULT/ASHIFT of a REG and an
343	 appropriate constant, and the third can only be a constant or a MEM.
344
345	 We get these two or three things and put the constant or MEM in
346	 OFFSET, the MULT/ASHIFT or REG in IREG, and the REG in BREG.  If we
347	 have a register and can't tell yet if it is a base or index register,
348	 put it into REG1.  */
349
350      reg1 = 0; ireg = 0; breg = 0; offset = 0;
351
352      if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
353	  || MEM_P (XEXP (addr, 0)))
354	{
355	  offset = XEXP (addr, 0);
356	  addr = XEXP (addr, 1);
357	}
358      else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
359	       || MEM_P (XEXP (addr, 1)))
360	{
361	  offset = XEXP (addr, 1);
362	  addr = XEXP (addr, 0);
363	}
364      else if (GET_CODE (XEXP (addr, 1)) == MULT
365	       || GET_CODE (XEXP (addr, 1)) == ASHIFT)
366	{
367	  ireg = XEXP (addr, 1);
368	  addr = XEXP (addr, 0);
369	}
370      else if (GET_CODE (XEXP (addr, 0)) == MULT
371	       || GET_CODE (XEXP (addr, 0)) == ASHIFT)
372	{
373	  ireg = XEXP (addr, 0);
374	  addr = XEXP (addr, 1);
375	}
376      else if (REG_P (XEXP (addr, 1)))
377	{
378	  reg1 = XEXP (addr, 1);
379	  addr = XEXP (addr, 0);
380	}
381      else if (REG_P (XEXP (addr, 0)))
382	{
383	  reg1 = XEXP (addr, 0);
384	  addr = XEXP (addr, 1);
385	}
386      else
387	gcc_unreachable ();
388
389      if (REG_P (addr))
390	{
391	  if (reg1)
392	    ireg = addr;
393	  else
394	    reg1 = addr;
395	}
396      else if (GET_CODE (addr) == MULT || GET_CODE (addr) == ASHIFT)
397	ireg = addr;
398      else
399	{
400	  gcc_assert (GET_CODE (addr) == PLUS);
401	  if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
402	      || MEM_P (XEXP (addr, 0)))
403	    {
404	      if (offset)
405		{
406		  if (CONST_INT_P (offset))
407		    offset = plus_constant (Pmode, XEXP (addr, 0),
408					    INTVAL (offset));
409		  else
410		    {
411		      gcc_assert (CONST_INT_P (XEXP (addr, 0)));
412		      offset = plus_constant (Pmode, offset,
413					      INTVAL (XEXP (addr, 0)));
414		    }
415		}
416	      offset = XEXP (addr, 0);
417	    }
418	  else if (REG_P (XEXP (addr, 0)))
419	    {
420	      if (reg1)
421		ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
422	      else
423		reg1 = XEXP (addr, 0);
424	    }
425	  else
426	    {
427	      gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT
428			  || GET_CODE (XEXP (addr, 0)) == ASHIFT);
429	      gcc_assert (!ireg);
430	      ireg = XEXP (addr, 0);
431	    }
432
433	  if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
434	      || MEM_P (XEXP (addr, 1)))
435	    {
436	      if (offset)
437		{
438		  if (CONST_INT_P (offset))
439		    offset = plus_constant (Pmode, XEXP (addr, 1),
440					    INTVAL (offset));
441		  else
442		    {
443		      gcc_assert (CONST_INT_P (XEXP (addr, 1)));
444		      offset = plus_constant (Pmode, offset,
445					      INTVAL (XEXP (addr, 1)));
446		    }
447		}
448	      offset = XEXP (addr, 1);
449	    }
450	  else if (REG_P (XEXP (addr, 1)))
451	    {
452	      if (reg1)
453		ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
454	      else
455		reg1 = XEXP (addr, 1);
456	    }
457	  else
458	    {
459	      gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT
460			  || GET_CODE (XEXP (addr, 1)) == ASHIFT);
461	      gcc_assert (!ireg);
462	      ireg = XEXP (addr, 1);
463	    }
464	}
465
466      /* If REG1 is nonzero, figure out if it is a base or index register.  */
467      if (reg1)
468	{
469	  if (breg
470	      || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
471	      || (offset
472		  && (MEM_P (offset)
473		      || (flag_pic && symbolic_operand (offset, SImode)))))
474	    {
475	      gcc_assert (!ireg);
476	      ireg = reg1;
477	    }
478	  else
479	    breg = reg1;
480	}
481
482      if (offset != 0)
483	{
484	  if (flag_pic && symbolic_operand (offset, SImode))
485	    {
486	      if (breg && ireg)
487		{
488		  debug_rtx (orig);
489		  output_operand_lossage ("symbol used with both base and indexed registers");
490		}
491
492#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
493	      if (flag_pic > 1 && GET_CODE (offset) == CONST
494		  && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
495		  && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
496		{
497		  debug_rtx (orig);
498		  output_operand_lossage ("symbol with offset used in PIC mode");
499		}
500#endif
501
502	      /* symbol(reg) isn't PIC, but symbol[reg] is.  */
503	      if (breg)
504		{
505		  ireg = breg;
506		  breg = 0;
507		}
508
509	    }
510
511	  output_address (VOIDmode, offset);
512	}
513
514      if (breg != 0)
515	fprintf (file, "(%s)", reg_names[REGNO (breg)]);
516
517      if (ireg != 0)
518	{
519	  if (GET_CODE (ireg) == MULT || GET_CODE (ireg) == ASHIFT)
520	    ireg = XEXP (ireg, 0);
521	  gcc_assert (REG_P (ireg));
522	  fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
523	}
524      break;
525
526    default:
527      output_addr_const (file, addr);
528    }
529}
530
531void
532print_operand (FILE *file, rtx x, int code)
533{
534  if (code == '#')
535    fputc (ASM_DOUBLE_CHAR, file);
536  else if (code == '|')
537    fputs (REGISTER_PREFIX, file);
538  else if (code == 'k')
539    fputs (cond_name (x), file);
540  else if (code == 'K')
541    fputs (rev_cond_name (x), file);
542  else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
543    fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
544  else if (code == 'P' && CONST_INT_P (x))
545    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
546  else if (code == 'N' && CONST_INT_P (x))
547    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
548  /* rotl instruction cannot deal with negative arguments.  */
549  else if (code == 'R' && CONST_INT_P (x))
550    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
551  else if (code == 'H' && CONST_INT_P (x))
552    fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
553  else if (code == 'h' && CONST_INT_P (x))
554    fprintf (file, "$%d", (short) - INTVAL (x));
555  else if (code == 'B' && CONST_INT_P (x))
556    fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
557  else if (code == 'b' && CONST_INT_P (x))
558    fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
559  else if (code == 'M' && CONST_INT_P (x))
560    fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
561  else if (code == 'x' && CONST_INT_P (x))
562    fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
563  else if (REG_P (x))
564    fprintf (file, "%s", reg_names[REGNO (x)]);
565  else if (MEM_P (x))
566    output_address (GET_MODE (x), XEXP (x, 0));
567  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
568    {
569      char dstr[30];
570      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
571		       sizeof (dstr), 0, 1);
572      fprintf (file, "$0f%s", dstr);
573    }
574  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
575    {
576      char dstr[30];
577      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
578		       sizeof (dstr), 0, 1);
579      fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
580    }
581  else
582    {
583      if (flag_pic > 1 && symbolic_operand (x, SImode))
584	{
585	  debug_rtx (x);
586	  output_operand_lossage ("symbol used as immediate operand");
587	}
588      putc ('$', file);
589      output_addr_const (file, x);
590    }
591}
592
593const char *
594cond_name (rtx op)
595{
596  switch (GET_CODE (op))
597    {
598    case NE:
599      return "neq";
600    case EQ:
601      return "eql";
602    case GE:
603      return "geq";
604    case GT:
605      return "gtr";
606    case LE:
607      return "leq";
608    case LT:
609      return "lss";
610    case GEU:
611      return "gequ";
612    case GTU:
613      return "gtru";
614    case LEU:
615      return "lequ";
616    case LTU:
617      return "lssu";
618
619    default:
620      gcc_unreachable ();
621    }
622}
623
624const char *
625rev_cond_name (rtx op)
626{
627  switch (GET_CODE (op))
628    {
629    case EQ:
630      return "neq";
631    case NE:
632      return "eql";
633    case LT:
634      return "geq";
635    case LE:
636      return "gtr";
637    case GT:
638      return "leq";
639    case GE:
640      return "lss";
641    case LTU:
642      return "gequ";
643    case LEU:
644      return "gtru";
645    case GTU:
646      return "lequ";
647    case GEU:
648      return "lssu";
649
650    default:
651      gcc_unreachable ();
652    }
653}
654
655static bool
656vax_float_literal (rtx c)
657{
658  machine_mode mode;
659  const REAL_VALUE_TYPE *r;
660  REAL_VALUE_TYPE s;
661  int i;
662
663  if (GET_CODE (c) != CONST_DOUBLE)
664    return false;
665
666  mode = GET_MODE (c);
667
668  if (c == const_tiny_rtx[(int) mode][0]
669      || c == const_tiny_rtx[(int) mode][1]
670      || c == const_tiny_rtx[(int) mode][2])
671    return true;
672
673  r = CONST_DOUBLE_REAL_VALUE (c);
674
675  for (i = 0; i < 7; i++)
676    {
677      int x = 1 << i;
678      bool ok;
679      real_from_integer (&s, mode, x, SIGNED);
680
681      if (real_equal (r, &s))
682	return true;
683      ok = exact_real_inverse (mode, &s);
684      gcc_assert (ok);
685      if (real_equal (r, &s))
686	return true;
687    }
688  return false;
689}
690
691
692/* Return the cost in cycles of a memory address, relative to register
693   indirect.
694
695   Each of the following adds the indicated number of cycles:
696
697   1 - symbolic address
698   1 - pre-decrement
699   1 - indexing and/or offset(register)
700   2 - indirect */
701
702
703static int
704vax_address_cost_1 (rtx addr)
705{
706  int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
707  rtx plus_op0 = 0, plus_op1 = 0;
708 restart:
709  switch (GET_CODE (addr))
710    {
711    case PRE_DEC:
712      predec = 1;
713      /* FALLTHRU */
714    case REG:
715    case SUBREG:
716    case POST_INC:
717      reg = 1;
718      break;
719    case MULT:
720    case ASHIFT:
721      indexed = 1;	/* 2 on VAX 2 */
722      break;
723    case CONST_INT:
724      /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
725      if (offset == 0)
726	offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
727      break;
728    case CONST:
729    case SYMBOL_REF:
730      offset = 1;	/* 2 on VAX 2 */
731      break;
732    case LABEL_REF:	/* this is probably a byte offset from the pc */
733      if (offset == 0)
734	offset = 1;
735      break;
736    case PLUS:
737      if (plus_op0)
738	plus_op1 = XEXP (addr, 0);
739      else
740	plus_op0 = XEXP (addr, 0);
741      addr = XEXP (addr, 1);
742      goto restart;
743    case MEM:
744      indir = 2;	/* 3 on VAX 2 */
745      addr = XEXP (addr, 0);
746      goto restart;
747    default:
748      break;
749    }
750
751  /* Up to 3 things can be added in an address.  They are stored in
752     plus_op0, plus_op1, and addr.  */
753
754  if (plus_op0)
755    {
756      addr = plus_op0;
757      plus_op0 = 0;
758      goto restart;
759    }
760  if (plus_op1)
761    {
762      addr = plus_op1;
763      plus_op1 = 0;
764      goto restart;
765    }
766  /* Indexing and register+offset can both be used (except on a VAX 2)
767     without increasing execution time over either one alone.  */
768  if (reg && indexed && offset)
769    return reg + indir + offset + predec;
770  return reg + indexed + indir + offset + predec;
771}
772
773static int
774vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
775		  addr_space_t as ATTRIBUTE_UNUSED,
776		  bool speed ATTRIBUTE_UNUSED)
777{
778  return COSTS_N_INSNS (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
779}
780
781/* Cost of an expression on a VAX.  This version has costs tuned for the
782   CVAX chip (found in the VAX 3 series) with comments for variations on
783   other models.
784
785   FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
786   and FLOAT_TRUNCATE.  We need a -mcpu option to allow provision of
787   costs on a per cpu basis.  */
788
789static bool
790vax_rtx_costs (rtx x, machine_mode mode, int outer_code,
791	       int opno ATTRIBUTE_UNUSED,
792	       int *total, bool speed ATTRIBUTE_UNUSED)
793{
794  enum rtx_code code = GET_CODE (x);
795  int i = 0;				   /* may be modified in switch */
796  const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
797
798  switch (code)
799    {
800      /* On a VAX, constants from 0..63 are cheap because they can use the
801	 1 byte literal constant format.  Compare to -1 should be made cheap
802	 so that decrement-and-branch insns can be formed more easily (if
803	 the value -1 is copied to a register some decrement-and-branch
804	 patterns will not match).  */
805    case CONST_INT:
806      if (INTVAL (x) == 0)
807	{
808	  *total = COSTS_N_INSNS (1) / 2;
809	  return true;
810	}
811      if (outer_code == AND)
812	{
813	  *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077
814		    ? COSTS_N_INSNS (1) : COSTS_N_INSNS (2));
815	  return true;
816	}
817      if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
818	  || (outer_code == COMPARE
819	      && INTVAL (x) == -1)
820	  || ((outer_code == PLUS || outer_code == MINUS)
821	      && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
822	{
823	  *total = COSTS_N_INSNS (1);
824	  return true;
825	}
826      /* FALLTHRU */
827
828    case CONST:
829    case LABEL_REF:
830    case SYMBOL_REF:
831      *total = COSTS_N_INSNS (3);
832      return true;
833
834    case CONST_DOUBLE:
835      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
836	*total = vax_float_literal (x) ? COSTS_N_INSNS (5) : COSTS_N_INSNS (8);
837      else
838	*total = ((CONST_DOUBLE_HIGH (x) == 0
839		   && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
840		  || (outer_code == PLUS
841		      && CONST_DOUBLE_HIGH (x) == -1
842		      && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64)
843		  ? COSTS_N_INSNS (2) : COSTS_N_INSNS (5));
844      return true;
845
846    case POST_INC:
847      *total = COSTS_N_INSNS (2);
848      return true;			/* Implies register operand.  */
849
850    case PRE_DEC:
851      *total = COSTS_N_INSNS (3);
852      return true;			/* Implies register operand.  */
853
854    case MULT:
855      switch (mode)
856	{
857	case E_DFmode:
858	  *total = COSTS_N_INSNS (16);	/* 4 on VAX 9000 */
859	  break;
860	case E_SFmode:
861	  *total = COSTS_N_INSNS (9);	/* 4 on VAX 9000, 12 on VAX 2 */
862	  break;
863	case E_DImode:
864	  *total = COSTS_N_INSNS (16);	/* 6 on VAX 9000, 28 on VAX 2 */
865	  break;
866	case E_SImode:
867	case E_HImode:
868	case E_QImode:
869	  *total = COSTS_N_INSNS (10);	/* 3-4 on VAX 9000, 20-28 on VAX 2 */
870	  break;
871	default:
872	  *total = MAX_COST;		/* Mode is not supported.  */
873	  return true;
874	}
875      break;
876
877    case UDIV:
878      if (mode != SImode)
879	{
880	  *total = MAX_COST;		/* Mode is not supported.  */
881	  return true;
882	}
883      *total = COSTS_N_INSNS (17);
884      break;
885
886    case DIV:
887      if (mode == DImode)
888	*total = COSTS_N_INSNS (30);	/* Highly variable.  */
889      else if (mode == DFmode)
890	/* divide takes 28 cycles if the result is not zero, 13 otherwise */
891	*total = COSTS_N_INSNS (24);
892      else
893	*total = COSTS_N_INSNS (11);	/* 25 on VAX 2 */
894      break;
895
896    case MOD:
897      *total = COSTS_N_INSNS (23);
898      break;
899
900    case UMOD:
901      if (mode != SImode)
902	{
903	  *total = MAX_COST;		/* Mode is not supported.  */
904	  return true;
905	}
906      *total = COSTS_N_INSNS (29);
907      break;
908
909    case FLOAT:
910      *total = COSTS_N_INSNS (6		/* 4 on VAX 9000 */
911			      + (mode == DFmode)
912			      + (GET_MODE (XEXP (x, 0)) != SImode));
913      break;
914
915    case FIX:
916      *total = COSTS_N_INSNS (7);	/* 17 on VAX 2 */
917      break;
918
919    case ASHIFT:
920    case LSHIFTRT:
921    case ASHIFTRT:
922      if (mode == DImode)
923	*total = COSTS_N_INSNS (12);
924      else
925	*total = COSTS_N_INSNS (10);	/* 6 on VAX 9000 */
926      break;
927
928    case ROTATE:
929    case ROTATERT:
930      *total = COSTS_N_INSNS (6);	/* 5 on VAX 2, 4 on VAX 9000 */
931      if (CONST_INT_P (XEXP (x, 1)))
932	fmt = "e"; 		/* all constant rotate counts are short */
933      break;
934
935    case PLUS:
936    case MINUS:
937      *total = (mode == DFmode		/* 6/8 on VAX 9000, 16/15 on VAX 2 */
938		? COSTS_N_INSNS (13) : COSTS_N_INSNS (8));
939      /* Small integer operands can use subl2 and addl2.  */
940      if ((CONST_INT_P (XEXP (x, 1)))
941	  && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
942	fmt = "e";
943      break;
944
945    case IOR:
946    case XOR:
947      *total = COSTS_N_INSNS (3);
948      break;
949
950    case AND:
951      /* AND is special because the first operand is complemented.  */
952      *total = COSTS_N_INSNS (3);
953      if (CONST_INT_P (XEXP (x, 0)))
954	{
955	  if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
956	    *total = COSTS_N_INSNS (4);
957	  fmt = "e";
958	  i = 1;
959	}
960      break;
961
962    case NEG:
963      if (mode == DFmode)
964	*total = COSTS_N_INSNS (9);
965      else if (mode == SFmode)
966	*total = COSTS_N_INSNS (6);
967      else if (mode == DImode)
968	*total = COSTS_N_INSNS (4);
969      else
970	*total = COSTS_N_INSNS (2);
971      break;
972
973    case NOT:
974      *total = COSTS_N_INSNS (2);
975      break;
976
977    case ZERO_EXTRACT:
978    case SIGN_EXTRACT:
979      *total = COSTS_N_INSNS (15);
980      break;
981
982    case MEM:
983      if (mode == DImode || mode == DFmode)
984	*total = COSTS_N_INSNS (5);	/* 7 on VAX 2 */
985      else
986	*total = COSTS_N_INSNS (3);	/* 4 on VAX 2 */
987      x = XEXP (x, 0);
988      if (!REG_P (x) && GET_CODE (x) != POST_INC)
989	*total += COSTS_N_INSNS (vax_address_cost_1 (x));
990      return true;
991
992    case FLOAT_EXTEND:
993    case FLOAT_TRUNCATE:
994    case TRUNCATE:
995      *total = COSTS_N_INSNS (3);	/* FIXME: Costs need to be checked  */
996      break;
997
998    default:
999      return false;
1000    }
1001
1002  /* Now look inside the expression.  Operands which are not registers or
1003     short constants add to the cost.
1004
1005     FMT and I may have been adjusted in the switch above for instructions
1006     which require special handling.  */
1007
1008  while (*fmt++ == 'e')
1009    {
1010      rtx op = XEXP (x, i);
1011
1012      i += 1;
1013      code = GET_CODE (op);
1014
1015      /* A NOT is likely to be found as the first operand of an AND
1016	 (in which case the relevant cost is of the operand inside
1017	 the not) and not likely to be found anywhere else.  */
1018      if (code == NOT)
1019	op = XEXP (op, 0), code = GET_CODE (op);
1020
1021      switch (code)
1022	{
1023	case CONST_INT:
1024	  if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
1025	      && mode != QImode)
1026	    *total += COSTS_N_INSNS (1);	/* 2 on VAX 2 */
1027	  break;
1028	case CONST:
1029	case LABEL_REF:
1030	case SYMBOL_REF:
1031	  *total += COSTS_N_INSNS (1);		/* 2 on VAX 2 */
1032	  break;
1033	case CONST_DOUBLE:
1034	  if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1035	    {
1036	      /* Registers are faster than floating point constants -- even
1037		 those constants which can be encoded in a single byte.  */
1038	      if (vax_float_literal (op))
1039		*total += COSTS_N_INSNS (1);
1040	      else
1041		*total += (GET_MODE (x) == DFmode
1042			   ? COSTS_N_INSNS (3) : COSTS_N_INSNS (2));
1043	    }
1044	  else
1045	    {
1046	      if (CONST_DOUBLE_HIGH (op) != 0
1047		  || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1048		*total += COSTS_N_INSNS (2);
1049	    }
1050	  break;
1051	case MEM:
1052	  *total += COSTS_N_INSNS (1);		/* 2 on VAX 2 */
1053	  if (!REG_P (XEXP (op, 0)))
1054	    *total += COSTS_N_INSNS (vax_address_cost_1 (XEXP (op, 0)));
1055	  break;
1056	case REG:
1057	case SUBREG:
1058	  break;
1059	default:
1060	  *total += COSTS_N_INSNS (1);
1061	  break;
1062	}
1063    }
1064  return true;
1065}
1066
1067/* With ELF we do not support GOT entries for external `symbol+offset'
1068   references, so do not accept external symbol references if an offset
1069   is to be added.  Do not accept external symbol references at all if
1070   LOCAL_P is set.  This is for cases where making a reference indirect
1071   would make it invalid.  Do not accept any kind of symbols if SYMBOL_P
1072   is clear.  This is for situations where the a reference is used as an
1073   immediate value for operations other than address loads (MOVA/PUSHA),
1074   as those operations do not support PC-relative immediates.  */
1075
1076bool
1077vax_acceptable_pic_operand_p (rtx x ATTRIBUTE_UNUSED,
1078			      bool local_p ATTRIBUTE_UNUSED,
1079			      bool symbol_p ATTRIBUTE_UNUSED)
1080{
1081#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1082  if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
1083    {
1084      x = XEXP (XEXP (x, 0), 0);
1085      local_p = true;
1086    }
1087  switch (GET_CODE (x))
1088    {
1089    case SYMBOL_REF:
1090      return symbol_p && !(local_p && !SYMBOL_REF_LOCAL_P (x));
1091    case LABEL_REF:
1092      return symbol_p && !(local_p && LABEL_REF_NONLOCAL_P (x));
1093    default:
1094      break;
1095    }
1096#endif
1097  return true;
1098}
1099
1100/* Given a comparison code (NE, EQ, etc.) and the operands of a COMPARE,
1101   return the mode to be used for the comparison.  As we have the same
1102   interpretation of condition codes across all the instructions we just
1103   return the narrowest mode suitable for the comparison code requested.  */
1104
1105extern machine_mode
1106vax_select_cc_mode (enum rtx_code op,
1107		    rtx x ATTRIBUTE_UNUSED, rtx y ATTRIBUTE_UNUSED)
1108{
1109  switch (op)
1110    {
1111    default:
1112      gcc_unreachable ();
1113    case NE:
1114    case EQ:
1115      return CCZmode;
1116    case GE:
1117    case LT:
1118      return CCNmode;
1119    case GT:
1120    case LE:
1121      return CCNZmode;
1122    case GEU:
1123    case GTU:
1124    case LEU:
1125    case LTU:
1126      return CCmode;
1127    }
1128}
1129
1130/* Return the narrowest CC mode that spans both modes offered.  If they
1131   intersect, this will be the wider of the two, and if they do not then
1132   find one that is a superset of both (i.e. CCNZmode for a pair
1133   consisting of CCNmode and CCZmode).  A wider CC writer will satisfy
1134   a narrower CC reader, e.g. a comparison operator that uses CCZmode
1135   can use a CCNZmode output of a previous instruction.  */
1136
1137static machine_mode
1138vax_cc_modes_compatible (machine_mode m1, machine_mode m2)
1139{
1140  switch (m1)
1141    {
1142    default:
1143      gcc_unreachable ();
1144    case E_CCmode:
1145      switch (m2)
1146	{
1147	default:
1148	  gcc_unreachable ();
1149	case E_CCmode:
1150	case E_CCNZmode:
1151	case E_CCNmode:
1152	case E_CCZmode:
1153	  return m1;
1154	}
1155    case E_CCNZmode:
1156      switch (m2)
1157	{
1158	default:
1159	  gcc_unreachable ();
1160	case E_CCmode:
1161	  return m2;
1162	case E_CCNmode:
1163	case E_CCNZmode:
1164	case E_CCZmode:
1165	  return m1;
1166	}
1167    case E_CCNmode:
1168    case E_CCZmode:
1169      switch (m2)
1170	{
1171	default:
1172	  gcc_unreachable ();
1173	case E_CCmode:
1174	case E_CCNZmode:
1175	  return m2;
1176	case E_CCNmode:
1177	case E_CCZmode:
1178	  return m1 == m2 ? m1 : E_CCNZmode;
1179	}
1180    }
1181}
1182
1183/* Mark PSL as clobbered for compatibility with the CC0 representation.  */
1184
1185static rtx_insn *
1186vax_md_asm_adjust (vec<rtx> &outputs ATTRIBUTE_UNUSED,
1187		   vec<rtx> &inputs ATTRIBUTE_UNUSED,
1188		   vec<machine_mode> &input_modes ATTRIBUTE_UNUSED,
1189		   vec<const char *> &constraints ATTRIBUTE_UNUSED,
1190		   vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs,
1191		   location_t /*loc*/)
1192{
1193  clobbers.safe_push (gen_rtx_REG (CCmode, VAX_PSL_REGNUM));
1194  SET_HARD_REG_BIT (clobbered_regs, VAX_PSL_REGNUM);
1195  return NULL;
1196}
1197
1198/* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1199   Used for C++ multiple inheritance.
1200	.mask	^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11>  #conservative entry mask
1201	addl2	$DELTA, 4(ap)	#adjust first argument
1202	jmp	FUNCTION+2	#jump beyond FUNCTION's entry mask
1203*/
1204
1205static void
1206vax_output_mi_thunk (FILE * file,
1207		     tree thunk ATTRIBUTE_UNUSED,
1208		     HOST_WIDE_INT delta,
1209		     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1210		     tree function)
1211{
1212  const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk));
1213
1214  assemble_start_function (thunk, fnname);
1215  fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1216  asm_fprintf (file, ",4(%Rap)\n");
1217  fprintf (file, "\tjmp ");
1218  assemble_name (file,  XSTR (XEXP (DECL_RTL (function), 0), 0));
1219  fprintf (file, "+2\n");
1220  assemble_end_function (thunk, fnname);
1221}
1222
1223static rtx
1224vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1225		      int incoming ATTRIBUTE_UNUSED)
1226{
1227  return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1228}
1229
1230/* Return true if we use LRA instead of reload pass.  */
1231
1232static bool
1233vax_lra_p (void)
1234{
1235  return TARGET_LRA;
1236}
1237
1238/* Output integer move instructions.  */
1239
1240bool
1241vax_maybe_split_dimode_move (rtx *operands)
1242{
1243  return (TARGET_QMATH
1244	  && (!MEM_P (operands[0])
1245	      || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1246	      || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1247	      || !illegal_addsub_di_memory_operand (operands[0], DImode))
1248	  && ((CONST_INT_P (operands[1])
1249	       && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1250	      || GET_CODE (operands[1]) == CONST_DOUBLE));
1251}
1252
1253const char *
1254vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1255		     machine_mode mode)
1256{
1257  rtx hi[3], lo[3];
1258  const char *pattern_hi, *pattern_lo;
1259  bool push_p;
1260
1261  switch (mode)
1262    {
1263    case E_DImode:
1264      if (operands[1] == const0_rtx)
1265	return "clrq %0";
1266      if (TARGET_QMATH && optimize_size
1267	  && (CONST_INT_P (operands[1])
1268	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1269	{
1270	  unsigned HOST_WIDE_INT hval, lval;
1271	  int n;
1272
1273	  if (GET_CODE (operands[1]) == CONST_DOUBLE)
1274	    {
1275	      gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1276
1277	      /* Make sure only the low 32 bits are valid.  */
1278	      lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1279	      hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1280	    }
1281	  else
1282	    {
1283	      lval = INTVAL (operands[1]);
1284	      hval = 0;
1285	    }
1286
1287	  /* Here we see if we are trying to see if the 64bit value is really
1288	     a 6bit shifted some arbitrary amount.  If so, we can use ashq to
1289	     shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1290	     8 bytes - 1 shift byte - 1 short literal byte.  */
1291	  if (lval != 0
1292	      && (n = exact_log2 (lval & (- lval))) != -1
1293	      && (lval >> n) < 64)
1294	    {
1295	      lval >>= n;
1296
1297	      /* On 32bit platforms, if the 6bits didn't overflow into the
1298		 upper 32bit value that value better be 0.  If we have
1299		 overflowed, make sure it wasn't too much.  */
1300	      if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1301		{
1302		  if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1303		    n = 0;	/* failure */
1304		  else
1305		    lval |= hval << (32 - n);
1306		}
1307	      /*  If n is 0, then ashq is not the best way to emit this.  */
1308	      if (n > 0)
1309		{
1310		  operands[1] = GEN_INT (lval);
1311		  operands[2] = GEN_INT (n);
1312		  return "ashq %2,%D1,%0";
1313		}
1314#if HOST_BITS_PER_WIDE_INT == 32
1315	    }
1316	  /* On 32bit platforms, if the low 32bit value is 0, checkout the
1317	     upper 32bit value.  */
1318	  else if (hval != 0
1319		   && (n = exact_log2 (hval & (- hval)) - 1) != -1
1320		   && (hval >> n) < 64)
1321	    {
1322	      operands[1] = GEN_INT (hval >> n);
1323	      operands[2] = GEN_INT (n + 32);
1324	      return "ashq %2,%D1,%0";
1325#endif
1326	    }
1327	}
1328
1329      if (vax_maybe_split_dimode_move (operands))
1330	{
1331	  hi[0] = operands[0];
1332	  hi[1] = operands[1];
1333
1334	  split_quadword_operands (insn, SET, hi, lo, 2);
1335
1336	  pattern_lo = vax_output_int_move (NULL, lo, SImode);
1337	  pattern_hi = vax_output_int_move (NULL, hi, SImode);
1338
1339	  /* The patterns are just movl/movl or pushl/pushl then a movq will
1340	     be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1341	     bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1342	     value bytes.  */
1343	  if ((startswith (pattern_lo, "movl")
1344	      && startswith (pattern_hi, "movl"))
1345	      || (startswith (pattern_lo, "pushl")
1346		  && startswith (pattern_hi, "pushl")))
1347	    return "movq %1,%0";
1348
1349	  if (MEM_P (operands[0])
1350	      && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1351	    {
1352	      output_asm_insn (pattern_hi, hi);
1353	      operands[0] = lo[0];
1354	      operands[1] = lo[1];
1355	      operands[2] = lo[2];
1356	      return pattern_lo;
1357	    }
1358	  else
1359	    {
1360	      output_asm_insn (pattern_lo, lo);
1361	      operands[0] = hi[0];
1362	      operands[1] = hi[1];
1363	      operands[2] = hi[2];
1364	      return pattern_hi;
1365	    }
1366	}
1367      return "movq %1,%0";
1368
1369    case E_SImode:
1370      push_p = push_operand (operands[0], SImode);
1371
1372      if (symbolic_operand (operands[1], SImode))
1373	return push_p ? "pushab %a1" : "movab %a1,%0";
1374
1375      if (operands[1] == const0_rtx)
1376	return push_p ? "pushl %1" : "clrl %0";
1377
1378      if (CONST_INT_P (operands[1])
1379	  && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1380	{
1381	  HOST_WIDE_INT i = INTVAL (operands[1]);
1382	  int n;
1383	  if ((unsigned HOST_WIDE_INT)(~i) < 64)
1384	    return "mcoml %N1,%0";
1385	  if ((unsigned HOST_WIDE_INT)i < 0x100)
1386	    return "movzbl %1,%0";
1387	  if (i >= -0x80 && i < 0)
1388	    return "cvtbl %1,%0";
1389	  if (optimize_size
1390	      && (n = exact_log2 (i & (-i))) != -1
1391	      && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1392	    {
1393	      operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1394	      operands[2] = GEN_INT (n);
1395	      return "ashl %2,%1,%0";
1396	    }
1397	  if ((unsigned HOST_WIDE_INT)i < 0x10000)
1398	    return "movzwl %1,%0";
1399	  if (i >= -0x8000 && i < 0)
1400	    return "cvtwl %1,%0";
1401	}
1402      return push_p ? "pushl %1" : "movl %1,%0";
1403
1404    case E_HImode:
1405      if (CONST_INT_P (operands[1]))
1406	{
1407	  HOST_WIDE_INT i = INTVAL (operands[1]);
1408	  if (i == 0)
1409	    return "clrw %0";
1410	  else if ((unsigned HOST_WIDE_INT)i < 64)
1411	    return "movw %1,%0";
1412	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1413	    return "mcomw %H1,%0";
1414	  else if ((unsigned HOST_WIDE_INT)i < 256)
1415	    return "movzbw %1,%0";
1416	  else if (i >= -0x80 && i < 0)
1417	    return "cvtbw %1,%0";
1418	}
1419      return "movw %1,%0";
1420
1421    case E_QImode:
1422      if (CONST_INT_P (operands[1]))
1423	{
1424	  HOST_WIDE_INT i = INTVAL (operands[1]);
1425	  if (i == 0)
1426	    return "clrb %0";
1427	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1428	    return "mcomb %B1,%0";
1429	}
1430      return "movb %1,%0";
1431
1432    default:
1433      gcc_unreachable ();
1434    }
1435}
1436
1437/* Output integer add instructions.
1438
1439   The space-time-opcode tradeoffs for addition vary by model of VAX.
1440
1441   On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1442   but it not faster on other models.
1443
1444   "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1445   faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1446   a register is used in an address too soon after it is set.
1447   Compromise by using movab only when it is shorter than the add
1448   or the base register in the address is one of sp, ap, and fp,
1449   which are not modified very often.  */
1450
1451const char *
1452vax_output_int_add (rtx_insn *insn, rtx *operands, machine_mode mode)
1453{
1454  switch (mode)
1455    {
1456    case E_DImode:
1457      {
1458	rtx low[3];
1459	const char *pattern;
1460	int carry = 1;
1461	bool sub;
1462
1463	if (TARGET_QMATH && 0)
1464	  debug_rtx (insn);
1465
1466	split_quadword_operands (insn, PLUS, operands, low, 3);
1467
1468	if (TARGET_QMATH)
1469	  {
1470	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1471#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1472	    gcc_assert (!flag_pic
1473			|| !non_pic_external_memory_operand (low[2], SImode));
1474	    gcc_assert (!flag_pic
1475			|| !non_pic_external_memory_operand (low[0], SImode));
1476#endif
1477
1478	    /* No reason to add a 0 to the low part and thus no carry, so just
1479	       emit the appropriate add/sub instruction.  */
1480	    if (low[2] == const0_rtx)
1481	      return vax_output_int_add (NULL, operands, SImode);
1482
1483	    /* Are we doing addition or subtraction?  */
1484	    sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1485
1486	    /* We can't use vax_output_int_add since some the patterns don't
1487	       modify the carry bit.  */
1488	    if (sub)
1489	      {
1490		if (low[2] == constm1_rtx)
1491		  pattern = "decl %0";
1492		else
1493		  pattern = "subl2 $%n2,%0";
1494	      }
1495	    else
1496	      {
1497		if (low[2] == const1_rtx)
1498		  pattern = "incl %0";
1499		else
1500		  pattern = "addl2 %2,%0";
1501	      }
1502	    output_asm_insn (pattern, low);
1503
1504	    /* In 2's complement, -n = ~n + 1.  Since we are dealing with
1505	       two 32bit parts, we complement each and then add one to
1506	       low part.  We know that the low part can't overflow since
1507	       it's value can never be 0.  */
1508	    if (sub)
1509		return "sbwc %N2,%0";
1510	    return "adwc %2,%0";
1511	  }
1512
1513	/* Add low parts.  */
1514	if (rtx_equal_p (operands[0], operands[1]))
1515	  {
1516	    if (low[2] == const0_rtx)
1517	/* Should examine operand, punt if not POST_INC.  */
1518	      pattern = "tstl %0", carry = 0;
1519	    else if (low[2] == const1_rtx)
1520	      pattern = "incl %0";
1521	    else
1522	      pattern = "addl2 %2,%0";
1523	  }
1524	else
1525	  {
1526	    if (low[2] == const0_rtx)
1527	      pattern = "movl %1,%0", carry = 0;
1528	    else
1529	      pattern = "addl3 %2,%1,%0";
1530	  }
1531	if (pattern)
1532	  output_asm_insn (pattern, low);
1533	if (!carry)
1534	  /* If CARRY is 0, we don't have any carry value to worry about.  */
1535	  return get_insn_template (CODE_FOR_addsi3, insn);
1536	/* %0 = C + %1 + %2 */
1537	if (!rtx_equal_p (operands[0], operands[1]))
1538	  output_asm_insn ((operands[1] == const0_rtx
1539			    ? "clrl %0"
1540			    : "movl %1,%0"), operands);
1541	return "adwc %2,%0";
1542      }
1543
1544    case E_SImode:
1545      if (rtx_equal_p (operands[0], operands[1]))
1546	{
1547	  if (operands[2] == const1_rtx)
1548	    return "incl %0";
1549	  if (operands[2] == constm1_rtx)
1550	    return "decl %0";
1551	  if (CONST_INT_P (operands[2])
1552	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1553	    return "subl2 $%n2,%0";
1554	  if (CONST_INT_P (operands[2])
1555	      && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1556	      && REG_P (operands[1])
1557	      && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1558		   || REGNO (operands[1]) > 11))
1559	    return "movab %c2(%1),%0";
1560	  if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1561	    return "movab %a2[%0],%0";
1562	  return "addl2 %2,%0";
1563	}
1564
1565      if (rtx_equal_p (operands[0], operands[2]))
1566	{
1567	  if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1568	    return "movab %a1[%0],%0";
1569	  return "addl2 %1,%0";
1570	}
1571
1572      if (CONST_INT_P (operands[2])
1573	  && INTVAL (operands[2]) < 32767
1574	  && INTVAL (operands[2]) > -32768
1575	  && REG_P (operands[1])
1576	  && push_operand (operands[0], SImode))
1577	return "pushab %c2(%1)";
1578
1579      if (CONST_INT_P (operands[2])
1580	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1581	return "subl3 $%n2,%1,%0";
1582
1583      if (CONST_INT_P (operands[2])
1584	  && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1585	  && REG_P (operands[1])
1586	  && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1587	       || REGNO (operands[1]) > 11))
1588	return "movab %c2(%1),%0";
1589
1590      /* Add this if using gcc on a VAX 3xxx:
1591      if (REG_P (operands[1]) && REG_P (operands[2]))
1592	return "movab (%1)[%2],%0";
1593      */
1594
1595      if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1596	{
1597	  if (push_operand (operands[0], SImode))
1598	    return "pushab %a2[%1]";
1599	  return "movab %a2[%1],%0";
1600	}
1601
1602      if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1603	{
1604	  if (push_operand (operands[0], SImode))
1605	    return "pushab %a1[%2]";
1606	  return "movab %a1[%2],%0";
1607	}
1608
1609      if (flag_pic && REG_P (operands[0])
1610	  && symbolic_operand (operands[2], SImode))
1611	return "movab %a2,%0;addl2 %1,%0";
1612
1613      if (flag_pic
1614	  && (symbolic_operand (operands[1], SImode)
1615	      || symbolic_operand (operands[2], SImode)))
1616	debug_rtx (insn);
1617
1618      return "addl3 %1,%2,%0";
1619
1620    case E_HImode:
1621      if (rtx_equal_p (operands[0], operands[1]))
1622	{
1623	  if (operands[2] == const1_rtx)
1624	    return "incw %0";
1625	  if (operands[2] == constm1_rtx)
1626	    return "decw %0";
1627	  if (CONST_INT_P (operands[2])
1628	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1629	    return "subw2 $%n2,%0";
1630	  return "addw2 %2,%0";
1631	}
1632      if (rtx_equal_p (operands[0], operands[2]))
1633	return "addw2 %1,%0";
1634      if (CONST_INT_P (operands[2])
1635	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1636	return "subw3 $%n2,%1,%0";
1637      return "addw3 %1,%2,%0";
1638
1639    case E_QImode:
1640      if (rtx_equal_p (operands[0], operands[1]))
1641	{
1642	  if (operands[2] == const1_rtx)
1643	    return "incb %0";
1644	  if (operands[2] == constm1_rtx)
1645	    return "decb %0";
1646	  if (CONST_INT_P (operands[2])
1647	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1648	    return "subb2 $%n2,%0";
1649	  return "addb2 %2,%0";
1650	}
1651      if (rtx_equal_p (operands[0], operands[2]))
1652	return "addb2 %1,%0";
1653      if (CONST_INT_P (operands[2])
1654	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1655	return "subb3 $%n2,%1,%0";
1656      return "addb3 %1,%2,%0";
1657
1658    default:
1659      gcc_unreachable ();
1660    }
1661}
1662
1663const char *
1664vax_output_int_subtract (rtx_insn *insn, rtx *operands, machine_mode mode)
1665{
1666  switch (mode)
1667    {
1668    case E_DImode:
1669      {
1670	rtx low[3];
1671	const char *pattern;
1672	int carry = 1;
1673
1674	if (TARGET_QMATH && 0)
1675	  debug_rtx (insn);
1676
1677	split_quadword_operands (insn, MINUS, operands, low, 3);
1678
1679	if (TARGET_QMATH)
1680	  {
1681	    if (operands[1] == const0_rtx && low[1] == const0_rtx)
1682	      {
1683		/* Negation is tricky.  It's basically complement and increment.
1684		   Negate hi, then lo, and subtract the carry back.  */
1685		if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1686		    || (MEM_P (operands[0])
1687			&& GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1688		  fatal_insn ("illegal operand detected", insn);
1689		output_asm_insn ("mnegl %2,%0", operands);
1690		output_asm_insn ("mnegl %2,%0", low);
1691		return "sbwc $0,%0";
1692	      }
1693	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1694	    gcc_assert (rtx_equal_p (low[0], low[1]));
1695	    if (low[2] == const1_rtx)
1696	      output_asm_insn ("decl %0", low);
1697	    else
1698	      output_asm_insn ("subl2 %2,%0", low);
1699	    return "sbwc %2,%0";
1700	  }
1701
1702	/* Subtract low parts.  */
1703	if (rtx_equal_p (operands[0], operands[1]))
1704	  {
1705	    if (low[2] == const0_rtx)
1706	      pattern = 0, carry = 0;
1707	    else if (low[2] == constm1_rtx)
1708	      pattern = "decl %0";
1709	    else
1710	      pattern = "subl2 %2,%0";
1711	  }
1712	else
1713	  {
1714	    if (low[2] == constm1_rtx)
1715	      pattern = "decl %0";
1716	    else if (low[2] == const0_rtx)
1717	      pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1718	    else
1719	      pattern = "subl3 %2,%1,%0";
1720	  }
1721	if (pattern)
1722	  output_asm_insn (pattern, low);
1723	if (carry)
1724	  {
1725	    if (!rtx_equal_p (operands[0], operands[1]))
1726	      return "movl %1,%0;sbwc %2,%0";
1727	    return "sbwc %2,%0";
1728	    /* %0 = %2 - %1 - C */
1729	  }
1730	return get_insn_template (CODE_FOR_subsi3, insn);
1731      }
1732
1733    default:
1734      gcc_unreachable ();
1735  }
1736}
1737
1738/* True if X is an rtx for a constant that is a valid address.  */
1739
1740bool
1741legitimate_constant_address_p (rtx x)
1742{
1743  if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1744	  || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1745    return true;
1746  if (GET_CODE (x) != CONST)
1747    return false;
1748#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1749  if (flag_pic
1750      && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1751      && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1752    return false;
1753#endif
1754   return true;
1755}
1756
1757/* The other macros defined here are used only in legitimate_address_p ().  */
1758
1759/* Nonzero if X is a hard reg that can be used as an index
1760   or, if not strict, if it is a pseudo reg.  */
1761#define	INDEX_REGISTER_P(X, STRICT) \
1762(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1763
1764/* Nonzero if X is a hard reg that can be used as a base reg
1765   or, if not strict, if it is a pseudo reg.  */
1766#define	BASE_REGISTER_P(X, STRICT) \
1767(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1768
1769#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1770
1771/* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1772   are no SYMBOL_REFs for external symbols present.  */
1773
1774static bool
1775indirectable_constant_address_p (rtx x, bool indirect)
1776{
1777  if (GET_CODE (x) == SYMBOL_REF)
1778    return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1779
1780  if (GET_CODE (x) == CONST)
1781    return !flag_pic
1782	   || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1783	   || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1784
1785  return CONSTANT_ADDRESS_P (x);
1786}
1787
1788#else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1789
1790static bool
1791indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1792{
1793  return CONSTANT_ADDRESS_P (x);
1794}
1795
1796#endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1797
1798/* True if X is an address which can be indirected.  External symbols
1799   could be in a sharable image library, so we disallow those.  */
1800
1801static bool
1802indirectable_address_p (rtx x, bool strict, bool indirect)
1803{
1804  if (indirectable_constant_address_p (x, indirect)
1805      || BASE_REGISTER_P (x, strict))
1806    return true;
1807  if (GET_CODE (x) != PLUS
1808      || !BASE_REGISTER_P (XEXP (x, 0), strict)
1809      || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1810    return false;
1811  return indirectable_constant_address_p (XEXP (x, 1), indirect);
1812}
1813
1814/* Return true if x is a valid address not using indexing.
1815   (This much is the easy part.)  */
1816static bool
1817nonindexed_address_p (rtx x, bool strict)
1818{
1819  rtx xfoo0;
1820  if (REG_P (x))
1821    {
1822      if (! reload_in_progress
1823	  || reg_equiv_mem (REGNO (x)) == 0
1824	  || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1825	return true;
1826    }
1827  if (indirectable_constant_address_p (x, false))
1828    return true;
1829  if (indirectable_address_p (x, strict, false))
1830    return true;
1831  xfoo0 = XEXP (x, 0);
1832  if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1833    return true;
1834  if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1835      && BASE_REGISTER_P (xfoo0, strict))
1836    return true;
1837  return false;
1838}
1839
1840/* True if PROD is either a reg times size of mode MODE and MODE is less
1841   than or equal 8 bytes, or just a reg if MODE is one byte.  */
1842
1843static bool
1844index_term_p (rtx prod, machine_mode mode, bool strict)
1845{
1846  rtx xfoo0, xfoo1;
1847  bool log_p;
1848
1849  if (GET_MODE_SIZE (mode) == 1)
1850    return BASE_REGISTER_P (prod, strict);
1851
1852  if ((GET_CODE (prod) != MULT && GET_CODE (prod) != ASHIFT)
1853      || GET_MODE_SIZE (mode) > 8)
1854    return false;
1855
1856  log_p = GET_CODE (prod) == ASHIFT;
1857  xfoo0 = XEXP (prod, 0);
1858  xfoo1 = XEXP (prod, 1);
1859
1860  if (CONST_INT_P (xfoo0)
1861      && GET_MODE_SIZE (mode) == (log_p ? 1 << INTVAL (xfoo0) : INTVAL (xfoo0))
1862      && INDEX_REGISTER_P (xfoo1, strict))
1863    return true;
1864
1865  if (CONST_INT_P (xfoo1)
1866      && GET_MODE_SIZE (mode) == (log_p ? 1 << INTVAL (xfoo1) : INTVAL (xfoo1))
1867      && INDEX_REGISTER_P (xfoo0, strict))
1868    return true;
1869
1870  return false;
1871}
1872
1873/* Return true if X is the sum of a register
1874   and a valid index term for mode MODE.  */
1875static bool
1876reg_plus_index_p (rtx x, machine_mode mode, bool strict)
1877{
1878  rtx xfoo0, xfoo1;
1879
1880  if (GET_CODE (x) != PLUS)
1881    return false;
1882
1883  xfoo0 = XEXP (x, 0);
1884  xfoo1 = XEXP (x, 1);
1885
1886  if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1887    return true;
1888
1889  if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1890    return true;
1891
1892  return false;
1893}
1894
1895/* Return true if xfoo0 and xfoo1 constitute a valid indexed address.  */
1896static bool
1897indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
1898{
1899  if (!CONSTANT_ADDRESS_P (xfoo0))
1900    return false;
1901  if (BASE_REGISTER_P (xfoo1, strict))
1902    return !flag_pic || mode == QImode;
1903  if (flag_pic && symbolic_operand (xfoo0, SImode))
1904    return false;
1905  return reg_plus_index_p (xfoo1, mode, strict);
1906}
1907
1908/* legitimate_address_p returns true if it recognizes an RTL expression "x"
1909   that is a valid memory address for an instruction.
1910   The MODE argument is the machine mode for the MEM expression
1911   that wants to use this address.  */
1912bool
1913vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1914{
1915  rtx xfoo0, xfoo1;
1916
1917  if (nonindexed_address_p (x, strict))
1918    return true;
1919
1920  if (GET_CODE (x) != PLUS)
1921    return false;
1922
1923  /* Handle <address>[index] represented with index-sum outermost */
1924
1925  xfoo0 = XEXP (x, 0);
1926  xfoo1 = XEXP (x, 1);
1927
1928  if (index_term_p (xfoo0, mode, strict)
1929      && nonindexed_address_p (xfoo1, strict))
1930    return true;
1931
1932  if (index_term_p (xfoo1, mode, strict)
1933      && nonindexed_address_p (xfoo0, strict))
1934    return true;
1935
1936  /* Handle offset(reg)[index] with offset added outermost */
1937
1938  if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1939      || indexable_address_p (xfoo1, xfoo0, mode, strict))
1940    return true;
1941
1942  return false;
1943}
1944
1945/* Return true if x (a legitimate address expression) has an effect that
1946   depends on the machine mode it is used for.  On the VAX, the predecrement
1947   and postincrement address depend thus (the amount of decrement or
1948   increment being the length of the operand) and all indexed address depend
1949   thus (because the index scale factor is the length of the operand).  */
1950
1951static bool
1952vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
1953{
1954  rtx xfoo0, xfoo1;
1955
1956  /* Auto-increment cases are now dealt with generically in recog.cc.  */
1957  if (GET_CODE (x) != PLUS)
1958    return false;
1959
1960  xfoo0 = XEXP (x, 0);
1961  xfoo1 = XEXP (x, 1);
1962
1963  if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1964    return false;
1965  if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1966    return false;
1967  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1968    return false;
1969  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1970    return false;
1971
1972  return true;
1973}
1974
1975static rtx
1976fixup_mathdi_operand (rtx x, machine_mode mode)
1977{
1978  if (illegal_addsub_di_memory_operand (x, mode))
1979    {
1980      rtx addr = XEXP (x, 0);
1981      rtx temp = gen_reg_rtx (Pmode);
1982      rtx offset = 0;
1983#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1984      if (GET_CODE (addr) == CONST && flag_pic)
1985	{
1986	  offset = XEXP (XEXP (addr, 0), 1);
1987	  addr = XEXP (XEXP (addr, 0), 0);
1988	}
1989#endif
1990      emit_move_insn (temp, addr);
1991      if (offset)
1992	temp = gen_rtx_PLUS (Pmode, temp, offset);
1993      x = gen_rtx_MEM (DImode, temp);
1994    }
1995  return x;
1996}
1997
1998void
1999vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
2000{
2001  int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
2002  rtx temp;
2003
2004  rtx (*gen_old_insn)(rtx, rtx, rtx);
2005  rtx (*gen_si_insn)(rtx, rtx, rtx);
2006  rtx (*gen_insn)(rtx, rtx, rtx);
2007
2008  if (code == PLUS)
2009    {
2010      gen_old_insn = gen_adddi3_old;
2011      gen_si_insn = gen_addsi3;
2012      gen_insn = gen_adcdi3;
2013    }
2014  else if (code == MINUS)
2015    {
2016      gen_old_insn = gen_subdi3_old;
2017      gen_si_insn = gen_subsi3;
2018      gen_insn = gen_sbcdi3;
2019    }
2020  else
2021    gcc_unreachable ();
2022
2023  /* If this is addition (thus operands are commutative) and if there is one
2024     addend that duplicates the desination, we want that addend to be the
2025     first addend.  */
2026  if (code == PLUS
2027      && rtx_equal_p (operands[0], operands[2])
2028      && !rtx_equal_p (operands[1], operands[2]))
2029    {
2030      temp = operands[2];
2031      operands[2] = operands[1];
2032      operands[1] = temp;
2033    }
2034
2035  if (!TARGET_QMATH)
2036    {
2037      emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
2038    }
2039  else if (hi_only)
2040    {
2041      if (!rtx_equal_p (operands[0], operands[1])
2042	  && (REG_P (operands[0]) && MEM_P (operands[1])))
2043	{
2044	  emit_move_insn (operands[0], operands[1]);
2045	  operands[1] = operands[0];
2046	}
2047
2048      operands[0] = fixup_mathdi_operand (operands[0], DImode);
2049      operands[1] = fixup_mathdi_operand (operands[1], DImode);
2050      operands[2] = fixup_mathdi_operand (operands[2], DImode);
2051
2052      if (!rtx_equal_p (operands[0], operands[1]))
2053	emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
2054			  operand_subword (operands[1], 0, 0, DImode));
2055
2056      emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
2057				 operand_subword (operands[1], 1, 0, DImode),
2058				 operand_subword (operands[2], 1, 0, DImode)));
2059    }
2060  else
2061    {
2062      /* If we are adding a value to itself, that's really a multiply by 2,
2063	 and that's just a left shift by 1.  If subtracting, it's just 0.  */
2064      if (rtx_equal_p (operands[1], operands[2]))
2065	{
2066	  if (code == PLUS)
2067	    emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
2068	  else
2069	    emit_move_insn (operands[0], const0_rtx);
2070	  return;
2071	}
2072
2073      operands[0] = fixup_mathdi_operand (operands[0], DImode);
2074
2075      /* If an operand is the same as operand[0], use the operand[0] rtx
2076	 because fixup will an equivalent rtx but not an equal one. */
2077
2078      if (rtx_equal_p (operands[0], operands[1]))
2079	operands[1] = operands[0];
2080      else
2081	operands[1] = fixup_mathdi_operand (operands[1], DImode);
2082
2083      if (rtx_equal_p (operands[0], operands[2]))
2084	operands[2] = operands[0];
2085      else
2086	operands[2] = fixup_mathdi_operand (operands[2], DImode);
2087
2088      /* If we are adding or subtracting 0, then this is a move.  */
2089      if (code == PLUS && operands[1] == const0_rtx)
2090	{
2091	  temp = operands[2];
2092	  operands[2] = operands[1];
2093	  operands[1] = temp;
2094	}
2095      if (operands[2] == const0_rtx)
2096	{
2097	  emit_move_insn (operands[0], operands[1]);
2098	  return;
2099	}
2100
2101      /* If we are subtracting not from ourselves [d = a - b], and because the
2102	 carry ops are two operand only, we would need to do a move prior to
2103	 the subtract.  And if d == b, we would need a temp otherwise
2104	 [d = a, d -= d] and we end up with 0.  Instead we rewrite d = a - b
2105	 into d = -b, d += a.  Since -b can never overflow, even if b == d,
2106	 no temp is needed.
2107
2108	 If we are doing addition, since the carry ops are two operand, if
2109	 we aren't adding to ourselves, move the first addend to the
2110	 destination first.  */
2111
2112      gcc_assert (operands[1] != const0_rtx || code == MINUS);
2113      if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2114	{
2115	  if (code == MINUS && CONSTANT_P (operands[1]))
2116	    {
2117	      emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2118	      code = PLUS;
2119	      gen_insn = gen_adcdi3;
2120	      operands[2] = operands[1];
2121	      operands[1] = operands[0];
2122	    }
2123	  else
2124	    emit_move_insn (operands[0], operands[1]);
2125	}
2126
2127      /* Subtracting a constant will have been rewritten to an addition of the
2128	 negative of that constant before we get here.  */
2129      gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2130      emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2131    }
2132}
2133
2134/* Output assembler code for a block containing the constant parts
2135   of a trampoline, leaving space for the variable parts.  */
2136
2137/* On the VAX, the trampoline contains an entry mask and two instructions:
2138     .word NN
2139     movl $STATIC,r0   (store the functions static chain)
2140     jmp  *$FUNCTION   (jump to function code at address FUNCTION)  */
2141
2142static void
2143vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2144{
2145  assemble_aligned_integer (2, const0_rtx);
2146  assemble_aligned_integer (2, GEN_INT (0x8fd0));
2147  assemble_aligned_integer (4, const0_rtx);
2148  assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2149  assemble_aligned_integer (2, GEN_INT (0x9f17));
2150  assemble_aligned_integer (4, const0_rtx);
2151}
2152
2153/* We copy the register-mask from the function's pure code
2154   to the start of the trampoline.  */
2155
2156static void
2157vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2158{
2159  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2160  rtx mem;
2161
2162  emit_block_move (m_tramp, assemble_trampoline_template (),
2163		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2164
2165  mem = adjust_address (m_tramp, HImode, 0);
2166  emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2167
2168  mem = adjust_address (m_tramp, SImode, 4);
2169  emit_move_insn (mem, cxt);
2170  mem = adjust_address (m_tramp, SImode, 11);
2171  emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2172  emit_insn (gen_sync_istream ());
2173}
2174
2175/* Value is the number of bytes of arguments automatically
2176   popped when returning from a subroutine call.
2177   FUNDECL is the declaration node of the function (as a tree),
2178   FUNTYPE is the data type of the function (as a tree),
2179   or for a library call it is an identifier node for the subroutine name.
2180   SIZE is the number of bytes of arguments passed on the stack.
2181
2182   On the VAX, the RET insn pops a maximum of 255 args for any function.  */
2183
2184static poly_int64
2185vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2186		      tree funtype ATTRIBUTE_UNUSED, poly_int64 size)
2187{
2188  return size > 255 * 4 ? 0 : (HOST_WIDE_INT) size;
2189}
2190
2191/* Implement TARGET_FUNCTION_ARG.  On the VAX all args are pushed.  */
2192
2193static rtx
2194vax_function_arg (cumulative_args_t, const function_arg_info &)
2195{
2196  return NULL_RTX;
2197}
2198
2199/* Update the data in CUM to advance over argument ARG.  */
2200
2201static void
2202vax_function_arg_advance (cumulative_args_t cum_v,
2203			  const function_arg_info &arg)
2204{
2205  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2206
2207  *cum += (arg.promoted_size_in_bytes () + 3) & ~3;
2208}
2209
2210static HOST_WIDE_INT
2211vax_starting_frame_offset (void)
2212{
2213  /* On ELF targets, reserve the top of the stack for exception handler
2214     stackadj value.  */
2215  return TARGET_ELF ? -4 : 0;
2216}
2217
2218