1/* Subroutines for insn-output.c for VAX.
2   Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#define IN_TARGET_CODE 1
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "backend.h"
26#include "target.h"
27#include "rtl.h"
28#include "tree.h"
29#include "stringpool.h"
30#include "attribs.h"
31#include "df.h"
32#include "memmodel.h"
33#include "tm_p.h"
34#include "optabs.h"
35#include "regs.h"
36#include "emit-rtl.h"
37#include "calls.h"
38#include "varasm.h"
39#include "conditions.h"
40#include "output.h"
41#include "expr.h"
42#include "reload.h"
43#include "builtins.h"
44
45/* This file should be included last.  */
46#include "target-def.h"
47
48static void vax_option_override (void);
49static void vax_init_builtins (void);
50static bool vax_legitimate_address_p (machine_mode, rtx, bool);
51static void vax_file_start (void);
52static void vax_init_libfuncs (void);
53static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
54				 HOST_WIDE_INT, tree);
55static int vax_address_cost_1 (rtx);
56static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
57static bool vax_rtx_costs (rtx, machine_mode, int, int, int *, bool);
58static rtx vax_function_arg (cumulative_args_t, const function_arg_info &);
59static void vax_function_arg_advance (cumulative_args_t,
60				      const function_arg_info &);
61static rtx vax_struct_value_rtx (tree, int);
62static void vax_asm_trampoline_template (FILE *);
63static void vax_trampoline_init (rtx, tree, rtx);
64static poly_int64 vax_return_pops_args (tree, tree, poly_int64);
65static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
66static HOST_WIDE_INT vax_starting_frame_offset (void);
67static int vax_bitfield_may_trap_p (const_rtx, unsigned);
68
69/* Initialize the GCC target structure.  */
70#undef TARGET_ASM_ALIGNED_HI_OP
71#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
72
73#undef TARGET_ASM_FILE_START
74#define TARGET_ASM_FILE_START vax_file_start
75#undef TARGET_ASM_FILE_START_APP_OFF
76#define TARGET_ASM_FILE_START_APP_OFF true
77
78#undef TARGET_INIT_LIBFUNCS
79#define TARGET_INIT_LIBFUNCS vax_init_libfuncs
80
81#undef TARGET_INIT_BUILTINS
82#define TARGET_INIT_BUILTINS vax_init_builtins
83
84#undef TARGET_ASM_OUTPUT_MI_THUNK
85#define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
86#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
87#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
88
89#undef TARGET_RTX_COSTS
90#define TARGET_RTX_COSTS vax_rtx_costs
91#undef TARGET_ADDRESS_COST
92#define TARGET_ADDRESS_COST vax_address_cost
93
94#undef TARGET_PROMOTE_PROTOTYPES
95#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
96
97#undef TARGET_FUNCTION_ARG
98#define TARGET_FUNCTION_ARG vax_function_arg
99#undef TARGET_FUNCTION_ARG_ADVANCE
100#define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
101
102#undef TARGET_STRUCT_VALUE_RTX
103#define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
104
105#undef TARGET_LRA_P
106#define TARGET_LRA_P hook_bool_void_false
107
108#undef TARGET_LEGITIMATE_ADDRESS_P
109#define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
110#undef TARGET_MODE_DEPENDENT_ADDRESS_P
111#define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
112
113#undef TARGET_FRAME_POINTER_REQUIRED
114#define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
115
116#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
117#define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
118#undef TARGET_TRAMPOLINE_INIT
119#define TARGET_TRAMPOLINE_INIT vax_trampoline_init
120#undef TARGET_RETURN_POPS_ARGS
121#define TARGET_RETURN_POPS_ARGS vax_return_pops_args
122
123#undef TARGET_OPTION_OVERRIDE
124#define TARGET_OPTION_OVERRIDE vax_option_override
125
126#if TARGET_ELF
127#undef TARGET_BINDS_LOCAL_P
128#define TARGET_BINDS_LOCAL_P vax_elf_binds_local_p
129
130static bool
131vax_elf_binds_local_p (const_tree exp)
132{
133  return default_binds_local_p_3 (exp, (flag_shlib | flag_pic) != 0,
134				  true, false, false);
135}
136#endif
137
138#undef TARGET_STARTING_FRAME_OFFSET
139#define TARGET_STARTING_FRAME_OFFSET vax_starting_frame_offset
140
141#undef TARGET_HAVE_SPECULATION_SAFE_VALUE
142#define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
143
144#undef TARGET_BITFIELD_MAY_TRAP_P
145#define TARGET_BITFIELD_MAY_TRAP_P vax_bitfield_may_trap_p
146
147struct gcc_target targetm = TARGET_INITIALIZER;
148
149/* Set global variables as needed for the options enabled.  */
150
151static void
152vax_option_override (void)
153{
154  /* We're VAX floating point, not IEEE floating point.  */
155  if (TARGET_G_FLOAT)
156    REAL_MODE_FORMAT (DFmode) = &vax_g_format;
157
158#ifdef SUBTARGET_OVERRIDE_OPTIONS
159  SUBTARGET_OVERRIDE_OPTIONS;
160#endif
161}
162/* Implement the TARGET_INIT_BUILTINS target hook.  */
163
164static void
165vax_init_builtins (void)
166{
167#ifdef SUBTARGET_INIT_BUILTINS
168  SUBTARGET_INIT_BUILTINS;
169#endif
170}
171
172static void
173vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
174{
175  rtx x;
176
177  x = plus_constant (Pmode, frame_pointer_rtx, offset);
178  x = gen_rtx_MEM (SImode, x);
179  x = gen_rtx_SET (x, src);
180  add_reg_note (insn, REG_CFA_OFFSET, x);
181}
182
183/* Generate the assembly code for function entry.  FILE is a stdio
184   stream to output the code to.  SIZE is an int: how many units of
185   temporary storage to allocate.
186
187   Refer to the array `regs_ever_live' to determine which registers to
188   save; `regs_ever_live[I]' is nonzero if register number I is ever
189   used in the function.  This function is responsible for knowing
190   which registers should not be saved even if used.  */
191
192void
193vax_expand_prologue (void)
194{
195  int regno, offset;
196  int mask = 0;
197  HOST_WIDE_INT size;
198  rtx insn;
199
200  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
201    if (df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
202      {
203        mask |= 1 << regno;
204      }
205
206  if (crtl->calls_eh_return)
207    {
208      mask |= 0
209	| ( 1 << EH_RETURN_DATA_REGNO(0) )
210	| ( 1 << EH_RETURN_DATA_REGNO(1) )
211	| ( 1 << EH_RETURN_DATA_REGNO(2) )
212	| ( 1 << EH_RETURN_DATA_REGNO(3) )
213	;
214    }
215
216  insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
217  RTX_FRAME_RELATED_P (insn) = 1;
218
219  /* The layout of the CALLG/S stack frame is follows:
220
221		<- AP
222	r11
223	r10
224	...	Registers saved as specified by MASK
225	r3
226	r2
227	return-addr
228	old fp
229	old ap
230	old psw
231	condition handler	<- CFA, FP, SP
232	  (initially zero)
233
234     The rest of the prologue will adjust the SP for the local frame.  */
235
236#ifdef notyet
237  /*
238   * We can't do this, the dwarf code asserts and we don't have yet a
239   * way to get to the psw
240   */
241  vax_add_reg_cfa_offset (insn, 4, gen_rtx_REG (Pmode, PSW_REGNUM));
242#endif
243  vax_add_reg_cfa_offset (insn, 8, arg_pointer_rtx);
244  vax_add_reg_cfa_offset (insn, 12, frame_pointer_rtx);
245  vax_add_reg_cfa_offset (insn, 16, pc_rtx);
246
247  offset = 5 * UNITS_PER_WORD;	/* PSW, AP &c */
248  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
249    if (mask & (1 << regno))
250      {
251	vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
252	offset += 1 * UNITS_PER_WORD;
253      }
254
255  /* Because add_reg_note pushes the notes, adding this last means that
256     it will be processed first.  This is required to allow the other
257     notes to be interpreted properly.  */
258  /* The RTX here must match the instantiation of the CFA vreg */
259  add_reg_note (insn, REG_CFA_DEF_CFA,
260		plus_constant (Pmode, frame_pointer_rtx,
261			       FRAME_POINTER_CFA_OFFSET(current_function_decl)));
262  /* Allocate the local stack frame.  */
263  size = get_frame_size ();
264  size -= vax_starting_frame_offset ();
265  emit_insn (gen_addsi3 (stack_pointer_rtx,
266			 stack_pointer_rtx, GEN_INT (-size)));
267
268  /* Do not allow instructions referencing local stack memory to be
269     scheduled before the frame is allocated.  This is more pedantic
270     than anything else, given that VAX does not currently have a
271     scheduling description.  */
272  emit_insn (gen_blockage ());
273}
274
275/* When debugging with stabs, we want to output an extra dummy label
276   so that gas can distinguish between D_float and G_float prior to
277   processing the .stabs directive identifying type double.  */
278static void
279vax_file_start (void)
280{
281  default_file_start ();
282
283  if (write_symbols == DBX_DEBUG)
284    fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
285}
286
287/* We can use the BSD C library routines for the libgcc calls that are
288   still generated, since that's what they boil down to anyways.  When
289   ELF, avoid the user's namespace.  */
290
291static void
292vax_init_libfuncs (void)
293{
294  if (TARGET_BSD_DIVMOD)
295    {
296      set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
297      set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
298    }
299}
300
301/* This is like nonimmediate_operand with a restriction on the type of MEM.  */
302
303static void
304split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
305			 rtx * low, int n)
306{
307  int i;
308
309  for (i = 0; i < n; i++)
310    low[i] = 0;
311
312  for (i = 0; i < n; i++)
313    {
314      if (MEM_P (operands[i])
315	  && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
316	      || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
317	{
318	  rtx addr = XEXP (operands[i], 0);
319	  operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
320	}
321      else if (optimize_size && MEM_P (operands[i])
322	       && REG_P (XEXP (operands[i], 0))
323	       && (code != MINUS || operands[1] != const0_rtx)
324	       && find_regno_note (insn, REG_DEAD,
325				   REGNO (XEXP (operands[i], 0))))
326	{
327	  low[i] = gen_rtx_MEM (SImode,
328				gen_rtx_POST_INC (Pmode,
329						  XEXP (operands[i], 0)));
330	  operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
331	}
332      else
333	{
334	  low[i] = operand_subword (operands[i], 0, 0, DImode);
335	  operands[i] = operand_subword (operands[i], 1, 0, DImode);
336	}
337    }
338}
339
340void
341print_operand_address (FILE * file, rtx addr)
342{
343  rtx orig = addr;
344  rtx reg1, breg, ireg;
345  rtx offset;
346
347 retry:
348  switch (GET_CODE (addr))
349    {
350    case MEM:
351      fprintf (file, "*");
352      addr = XEXP (addr, 0);
353      goto retry;
354
355    case REG:
356      fprintf (file, "(%s)", reg_names[REGNO (addr)]);
357      break;
358
359    case PRE_DEC:
360      fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
361      break;
362
363    case POST_INC:
364      fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
365      break;
366
367    case PLUS:
368      /* There can be either two or three things added here.  One must be a
369	 REG.  One can be either a REG or a MULT of a REG and an appropriate
370	 constant, and the third can only be a constant or a MEM.
371
372	 We get these two or three things and put the constant or MEM in
373	 OFFSET, the MULT or REG in IREG, and the REG in BREG.  If we have
374	 a register and can't tell yet if it is a base or index register,
375	 put it into REG1.  */
376
377      reg1 = 0; ireg = 0; breg = 0; offset = 0;
378
379      if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
380	  || MEM_P (XEXP (addr, 0)))
381	{
382	  offset = XEXP (addr, 0);
383	  addr = XEXP (addr, 1);
384	}
385      else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
386	       || MEM_P (XEXP (addr, 1)))
387	{
388	  offset = XEXP (addr, 1);
389	  addr = XEXP (addr, 0);
390	}
391      else if (GET_CODE (XEXP (addr, 1)) == MULT)
392	{
393	  ireg = XEXP (addr, 1);
394	  addr = XEXP (addr, 0);
395	}
396      else if (GET_CODE (XEXP (addr, 0)) == MULT)
397	{
398	  ireg = XEXP (addr, 0);
399	  addr = XEXP (addr, 1);
400	}
401      else if (REG_P (XEXP (addr, 1)))
402	{
403	  reg1 = XEXP (addr, 1);
404	  addr = XEXP (addr, 0);
405	}
406      else if (REG_P (XEXP (addr, 0)))
407	{
408	  reg1 = XEXP (addr, 0);
409	  addr = XEXP (addr, 1);
410	}
411      else
412	{
413	   debug_rtx (orig);
414	   gcc_unreachable ();
415	}
416
417      if (REG_P (addr))
418	{
419	  if (reg1)
420	    ireg = addr;
421	  else
422	    reg1 = addr;
423	}
424      else if (GET_CODE (addr) == MULT)
425	ireg = addr;
426      else if (GET_CODE (addr) == PLUS)
427	{
428	  if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
429	      || MEM_P (XEXP (addr, 0)))
430	    {
431	      if (offset)
432		{
433		  if (CONST_INT_P (offset))
434		    offset = plus_constant (Pmode, XEXP (addr, 0),
435		                            INTVAL (offset));
436		  else
437		    {
438		      gcc_assert (CONST_INT_P (XEXP (addr, 0)));
439		      offset = plus_constant (Pmode, offset,
440					      INTVAL (XEXP (addr, 0)));
441		    }
442		}
443	      offset = XEXP (addr, 0);
444	    }
445	  else if (REG_P (XEXP (addr, 0)))
446	    {
447	      if (reg1)
448		ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
449	      else
450		reg1 = XEXP (addr, 0);
451	    }
452	  else if (GET_CODE (XEXP (addr, 0)) == MULT && !ireg)
453	    {
454	      ireg = XEXP (addr, 0);
455	    }
456	  else
457	    {
458	      debug_rtx (orig);
459	      gcc_unreachable ();
460	    }
461
462	  if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
463	      || MEM_P (XEXP (addr, 1)))
464	    {
465	      if (offset)
466		{
467		  if (CONST_INT_P (offset))
468		    offset = plus_constant (Pmode, XEXP (addr, 1),
469					    INTVAL (offset));
470		  else
471		    {
472		      gcc_assert (CONST_INT_P (XEXP (addr, 1)));
473		      offset = plus_constant (Pmode, offset,
474					      INTVAL (XEXP (addr, 1)));
475		    }
476		}
477	      offset = XEXP (addr, 1);
478	    }
479	  else if (REG_P (XEXP (addr, 1)))
480	    {
481	      if (reg1)
482		ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
483	      else
484		reg1 = XEXP (addr, 1);
485	    }
486	  else if (GET_CODE (XEXP (addr, 1)) == MULT && !ireg)
487	    {
488	      ireg = XEXP (addr, 1);
489	    }
490	  else
491	    {
492	      debug_rtx (orig);
493	      gcc_unreachable ();
494	    }
495	}
496      else
497	{
498	  debug_rtx (orig);
499	  gcc_unreachable ();
500	}
501
502      /* If REG1 is nonzero, figure out if it is a base or index register.  */
503      if (reg1)
504	{
505	  if (breg
506	      || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
507	      || (offset
508		  && (MEM_P (offset)
509		      || (flag_pic && symbolic_operand (offset, SImode)))))
510	    {
511	      if (ireg)
512		{
513		  debug_rtx (orig);
514		  gcc_unreachable ();
515		}
516	      ireg = reg1;
517	    }
518	  else
519	    breg = reg1;
520	}
521
522      if (offset != 0)
523	{
524	  if (flag_pic && symbolic_operand (offset, SImode))
525	    {
526	      if (breg && ireg)
527		{
528		  debug_rtx (orig);
529		  output_operand_lossage ("symbol used with both base and indexed registers");
530		}
531
532#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
533	      if (flag_pic > 1 && GET_CODE (offset) == CONST
534		  && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
535		  && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
536		{
537		  debug_rtx (orig);
538		  output_operand_lossage ("symbol with offset used in PIC mode");
539		}
540#endif
541
542	      /* symbol(reg) isn't PIC, but symbol[reg] is.  */
543	      if (breg)
544		{
545		  ireg = breg;
546		  breg = 0;
547		}
548
549	    }
550
551	  output_address (VOIDmode, offset);
552	}
553
554      if (breg != 0)
555	fprintf (file, "(%s)", reg_names[REGNO (breg)]);
556
557      if (ireg != 0)
558	{
559	  if (GET_CODE (ireg) == MULT)
560	    ireg = XEXP (ireg, 0);
561	  if (! REG_P (ireg))
562	    {
563	      debug_rtx (orig);
564	      output_operand_lossage ("non-register index expression");
565	    }
566	  fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
567	}
568      break;
569
570    default:
571      gcc_assert (! REG_P(addr));
572      output_addr_const (file, addr);
573    }
574}
575
576void
577print_operand (FILE *file, rtx x, int code)
578{
579  if (code == '#')
580    fputc (ASM_DOUBLE_CHAR, file);
581  else if (code == '|')
582    fputs (REGISTER_PREFIX, file);
583  else if (code == 'c')
584    fputs (cond_name (x), file);
585  else if (code == 'C')
586    fputs (rev_cond_name (x), file);
587  else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
588    fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
589  else if (code == 'P' && CONST_INT_P (x))
590    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
591  else if (code == 'N' && CONST_INT_P (x))
592    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
593  /* rotl instruction cannot deal with negative arguments.  */
594  else if (code == 'R' && CONST_INT_P (x))
595    fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
596  else if (code == 'H' && CONST_INT_P (x))
597    fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
598  else if (code == 'h' && CONST_INT_P (x))
599    fprintf (file, "$%d", (short) - INTVAL (x));
600  else if (code == 'B' && CONST_INT_P (x))
601    fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
602  else if (code == 'b' && CONST_INT_P (x))
603    fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
604  else if (code == 'M' && CONST_INT_P (x))
605    fprintf (file, "$%d", ((~0) << (INTVAL (x))));
606  else if (code == 'x' && CONST_INT_P (x))
607    fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
608  else if (REG_P (x))
609    fprintf (file, "%s", reg_names[REGNO (x)]);
610  else if (MEM_P (x))
611    output_address (GET_MODE (x), XEXP (x, 0));
612  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
613    {
614      char dstr[30];
615      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
616		       sizeof (dstr), 0, 1);
617      fprintf (file, "$0f%s", dstr);
618    }
619  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
620    {
621      char dstr[30];
622      real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
623		       sizeof (dstr), 0, 1);
624      fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
625    }
626  else if (GET_CODE (x) == SUBREG)
627    {
628      debug_rtx (x);
629      output_operand_lossage ("SUBREG operand");
630    }
631  else
632    {
633      if (flag_pic > 1 && symbolic_operand (x, SImode))
634	{
635	  debug_rtx (x);
636	  output_operand_lossage ("symbol used as immediate operand");
637	}
638      putc ('$', file);
639      output_addr_const (file, x);
640    }
641}
642
643const char *
644cond_name (rtx op)
645{
646  switch (GET_CODE (op))
647    {
648    case NE:
649      return "neq";
650    case EQ:
651      return "eql";
652    case GE:
653      return "geq";
654    case GT:
655      return "gtr";
656    case LE:
657      return "leq";
658    case LT:
659      return "lss";
660    case GEU:
661      return "gequ";
662    case GTU:
663      return "gtru";
664    case LEU:
665      return "lequ";
666    case LTU:
667      return "lssu";
668
669    default:
670      gcc_unreachable ();
671    }
672}
673
674const char *
675rev_cond_name (rtx op)
676{
677  switch (GET_CODE (op))
678    {
679    case EQ:
680      return "neq";
681    case NE:
682      return "eql";
683    case LT:
684      return "geq";
685    case LE:
686      return "gtr";
687    case GT:
688      return "leq";
689    case GE:
690      return "lss";
691    case LTU:
692      return "gequ";
693    case LEU:
694      return "gtru";
695    case GTU:
696      return "lequ";
697    case GEU:
698      return "lssu";
699
700    default:
701      gcc_unreachable ();
702    }
703}
704
705static bool
706vax_float_literal (rtx c)
707{
708  machine_mode mode;
709  const REAL_VALUE_TYPE *r;
710  REAL_VALUE_TYPE s;
711  int i;
712
713  if (GET_CODE (c) != CONST_DOUBLE)
714    return false;
715
716  mode = GET_MODE (c);
717
718  if (c == const_tiny_rtx[(int) mode][0]
719      || c == const_tiny_rtx[(int) mode][1]
720      || c == const_tiny_rtx[(int) mode][2])
721    return true;
722
723  r = CONST_DOUBLE_REAL_VALUE (c);
724
725  for (i = 0; i < 7; i++)
726    {
727      int x = 1 << i;
728      bool ok;
729      real_from_integer (&s, mode, x, SIGNED);
730
731      if (real_equal (r, &s))
732	return true;
733      ok = exact_real_inverse (mode, &s);
734      gcc_assert (ok);
735      if (real_equal (r, &s))
736	return true;
737    }
738  return false;
739}
740
741
742/* Return the cost in cycles of a memory address, relative to register
743   indirect.
744
745   Each of the following adds the indicated number of cycles:
746
747   1 - symbolic address
748   1 - pre-decrement
749   1 - indexing and/or offset(register)
750   2 - indirect */
751
752
753static int
754vax_address_cost_1 (rtx addr)
755{
756  int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
757  rtx plus_op0 = 0, plus_op1 = 0;
758 restart:
759  switch (GET_CODE (addr))
760    {
761    case PRE_DEC:
762      predec = 1;
763      /* FALLTHRU */
764    case REG:
765    case SUBREG:
766    case POST_INC:
767      reg = 1;
768      break;
769    case MULT:
770      indexed = 1;	/* 2 on VAX 2 */
771      break;
772    case CONST_INT:
773      /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
774      if (offset == 0)
775	offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
776      break;
777    case CONST:
778    case SYMBOL_REF:
779      offset = 1;	/* 2 on VAX 2 */
780      break;
781    case LABEL_REF:	/* this is probably a byte offset from the pc */
782      if (offset == 0)
783	offset = 1;
784      break;
785    case PLUS:
786      if (plus_op0)
787	plus_op1 = XEXP (addr, 0);
788      else
789	plus_op0 = XEXP (addr, 0);
790      addr = XEXP (addr, 1);
791      goto restart;
792    case MEM:
793      indir = 2;	/* 3 on VAX 2 */
794      addr = XEXP (addr, 0);
795      goto restart;
796    default:
797      break;
798    }
799
800  /* Up to 3 things can be added in an address.  They are stored in
801     plus_op0, plus_op1, and addr.  */
802
803  if (plus_op0)
804    {
805      addr = plus_op0;
806      plus_op0 = 0;
807      goto restart;
808    }
809  if (plus_op1)
810    {
811      addr = plus_op1;
812      plus_op1 = 0;
813      goto restart;
814    }
815  /* Indexing and register+offset can both be used (except on a VAX 2)
816     without increasing execution time over either one alone.  */
817  if (reg && indexed && offset)
818    return reg + indir + offset + predec;
819  return reg + indexed + indir + offset + predec;
820}
821
822static int
823vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
824		  addr_space_t as ATTRIBUTE_UNUSED,
825		  bool speed ATTRIBUTE_UNUSED)
826{
827  return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
828}
829
830/* Cost of an expression on a VAX.  This version has costs tuned for the
831   CVAX chip (found in the VAX 3 series) with comments for variations on
832   other models.
833
834   FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
835   and FLOAT_TRUNCATE.  We need a -mcpu option to allow provision of
836   costs on a per cpu basis.  */
837
838static bool
839vax_rtx_costs (rtx x, machine_mode mode, int outer_code,
840	       int opno ATTRIBUTE_UNUSED,
841	       int *total, bool speed ATTRIBUTE_UNUSED)
842{
843  int code = GET_CODE (x);
844  int i = 0;				   /* may be modified in switch */
845  const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
846
847  switch (code)
848    {
849      /* On a VAX, constants from 0..63 are cheap because they can use the
850	 1 byte literal constant format.  Compare to -1 should be made cheap
851	 so that decrement-and-branch insns can be formed more easily (if
852	 the value -1 is copied to a register some decrement-and-branch
853	 patterns will not match).  */
854    case CONST_INT:
855      if (INTVAL (x) == 0)
856	{
857	  *total = 0;
858	  return true;
859	}
860      if (outer_code == AND)
861	{
862	  *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
863	  return true;
864	}
865      if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
866	  || (outer_code == COMPARE
867	      && INTVAL (x) == -1)
868	  || ((outer_code == PLUS || outer_code == MINUS)
869	      && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
870	{
871	  *total = 1;
872	  return true;
873	}
874      /* FALLTHRU */
875
876    case CONST:
877    case LABEL_REF:
878    case SYMBOL_REF:
879      *total = 3;
880      return true;
881
882    case CONST_DOUBLE:
883      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
884	*total = vax_float_literal (x) ? 5 : 8;
885      else
886	*total = ((CONST_DOUBLE_HIGH (x) == 0
887		   && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
888		  || (outer_code == PLUS
889		      && CONST_DOUBLE_HIGH (x) == -1
890		      && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
891		 ? 2 : 5;
892      return true;
893
894    case POST_INC:
895      *total = 2;
896      return true;		/* Implies register operand.  */
897
898    case PRE_DEC:
899      *total = 3;
900      return true;		/* Implies register operand.  */
901
902    case MULT:
903      switch (mode)
904	{
905	case E_DFmode:
906	  *total = 16;		/* 4 on VAX 9000 */
907	  break;
908	case E_SFmode:
909	  *total = 9;		/* 4 on VAX 9000, 12 on VAX 2 */
910	  break;
911	case E_DImode:
912	  *total = 16;		/* 6 on VAX 9000, 28 on VAX 2 */
913	  break;
914	case E_SImode:
915	case E_HImode:
916	case E_QImode:
917	  *total = 10;		/* 3-4 on VAX 9000, 20-28 on VAX 2 */
918	  break;
919	default:
920	  *total = MAX_COST;	/* Mode is not supported.  */
921	  return true;
922	}
923      break;
924
925    case UDIV:
926      if (mode != SImode)
927	{
928	  *total = MAX_COST;	/* Mode is not supported.  */
929	  return true;
930	}
931      *total = 17;
932      break;
933
934    case DIV:
935      if (mode == DImode)
936	*total = 30;		/* Highly variable.  */
937      else if (mode == DFmode)
938	/* divide takes 28 cycles if the result is not zero, 13 otherwise */
939	*total = 24;
940      else
941	*total = 11;		/* 25 on VAX 2 */
942      break;
943
944    case MOD:
945      *total = 23;
946      break;
947
948    case UMOD:
949      if (mode != SImode)
950	{
951	  *total = MAX_COST;	/* Mode is not supported.  */
952	  return true;
953	}
954      *total = 29;
955      break;
956
957    case FLOAT:
958      *total = (6		/* 4 on VAX 9000 */
959		+ (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
960      break;
961
962    case FIX:
963      *total = 7;		/* 17 on VAX 2 */
964      break;
965
966    case ASHIFT:
967    case LSHIFTRT:
968    case ASHIFTRT:
969      if (mode == DImode)
970	*total = 12;
971      else
972	*total = 10;		/* 6 on VAX 9000 */
973      break;
974
975    case ROTATE:
976    case ROTATERT:
977      *total = 6;		/* 5 on VAX 2, 4 on VAX 9000 */
978      if (CONST_INT_P (XEXP (x, 1)))
979	fmt = "e"; 		/* all constant rotate counts are short */
980      break;
981
982    case PLUS:
983    case MINUS:
984      *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
985      /* Small integer operands can use subl2 and addl2.  */
986      if ((CONST_INT_P (XEXP (x, 1)))
987	  && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
988	fmt = "e";
989      break;
990
991    case IOR:
992    case XOR:
993      *total = 3;
994      break;
995
996    case AND:
997      /* AND is special because the first operand is complemented.  */
998      *total = 3;
999      if (CONST_INT_P (XEXP (x, 0)))
1000	{
1001	  if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
1002	    *total = 4;
1003	  fmt = "e";
1004	  i = 1;
1005	}
1006      break;
1007
1008    case NEG:
1009      if (mode == DFmode)
1010	*total = 9;
1011      else if (mode == SFmode)
1012	*total = 6;
1013      else if (mode == DImode)
1014	*total = 4;
1015      else
1016	*total = 2;
1017      break;
1018
1019    case NOT:
1020      *total = 2;
1021      break;
1022
1023    case ZERO_EXTRACT:
1024    case SIGN_EXTRACT:
1025      *total = 15;
1026      break;
1027
1028    case MEM:
1029      if (mode == DImode || mode == DFmode)
1030	*total = 5;		/* 7 on VAX 2 */
1031      else
1032	*total = 3;		/* 4 on VAX 2 */
1033      x = XEXP (x, 0);
1034      if (!REG_P (x) && GET_CODE (x) != POST_INC)
1035	*total += vax_address_cost_1 (x);
1036      return true;
1037
1038    case FLOAT_EXTEND:
1039    case FLOAT_TRUNCATE:
1040    case TRUNCATE:
1041      *total = 3;		/* FIXME: Costs need to be checked  */
1042      break;
1043
1044    default:
1045      return false;
1046    }
1047
1048  /* Now look inside the expression.  Operands which are not registers or
1049     short constants add to the cost.
1050
1051     FMT and I may have been adjusted in the switch above for instructions
1052     which require special handling.  */
1053
1054  while (*fmt++ == 'e')
1055    {
1056      rtx op = XEXP (x, i);
1057
1058      i += 1;
1059      code = GET_CODE (op);
1060
1061      /* A NOT is likely to be found as the first operand of an AND
1062	 (in which case the relevant cost is of the operand inside
1063	 the not) and not likely to be found anywhere else.  */
1064      if (code == NOT)
1065	op = XEXP (op, 0), code = GET_CODE (op);
1066
1067      switch (code)
1068	{
1069	case CONST_INT:
1070	  if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
1071	      && mode != QImode)
1072	    *total += 1;	/* 2 on VAX 2 */
1073	  break;
1074	case CONST:
1075	case LABEL_REF:
1076	case SYMBOL_REF:
1077	  *total += 1;		/* 2 on VAX 2 */
1078	  break;
1079	case CONST_DOUBLE:
1080	  if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1081	    {
1082	      /* Registers are faster than floating point constants -- even
1083		 those constants which can be encoded in a single byte.  */
1084	      if (vax_float_literal (op))
1085		*total += 1;
1086	      else
1087		*total += (GET_MODE (x) == DFmode) ? 3 : 2;
1088	    }
1089	  else
1090	    {
1091	      if (CONST_DOUBLE_HIGH (op) != 0
1092		  || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1093		*total += 2;
1094	    }
1095	  break;
1096	case MEM:
1097	  *total += 1;		/* 2 on VAX 2 */
1098	  if (!REG_P (XEXP (op, 0)))
1099	    *total += vax_address_cost_1 (XEXP (op, 0));
1100	  break;
1101	case REG:
1102	case SUBREG:
1103	  break;
1104	default:
1105	  *total += 1;
1106	  break;
1107	}
1108    }
1109  return true;
1110}
1111
1112/* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1113   Used for C++ multiple inheritance.
1114	.mask	^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11>  #conservative entry mask
1115	addl2	$DELTA, 4(ap)	#adjust first argument
1116	jmp	FUNCTION+2	#jump beyond FUNCTION's entry mask
1117*/
1118
1119static void
1120vax_output_mi_thunk (FILE * file,
1121		     tree thunk ATTRIBUTE_UNUSED,
1122		     HOST_WIDE_INT delta,
1123		     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1124		     tree function)
1125{
1126  const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk));
1127
1128  assemble_start_function (thunk, fnname);
1129  fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1130  asm_fprintf (file, ",4(%Rap)\n");
1131  fprintf (file, "\tjmp ");
1132  assemble_name (file,  XSTR (XEXP (DECL_RTL (function), 0), 0));
1133  fprintf (file, "+2\n");
1134  assemble_end_function (thunk, fnname);
1135}
1136
1137static rtx
1138vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1139		      int incoming ATTRIBUTE_UNUSED)
1140{
1141  return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1142}
1143
1144/* Worker function for NOTICE_UPDATE_CC.  */
1145
1146void
1147vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1148{
1149  if (GET_CODE (exp) == SET)
1150    {
1151      if (GET_CODE (SET_SRC (exp)) == CALL)
1152	CC_STATUS_INIT;
1153      else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1154	       && GET_CODE (SET_DEST (exp)) != PC)
1155	{
1156	  cc_status.flags = 0;
1157	  /* The integer operations below don't set carry or
1158	     set it in an incompatible way.  That's ok though
1159	     as the Z bit is all we need when doing unsigned
1160	     comparisons on the result of these insns (since
1161	     they're always with 0).  Set CC_NO_OVERFLOW to
1162	     generate the correct unsigned branches.  */
1163	  switch (GET_CODE (SET_SRC (exp)))
1164	    {
1165	    case NEG:
1166	      if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1167		break;
1168	      /* FALLTHRU */
1169	    case AND:
1170	    case IOR:
1171	    case XOR:
1172	    case NOT:
1173	    case CTZ:
1174	    case MEM:
1175	    case REG:
1176	      cc_status.flags = CC_NO_OVERFLOW;
1177	      break;
1178	    default:
1179	      break;
1180	    }
1181	  cc_status.value1 = SET_DEST (exp);
1182	  cc_status.value2 = SET_SRC (exp);
1183	}
1184    }
1185  else if (GET_CODE (exp) == PARALLEL
1186	   && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1187    {
1188      if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1189	CC_STATUS_INIT;
1190      else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1191	{
1192	  cc_status.flags = 0;
1193	  cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1194	  cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1195	}
1196      else
1197	/* PARALLELs whose first element sets the PC are aob,
1198	   sob insns.  They do change the cc's.  */
1199	CC_STATUS_INIT;
1200    }
1201  else
1202    CC_STATUS_INIT;
1203  if (cc_status.value1 && REG_P (cc_status.value1)
1204      && cc_status.value2
1205      && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1206    cc_status.value2 = 0;
1207  if (cc_status.value1 && MEM_P (cc_status.value1)
1208      && cc_status.value2
1209      && MEM_P (cc_status.value2))
1210    cc_status.value2 = 0;
1211  /* Actual condition, one line up, should be that value2's address
1212     depends on value1, but that is too much of a pain.  */
1213}
1214
1215/* Output integer move instructions.  */
1216
1217const char *
1218vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1219		     machine_mode mode)
1220{
1221  rtx hi[3], lo[3];
1222  const char *pattern_hi, *pattern_lo;
1223
1224  switch (mode)
1225    {
1226    case E_DImode:
1227      if (operands[1] == const0_rtx)
1228	return "clrq %0";
1229      if (TARGET_QMATH && optimize_size
1230	  && (CONST_INT_P (operands[1])
1231	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1232	{
1233	  unsigned HOST_WIDE_INT hval, lval;
1234	  int n;
1235
1236	  if (GET_CODE (operands[1]) == CONST_DOUBLE)
1237	    {
1238	      gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1239
1240	      /* Make sure only the low 32 bits are valid.  */
1241	      lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1242	      hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1243	    }
1244	  else
1245	    {
1246	      lval = INTVAL (operands[1]);
1247	      hval = 0;
1248	    }
1249
1250	  /* Here we see if we are trying to see if the 64bit value is really
1251	     a 6bit shifted some arbitrary amount.  If so, we can use ashq to
1252	     shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1253	     8 bytes - 1 shift byte - 1 short literal byte.  */
1254	  if (lval != 0
1255	      && (n = exact_log2 (lval & (- lval))) != -1
1256	      && (lval >> n) < 64)
1257	    {
1258	      lval >>= n;
1259
1260	      /* On 32bit platforms, if the 6bits didn't overflow into the
1261		 upper 32bit value that value better be 0.  If we have
1262		 overflowed, make sure it wasn't too much.  */
1263	      if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1264		{
1265		  if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1266		    n = 0;	/* failure */
1267		  else
1268		    lval |= hval << (32 - n);
1269		}
1270	      /*  If n is 0, then ashq is not the best way to emit this.  */
1271	      if (n > 0)
1272		{
1273		  operands[1] = GEN_INT (lval);
1274		  operands[2] = GEN_INT (n);
1275		  return "ashq %2,%D1,%0";
1276		}
1277#if HOST_BITS_PER_WIDE_INT == 32
1278	    }
1279	  /* On 32bit platforms, if the low 32bit value is 0, checkout the
1280	     upper 32bit value.  */
1281	  else if (hval != 0
1282		   && (n = exact_log2 (hval & (- hval)) - 1) != -1
1283		   && (hval >> n) < 64)
1284	    {
1285	      operands[1] = GEN_INT (hval >> n);
1286	      operands[2] = GEN_INT (n + 32);
1287	      return "ashq %2,%D1,%0";
1288#endif
1289	    }
1290	}
1291
1292      if (TARGET_QMATH
1293	  && (!MEM_P (operands[0])
1294	      || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1295	      || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1296	      || !illegal_addsub_di_memory_operand (operands[0], DImode))
1297	  && ((CONST_INT_P (operands[1])
1298	       && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1299	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1300	{
1301	  hi[0] = operands[0];
1302	  hi[1] = operands[1];
1303
1304	  split_quadword_operands (insn, SET, hi, lo, 2);
1305
1306	  pattern_lo = vax_output_int_move (NULL, lo, SImode);
1307	  pattern_hi = vax_output_int_move (NULL, hi, SImode);
1308
1309	  /* The patterns are just movl/movl or pushl/pushl then a movq will
1310	     be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1311	     bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1312	     value bytes.  */
1313	  if ((!strncmp (pattern_lo, "movl", 4)
1314	      && !strncmp (pattern_hi, "movl", 4))
1315	      || (!strncmp (pattern_lo, "pushl", 5)
1316		  && !strncmp (pattern_hi, "pushl", 5)))
1317	    return "movq %1,%0";
1318
1319	  if (MEM_P (operands[0])
1320	      && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1321	    {
1322	      output_asm_insn (pattern_hi, hi);
1323	      operands[0] = lo[0];
1324	      operands[1] = lo[1];
1325	      operands[2] = lo[2];
1326	      return pattern_lo;
1327	    }
1328	  else
1329	    {
1330	      output_asm_insn (pattern_lo, lo);
1331	      operands[0] = hi[0];
1332	      operands[1] = hi[1];
1333	      operands[2] = hi[2];
1334	      return pattern_hi;
1335	    }
1336	}
1337      return "movq %1,%0";
1338
1339    case E_SImode:
1340      if (symbolic_operand (operands[1], SImode))
1341	{
1342	  if (push_operand (operands[0], SImode))
1343	    return "pushab %a1";
1344	  return "movab %a1,%0";
1345	}
1346
1347      if (operands[1] == const0_rtx)
1348	{
1349	  if (push_operand (operands[0], SImode))
1350	    return "pushl %1";
1351	  return "clrl %0";
1352	}
1353
1354      if (CONST_INT_P (operands[1])
1355	  && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1356	{
1357	  HOST_WIDE_INT i = INTVAL (operands[1]);
1358	  int n;
1359	  if ((unsigned HOST_WIDE_INT)(~i) < 64)
1360	    return "mcoml %N1,%0";
1361	  if ((unsigned HOST_WIDE_INT)i < 0x100)
1362	    return "movzbl %1,%0";
1363	  if (i >= -0x80 && i < 0)
1364	    return "cvtbl %1,%0";
1365	  if (optimize_size
1366	      && (n = exact_log2 (i & (-i))) != -1
1367	      && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1368	    {
1369	      operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1370	      operands[2] = GEN_INT (n);
1371	      return "ashl %2,%1,%0";
1372	    }
1373	  if ((unsigned HOST_WIDE_INT)i < 0x10000)
1374	    return "movzwl %1,%0";
1375	  if (i >= -0x8000 && i < 0)
1376	    return "cvtwl %1,%0";
1377	}
1378      if (push_operand (operands[0], SImode))
1379	return "pushl %1";
1380      return "movl %1,%0";
1381
1382    case E_HImode:
1383      if (CONST_INT_P (operands[1]))
1384	{
1385	  HOST_WIDE_INT i = INTVAL (operands[1]);
1386	  if (i == 0)
1387	    return "clrw %0";
1388	  else if ((unsigned HOST_WIDE_INT)i < 64)
1389	    return "movw %1,%0";
1390	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1391	    return "mcomw %H1,%0";
1392	  else if ((unsigned HOST_WIDE_INT)i < 256)
1393	    return "movzbw %1,%0";
1394	  else if (i >= -0x80 && i < 0)
1395	    return "cvtbw %1,%0";
1396	}
1397      return "movw %1,%0";
1398
1399    case E_QImode:
1400      if (CONST_INT_P (operands[1]))
1401	{
1402	  HOST_WIDE_INT i = INTVAL (operands[1]);
1403	  if (i == 0)
1404	    return "clrb %0";
1405	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1406	    return "mcomb %B1,%0";
1407	}
1408      return "movb %1,%0";
1409
1410    default:
1411      gcc_unreachable ();
1412    }
1413}
1414
1415/* Output integer add instructions.
1416
1417   The space-time-opcode tradeoffs for addition vary by model of VAX.
1418
1419   On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1420   but it not faster on other models.
1421
1422   "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1423   faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1424   a register is used in an address too soon after it is set.
1425   Compromise by using movab only when it is shorter than the add
1426   or the base register in the address is one of sp, ap, and fp,
1427   which are not modified very often.  */
1428
1429const char *
1430vax_output_int_add (rtx_insn *insn, rtx *operands, machine_mode mode)
1431{
1432  switch (mode)
1433    {
1434    case E_DImode:
1435      {
1436	rtx low[3];
1437	const char *pattern;
1438	int carry = 1;
1439	bool sub;
1440
1441	if (TARGET_QMATH && 0)
1442	  debug_rtx (insn);
1443
1444	split_quadword_operands (insn, PLUS, operands, low, 3);
1445
1446	if (TARGET_QMATH)
1447	  {
1448	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1449#ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1450	    gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1451	    gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1452#endif
1453
1454	    /* No reason to add a 0 to the low part and thus no carry, so just
1455	       emit the appropriate add/sub instruction.  */
1456	    if (low[2] == const0_rtx)
1457	      return vax_output_int_add (NULL, operands, SImode);
1458
1459	    /* Are we doing addition or subtraction?  */
1460	    sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1461
1462	    /* We can't use vax_output_int_add since some the patterns don't
1463	       modify the carry bit.  */
1464	    if (sub)
1465	      {
1466		if (low[2] == constm1_rtx)
1467		  pattern = "decl %0";
1468		else
1469		  pattern = "subl2 $%n2,%0";
1470	      }
1471	    else
1472	      {
1473		if (low[2] == const1_rtx)
1474		  pattern = "incl %0";
1475		else
1476		  pattern = "addl2 %2,%0";
1477	      }
1478	    output_asm_insn (pattern, low);
1479
1480	    /* In 2's complement, -n = ~n + 1.  Since we are dealing with
1481	       two 32bit parts, we complement each and then add one to
1482	       low part.  We know that the low part can't overflow since
1483	       it's value can never be 0.  */
1484	    if (sub)
1485		return "sbwc %N2,%0";
1486	    return "adwc %2,%0";
1487	  }
1488
1489	/* Add low parts.  */
1490	if (rtx_equal_p (operands[0], operands[1]))
1491	  {
1492	    if (low[2] == const0_rtx)
1493	/* Should examine operand, punt if not POST_INC.  */
1494	      pattern = "tstl %0", carry = 0;
1495	    else if (low[2] == const1_rtx)
1496	      pattern = "incl %0";
1497	    else
1498	      pattern = "addl2 %2,%0";
1499	  }
1500	else
1501	  {
1502	    if (low[2] == const0_rtx)
1503	      pattern = "movl %1,%0", carry = 0;
1504	    else
1505	      pattern = "addl3 %2,%1,%0";
1506	  }
1507	if (pattern)
1508	  output_asm_insn (pattern, low);
1509	if (!carry)
1510	  /* If CARRY is 0, we don't have any carry value to worry about.  */
1511	  return get_insn_template (CODE_FOR_addsi3, insn);
1512	/* %0 = C + %1 + %2 */
1513	if (!rtx_equal_p (operands[0], operands[1]))
1514	  output_asm_insn ((operands[1] == const0_rtx
1515			    ? "clrl %0"
1516			    : "movl %1,%0"), operands);
1517	return "adwc %2,%0";
1518      }
1519
1520    case E_SImode:
1521      if (rtx_equal_p (operands[0], operands[1]))
1522	{
1523	  if (operands[2] == const1_rtx)
1524	    return "incl %0";
1525	  if (operands[2] == constm1_rtx)
1526	    return "decl %0";
1527	  if (CONST_INT_P (operands[2])
1528	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1529	    return "subl2 $%n2,%0";
1530	  if (CONST_INT_P (operands[2])
1531	      && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1532	      && REG_P (operands[1])
1533	      && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1534		   || REGNO (operands[1]) > 11))
1535	    return "movab %c2(%1),%0";
1536	  if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1537	    return "movab %a2[%0],%0";
1538	  return "addl2 %2,%0";
1539	}
1540
1541      if (rtx_equal_p (operands[0], operands[2]))
1542	{
1543	  if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1544	    return "movab %a1[%0],%0";
1545	  return "addl2 %1,%0";
1546	}
1547
1548      if (CONST_INT_P (operands[2])
1549	  && INTVAL (operands[2]) < 32767
1550	  && INTVAL (operands[2]) > -32768
1551	  && REG_P (operands[1])
1552	  && push_operand (operands[0], SImode))
1553	return "pushab %c2(%1)";
1554
1555      if (CONST_INT_P (operands[2])
1556	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1557	return "subl3 $%n2,%1,%0";
1558
1559      if (CONST_INT_P (operands[2])
1560	  && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1561	  && REG_P (operands[1])
1562	  && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1563	       || REGNO (operands[1]) > 11))
1564	return "movab %c2(%1),%0";
1565
1566      /* Add this if using gcc on a VAX 3xxx:
1567      if (REG_P (operands[1]) && REG_P (operands[2]))
1568	return "movab (%1)[%2],%0";
1569      */
1570
1571      if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1572	{
1573	  if (push_operand (operands[0], SImode))
1574	    return "pushab %a2[%1]";
1575	  return "movab %a2[%1],%0";
1576	}
1577
1578      if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1579	{
1580	  if (push_operand (operands[0], SImode))
1581	    return "pushab %a1[%2]";
1582	  return "movab %a1[%2],%0";
1583	}
1584
1585      if (flag_pic && REG_P (operands[0])
1586	  && symbolic_operand (operands[2], SImode))
1587	return "movab %a2,%0;addl2 %1,%0";
1588
1589      if (flag_pic
1590	  && (symbolic_operand (operands[1], SImode)
1591	      || symbolic_operand (operands[1], SImode)))
1592	debug_rtx (insn);
1593
1594      return "addl3 %1,%2,%0";
1595
1596    case E_HImode:
1597      if (rtx_equal_p (operands[0], operands[1]))
1598	{
1599	  if (operands[2] == const1_rtx)
1600	    return "incw %0";
1601	  if (operands[2] == constm1_rtx)
1602	    return "decw %0";
1603	  if (CONST_INT_P (operands[2])
1604	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1605	    return "subw2 $%n2,%0";
1606	  return "addw2 %2,%0";
1607	}
1608      if (rtx_equal_p (operands[0], operands[2]))
1609	return "addw2 %1,%0";
1610      if (CONST_INT_P (operands[2])
1611	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1612	return "subw3 $%n2,%1,%0";
1613      return "addw3 %1,%2,%0";
1614
1615    case E_QImode:
1616      if (rtx_equal_p (operands[0], operands[1]))
1617	{
1618	  if (operands[2] == const1_rtx)
1619	    return "incb %0";
1620	  if (operands[2] == constm1_rtx)
1621	    return "decb %0";
1622	  if (CONST_INT_P (operands[2])
1623	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1624	    return "subb2 $%n2,%0";
1625	  return "addb2 %2,%0";
1626	}
1627      if (rtx_equal_p (operands[0], operands[2]))
1628	return "addb2 %1,%0";
1629      if (CONST_INT_P (operands[2])
1630	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1631	return "subb3 $%n2,%1,%0";
1632      return "addb3 %1,%2,%0";
1633
1634    default:
1635      gcc_unreachable ();
1636    }
1637}
1638
1639const char *
1640vax_output_int_subtract (rtx_insn *insn, rtx *operands, machine_mode mode)
1641{
1642  switch (mode)
1643    {
1644    case E_DImode:
1645      {
1646	rtx low[3];
1647	const char *pattern;
1648	int carry = 1;
1649
1650	if (TARGET_QMATH && 0)
1651	  debug_rtx (insn);
1652
1653	split_quadword_operands (insn, MINUS, operands, low, 3);
1654
1655	if (TARGET_QMATH)
1656	  {
1657	    if (operands[1] == const0_rtx && low[1] == const0_rtx)
1658	      {
1659		/* Negation is tricky.  It's basically complement and increment.
1660		   Negate hi, then lo, and subtract the carry back.  */
1661
1662		/*
1663		 * If the source *or* the destination operands are
1664		 * indirect memory references with post-increment
1665		 * addressing, an memory reference using the base
1666		 * register plus an offset must be constructed to
1667		 * address the high word of the source or result.
1668		 *
1669		 * pre-decrement memory references are rejected by the
1670		 * illegal_addsub_di_memory_operand predicate
1671		 */
1672
1673		rtx earlyhiw[3];
1674
1675		/* high word - destination */
1676		if (MEM_P (operands[0])
1677		    && GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1678		  {
1679		    const enum machine_mode mode = GET_MODE (operands[0]);
1680		    rtx x = XEXP (XEXP (operands[0], 0), 0);
1681		    x = plus_constant (Pmode, x, GET_MODE_SIZE (mode));
1682		    x = gen_rtx_MEM (mode, x);
1683		    earlyhiw[0] = x;
1684		  }
1685		else
1686		  earlyhiw[0] = operands[0];
1687
1688		earlyhiw[1] = operands[1]; /* easy, this is const0_rtx */
1689
1690		/* high word - source */
1691		if (MEM_P (operands[2])
1692		    && GET_CODE (XEXP (operands[2], 0)) == POST_INC)
1693		  {
1694		    const enum machine_mode mode = GET_MODE (operands[2]);
1695		    rtx x = XEXP (XEXP (operands[2], 0), 0);
1696		    x = plus_constant (Pmode, x, GET_MODE_SIZE (mode));
1697		    x = gen_rtx_MEM (mode, x);
1698		    earlyhiw[2] = x;
1699		  }
1700		else
1701		  earlyhiw[2] = operands[2];
1702
1703		output_asm_insn ("mnegl %2,%0", earlyhiw);
1704		output_asm_insn ("mnegl %2,%0", low);
1705
1706		if (earlyhiw[2] != operands[2])
1707		  {
1708		    rtx ops[3];
1709		    const enum machine_mode mode = GET_MODE (operands[2]);
1710
1711		    output_asm_insn ("sbwc $0,%0", operands);
1712		    /* update the source operand's base register to
1713		       point to the following word */
1714		    ops[0] = XEXP (XEXP (operands[2], 0), 0);
1715		    ops[1] = const0_rtx;
1716		    ops[2] = gen_int_mode (GET_MODE_SIZE (mode), SImode);
1717		    output_asm_insn ("addl2 %2,%0", ops);
1718		    return "";
1719		  }
1720		else
1721		  return "sbwc $0,%0";
1722	      }
1723	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1724	    gcc_assert (rtx_equal_p (low[0], low[1]));
1725	    if (low[2] == const1_rtx)
1726	      output_asm_insn ("decl %0", low);
1727	    else
1728	      output_asm_insn ("subl2 %2,%0", low);
1729	    return "sbwc %2,%0";
1730	  }
1731
1732	/* Subtract low parts.  */
1733	if (rtx_equal_p (operands[0], operands[1]))
1734	  {
1735	    if (low[2] == const0_rtx)
1736	      pattern = 0, carry = 0;
1737	    else if (low[2] == constm1_rtx)
1738	      pattern = "decl %0";
1739	    else
1740	      pattern = "subl2 %2,%0";
1741	  }
1742	else
1743	  {
1744	    if (low[2] == constm1_rtx)
1745	      pattern = "decl %0";
1746	    else if (low[2] == const0_rtx)
1747	      pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1748	    else
1749	      pattern = "subl3 %2,%1,%0";
1750	  }
1751	if (pattern)
1752	  output_asm_insn (pattern, low);
1753	if (carry)
1754	  {
1755	    if (!rtx_equal_p (operands[0], operands[1]))
1756	      return "movl %1,%0;sbwc %2,%0";
1757	    return "sbwc %2,%0";
1758	    /* %0 = %2 - %1 - C */
1759	  }
1760	return get_insn_template (CODE_FOR_subsi3, insn);
1761      }
1762
1763    default:
1764      gcc_unreachable ();
1765  }
1766}
1767
1768static rtx
1769mkrtx(enum rtx_code code, enum machine_mode mode, rtx base, HOST_WIDE_INT off)
1770{
1771  rtx tmp;
1772
1773  if (GET_CODE (base) == CONST)
1774    base = XEXP (base, 0);
1775
1776  if (GET_CODE (base) == PLUS)
1777    {
1778      rtx a = XEXP (base, 0);
1779      rtx b = XEXP (base, 1);
1780      if (GET_CODE (b) == CONST)
1781	b = XEXP (b, 0);
1782      if (CONST_INT_P (b))
1783	{
1784          off += INTVAL (b);
1785          base = a;
1786	}
1787      else if (REG_P (a) && GET_CODE (b) == SYMBOL_REF)
1788	{
1789	  if (off != 0)
1790	    {
1791	      base = gen_rtx_PLUS (Pmode, a, plus_constant(Pmode, b, off));
1792	      off = 0;
1793	    }
1794	}
1795      else if (REG_P (a) && GET_CODE (b) == PLUS)
1796	{
1797          off += INTVAL (XEXP (b, 1));
1798	  base = gen_rtx_PLUS (Pmode, a, plus_constant(Pmode, XEXP (b, 0), off));
1799	  off = 0;
1800	}
1801      else
1802        {
1803	  debug_rtx(base);
1804	  gcc_unreachable ();
1805	}
1806    }
1807  if (code == POST_INC)
1808    tmp = gen_rtx_POST_INC (SImode, base);
1809  else if (off == 0 || (REG_P (base) && code == REG))
1810    tmp = base;
1811  else
1812    tmp = plus_constant (Pmode, base, off);
1813  return gen_rtx_MEM (mode, tmp);
1814}
1815
1816const char *
1817vax_output_movmemsi (rtx insn, rtx *operands)
1818{
1819  HOST_WIDE_INT n = INTVAL (operands[2]);
1820  HOST_WIDE_INT off;
1821  rtx src, dest;
1822  const char *pat = NULL;
1823  const enum rtx_code *src_codes;
1824  const enum rtx_code *dest_codes;
1825  int code_idx = 0;
1826  int mode_idx;
1827
1828  static const enum machine_mode xmodes[4] =
1829    {
1830      QImode, HImode, SImode, DImode
1831    };
1832  static const char * const pats[4] =
1833    {
1834      "movb %1,%0", "movw %1,%0", "movl %1,%0", "movq %1,%0",
1835    };
1836  static const enum rtx_code codes[2][3] =
1837    {
1838      { PLUS, PLUS, PLUS },
1839      { POST_INC, POST_INC, REG },
1840    };
1841
1842  src = XEXP (operands[1], 0);
1843
1844  src_codes =
1845    codes[REG_P (src) && find_regno_note (insn, REG_DEAD, REGNO(src))];
1846
1847  dest = XEXP (operands[0], 0);
1848
1849  dest_codes =
1850    codes[REG_P (dest) && find_regno_note (insn, REG_DEAD, REGNO(dest))];
1851
1852  for (off = 0, code_idx = 0, mode_idx = 3; mode_idx >= 0; mode_idx--)
1853    {
1854      const enum machine_mode mode = xmodes[mode_idx];
1855      const HOST_WIDE_INT mode_len = GET_MODE_SIZE (mode);
1856      for (; n >= mode_len; n -= mode_len, off += mode_len)
1857	{
1858	  if (pat != NULL)
1859	    output_asm_insn (pat, operands);
1860	  if (n == mode_len)
1861	    code_idx = 2;
1862	  operands[0] = mkrtx(dest_codes[code_idx], mode, dest, off);
1863	  operands[1] = mkrtx(src_codes[code_idx], mode, src, off);
1864	  if (pat == NULL)
1865	    code_idx = 1;
1866	  pat = pats[mode_idx];
1867	}
1868    }
1869
1870  return pat;
1871}
1872
1873/* True if X is an rtx for a constant that is a valid address.  */
1874
1875bool
1876legitimate_constant_address_p (rtx x)
1877{
1878  if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1879	  || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1880    return true;
1881  if (GET_CODE (x) != CONST)
1882    return false;
1883#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1884  if (flag_pic
1885      && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1886      && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1887    return false;
1888#endif
1889   gcc_assert (! REG_P (x));
1890   return true;
1891}
1892
1893bool
1894legitimate_pic_operand_p (rtx x)
1895{
1896#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1897  if (GET_CODE (x) != CONST)
1898    return true;
1899  if (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1900      && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1901    return false;
1902#endif
1903  return true;
1904}
1905
1906/* The other macros defined here are used only in legitimate_address_p ().  */
1907
1908/* Nonzero if X is a hard reg that can be used as an index
1909   or, if not strict, if it is a pseudo reg.  */
1910#define	INDEX_REGISTER_P(X, STRICT) \
1911(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1912
1913/* Nonzero if X is a hard reg that can be used as a base reg
1914   or, if not strict, if it is a pseudo reg.  */
1915#define	BASE_REGISTER_P(X, STRICT) \
1916(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1917
1918#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1919
1920/* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1921   are no SYMBOL_REFs for external symbols present.  */
1922
1923static bool
1924indirectable_constant_address_p (rtx x, bool indirect)
1925{
1926  if (GET_CODE (x) == SYMBOL_REF)
1927    return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1928
1929  if (GET_CODE (x) == CONST)
1930    return !flag_pic
1931	   || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1932	   || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1933
1934  return CONSTANT_ADDRESS_P (x);
1935}
1936
1937#else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1938
1939static bool
1940indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1941{
1942  return CONSTANT_ADDRESS_P (x);
1943}
1944
1945#endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1946
1947/* True if X is an address which can be indirected.  External symbols
1948   could be in a sharable image library, so we disallow those.  */
1949
1950static bool
1951indirectable_address_p (rtx x, bool strict, bool indirect)
1952{
1953  if (indirectable_constant_address_p (x, indirect)
1954      || BASE_REGISTER_P (x, strict))
1955    return true;
1956  if (GET_CODE (x) != PLUS
1957      || !BASE_REGISTER_P (XEXP (x, 0), strict)
1958      || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1959    return false;
1960  return indirectable_constant_address_p (XEXP (x, 1), indirect);
1961}
1962
1963/* Return true if x is a valid address not using indexing.
1964   (This much is the easy part.)  */
1965static bool
1966nonindexed_address_p (rtx x, bool strict)
1967{
1968  rtx xfoo0;
1969  if (REG_P (x))
1970    {
1971      if (! reload_in_progress
1972	  || reg_equiv_mem (REGNO (x)) == 0
1973	  || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1974	return true;
1975    }
1976  if (indirectable_constant_address_p (x, false))
1977    return true;
1978  if (indirectable_address_p (x, strict, false))
1979    return true;
1980  xfoo0 = XEXP (x, 0);
1981  if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1982    return true;
1983  if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1984      && BASE_REGISTER_P (xfoo0, strict))
1985    return true;
1986  return false;
1987}
1988
1989/* True if PROD is either a reg times size of mode MODE and MODE is less
1990   than or equal 8 bytes, or just a reg if MODE is one byte.  */
1991
1992static bool
1993index_term_p (rtx prod, machine_mode mode, bool strict)
1994{
1995  rtx xfoo0, xfoo1;
1996
1997  if (GET_MODE_SIZE (mode) == 1)
1998    return BASE_REGISTER_P (prod, strict);
1999
2000  if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
2001    return false;
2002
2003  xfoo0 = XEXP (prod, 0);
2004  xfoo1 = XEXP (prod, 1);
2005
2006  if (CONST_INT_P (xfoo0)
2007      && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
2008      && INDEX_REGISTER_P (xfoo1, strict))
2009    return true;
2010
2011  if (CONST_INT_P (xfoo1)
2012      && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
2013      && INDEX_REGISTER_P (xfoo0, strict))
2014    return true;
2015
2016  return false;
2017}
2018
2019/* Return true if X is the sum of a register
2020   and a valid index term for mode MODE.  */
2021static bool
2022reg_plus_index_p (rtx x, machine_mode mode, bool strict)
2023{
2024  rtx xfoo0, xfoo1;
2025
2026  if (GET_CODE (x) != PLUS)
2027    return false;
2028
2029  xfoo0 = XEXP (x, 0);
2030  xfoo1 = XEXP (x, 1);
2031
2032  if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
2033    return true;
2034
2035  if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
2036    return true;
2037
2038  return false;
2039}
2040
2041/* Return true if xfoo0 and xfoo1 constitute a valid indexed address.  */
2042static bool
2043indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
2044{
2045  if (!CONSTANT_ADDRESS_P (xfoo0))
2046    return false;
2047  if (BASE_REGISTER_P (xfoo1, strict))
2048    return !flag_pic || mode == QImode;
2049  if (flag_pic && symbolic_operand (xfoo0, SImode))
2050    return false;
2051  return reg_plus_index_p (xfoo1, mode, strict);
2052}
2053
2054/* legitimate_address_p returns true if it recognizes an RTL expression "x"
2055   that is a valid memory address for an instruction.
2056   The MODE argument is the machine mode for the MEM expression
2057   that wants to use this address.  */
2058bool
2059vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2060{
2061  rtx xfoo0, xfoo1;
2062
2063  if (nonindexed_address_p (x, strict))
2064    return true;
2065
2066  if (GET_CODE (x) != PLUS)
2067    return false;
2068
2069  /* Handle <address>[index] represented with index-sum outermost */
2070
2071  xfoo0 = XEXP (x, 0);
2072  xfoo1 = XEXP (x, 1);
2073
2074  if (index_term_p (xfoo0, mode, strict)
2075      && nonindexed_address_p (xfoo1, strict))
2076    return true;
2077
2078  if (index_term_p (xfoo1, mode, strict)
2079      && nonindexed_address_p (xfoo0, strict))
2080    return true;
2081
2082  /* Handle offset(reg)[index] with offset added outermost */
2083
2084  if (indexable_address_p (xfoo0, xfoo1, mode, strict)
2085      || indexable_address_p (xfoo1, xfoo0, mode, strict))
2086    return true;
2087
2088  return false;
2089}
2090
2091/* Return true if x (a legitimate address expression) has an effect that
2092   depends on the machine mode it is used for.  On the VAX, the predecrement
2093   and postincrement address depend thus (the amount of decrement or
2094   increment being the length of the operand) and all indexed address depend
2095   thus (because the index scale factor is the length of the operand).  */
2096
2097static bool
2098vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
2099{
2100  rtx xfoo0, xfoo1;
2101
2102  /* Auto-increment cases are now dealt with generically in recog.c.  */
2103  if (GET_CODE (x) != PLUS)
2104    return false;
2105
2106  xfoo0 = XEXP (x, 0);
2107  xfoo1 = XEXP (x, 1);
2108
2109  if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
2110    return false;
2111  if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
2112    return false;
2113  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
2114    return false;
2115  if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
2116    return false;
2117
2118  return true;
2119}
2120
2121static rtx
2122decompose_address_operand(rtx addr)
2123{
2124  enum rtx_code code = GET_CODE (addr);
2125
2126  switch (code)
2127    {
2128    case CONST:
2129      return decompose_address_operand (XEXP (addr, 0));
2130    case PLUS:
2131    case MULT:
2132      {
2133	rtx op0, op1;
2134	rtx temp;
2135	/*
2136	 * Generate a temporary register, assign the result of
2137	 * decomposing op0 to it, then generate an op code opping (PLUS
2138	 * or MULT) the result of decomposing op1 to it.
2139	 * Return the temporary register.
2140	 */
2141	temp = gen_reg_rtx (Pmode);
2142	op0 = decompose_address_operand (XEXP (addr, 0));
2143	op1 = decompose_address_operand (XEXP (addr, 1));
2144
2145	emit_move_insn (temp, op0);
2146
2147	if (code == PLUS)
2148	  temp = gen_rtx_PLUS (Pmode, temp, op1);
2149	else if (code == MULT)
2150	  temp = gen_rtx_MULT (Pmode, temp, op1);
2151
2152	return temp;
2153      }
2154      break;
2155    default:
2156      break;
2157    }
2158  return addr;
2159}
2160
2161static rtx
2162fixup_mathdi_operand (rtx x, machine_mode mode)
2163{
2164  if (illegal_addsub_di_memory_operand (x, mode))
2165    {
2166      rtx addr = XEXP (x, 0);
2167      rtx temp = gen_reg_rtx (Pmode);
2168      rtx offset = 0;
2169#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
2170      if (GET_CODE (addr) == CONST && flag_pic)
2171	{
2172	  offset = XEXP (XEXP (addr, 0), 1);
2173	  addr = XEXP (XEXP (addr, 0), 0);
2174	}
2175#endif
2176      emit_move_insn (temp, decompose_address_operand (addr));
2177      if (offset)
2178	temp = gen_rtx_PLUS (Pmode, temp, offset);
2179      x = gen_rtx_MEM (DImode, temp);
2180    }
2181  return x;
2182}
2183
2184void
2185vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
2186{
2187  int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
2188  rtx temp;
2189
2190  rtx (*gen_old_insn)(rtx, rtx, rtx);
2191  rtx (*gen_si_insn)(rtx, rtx, rtx);
2192  rtx (*gen_insn)(rtx, rtx, rtx);
2193
2194  if (code == PLUS)
2195    {
2196      gen_old_insn = gen_adddi3_old;
2197      gen_si_insn = gen_addsi3;
2198      gen_insn = gen_adcdi3;
2199    }
2200  else if (code == MINUS)
2201    {
2202      gen_old_insn = gen_subdi3_old;
2203      gen_si_insn = gen_subsi3;
2204      gen_insn = gen_sbcdi3;
2205    }
2206  else
2207    gcc_unreachable ();
2208
2209  /* If this is addition (thus operands are commutative) and if there is one
2210     addend that duplicates the desination, we want that addend to be the
2211     first addend.  */
2212  if (code == PLUS
2213      && rtx_equal_p (operands[0], operands[2])
2214      && !rtx_equal_p (operands[1], operands[2]))
2215    {
2216      temp = operands[2];
2217      operands[2] = operands[1];
2218      operands[1] = temp;
2219    }
2220
2221  if (!TARGET_QMATH)
2222    {
2223      emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
2224    }
2225  else if (hi_only)
2226    {
2227      if (!rtx_equal_p (operands[0], operands[1])
2228	  && (REG_P (operands[0]) && MEM_P (operands[1])))
2229	{
2230	  emit_move_insn (operands[0], operands[1]);
2231	  operands[1] = operands[0];
2232	}
2233
2234      operands[0] = fixup_mathdi_operand (operands[0], DImode);
2235      operands[1] = fixup_mathdi_operand (operands[1], DImode);
2236      operands[2] = fixup_mathdi_operand (operands[2], DImode);
2237
2238      if (!rtx_equal_p (operands[0], operands[1]))
2239	emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
2240			  operand_subword (operands[1], 0, 0, DImode));
2241
2242      emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
2243				 operand_subword (operands[1], 1, 0, DImode),
2244				 operand_subword (operands[2], 1, 0, DImode)));
2245    }
2246  else
2247    {
2248      /* If are adding the same value together, that's really a multiply by 2,
2249	 and that's just a left shift of 1.  */
2250      if (rtx_equal_p (operands[1], operands[2]))
2251	{
2252	  if (code == MINUS)
2253	    emit_insn (gen_movdi (operands[0], const0_rtx));
2254	  else
2255	    emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
2256	  return;
2257	}
2258
2259      operands[0] = fixup_mathdi_operand (operands[0], DImode);
2260
2261      /* If an operand is the same as operand[0], use the operand[0] rtx
2262	 because fixup will an equivalent rtx but not an equal one. */
2263
2264      if (rtx_equal_p (operands[0], operands[1]))
2265	operands[1] = operands[0];
2266      else
2267	operands[1] = fixup_mathdi_operand (operands[1], DImode);
2268
2269      if (rtx_equal_p (operands[0], operands[2]))
2270	operands[2] = operands[0];
2271      else
2272	operands[2] = fixup_mathdi_operand (operands[2], DImode);
2273
2274      /* If we are subtracting not from ourselves [d = a - b], and because the
2275	 carry ops are two operand only, we would need to do a move prior to
2276	 the subtract.  And if d == b, we would need a temp otherwise
2277	 [d = a, d -= d] and we end up with 0.  Instead we rewrite d = a - b
2278	 into d = -b, d += a.  Since -b can never overflow, even if b == d,
2279	 no temp is needed.
2280
2281	 If we are doing addition, since the carry ops are two operand, if
2282	 we aren't adding to ourselves, move the first addend to the
2283	 destination first.  */
2284
2285      gcc_assert (operands[1] != const0_rtx || code == MINUS);
2286      if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2287	{
2288	  if (code == MINUS && CONSTANT_P (operands[1]))
2289	    {
2290	      temp = gen_reg_rtx (DImode);
2291	      emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2292	      code = PLUS;
2293	      gen_insn = gen_adcdi3;
2294	      operands[2] = operands[1];
2295	      operands[1] = operands[0];
2296	    }
2297	  else
2298	    emit_move_insn (operands[0], operands[1]);
2299	}
2300
2301      /* Subtracting a constant will have been rewritten to an addition of the
2302	 negative of that constant before we get here.  */
2303      gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2304      emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2305    }
2306}
2307
2308bool
2309adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
2310{
2311  HOST_WIDE_INT lo_offset;
2312  HOST_WIDE_INT hi_offset;
2313
2314  if (GET_CODE (lo) != GET_CODE (hi))
2315    return false;
2316
2317  if (REG_P (lo))
2318    return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2319  if (CONST_INT_P (lo))
2320    return INTVAL (hi) == 0 && UINTVAL (lo) < 64;
2321  if (CONST_INT_P (lo))
2322    return mode != SImode;
2323
2324  if (!MEM_P (lo))
2325    return false;
2326
2327  if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2328    return false;
2329
2330  lo = XEXP (lo, 0);
2331  hi = XEXP (hi, 0);
2332
2333  if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2334    return rtx_equal_p (lo, hi);
2335
2336  switch (GET_CODE (lo))
2337    {
2338    case REG:
2339    case SYMBOL_REF:
2340      lo_offset = 0;
2341      break;
2342    case CONST:
2343      lo = XEXP (lo, 0);
2344      /* FALLTHROUGH */
2345    case PLUS:
2346      if (!CONST_INT_P (XEXP (lo, 1)))
2347	return false;
2348      lo_offset = INTVAL (XEXP (lo, 1));
2349      lo = XEXP (lo, 0);
2350      break;
2351    default:
2352      return false;
2353    }
2354
2355  switch (GET_CODE (hi))
2356    {
2357    case REG:
2358    case SYMBOL_REF:
2359      hi_offset = 0;
2360      break;
2361    case CONST:
2362      hi = XEXP (hi, 0);
2363      /* FALLTHROUGH */
2364    case PLUS:
2365      if (!CONST_INT_P (XEXP (hi, 1)))
2366	return false;
2367      hi_offset = INTVAL (XEXP (hi, 1));
2368      hi = XEXP (hi, 0);
2369      break;
2370    default:
2371      return false;
2372    }
2373
2374  if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2375    return false;
2376
2377  return rtx_equal_p (lo, hi)
2378	 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2379}
2380
2381/* Output assembler code for a block containing the constant parts
2382   of a trampoline, leaving space for the variable parts.  */
2383
2384/* On the VAX, the trampoline contains an entry mask and two instructions:
2385     .word NN
2386     movl $STATIC,r0   (store the functions static chain)
2387     jmp  *$FUNCTION   (jump to function code at address FUNCTION)  */
2388
2389static void
2390vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2391{
2392  assemble_aligned_integer (2, const0_rtx);
2393  assemble_aligned_integer (2, GEN_INT (0x8fd0));
2394  assemble_aligned_integer (4, const0_rtx);
2395  assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2396  assemble_aligned_integer (2, GEN_INT (0x9f17));
2397  assemble_aligned_integer (4, const0_rtx);
2398}
2399
2400/* We copy the register-mask from the function's pure code
2401   to the start of the trampoline.  */
2402
2403static void
2404vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2405{
2406  rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2407  rtx mem;
2408
2409  emit_block_move (m_tramp, assemble_trampoline_template (),
2410		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2411
2412  mem = adjust_address (m_tramp, HImode, 0);
2413  emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2414
2415  mem = adjust_address (m_tramp, SImode, 4);
2416  emit_move_insn (mem, cxt);
2417  mem = adjust_address (m_tramp, SImode, 11);
2418  emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2419  emit_insn (gen_sync_istream ());
2420}
2421
2422/* Value is the number of bytes of arguments automatically
2423   popped when returning from a subroutine call.
2424   FUNDECL is the declaration node of the function (as a tree),
2425   FUNTYPE is the data type of the function (as a tree),
2426   or for a library call it is an identifier node for the subroutine name.
2427   SIZE is the number of bytes of arguments passed on the stack.
2428
2429   On the VAX, the RET insn pops a maximum of 255 args for any function.  */
2430
2431static poly_int64
2432vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2433		      tree funtype ATTRIBUTE_UNUSED, poly_int64 size)
2434{
2435  return size > 255 * 4 ? 0 : (HOST_WIDE_INT) size;
2436}
2437
2438/* Implement TARGET_FUNCTION_ARG.  On the VAX all args are pushed.  */
2439
2440static rtx
2441vax_function_arg (cumulative_args_t, const function_arg_info &)
2442{
2443  return NULL_RTX;
2444}
2445
2446/* Update the data in CUM to advance over argument ARG.  */
2447
2448static void
2449vax_function_arg_advance (cumulative_args_t cum_v,
2450			  const function_arg_info &arg)
2451{
2452  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2453
2454  *cum += (arg.promoted_size_in_bytes () + 3) & ~3;
2455}
2456
2457static HOST_WIDE_INT
2458vax_starting_frame_offset (void)
2459{
2460  /* On ELF targets, reserve the top of the stack for exception handler
2461     stackadj value.  */
2462  return TARGET_ELF ? -4 : 0;
2463}
2464
2465bool
2466vax_decomposed_dimode_operand_p (rtx lo, rtx hi)
2467{
2468  HOST_WIDE_INT lo_offset = 0;
2469  HOST_WIDE_INT hi_offset = 0;
2470
2471  /* If the codes aren't the same, can't be a DImode operand.  */
2472  if (GET_CODE (lo) != GET_CODE (hi))
2473    return false;
2474
2475  /* If a register, hi regno must be one more than the lo regno.  */
2476  if (REG_P (lo))
2477    return REGNO (lo) + 1 == REGNO (hi);
2478
2479  /* If not memory, can't be a DImode operand.  */
2480  if (!MEM_P (lo))
2481    return false;
2482
2483  /* Get addresses of memory operands.  */
2484  lo = XEXP(lo, 0);
2485  hi = XEXP(hi, 0);
2486
2487  /* If POST_INC, regno must match.  */
2488  if (GET_CODE (lo) == POST_INC && GET_CODE (hi) == POST_INC)
2489    return REGNO (XEXP (lo, 0)) == REGNO (XEXP (hi, 0));
2490
2491  if (GET_CODE (lo) == PLUS)
2492    {
2493      /* If PLUS or MULT, this must an indexed address so fail.  */
2494      if (GET_CODE (XEXP (lo, 0)) == PLUS
2495	  || GET_CODE (XEXP (lo, 0)) == MULT
2496	  || !CONST_INT_P (XEXP (lo, 1)))
2497	return false;
2498      lo_offset = INTVAL (XEXP (lo, 1));
2499      lo = XEXP(lo, 0);
2500    }
2501
2502  if (GET_CODE (hi) == PLUS)
2503    {
2504      /* If PLUS or MULT, this must an indexed address so fail.  */
2505      if (GET_CODE (XEXP (hi, 0)) == PLUS
2506	  || GET_CODE (XEXP (hi, 0)) == MULT
2507	  || !CONST_INT_P (XEXP (hi, 1)))
2508	return false;
2509      hi_offset = INTVAL (XEXP (hi, 1));
2510      hi = XEXP(hi, 0);
2511    }
2512
2513  return rtx_equal_p(lo, hi) && lo_offset + 4 == hi_offset;
2514}
2515
2516/* Return 1 if a bitfield instruction (extv/extzv) may trap */
2517static int
2518vax_bitfield_may_trap_p (const_rtx x, unsigned flags)
2519{
2520  /* per the VARM
2521   * Bitfield instructions may trap if
2522   * size (arg1) GTRU 32
2523   * size (arg1) NEQ 0, pos (arg 2) GTRU 31 and the field is in a register
2524   * i.e. REG_P(operands[0]) is true
2525   *
2526   * GCC can only determine that a bitfield instruction will not trap
2527   * if the size and position arguments are constants; if they aren't,
2528   * the instruction must be assumed to trap.
2529   */
2530  rtx field = XEXP (x, 0);
2531  rtx size = XEXP (x, 1);
2532  rtx pos = XEXP (x, 2);
2533  int retval = 0;
2534
2535  if (!CONST_INT_P (size) || !CONST_INT_P (pos))
2536    retval = 1;
2537  else if (INTVAL (size) < 0 || INTVAL (size) > GET_MODE_BITSIZE ( SImode ))
2538    retval = 1;
2539  else if (REG_P (field) && INTVAL (size) != 0
2540	   && (INTVAL (pos) < 0 || INTVAL (pos) >= GET_MODE_BITSIZE ( SImode )))
2541    retval = 1;
2542  else
2543    retval = 0;
2544  return retval;
2545}
2546