rs6000.c revision 110611
1/* Subroutines used for code generation on IBM RS/6000.
2   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING.  If not, write to
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
28#include "real.h"
29#include "insn-config.h"
30#include "conditions.h"
31#include "insn-attr.h"
32#include "flags.h"
33#include "recog.h"
34#include "obstack.h"
35#include "tree.h"
36#include "expr.h"
37#include "optabs.h"
38#include "except.h"
39#include "function.h"
40#include "output.h"
41#include "basic-block.h"
42#include "integrate.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "hashtab.h"
46#include "tm_p.h"
47#include "target.h"
48#include "target-def.h"
49#include "langhooks.h"
50#include "reload.h"
51
52#ifndef TARGET_NO_PROTOTYPE
53#define TARGET_NO_PROTOTYPE 0
54#endif
55
56#define min(A,B)	((A) < (B) ? (A) : (B))
57#define max(A,B)	((A) > (B) ? (A) : (B))
58
59/* Target cpu type */
60
61enum processor_type rs6000_cpu;
62struct rs6000_cpu_select rs6000_select[3] =
63{
64  /* switch		name,			tune	arch */
65  { (const char *)0,	"--with-cpu=",		1,	1 },
66  { (const char *)0,	"-mcpu=",		1,	1 },
67  { (const char *)0,	"-mtune=",		1,	0 },
68};
69
70/* Size of long double */
71const char *rs6000_long_double_size_string;
72int rs6000_long_double_type_size;
73
74/* Whether -mabi=altivec has appeared */
75int rs6000_altivec_abi;
76
77/* Set to non-zero once AIX common-mode calls have been defined.  */
78static int common_mode_defined;
79
80/* Private copy of original value of flag_pic for ABI_AIX.  */
81static int rs6000_flag_pic;
82
83/* Save information from a "cmpxx" operation until the branch or scc is
84   emitted.  */
85rtx rs6000_compare_op0, rs6000_compare_op1;
86int rs6000_compare_fp_p;
87
88/* Label number of label created for -mrelocatable, to call to so we can
89   get the address of the GOT section */
90int rs6000_pic_labelno;
91
92#ifdef USING_ELFOS_H
93/* Which abi to adhere to */
94const char *rs6000_abi_name = RS6000_ABI_NAME;
95
96/* Semantics of the small data area */
97enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
98
99/* Which small data model to use */
100const char *rs6000_sdata_name = (char *)0;
101
102/* Counter for labels which are to be placed in .fixup.  */
103int fixuplabelno = 0;
104#endif
105
106/* ABI enumeration available for subtarget to use.  */
107enum rs6000_abi rs6000_current_abi;
108
109/* ABI string from -mabi= option.  */
110const char *rs6000_abi_string;
111
112/* Debug flags */
113const char *rs6000_debug_name;
114int rs6000_debug_stack;		/* debug stack applications */
115int rs6000_debug_arg;		/* debug argument handling */
116
117/* Flag to say the TOC is initialized */
118int toc_initialized;
119char toc_label_name[10];
120
121/* Alias set for saves and restores from the rs6000 stack.  */
122static int rs6000_sr_alias_set;
123
124static void rs6000_add_gc_roots PARAMS ((void));
125static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
126static void validate_condition_mode
127  PARAMS ((enum rtx_code, enum machine_mode));
128static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
129static void rs6000_maybe_dead PARAMS ((rtx));
130static void rs6000_emit_stack_tie PARAMS ((void));
131static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
132static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
133static unsigned rs6000_hash_constant PARAMS ((rtx));
134static unsigned toc_hash_function PARAMS ((const void *));
135static int toc_hash_eq PARAMS ((const void *, const void *));
136static int toc_hash_mark_entry PARAMS ((void **, void *));
137static void toc_hash_mark_table PARAMS ((void *));
138static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
139static void rs6000_free_machine_status PARAMS ((struct function *));
140static void rs6000_init_machine_status PARAMS ((struct function *));
141static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
142static int rs6000_ra_ever_killed PARAMS ((void));
143static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
144const struct attribute_spec rs6000_attribute_table[];
145static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
146static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
147static rtx rs6000_emit_set_long_const PARAMS ((rtx,
148  HOST_WIDE_INT, HOST_WIDE_INT));
149#if TARGET_ELF
150static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
151							   int));
152static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
153static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
154#endif
155#ifdef OBJECT_FORMAT_COFF
156static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
157#endif
158static bool rs6000_binds_local_p PARAMS ((tree));
159static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
160static int rs6000_adjust_priority PARAMS ((rtx, int));
161static int rs6000_issue_rate PARAMS ((void));
162
163static void rs6000_init_builtins PARAMS ((void));
164static void altivec_init_builtins PARAMS ((void));
165static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
166static rtx altivec_expand_builtin PARAMS ((tree, rtx));
167static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
168static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
169static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
170static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
171static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
172static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
173static void rs6000_parse_abi_options PARAMS ((void));
174static int first_altivec_reg_to_save PARAMS ((void));
175static unsigned int compute_vrsave_mask PARAMS ((void));
176static void is_altivec_return_reg PARAMS ((rtx, void *));
177int vrsave_operation PARAMS ((rtx, enum machine_mode));
178static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
179static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
180static int easy_vector_constant PARAMS ((rtx));
181
182/* Default register names.  */
183char rs6000_reg_names[][8] =
184{
185      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
186      "8",  "9", "10", "11", "12", "13", "14", "15",
187     "16", "17", "18", "19", "20", "21", "22", "23",
188     "24", "25", "26", "27", "28", "29", "30", "31",
189      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
190      "8",  "9", "10", "11", "12", "13", "14", "15",
191     "16", "17", "18", "19", "20", "21", "22", "23",
192     "24", "25", "26", "27", "28", "29", "30", "31",
193     "mq", "lr", "ctr","ap",
194      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
195      "xer",
196      /* AltiVec registers.  */
197      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
198      "8",  "9",  "10", "11", "12", "13", "14", "15",
199      "16", "17", "18", "19", "20", "21", "22", "23",
200      "24", "25", "26", "27", "28", "29", "30", "31",
201      "vrsave"
202};
203
204#ifdef TARGET_REGNAMES
205static const char alt_reg_names[][8] =
206{
207   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
208   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
209  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
210  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
211   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
212   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
213  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
214  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
215    "mq",    "lr",  "ctr",   "ap",
216  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
217   "xer",
218   /* AltiVec registers.  */
219   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
220   "%v8",  "%v9",  "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
221   "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
222   "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
223   "vrsave"
224};
225#endif
226
227#ifndef MASK_STRICT_ALIGN
228#define MASK_STRICT_ALIGN 0
229#endif
230
231/* Initialize the GCC target structure.  */
232#undef TARGET_ATTRIBUTE_TABLE
233#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
234
235#undef TARGET_ASM_ALIGNED_DI_OP
236#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
237
238/* Default unaligned ops are only provided for ELF.  Find the ops needed
239   for non-ELF systems.  */
240#ifndef OBJECT_FORMAT_ELF
241#ifdef OBJECT_FORMAT_COFF
242/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
243   64-bit targets.  */
244#undef TARGET_ASM_UNALIGNED_HI_OP
245#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
246#undef TARGET_ASM_UNALIGNED_SI_OP
247#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
248#undef TARGET_ASM_UNALIGNED_DI_OP
249#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
250#else
251/* For Darwin.  */
252#undef TARGET_ASM_UNALIGNED_HI_OP
253#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
254#undef TARGET_ASM_UNALIGNED_SI_OP
255#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
256#endif
257#endif
258
259/* This hook deals with fixups for relocatable code and DI-mode objects
260   in 64-bit code.  */
261#undef TARGET_ASM_INTEGER
262#define TARGET_ASM_INTEGER rs6000_assemble_integer
263
264#undef TARGET_ASM_FUNCTION_PROLOGUE
265#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
266#undef TARGET_ASM_FUNCTION_EPILOGUE
267#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
268
269#if TARGET_ELF
270#undef TARGET_SECTION_TYPE_FLAGS
271#define TARGET_SECTION_TYPE_FLAGS  rs6000_elf_section_type_flags
272#endif
273
274#undef TARGET_SCHED_ISSUE_RATE
275#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
276#undef TARGET_SCHED_ADJUST_COST
277#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
278#undef TARGET_SCHED_ADJUST_PRIORITY
279#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
280
281#undef TARGET_INIT_BUILTINS
282#define TARGET_INIT_BUILTINS rs6000_init_builtins
283
284#undef TARGET_EXPAND_BUILTIN
285#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
286
287/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
288#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
289
290struct gcc_target targetm = TARGET_INITIALIZER;
291
292/* Override command line options.  Mostly we process the processor
293   type and sometimes adjust other TARGET_ options.  */
294
295void
296rs6000_override_options (default_cpu)
297     const char *default_cpu;
298{
299  size_t i, j;
300  struct rs6000_cpu_select *ptr;
301
302  /* Simplify the entries below by making a mask for any POWER
303     variant and any PowerPC variant.  */
304
305#define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
306#define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
307		       | MASK_PPC_GFXOPT | MASK_POWERPC64)
308#define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
309
310  static struct ptt
311    {
312      const char *const name;		/* Canonical processor name.  */
313      const enum processor_type processor; /* Processor type enum value.  */
314      const int target_enable;	/* Target flags to enable.  */
315      const int target_disable;	/* Target flags to disable.  */
316    } const processor_target_table[]
317      = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
318	    POWER_MASKS | POWERPC_MASKS},
319	 {"power", PROCESSOR_POWER,
320	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
321	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
322	 {"power2", PROCESSOR_POWER,
323	    MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
324	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
325	 {"power3", PROCESSOR_PPC630,
326	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
327	    POWER_MASKS | MASK_PPC_GPOPT},
328	 {"powerpc", PROCESSOR_POWERPC,
329	    MASK_POWERPC | MASK_NEW_MNEMONICS,
330	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
331	 {"powerpc64", PROCESSOR_POWERPC64,
332	    MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
333	    POWER_MASKS | POWERPC_OPT_MASKS},
334	 {"rios", PROCESSOR_RIOS1,
335	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
336	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
337	 {"rios1", PROCESSOR_RIOS1,
338	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
339	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
340	 {"rsc", PROCESSOR_PPC601,
341	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
342	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
343	 {"rsc1", PROCESSOR_PPC601,
344	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
345	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
346	 {"rios2", PROCESSOR_RIOS2,
347	    MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
348	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
349	 {"rs64a", PROCESSOR_RS64A,
350	    MASK_POWERPC | MASK_NEW_MNEMONICS,
351	    POWER_MASKS | POWERPC_OPT_MASKS},
352	 {"401", PROCESSOR_PPC403,
353	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
354	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
355	 {"403", PROCESSOR_PPC403,
356	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
357	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
358	 {"405", PROCESSOR_PPC405,
359	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
360	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
361	 {"505", PROCESSOR_MPCCORE,
362	    MASK_POWERPC | MASK_NEW_MNEMONICS,
363	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
364	 {"601", PROCESSOR_PPC601,
365	    MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
366	    MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
367	 {"602", PROCESSOR_PPC603,
368	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
369	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
370	 {"603", PROCESSOR_PPC603,
371	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
372	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
373	 {"603e", PROCESSOR_PPC603,
374	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
375	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
376	 {"ec603e", PROCESSOR_PPC603,
377	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
378	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
379	 {"604", PROCESSOR_PPC604,
380	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
381	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
382	 {"604e", PROCESSOR_PPC604e,
383	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
384	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
385	 {"620", PROCESSOR_PPC620,
386	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
387	    POWER_MASKS | MASK_PPC_GPOPT},
388	 {"630", PROCESSOR_PPC630,
389	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
390	    POWER_MASKS | MASK_PPC_GPOPT},
391	 {"740", PROCESSOR_PPC750,
392 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
393 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
394	 {"750", PROCESSOR_PPC750,
395 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
396 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
397	 {"7400", PROCESSOR_PPC7400,
398            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
399            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
400	 {"7450", PROCESSOR_PPC7450,
401            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
402            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
403	 {"801", PROCESSOR_MPCCORE,
404	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
405	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
406	 {"821", PROCESSOR_MPCCORE,
407	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
408	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
409	 {"823", PROCESSOR_MPCCORE,
410	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
411	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
412	 {"860", PROCESSOR_MPCCORE,
413	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
414	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
415
416  size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
417
418  /* Save current -mmultiple/-mno-multiple status.  */
419  int multiple = TARGET_MULTIPLE;
420  /* Save current -mstring/-mno-string status.  */
421  int string = TARGET_STRING;
422
423  /* Identify the processor type.  */
424  rs6000_select[0].string = default_cpu;
425  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
426
427  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
428    {
429      ptr = &rs6000_select[i];
430      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
431	{
432	  for (j = 0; j < ptt_size; j++)
433	    if (! strcmp (ptr->string, processor_target_table[j].name))
434	      {
435		if (ptr->set_tune_p)
436		  rs6000_cpu = processor_target_table[j].processor;
437
438		if (ptr->set_arch_p)
439		  {
440		    target_flags |= processor_target_table[j].target_enable;
441		    target_flags &= ~processor_target_table[j].target_disable;
442		  }
443		break;
444	      }
445
446	  if (j == ptt_size)
447	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
448	}
449    }
450
451  /* If we are optimizing big endian systems for space, use the store
452     multiple instructions.  */
453  if (BYTES_BIG_ENDIAN && optimize_size)
454    target_flags |= MASK_MULTIPLE;
455
456  /* If -mmultiple or -mno-multiple was explicitly used, don't
457     override with the processor default */
458  if (TARGET_MULTIPLE_SET)
459    target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
460
461  /* If -mstring or -mno-string was explicitly used, don't override
462     with the processor default.  */
463  if (TARGET_STRING_SET)
464    target_flags = (target_flags & ~MASK_STRING) | string;
465
466  /* Don't allow -mmultiple or -mstring on little endian systems
467     unless the cpu is a 750, because the hardware doesn't support the
468     instructions used in little endian mode, and causes an alignment
469     trap.  The 750 does not cause an alignment trap (except when the
470     target is unaligned).  */
471
472  if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
473    {
474      if (TARGET_MULTIPLE)
475	{
476	  target_flags &= ~MASK_MULTIPLE;
477	  if (TARGET_MULTIPLE_SET)
478	    warning ("-mmultiple is not supported on little endian systems");
479	}
480
481      if (TARGET_STRING)
482	{
483	  target_flags &= ~MASK_STRING;
484	  if (TARGET_STRING_SET)
485	    warning ("-mstring is not supported on little endian systems");
486	}
487    }
488
489  if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
490    {
491      rs6000_flag_pic = flag_pic;
492      flag_pic = 0;
493    }
494
495#ifdef XCOFF_DEBUGGING_INFO
496  if (flag_function_sections && (write_symbols != NO_DEBUG)
497      && DEFAULT_ABI == ABI_AIX)
498    {
499      warning ("-ffunction-sections disabled on AIX when debugging");
500      flag_function_sections = 0;
501    }
502
503  if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
504    {
505      warning ("-fdata-sections not supported on AIX");
506      flag_data_sections = 0;
507    }
508#endif
509
510  /* Set debug flags */
511  if (rs6000_debug_name)
512    {
513      if (! strcmp (rs6000_debug_name, "all"))
514	rs6000_debug_stack = rs6000_debug_arg = 1;
515      else if (! strcmp (rs6000_debug_name, "stack"))
516	rs6000_debug_stack = 1;
517      else if (! strcmp (rs6000_debug_name, "arg"))
518	rs6000_debug_arg = 1;
519      else
520	error ("unknown -mdebug-%s switch", rs6000_debug_name);
521    }
522
523  /* Set size of long double */
524  rs6000_long_double_type_size = 64;
525  if (rs6000_long_double_size_string)
526    {
527      char *tail;
528      int size = strtol (rs6000_long_double_size_string, &tail, 10);
529      if (*tail != '\0' || (size != 64 && size != 128))
530	error ("Unknown switch -mlong-double-%s",
531	       rs6000_long_double_size_string);
532      else
533	rs6000_long_double_type_size = size;
534    }
535
536  /* Handle -mabi= options.  */
537  rs6000_parse_abi_options ();
538
539#ifdef TARGET_REGNAMES
540  /* If the user desires alternate register names, copy in the
541     alternate names now.  */
542  if (TARGET_REGNAMES)
543    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
544#endif
545
546#ifdef SUBTARGET_OVERRIDE_OPTIONS
547  SUBTARGET_OVERRIDE_OPTIONS;
548#endif
549#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
550  SUBSUBTARGET_OVERRIDE_OPTIONS;
551#endif
552
553  /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
554     If -maix-struct-return or -msvr4-struct-return was explicitly
555     used, don't override with the ABI default.  */
556  if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
557    {
558      if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
559	target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
560      else
561	target_flags |= MASK_AIX_STRUCT_RET;
562    }
563
564  /* Register global variables with the garbage collector.  */
565  rs6000_add_gc_roots ();
566
567  /* Allocate an alias set for register saves & restores from stack.  */
568  rs6000_sr_alias_set = new_alias_set ();
569
570  if (TARGET_TOC)
571    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
572
573  /* We can only guarantee the availability of DI pseudo-ops when
574     assembling for 64-bit targets.  */
575  if (!TARGET_64BIT)
576    {
577      targetm.asm_out.aligned_op.di = NULL;
578      targetm.asm_out.unaligned_op.di = NULL;
579    }
580
581  /* Arrange to save and restore machine status around nested functions.  */
582  init_machine_status = rs6000_init_machine_status;
583  free_machine_status = rs6000_free_machine_status;
584}
585
586/* Handle -mabi= options.  */
587static void
588rs6000_parse_abi_options ()
589{
590  if (rs6000_abi_string == 0)
591    return;
592  else if (! strcmp (rs6000_abi_string, "altivec"))
593    rs6000_altivec_abi = 1;
594  else if (! strcmp (rs6000_abi_string, "no-altivec"))
595    rs6000_altivec_abi = 0;
596  else
597    error ("unknown ABI specified: '%s'", rs6000_abi_string);
598}
599
600void
601optimization_options (level, size)
602     int level ATTRIBUTE_UNUSED;
603     int size ATTRIBUTE_UNUSED;
604{
605}
606
607/* Do anything needed at the start of the asm file.  */
608
609void
610rs6000_file_start (file, default_cpu)
611     FILE *file;
612     const char *default_cpu;
613{
614  size_t i;
615  char buffer[80];
616  const char *start = buffer;
617  struct rs6000_cpu_select *ptr;
618
619  if (flag_verbose_asm)
620    {
621      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
622      rs6000_select[0].string = default_cpu;
623
624      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
625	{
626	  ptr = &rs6000_select[i];
627	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
628	    {
629	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
630	      start = "";
631	    }
632	}
633
634#ifdef USING_ELFOS_H
635      switch (rs6000_sdata)
636	{
637	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
638	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
639	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
640	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
641	}
642
643      if (rs6000_sdata && g_switch_value)
644	{
645	  fprintf (file, "%s -G %d", start, g_switch_value);
646	  start = "";
647	}
648#endif
649
650      if (*start == '\0')
651	putc ('\n', file);
652    }
653}
654
655
656/* Create a CONST_DOUBLE from a string.  */
657
658struct rtx_def *
659rs6000_float_const (string, mode)
660     const char *string;
661     enum machine_mode mode;
662{
663  REAL_VALUE_TYPE value;
664  value = REAL_VALUE_ATOF (string, mode);
665  return immed_real_const_1 (value, mode);
666}
667
668/* Return non-zero if this function is known to have a null epilogue.  */
669
670int
671direct_return ()
672{
673  if (reload_completed)
674    {
675      rs6000_stack_t *info = rs6000_stack_info ();
676
677      if (info->first_gp_reg_save == 32
678	  && info->first_fp_reg_save == 64
679	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
680	  && ! info->lr_save_p
681	  && ! info->cr_save_p
682	  && info->vrsave_mask == 0
683	  && ! info->push_p)
684	return 1;
685    }
686
687  return 0;
688}
689
690/* Returns 1 always.  */
691
692int
693any_operand (op, mode)
694     rtx op ATTRIBUTE_UNUSED;
695     enum machine_mode mode ATTRIBUTE_UNUSED;
696{
697  return 1;
698}
699
700/* Returns 1 if op is the count register.  */
701int
702count_register_operand (op, mode)
703     rtx op;
704     enum machine_mode mode ATTRIBUTE_UNUSED;
705{
706  if (GET_CODE (op) != REG)
707    return 0;
708
709  if (REGNO (op) == COUNT_REGISTER_REGNUM)
710    return 1;
711
712  if (REGNO (op) > FIRST_PSEUDO_REGISTER)
713    return 1;
714
715  return 0;
716}
717
718/* Returns 1 if op is an altivec register.  */
719int
720altivec_register_operand (op, mode)
721     rtx op;
722     enum machine_mode mode ATTRIBUTE_UNUSED;
723{
724
725  return (register_operand (op, mode)
726	  && (GET_CODE (op) != REG
727	      || REGNO (op) > FIRST_PSEUDO_REGISTER
728	      || ALTIVEC_REGNO_P (REGNO (op))));
729}
730
731int
732xer_operand (op, mode)
733     rtx op;
734     enum machine_mode mode ATTRIBUTE_UNUSED;
735{
736  if (GET_CODE (op) != REG)
737    return 0;
738
739  if (XER_REGNO_P (REGNO (op)))
740    return 1;
741
742  return 0;
743}
744
745/* Return 1 if OP is a signed 8-bit constant.  Int multiplication
746   by such constants completes more quickly.  */
747
748int
749s8bit_cint_operand (op, mode)
750     rtx op;
751     enum machine_mode mode ATTRIBUTE_UNUSED;
752{
753  return ( GET_CODE (op) == CONST_INT
754	  && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
755}
756
757/* Return 1 if OP is a constant that can fit in a D field.  */
758
759int
760short_cint_operand (op, mode)
761     rtx op;
762     enum machine_mode mode ATTRIBUTE_UNUSED;
763{
764  return (GET_CODE (op) == CONST_INT
765	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
766}
767
768/* Similar for an unsigned D field.  */
769
770int
771u_short_cint_operand (op, mode)
772     rtx op;
773     enum machine_mode mode ATTRIBUTE_UNUSED;
774{
775  return (GET_CODE (op) == CONST_INT
776	  && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
777}
778
779/* Return 1 if OP is a CONST_INT that cannot fit in a signed D field.  */
780
781int
782non_short_cint_operand (op, mode)
783     rtx op;
784     enum machine_mode mode ATTRIBUTE_UNUSED;
785{
786  return (GET_CODE (op) == CONST_INT
787	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
788}
789
790/* Returns 1 if OP is a CONST_INT that is a positive value
791   and an exact power of 2.  */
792
793int
794exact_log2_cint_operand (op, mode)
795     rtx op;
796     enum machine_mode mode ATTRIBUTE_UNUSED;
797{
798  return (GET_CODE (op) == CONST_INT
799	  && INTVAL (op) > 0
800	  && exact_log2 (INTVAL (op)) >= 0);
801}
802
803/* Returns 1 if OP is a register that is not special (i.e., not MQ,
804   ctr, or lr).  */
805
806int
807gpc_reg_operand (op, mode)
808     rtx op;
809     enum machine_mode mode;
810{
811  return (register_operand (op, mode)
812	  && (GET_CODE (op) != REG
813	      || (REGNO (op) >= ARG_POINTER_REGNUM
814		  && !XER_REGNO_P (REGNO (op)))
815	      || REGNO (op) < MQ_REGNO));
816}
817
818/* Returns 1 if OP is either a pseudo-register or a register denoting a
819   CR field.  */
820
821int
822cc_reg_operand (op, mode)
823     rtx op;
824     enum machine_mode mode;
825{
826  return (register_operand (op, mode)
827	  && (GET_CODE (op) != REG
828	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
829	      || CR_REGNO_P (REGNO (op))));
830}
831
832/* Returns 1 if OP is either a pseudo-register or a register denoting a
833   CR field that isn't CR0.  */
834
835int
836cc_reg_not_cr0_operand (op, mode)
837     rtx op;
838     enum machine_mode mode;
839{
840  return (register_operand (op, mode)
841	  && (GET_CODE (op) != REG
842	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
843	      || CR_REGNO_NOT_CR0_P (REGNO (op))));
844}
845
846/* Returns 1 if OP is either a constant integer valid for a D-field or
847   a non-special register.  If a register, it must be in the proper
848   mode unless MODE is VOIDmode.  */
849
850int
851reg_or_short_operand (op, mode)
852      rtx op;
853      enum machine_mode mode;
854{
855  return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
856}
857
858/* Similar, except check if the negation of the constant would be
859   valid for a D-field.  */
860
861int
862reg_or_neg_short_operand (op, mode)
863      rtx op;
864      enum machine_mode mode;
865{
866  if (GET_CODE (op) == CONST_INT)
867    return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
868
869  return gpc_reg_operand (op, mode);
870}
871
872/* Returns 1 if OP is either a constant integer valid for a DS-field or
873   a non-special register.  If a register, it must be in the proper
874   mode unless MODE is VOIDmode.  */
875
876int
877reg_or_aligned_short_operand (op, mode)
878      rtx op;
879      enum machine_mode mode;
880{
881  if (gpc_reg_operand (op, mode))
882    return 1;
883  else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
884    return 1;
885
886  return 0;
887}
888
889
890/* Return 1 if the operand is either a register or an integer whose
891   high-order 16 bits are zero.  */
892
893int
894reg_or_u_short_operand (op, mode)
895     rtx op;
896     enum machine_mode mode;
897{
898  return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
899}
900
901/* Return 1 is the operand is either a non-special register or ANY
902   constant integer.  */
903
904int
905reg_or_cint_operand (op, mode)
906    rtx op;
907    enum machine_mode mode;
908{
909  return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
910}
911
912/* Return 1 is the operand is either a non-special register or ANY
913   32-bit signed constant integer.  */
914
915int
916reg_or_arith_cint_operand (op, mode)
917    rtx op;
918    enum machine_mode mode;
919{
920  return (gpc_reg_operand (op, mode)
921	  || (GET_CODE (op) == CONST_INT
922#if HOST_BITS_PER_WIDE_INT != 32
923	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
924		  < (unsigned HOST_WIDE_INT) 0x100000000ll)
925#endif
926	      ));
927}
928
929/* Return 1 is the operand is either a non-special register or a 32-bit
930   signed constant integer valid for 64-bit addition.  */
931
932int
933reg_or_add_cint64_operand (op, mode)
934    rtx op;
935    enum machine_mode mode;
936{
937  return (gpc_reg_operand (op, mode)
938	  || (GET_CODE (op) == CONST_INT
939	      && INTVAL (op) < 0x7fff8000
940#if HOST_BITS_PER_WIDE_INT != 32
941	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
942		  < 0x100000000ll)
943#endif
944	      ));
945}
946
947/* Return 1 is the operand is either a non-special register or a 32-bit
948   signed constant integer valid for 64-bit subtraction.  */
949
950int
951reg_or_sub_cint64_operand (op, mode)
952    rtx op;
953    enum machine_mode mode;
954{
955  return (gpc_reg_operand (op, mode)
956	  || (GET_CODE (op) == CONST_INT
957	      && (- INTVAL (op)) < 0x7fff8000
958#if HOST_BITS_PER_WIDE_INT != 32
959	      && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
960		  < 0x100000000ll)
961#endif
962	      ));
963}
964
965/* Return 1 is the operand is either a non-special register or ANY
966   32-bit unsigned constant integer.  */
967
968int
969reg_or_logical_cint_operand (op, mode)
970    rtx op;
971    enum machine_mode mode;
972{
973  if (GET_CODE (op) == CONST_INT)
974    {
975      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
976	{
977	  if (GET_MODE_BITSIZE (mode) <= 32)
978	    abort ();
979
980	  if (INTVAL (op) < 0)
981	    return 0;
982	}
983
984      return ((INTVAL (op) & GET_MODE_MASK (mode)
985	       & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
986    }
987  else if (GET_CODE (op) == CONST_DOUBLE)
988    {
989      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
990	  || mode != DImode)
991	abort ();
992
993      return CONST_DOUBLE_HIGH (op) == 0;
994    }
995  else
996    return gpc_reg_operand (op, mode);
997}
998
999/* Return 1 if the operand is an operand that can be loaded via the GOT.  */
1000
1001int
1002got_operand (op, mode)
1003     rtx op;
1004     enum machine_mode mode ATTRIBUTE_UNUSED;
1005{
1006  return (GET_CODE (op) == SYMBOL_REF
1007	  || GET_CODE (op) == CONST
1008	  || GET_CODE (op) == LABEL_REF);
1009}
1010
1011/* Return 1 if the operand is a simple references that can be loaded via
1012   the GOT (labels involving addition aren't allowed).  */
1013
1014int
1015got_no_const_operand (op, mode)
1016     rtx op;
1017     enum machine_mode mode ATTRIBUTE_UNUSED;
1018{
1019  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1020}
1021
1022/* Return the number of instructions it takes to form a constant in an
1023   integer register.  */
1024
1025static int
1026num_insns_constant_wide (value)
1027     HOST_WIDE_INT value;
1028{
1029  /* signed constant loadable with {cal|addi} */
1030  if (CONST_OK_FOR_LETTER_P (value, 'I'))
1031    return 1;
1032
1033  /* constant loadable with {cau|addis} */
1034  else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1035    return 1;
1036
1037#if HOST_BITS_PER_WIDE_INT == 64
1038  else if (TARGET_POWERPC64)
1039    {
1040      HOST_WIDE_INT low  = value & 0xffffffff;
1041      HOST_WIDE_INT high = value >> 32;
1042
1043      low = (low ^ 0x80000000) - 0x80000000;  /* sign extend */
1044
1045      if (high == 0 && (low & 0x80000000) == 0)
1046	return 2;
1047
1048      else if (high == -1 && (low & 0x80000000) != 0)
1049	return 2;
1050
1051      else if (! low)
1052	return num_insns_constant_wide (high) + 1;
1053
1054      else
1055	return (num_insns_constant_wide (high)
1056		+ num_insns_constant_wide (low) + 1);
1057    }
1058#endif
1059
1060  else
1061    return 2;
1062}
1063
1064int
1065num_insns_constant (op, mode)
1066     rtx op;
1067     enum machine_mode mode;
1068{
1069  if (GET_CODE (op) == CONST_INT)
1070    {
1071#if HOST_BITS_PER_WIDE_INT == 64
1072      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1073	  && mask64_operand (op, mode))
1074	    return 2;
1075      else
1076#endif
1077	return num_insns_constant_wide (INTVAL (op));
1078    }
1079
1080  else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1081    {
1082      long l;
1083      REAL_VALUE_TYPE rv;
1084
1085      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1086      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1087      return num_insns_constant_wide ((HOST_WIDE_INT) l);
1088    }
1089
1090  else if (GET_CODE (op) == CONST_DOUBLE)
1091    {
1092      HOST_WIDE_INT low;
1093      HOST_WIDE_INT high;
1094      long l[2];
1095      REAL_VALUE_TYPE rv;
1096      int endian = (WORDS_BIG_ENDIAN == 0);
1097
1098      if (mode == VOIDmode || mode == DImode)
1099	{
1100	  high = CONST_DOUBLE_HIGH (op);
1101	  low  = CONST_DOUBLE_LOW (op);
1102	}
1103      else
1104	{
1105	  REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1106	  REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1107	  high = l[endian];
1108	  low  = l[1 - endian];
1109	}
1110
1111      if (TARGET_32BIT)
1112	return (num_insns_constant_wide (low)
1113		+ num_insns_constant_wide (high));
1114
1115      else
1116	{
1117	  if (high == 0 && low >= 0)
1118	    return num_insns_constant_wide (low);
1119
1120	  else if (high == -1 && low < 0)
1121	    return num_insns_constant_wide (low);
1122
1123	  else if (mask64_operand (op, mode))
1124	    return 2;
1125
1126	  else if (low == 0)
1127	    return num_insns_constant_wide (high) + 1;
1128
1129	  else
1130	    return (num_insns_constant_wide (high)
1131		    + num_insns_constant_wide (low) + 1);
1132	}
1133    }
1134
1135  else
1136    abort ();
1137}
1138
1139/* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1140   register with one instruction per word.  We only do this if we can
1141   safely read CONST_DOUBLE_{LOW,HIGH}.  */
1142
1143int
1144easy_fp_constant (op, mode)
1145     rtx op;
1146     enum machine_mode mode;
1147{
1148  if (GET_CODE (op) != CONST_DOUBLE
1149      || GET_MODE (op) != mode
1150      || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1151    return 0;
1152
1153  /* Consider all constants with -msoft-float to be easy.  */
1154  if (TARGET_SOFT_FLOAT && mode != DImode)
1155    return 1;
1156
1157  /* If we are using V.4 style PIC, consider all constants to be hard.  */
1158  if (flag_pic && DEFAULT_ABI == ABI_V4)
1159    return 0;
1160
1161#ifdef TARGET_RELOCATABLE
1162  /* Similarly if we are using -mrelocatable, consider all constants
1163     to be hard.  */
1164  if (TARGET_RELOCATABLE)
1165    return 0;
1166#endif
1167
1168  if (mode == DFmode)
1169    {
1170      long k[2];
1171      REAL_VALUE_TYPE rv;
1172
1173      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1174      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1175
1176      return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1177	      && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1178    }
1179
1180  else if (mode == SFmode)
1181    {
1182      long l;
1183      REAL_VALUE_TYPE rv;
1184
1185      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1186      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1187
1188      return num_insns_constant_wide (l) == 1;
1189    }
1190
1191  else if (mode == DImode)
1192    return ((TARGET_POWERPC64
1193	     && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1194	    || (num_insns_constant (op, DImode) <= 2));
1195
1196  else if (mode == SImode)
1197    return 1;
1198  else
1199    abort ();
1200}
1201
1202/* Return 1 if the operand is a CONST_INT and can be put into a
1203   register with one instruction.  */
1204
1205static int
1206easy_vector_constant (op)
1207     rtx op;
1208{
1209  rtx elt;
1210  int units, i;
1211
1212  if (GET_CODE (op) != CONST_VECTOR)
1213    return 0;
1214
1215  units = CONST_VECTOR_NUNITS (op);
1216
1217  /* We can generate 0 easily.  Look for that.  */
1218  for (i = 0; i < units; ++i)
1219    {
1220      elt = CONST_VECTOR_ELT (op, i);
1221
1222      /* We could probably simplify this by just checking for equality
1223	 with CONST0_RTX for the current mode, but let's be safe
1224	 instead.  */
1225
1226      switch (GET_CODE (elt))
1227	{
1228	case CONST_INT:
1229	  if (INTVAL (elt) != 0)
1230	    return 0;
1231	  break;
1232	case CONST_DOUBLE:
1233	  if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1234	    return 0;
1235	  break;
1236	default:
1237	  return 0;
1238	}
1239    }
1240
1241  /* We could probably generate a few other constants trivially, but
1242     gcc doesn't generate them yet.  FIXME later.  */
1243  return 1;
1244}
1245
1246/* Return 1 if the operand is the constant 0.  This works for scalars
1247   as well as vectors.  */
1248int
1249zero_constant (op, mode)
1250     rtx op;
1251     enum machine_mode mode;
1252{
1253  return op == CONST0_RTX (mode);
1254}
1255
1256/* Return 1 if the operand is 0.0.  */
1257int
1258zero_fp_constant (op, mode)
1259     rtx op;
1260     enum machine_mode mode;
1261{
1262  return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1263}
1264
1265/* Return 1 if the operand is in volatile memory.  Note that during
1266   the RTL generation phase, memory_operand does not return TRUE for
1267   volatile memory references.  So this function allows us to
1268   recognize volatile references where its safe.  */
1269
1270int
1271volatile_mem_operand (op, mode)
1272     rtx op;
1273     enum machine_mode mode;
1274{
1275  if (GET_CODE (op) != MEM)
1276    return 0;
1277
1278  if (!MEM_VOLATILE_P (op))
1279    return 0;
1280
1281  if (mode != GET_MODE (op))
1282    return 0;
1283
1284  if (reload_completed)
1285    return memory_operand (op, mode);
1286
1287  if (reload_in_progress)
1288    return strict_memory_address_p (mode, XEXP (op, 0));
1289
1290  return memory_address_p (mode, XEXP (op, 0));
1291}
1292
1293/* Return 1 if the operand is an offsettable memory operand.  */
1294
1295int
1296offsettable_mem_operand (op, mode)
1297     rtx op;
1298     enum machine_mode mode;
1299{
1300  return ((GET_CODE (op) == MEM)
1301	  && offsettable_address_p (reload_completed || reload_in_progress,
1302				    mode, XEXP (op, 0)));
1303}
1304
1305/* Return 1 if the operand is either an easy FP constant (see above) or
1306   memory.  */
1307
1308int
1309mem_or_easy_const_operand (op, mode)
1310     rtx op;
1311     enum machine_mode mode;
1312{
1313  return memory_operand (op, mode) || easy_fp_constant (op, mode);
1314}
1315
1316/* Return 1 if the operand is either a non-special register or an item
1317   that can be used as the operand of a `mode' add insn.  */
1318
1319int
1320add_operand (op, mode)
1321    rtx op;
1322    enum machine_mode mode;
1323{
1324  if (GET_CODE (op) == CONST_INT)
1325    return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1326	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1327
1328  return gpc_reg_operand (op, mode);
1329}
1330
1331/* Return 1 if OP is a constant but not a valid add_operand.  */
1332
1333int
1334non_add_cint_operand (op, mode)
1335     rtx op;
1336     enum machine_mode mode ATTRIBUTE_UNUSED;
1337{
1338  return (GET_CODE (op) == CONST_INT
1339	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1340	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1341}
1342
1343/* Return 1 if the operand is a non-special register or a constant that
1344   can be used as the operand of an OR or XOR insn on the RS/6000.  */
1345
1346int
1347logical_operand (op, mode)
1348     rtx op;
1349     enum machine_mode mode;
1350{
1351  HOST_WIDE_INT opl, oph;
1352
1353  if (gpc_reg_operand (op, mode))
1354    return 1;
1355
1356  if (GET_CODE (op) == CONST_INT)
1357    {
1358      opl = INTVAL (op) & GET_MODE_MASK (mode);
1359
1360#if HOST_BITS_PER_WIDE_INT <= 32
1361      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1362	return 0;
1363#endif
1364    }
1365  else if (GET_CODE (op) == CONST_DOUBLE)
1366    {
1367      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1368	abort ();
1369
1370      opl = CONST_DOUBLE_LOW (op);
1371      oph = CONST_DOUBLE_HIGH (op);
1372      if (oph != 0)
1373	return 0;
1374    }
1375  else
1376    return 0;
1377
1378  return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1379	  || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1380}
1381
1382/* Return 1 if C is a constant that is not a logical operand (as
1383   above), but could be split into one.  */
1384
1385int
1386non_logical_cint_operand (op, mode)
1387     rtx op;
1388     enum machine_mode mode;
1389{
1390  return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1391	  && ! logical_operand (op, mode)
1392	  && reg_or_logical_cint_operand (op, mode));
1393}
1394
1395/* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1396   RS/6000.  It is if there are no more than two 1->0 or 0->1 transitions.
1397   Reject all ones and all zeros, since these should have been optimized
1398   away and confuse the making of MB and ME.  */
1399
1400int
1401mask_operand (op, mode)
1402     rtx op;
1403     enum machine_mode mode ATTRIBUTE_UNUSED;
1404{
1405  HOST_WIDE_INT c, lsb;
1406
1407  if (GET_CODE (op) != CONST_INT)
1408    return 0;
1409
1410  c = INTVAL (op);
1411
1412  /* Fail in 64-bit mode if the mask wraps around because the upper
1413     32-bits of the mask will all be 1s, contrary to GCC's internal view.  */
1414  if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1415    return 0;
1416
1417  /* We don't change the number of transitions by inverting,
1418     so make sure we start with the LS bit zero.  */
1419  if (c & 1)
1420    c = ~c;
1421
1422  /* Reject all zeros or all ones.  */
1423  if (c == 0)
1424    return 0;
1425
1426  /* Find the first transition.  */
1427  lsb = c & -c;
1428
1429  /* Invert to look for a second transition.  */
1430  c = ~c;
1431
1432  /* Erase first transition.  */
1433  c &= -lsb;
1434
1435  /* Find the second transition (if any).  */
1436  lsb = c & -c;
1437
1438  /* Match if all the bits above are 1's (or c is zero).  */
1439  return c == -lsb;
1440}
1441
1442/* Return 1 if the operand is a constant that is a PowerPC64 mask.
1443   It is if there are no more than one 1->0 or 0->1 transitions.
1444   Reject all ones and all zeros, since these should have been optimized
1445   away and confuse the making of MB and ME.  */
1446
1447int
1448mask64_operand (op, mode)
1449     rtx op;
1450     enum machine_mode mode;
1451{
1452  if (GET_CODE (op) == CONST_INT)
1453    {
1454      HOST_WIDE_INT c, lsb;
1455
1456      /* We don't change the number of transitions by inverting,
1457	 so make sure we start with the LS bit zero.  */
1458      c = INTVAL (op);
1459      if (c & 1)
1460	c = ~c;
1461
1462      /* Reject all zeros or all ones.  */
1463      if (c == 0)
1464	return 0;
1465
1466      /* Find the transition, and check that all bits above are 1's.  */
1467      lsb = c & -c;
1468      return c == -lsb;
1469    }
1470  else if (GET_CODE (op) == CONST_DOUBLE
1471	   && (mode == VOIDmode || mode == DImode))
1472    {
1473      HOST_WIDE_INT low, high, lsb;
1474
1475      if (HOST_BITS_PER_WIDE_INT < 64)
1476	high = CONST_DOUBLE_HIGH (op);
1477
1478      low = CONST_DOUBLE_LOW (op);
1479      if (low & 1)
1480	{
1481	  if (HOST_BITS_PER_WIDE_INT < 64)
1482	    high = ~high;
1483	  low = ~low;
1484	}
1485
1486      if (low == 0)
1487	{
1488	  if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1489	    return 0;
1490
1491	  lsb = high & -high;
1492	  return high == -lsb;
1493	}
1494
1495      lsb = low & -low;
1496      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1497    }
1498  else
1499    return 0;
1500}
1501
1502/* Return 1 if the operand is either a non-special register or a constant
1503   that can be used as the operand of a PowerPC64 logical AND insn.  */
1504
1505int
1506and64_operand (op, mode)
1507    rtx op;
1508    enum machine_mode mode;
1509{
1510  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1511    return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1512
1513  return (logical_operand (op, mode) || mask64_operand (op, mode));
1514}
1515
1516/* Return 1 if the operand is either a non-special register or a
1517   constant that can be used as the operand of an RS/6000 logical AND insn.  */
1518
1519int
1520and_operand (op, mode)
1521    rtx op;
1522    enum machine_mode mode;
1523{
1524  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1525    return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1526
1527  return (logical_operand (op, mode) || mask_operand (op, mode));
1528}
1529
1530/* Return 1 if the operand is a general register or memory operand.  */
1531
1532int
1533reg_or_mem_operand (op, mode)
1534     rtx op;
1535     enum machine_mode mode;
1536{
1537  return (gpc_reg_operand (op, mode)
1538	  || memory_operand (op, mode)
1539	  || volatile_mem_operand (op, mode));
1540}
1541
1542/* Return 1 if the operand is a general register or memory operand without
1543   pre_inc or pre_dec which produces invalid form of PowerPC lwa
1544   instruction.  */
1545
1546int
1547lwa_operand (op, mode)
1548     rtx op;
1549     enum machine_mode mode;
1550{
1551  rtx inner = op;
1552
1553  if (reload_completed && GET_CODE (inner) == SUBREG)
1554    inner = SUBREG_REG (inner);
1555
1556  return gpc_reg_operand (inner, mode)
1557    || (memory_operand (inner, mode)
1558	&& GET_CODE (XEXP (inner, 0)) != PRE_INC
1559	&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
1560	&& (GET_CODE (XEXP (inner, 0)) != PLUS
1561	    || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1562	    || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1563}
1564
1565/* Return 1 if the operand, used inside a MEM, is a valid first argument
1566   to CALL.  This is a SYMBOL_REF or a pseudo-register, which will be
1567   forced to lr.  */
1568
1569int
1570call_operand (op, mode)
1571     rtx op;
1572     enum machine_mode mode;
1573{
1574  if (mode != VOIDmode && GET_MODE (op) != mode)
1575    return 0;
1576
1577  return (GET_CODE (op) == SYMBOL_REF
1578	  || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1579}
1580
1581/* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1582   this file and the function is not weakly defined.  */
1583
1584int
1585current_file_function_operand (op, mode)
1586     rtx op;
1587     enum machine_mode mode ATTRIBUTE_UNUSED;
1588{
1589  return (GET_CODE (op) == SYMBOL_REF
1590	  && (SYMBOL_REF_FLAG (op)
1591	      || (op == XEXP (DECL_RTL (current_function_decl), 0)
1592	          && ! DECL_WEAK (current_function_decl))));
1593}
1594
1595/* Return 1 if this operand is a valid input for a move insn.  */
1596
1597int
1598input_operand (op, mode)
1599     rtx op;
1600     enum machine_mode mode;
1601{
1602  /* Memory is always valid.  */
1603  if (memory_operand (op, mode))
1604    return 1;
1605
1606  /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary.  */
1607  if (GET_CODE (op) == CONSTANT_P_RTX)
1608    return 1;
1609
1610  /* For floating-point, easy constants are valid.  */
1611  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1612      && CONSTANT_P (op)
1613      && easy_fp_constant (op, mode))
1614    return 1;
1615
1616  /* Allow any integer constant.  */
1617  if (GET_MODE_CLASS (mode) == MODE_INT
1618      && (GET_CODE (op) == CONST_INT
1619	  || GET_CODE (op) == CONST_DOUBLE))
1620    return 1;
1621
1622  /* For floating-point or multi-word mode, the only remaining valid type
1623     is a register.  */
1624  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1625      || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1626    return register_operand (op, mode);
1627
1628  /* The only cases left are integral modes one word or smaller (we
1629     do not get called for MODE_CC values).  These can be in any
1630     register.  */
1631  if (register_operand (op, mode))
1632    return 1;
1633
1634  /* A SYMBOL_REF referring to the TOC is valid.  */
1635  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1636    return 1;
1637
1638  /* A constant pool expression (relative to the TOC) is valid */
1639  if (TOC_RELATIVE_EXPR_P (op))
1640    return 1;
1641
1642  /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1643     to be valid.  */
1644  if (DEFAULT_ABI == ABI_V4
1645      && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1646      && small_data_operand (op, Pmode))
1647    return 1;
1648
1649  return 0;
1650}
1651
1652/* Return 1 for an operand in small memory on V.4/eabi.  */
1653
1654int
1655small_data_operand (op, mode)
1656     rtx op ATTRIBUTE_UNUSED;
1657     enum machine_mode mode ATTRIBUTE_UNUSED;
1658{
1659#if TARGET_ELF
1660  rtx sym_ref;
1661
1662  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1663    return 0;
1664
1665  if (DEFAULT_ABI != ABI_V4)
1666    return 0;
1667
1668  if (GET_CODE (op) == SYMBOL_REF)
1669    sym_ref = op;
1670
1671  else if (GET_CODE (op) != CONST
1672	   || GET_CODE (XEXP (op, 0)) != PLUS
1673	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1674	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1675    return 0;
1676
1677  else
1678    {
1679      rtx sum = XEXP (op, 0);
1680      HOST_WIDE_INT summand;
1681
1682      /* We have to be careful here, because it is the referenced address
1683        that must be 32k from _SDA_BASE_, not just the symbol.  */
1684      summand = INTVAL (XEXP (sum, 1));
1685      if (summand < 0 || summand > g_switch_value)
1686       return 0;
1687
1688      sym_ref = XEXP (sum, 0);
1689    }
1690
1691  if (*XSTR (sym_ref, 0) != '@')
1692    return 0;
1693
1694  return 1;
1695
1696#else
1697  return 0;
1698#endif
1699}
1700
1701static int
1702constant_pool_expr_1 (op, have_sym, have_toc)
1703    rtx op;
1704    int *have_sym;
1705    int *have_toc;
1706{
1707  switch (GET_CODE(op))
1708    {
1709    case SYMBOL_REF:
1710      if (CONSTANT_POOL_ADDRESS_P (op))
1711	{
1712	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1713	    {
1714	      *have_sym = 1;
1715	      return 1;
1716	    }
1717	  else
1718	    return 0;
1719	}
1720      else if (! strcmp (XSTR (op, 0), toc_label_name))
1721	{
1722	  *have_toc = 1;
1723	  return 1;
1724	}
1725      else
1726	return 0;
1727    case PLUS:
1728    case MINUS:
1729      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1730	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1731    case CONST:
1732      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1733    case CONST_INT:
1734      return 1;
1735    default:
1736      return 0;
1737    }
1738}
1739
1740int
1741constant_pool_expr_p (op)
1742    rtx op;
1743{
1744  int have_sym = 0;
1745  int have_toc = 0;
1746  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1747}
1748
1749int
1750toc_relative_expr_p (op)
1751    rtx op;
1752{
1753    int have_sym = 0;
1754    int have_toc = 0;
1755    return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1756}
1757
1758/* Try machine-dependent ways of modifying an illegitimate address
1759   to be legitimate.  If we find one, return the new, valid address.
1760   This is used from only one place: `memory_address' in explow.c.
1761
1762   OLDX is the address as it was before break_out_memory_refs was
1763   called.  In some cases it is useful to look at this to decide what
1764   needs to be done.
1765
1766   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1767
1768   It is always safe for this function to do nothing.  It exists to
1769   recognize opportunities to optimize the output.
1770
1771   On RS/6000, first check for the sum of a register with a constant
1772   integer that is out of range.  If so, generate code to add the
1773   constant with the low-order 16 bits masked to the register and force
1774   this result into another register (this can be done with `cau').
1775   Then generate an address of REG+(CONST&0xffff), allowing for the
1776   possibility of bit 16 being a one.
1777
1778   Then check for the sum of a register and something not constant, try to
1779   load the other things into a register and return the sum.  */
1780rtx
1781rs6000_legitimize_address (x, oldx, mode)
1782     rtx x;
1783     rtx oldx ATTRIBUTE_UNUSED;
1784     enum machine_mode mode;
1785{
1786  if (GET_CODE (x) == PLUS
1787      && GET_CODE (XEXP (x, 0)) == REG
1788      && GET_CODE (XEXP (x, 1)) == CONST_INT
1789      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1790    {
1791      HOST_WIDE_INT high_int, low_int;
1792      rtx sum;
1793      high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1794      low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1795      if (low_int & 0x8000)
1796	high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1797      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1798					 GEN_INT (high_int)), 0);
1799      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1800    }
1801  else if (GET_CODE (x) == PLUS
1802	   && GET_CODE (XEXP (x, 0)) == REG
1803	   && GET_CODE (XEXP (x, 1)) != CONST_INT
1804	   && GET_MODE_NUNITS (mode) == 1
1805	   && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1806	   && (TARGET_POWERPC64 || mode != DImode)
1807	   && mode != TImode)
1808    {
1809      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1810			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1811    }
1812  else if (ALTIVEC_VECTOR_MODE (mode))
1813    {
1814      rtx reg;
1815
1816      /* Make sure both operands are registers.  */
1817      if (GET_CODE (x) == PLUS)
1818	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1819			     force_reg (Pmode, XEXP (x, 1)));
1820
1821      reg = force_reg (Pmode, x);
1822      return reg;
1823    }
1824  else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1825	   && GET_CODE (x) != CONST_INT
1826	   && GET_CODE (x) != CONST_DOUBLE
1827	   && CONSTANT_P (x)
1828	   && GET_MODE_NUNITS (mode) == 1
1829	   && (GET_MODE_BITSIZE (mode) <= 32
1830	       || (TARGET_HARD_FLOAT && mode == DFmode)))
1831    {
1832      rtx reg = gen_reg_rtx (Pmode);
1833      emit_insn (gen_elf_high (reg, (x)));
1834      return gen_rtx_LO_SUM (Pmode, reg, (x));
1835    }
1836  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1837	   && ! flag_pic
1838	   && GET_CODE (x) != CONST_INT
1839	   && GET_CODE (x) != CONST_DOUBLE
1840	   && CONSTANT_P (x)
1841	   && (TARGET_HARD_FLOAT || mode != DFmode)
1842	   && mode != DImode
1843	   && mode != TImode)
1844    {
1845      rtx reg = gen_reg_rtx (Pmode);
1846      emit_insn (gen_macho_high (reg, (x)));
1847      return gen_rtx_LO_SUM (Pmode, reg, (x));
1848    }
1849  else if (TARGET_TOC
1850	   && CONSTANT_POOL_EXPR_P (x)
1851	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1852    {
1853      return create_TOC_reference (x);
1854    }
1855  else
1856    return NULL_RTX;
1857}
1858
1859/* The convention appears to be to define this wherever it is used.
1860   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1861   is now used here.  */
1862#ifndef REG_MODE_OK_FOR_BASE_P
1863#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1864#endif
1865
1866/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
1867   replace the input X, or the original X if no replacement is called for.
1868   The output parameter *WIN is 1 if the calling macro should goto WIN,
1869   0 if it should not.
1870
1871   For RS/6000, we wish to handle large displacements off a base
1872   register by splitting the addend across an addiu/addis and the mem insn.
1873   This cuts number of extra insns needed from 3 to 1.
1874
1875   On Darwin, we use this to generate code for floating point constants.
1876   A movsf_low is generated so we wind up with 2 instructions rather than 3.
1877   The Darwin code is inside #if TARGET_MACHO because only then is
1878   machopic_function_base_name() defined.  */
1879rtx
1880rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1881    rtx x;
1882    enum machine_mode mode;
1883    int opnum;
1884    int type;
1885    int ind_levels ATTRIBUTE_UNUSED;
1886    int *win;
1887{
1888  /* We must recognize output that we have already generated ourselves.  */
1889  if (GET_CODE (x) == PLUS
1890      && GET_CODE (XEXP (x, 0)) == PLUS
1891      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1892      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1893      && GET_CODE (XEXP (x, 1)) == CONST_INT)
1894    {
1895      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1896                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1897                   opnum, (enum reload_type)type);
1898      *win = 1;
1899      return x;
1900    }
1901
1902#if TARGET_MACHO
1903  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1904      && GET_CODE (x) == LO_SUM
1905      && GET_CODE (XEXP (x, 0)) == PLUS
1906      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1907      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1908      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1909      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1910      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1911      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1912      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1913    {
1914      /* Result of previous invocation of this function on Darwin
1915	 floating point constant.  */
1916      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1917		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1918		opnum, (enum reload_type)type);
1919      *win = 1;
1920      return x;
1921    }
1922#endif
1923  if (GET_CODE (x) == PLUS
1924      && GET_CODE (XEXP (x, 0)) == REG
1925      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1926      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1927      && GET_CODE (XEXP (x, 1)) == CONST_INT
1928      && !ALTIVEC_VECTOR_MODE (mode))
1929    {
1930      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1931      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1932      HOST_WIDE_INT high
1933        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1934
1935      /* Check for 32-bit overflow.  */
1936      if (high + low != val)
1937        {
1938	  *win = 0;
1939	  return x;
1940	}
1941
1942      /* Reload the high part into a base reg; leave the low part
1943         in the mem directly.  */
1944
1945      x = gen_rtx_PLUS (GET_MODE (x),
1946                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1947                                      GEN_INT (high)),
1948                        GEN_INT (low));
1949
1950      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1951                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1952                   opnum, (enum reload_type)type);
1953      *win = 1;
1954      return x;
1955    }
1956#if TARGET_MACHO
1957  if (GET_CODE (x) == SYMBOL_REF
1958      && DEFAULT_ABI == ABI_DARWIN
1959      && !ALTIVEC_VECTOR_MODE (mode)
1960      && flag_pic)
1961    {
1962      /* Darwin load of floating point constant.  */
1963      rtx offset = gen_rtx (CONST, Pmode,
1964		    gen_rtx (MINUS, Pmode, x,
1965		    gen_rtx (SYMBOL_REF, Pmode,
1966			machopic_function_base_name ())));
1967      x = gen_rtx (LO_SUM, GET_MODE (x),
1968	    gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1969		gen_rtx (HIGH, Pmode, offset)), offset);
1970      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1971		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1972		opnum, (enum reload_type)type);
1973      *win = 1;
1974      return x;
1975    }
1976#endif
1977  if (TARGET_TOC
1978      && CONSTANT_POOL_EXPR_P (x)
1979      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1980    {
1981      (x) = create_TOC_reference (x);
1982      *win = 1;
1983      return x;
1984    }
1985  *win = 0;
1986  return x;
1987}
1988
1989/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1990   that is a valid memory address for an instruction.
1991   The MODE argument is the machine mode for the MEM expression
1992   that wants to use this address.
1993
1994   On the RS/6000, there are four valid address: a SYMBOL_REF that
1995   refers to a constant pool entry of an address (or the sum of it
1996   plus a constant), a short (16-bit signed) constant plus a register,
1997   the sum of two registers, or a register indirect, possibly with an
1998   auto-increment.  For DFmode and DImode with an constant plus register,
1999   we must ensure that both words are addressable or PowerPC64 with offset
2000   word aligned.
2001
2002   For modes spanning multiple registers (DFmode in 32-bit GPRs,
2003   32-bit DImode, TImode), indexed addressing cannot be used because
2004   adjacent memory cells are accessed by adding word-sized offsets
2005   during assembly output.  */
2006int
2007rs6000_legitimate_address (mode, x, reg_ok_strict)
2008    enum machine_mode mode;
2009    rtx x;
2010    int reg_ok_strict;
2011{
2012  if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2013    return 1;
2014  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2015      && !ALTIVEC_VECTOR_MODE (mode)
2016      && TARGET_UPDATE
2017      && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2018    return 1;
2019  if (LEGITIMATE_SMALL_DATA_P (mode, x))
2020    return 1;
2021  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2022    return 1;
2023  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
2024  if (! reg_ok_strict
2025      && GET_CODE (x) == PLUS
2026      && GET_CODE (XEXP (x, 0)) == REG
2027      && XEXP (x, 0) == virtual_stack_vars_rtx
2028      && GET_CODE (XEXP (x, 1)) == CONST_INT)
2029    return 1;
2030  if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2031    return 1;
2032  if (mode != TImode
2033      && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2034      && (TARGET_POWERPC64 || mode != DImode)
2035      && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2036    return 1;
2037  if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2038    return 1;
2039  return 0;
2040}
2041
2042/* Try to output insns to set TARGET equal to the constant C if it can
2043   be done in less than N insns.  Do all computations in MODE.
2044   Returns the place where the output has been placed if it can be
2045   done and the insns have been emitted.  If it would take more than N
2046   insns, zero is returned and no insns and emitted.  */
2047
2048rtx
2049rs6000_emit_set_const (dest, mode, source, n)
2050     rtx dest, source;
2051     enum machine_mode mode;
2052     int n ATTRIBUTE_UNUSED;
2053{
2054  HOST_WIDE_INT c0, c1;
2055
2056  if (mode == QImode || mode == HImode || mode == SImode)
2057    {
2058      if (dest == NULL)
2059        dest = gen_reg_rtx (mode);
2060      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2061      return dest;
2062    }
2063
2064  if (GET_CODE (source) == CONST_INT)
2065    {
2066      c0 = INTVAL (source);
2067      c1 = -(c0 < 0);
2068    }
2069  else if (GET_CODE (source) == CONST_DOUBLE)
2070    {
2071#if HOST_BITS_PER_WIDE_INT >= 64
2072      c0 = CONST_DOUBLE_LOW (source);
2073      c1 = -(c0 < 0);
2074#else
2075      c0 = CONST_DOUBLE_LOW (source);
2076      c1 = CONST_DOUBLE_HIGH (source);
2077#endif
2078    }
2079  else
2080    abort ();
2081
2082  return rs6000_emit_set_long_const (dest, c0, c1);
2083}
2084
2085/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2086   fall back to a straight forward decomposition.  We do this to avoid
2087   exponential run times encountered when looking for longer sequences
2088   with rs6000_emit_set_const.  */
2089static rtx
2090rs6000_emit_set_long_const (dest, c1, c2)
2091     rtx dest;
2092     HOST_WIDE_INT c1, c2;
2093{
2094  if (!TARGET_POWERPC64)
2095    {
2096      rtx operand1, operand2;
2097
2098      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2099					DImode);
2100      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2101					DImode);
2102      emit_move_insn (operand1, GEN_INT (c1));
2103      emit_move_insn (operand2, GEN_INT (c2));
2104    }
2105  else
2106    {
2107      HOST_WIDE_INT ud1, ud2, ud3, ud4;
2108
2109      ud1 = c1 & 0xffff;
2110      ud2 = (c1 & 0xffff0000) >> 16;
2111#if HOST_BITS_PER_WIDE_INT >= 64
2112      c2 = c1 >> 32;
2113#endif
2114      ud3 = c2 & 0xffff;
2115      ud4 = (c2 & 0xffff0000) >> 16;
2116
2117      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2118	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2119	{
2120	  if (ud1 & 0x8000)
2121	    emit_move_insn (dest, GEN_INT (((ud1  ^ 0x8000) -  0x8000)));
2122	  else
2123	    emit_move_insn (dest, GEN_INT (ud1));
2124	}
2125
2126      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2127	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2128	{
2129	  if (ud2 & 0x8000)
2130	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2131					   - 0x80000000));
2132	  else
2133	    emit_move_insn (dest, GEN_INT (ud2 << 16));
2134	  if (ud1 != 0)
2135	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2136	}
2137      else if ((ud4 == 0xffff && (ud3 & 0x8000))
2138	       || (ud4 == 0 && ! (ud3 & 0x8000)))
2139	{
2140	  if (ud3 & 0x8000)
2141	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2142					   - 0x80000000));
2143	  else
2144	    emit_move_insn (dest, GEN_INT (ud3 << 16));
2145
2146	  if (ud2 != 0)
2147	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2148	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2149	  if (ud1 != 0)
2150	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2151	}
2152      else
2153	{
2154	  if (ud4 & 0x8000)
2155	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2156					   - 0x80000000));
2157	  else
2158	    emit_move_insn (dest, GEN_INT (ud4 << 16));
2159
2160	  if (ud3 != 0)
2161	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2162
2163	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2164	  if (ud2 != 0)
2165	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2166					       GEN_INT (ud2 << 16)));
2167	  if (ud1 != 0)
2168	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2169	}
2170    }
2171  return dest;
2172}
2173
2174/* Emit a move from SOURCE to DEST in mode MODE.  */
2175void
2176rs6000_emit_move (dest, source, mode)
2177     rtx dest;
2178     rtx source;
2179     enum machine_mode mode;
2180{
2181  rtx operands[2];
2182  operands[0] = dest;
2183  operands[1] = source;
2184
2185  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
2186  if (GET_CODE (operands[1]) == CONST_DOUBLE
2187      && ! FLOAT_MODE_P (mode)
2188      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2189    {
2190      /* FIXME.  This should never happen.  */
2191      /* Since it seems that it does, do the safe thing and convert
2192	 to a CONST_INT.  */
2193      operands[1] =
2194	GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2195    }
2196  if (GET_CODE (operands[1]) == CONST_DOUBLE
2197      && ! FLOAT_MODE_P (mode)
2198      && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2199	   && CONST_DOUBLE_LOW (operands[1]) >= 0)
2200	  || (CONST_DOUBLE_HIGH (operands[1]) == -1
2201	      && CONST_DOUBLE_LOW (operands[1]) < 0)))
2202    abort ();
2203
2204  /* Check if GCC is setting up a block move that will end up using FP
2205     registers as temporaries.  We must make sure this is acceptable.  */
2206  if (GET_CODE (operands[0]) == MEM
2207      && GET_CODE (operands[1]) == MEM
2208      && mode == DImode
2209      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2210	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2211      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2212					    ? 32 : MEM_ALIGN (operands[0])))
2213	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2214					       ? 32
2215					       : MEM_ALIGN (operands[1]))))
2216      && ! MEM_VOLATILE_P (operands [0])
2217      && ! MEM_VOLATILE_P (operands [1]))
2218    {
2219      emit_move_insn (adjust_address (operands[0], SImode, 0),
2220		      adjust_address (operands[1], SImode, 0));
2221      emit_move_insn (adjust_address (operands[0], SImode, 4),
2222		      adjust_address (operands[1], SImode, 4));
2223      return;
2224    }
2225
2226  if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2227    operands[1] = force_reg (mode, operands[1]);
2228
2229  if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2230      && GET_CODE (operands[0]) == MEM)
2231    {
2232      int regnum;
2233
2234      if (reload_in_progress || reload_completed)
2235	regnum = true_regnum (operands[1]);
2236      else if (GET_CODE (operands[1]) == REG)
2237	regnum = REGNO (operands[1]);
2238      else
2239	regnum = -1;
2240
2241      /* If operands[1] is a register, on POWER it may have
2242	 double-precision data in it, so truncate it to single
2243	 precision.  */
2244      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2245	{
2246	  rtx newreg;
2247	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2248	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2249	  operands[1] = newreg;
2250	}
2251    }
2252
2253  /* Handle the case where reload calls us with an invalid address;
2254     and the case of CONSTANT_P_RTX.  */
2255  if (!VECTOR_MODE_P (mode)
2256      && (! general_operand (operands[1], mode)
2257	  || ! nonimmediate_operand (operands[0], mode)
2258	  || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2259    {
2260      emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2261      return;
2262    }
2263
2264  /* FIXME:  In the long term, this switch statement should go away
2265     and be replaced by a sequence of tests based on things like
2266     mode == Pmode.  */
2267  switch (mode)
2268    {
2269    case HImode:
2270    case QImode:
2271      if (CONSTANT_P (operands[1])
2272	  && GET_CODE (operands[1]) != CONST_INT)
2273	operands[1] = force_const_mem (mode, operands[1]);
2274      break;
2275
2276    case TFmode:
2277    case DFmode:
2278    case SFmode:
2279      if (CONSTANT_P (operands[1])
2280	  && ! easy_fp_constant (operands[1], mode))
2281	operands[1] = force_const_mem (mode, operands[1]);
2282      break;
2283
2284    case V16QImode:
2285    case V8HImode:
2286    case V4SFmode:
2287    case V4SImode:
2288      if (CONSTANT_P (operands[1])
2289	  && !easy_vector_constant (operands[1]))
2290	operands[1] = force_const_mem (mode, operands[1]);
2291      break;
2292
2293    case SImode:
2294    case DImode:
2295      /* Use default pattern for address of ELF small data */
2296      if (TARGET_ELF
2297	  && mode == Pmode
2298	  && DEFAULT_ABI == ABI_V4
2299	  && (GET_CODE (operands[1]) == SYMBOL_REF
2300	      || GET_CODE (operands[1]) == CONST)
2301	  && small_data_operand (operands[1], mode))
2302	{
2303	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2304	  return;
2305	}
2306
2307      if (DEFAULT_ABI == ABI_V4
2308	  && mode == Pmode && mode == SImode
2309	  && flag_pic == 1 && got_operand (operands[1], mode))
2310	{
2311	  emit_insn (gen_movsi_got (operands[0], operands[1]));
2312	  return;
2313	}
2314
2315      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2316	  && TARGET_NO_TOC && ! flag_pic
2317	  && mode == Pmode
2318	  && CONSTANT_P (operands[1])
2319	  && GET_CODE (operands[1]) != HIGH
2320	  && GET_CODE (operands[1]) != CONST_INT)
2321	{
2322	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2323
2324	  /* If this is a function address on -mcall-aixdesc,
2325	     convert it to the address of the descriptor.  */
2326	  if (DEFAULT_ABI == ABI_AIX
2327	      && GET_CODE (operands[1]) == SYMBOL_REF
2328	      && XSTR (operands[1], 0)[0] == '.')
2329	    {
2330	      const char *name = XSTR (operands[1], 0);
2331	      rtx new_ref;
2332	      while (*name == '.')
2333		name++;
2334	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2335	      CONSTANT_POOL_ADDRESS_P (new_ref)
2336		= CONSTANT_POOL_ADDRESS_P (operands[1]);
2337	      SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2338	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2339	      operands[1] = new_ref;
2340	    }
2341
2342	  if (DEFAULT_ABI == ABI_DARWIN)
2343	    {
2344	      emit_insn (gen_macho_high (target, operands[1]));
2345	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
2346	      return;
2347	    }
2348
2349	  emit_insn (gen_elf_high (target, operands[1]));
2350	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
2351	  return;
2352	}
2353
2354      /* If this is a SYMBOL_REF that refers to a constant pool entry,
2355	 and we have put it in the TOC, we just need to make a TOC-relative
2356	 reference to it.  */
2357      if (TARGET_TOC
2358	  && GET_CODE (operands[1]) == SYMBOL_REF
2359	  && CONSTANT_POOL_EXPR_P (operands[1])
2360	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2361					      get_pool_mode (operands[1])))
2362	{
2363	  operands[1] = create_TOC_reference (operands[1]);
2364	}
2365      else if (mode == Pmode
2366	       && CONSTANT_P (operands[1])
2367	       && ((GET_CODE (operands[1]) != CONST_INT
2368		    && ! easy_fp_constant (operands[1], mode))
2369		   || (GET_CODE (operands[1]) == CONST_INT
2370		       && num_insns_constant (operands[1], mode) > 2)
2371		   || (GET_CODE (operands[0]) == REG
2372		       && FP_REGNO_P (REGNO (operands[0]))))
2373	       && GET_CODE (operands[1]) != HIGH
2374	       && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2375	       && ! TOC_RELATIVE_EXPR_P (operands[1]))
2376	{
2377	  /* Emit a USE operation so that the constant isn't deleted if
2378	     expensive optimizations are turned on because nobody
2379	     references it.  This should only be done for operands that
2380	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2381	     This should not be done for operands that contain LABEL_REFs.
2382	     For now, we just handle the obvious case.  */
2383	  if (GET_CODE (operands[1]) != LABEL_REF)
2384	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2385
2386#if TARGET_MACHO
2387	  /* Darwin uses a special PIC legitimizer.  */
2388	  if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2389	    {
2390	      operands[1] =
2391		rs6000_machopic_legitimize_pic_address (operands[1], mode,
2392							operands[0]);
2393	      if (operands[0] != operands[1])
2394		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2395	      return;
2396	    }
2397#endif
2398
2399	  /* If we are to limit the number of things we put in the TOC and
2400	     this is a symbol plus a constant we can add in one insn,
2401	     just put the symbol in the TOC and add the constant.  Don't do
2402	     this if reload is in progress.  */
2403	  if (GET_CODE (operands[1]) == CONST
2404	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2405	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
2406	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2407	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2408		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2409	      && ! side_effects_p (operands[0]))
2410	    {
2411	      rtx sym =
2412		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2413	      rtx other = XEXP (XEXP (operands[1], 0), 1);
2414
2415	      sym = force_reg (mode, sym);
2416	      if (mode == SImode)
2417		emit_insn (gen_addsi3 (operands[0], sym, other));
2418	      else
2419		emit_insn (gen_adddi3 (operands[0], sym, other));
2420	      return;
2421	    }
2422
2423	  operands[1] = force_const_mem (mode, operands[1]);
2424
2425	  if (TARGET_TOC
2426	      && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2427	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2428			get_pool_constant (XEXP (operands[1], 0)),
2429			get_pool_mode (XEXP (operands[1], 0))))
2430	    {
2431	      operands[1]
2432		= gen_rtx_MEM (mode,
2433			       create_TOC_reference (XEXP (operands[1], 0)));
2434	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
2435	      RTX_UNCHANGING_P (operands[1]) = 1;
2436	    }
2437	}
2438      break;
2439
2440    case TImode:
2441      if (GET_CODE (operands[0]) == MEM
2442	  && GET_CODE (XEXP (operands[0], 0)) != REG
2443	  && ! reload_in_progress)
2444	operands[0]
2445	  = replace_equiv_address (operands[0],
2446				   copy_addr_to_reg (XEXP (operands[0], 0)));
2447
2448      if (GET_CODE (operands[1]) == MEM
2449	  && GET_CODE (XEXP (operands[1], 0)) != REG
2450	  && ! reload_in_progress)
2451	operands[1]
2452	  = replace_equiv_address (operands[1],
2453				   copy_addr_to_reg (XEXP (operands[1], 0)));
2454      break;
2455
2456    default:
2457      abort ();
2458    }
2459
2460  /* Above, we may have called force_const_mem which may have returned
2461     an invalid address.  If we can, fix this up; otherwise, reload will
2462     have to deal with it.  */
2463  if (GET_CODE (operands[1]) == MEM
2464      && ! memory_address_p (mode, XEXP (operands[1], 0))
2465      && ! reload_in_progress)
2466    operands[1] = adjust_address (operands[1], mode, 0);
2467
2468  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2469  return;
2470}
2471
2472/* Initialize a variable CUM of type CUMULATIVE_ARGS
2473   for a call to a function whose data type is FNTYPE.
2474   For a library call, FNTYPE is 0.
2475
2476   For incoming args we set the number of arguments in the prototype large
2477   so we never return a PARALLEL.  */
2478
2479void
2480init_cumulative_args (cum, fntype, libname, incoming)
2481     CUMULATIVE_ARGS *cum;
2482     tree fntype;
2483     rtx libname ATTRIBUTE_UNUSED;
2484     int incoming;
2485{
2486  static CUMULATIVE_ARGS zero_cumulative;
2487
2488  *cum = zero_cumulative;
2489  cum->words = 0;
2490  cum->fregno = FP_ARG_MIN_REG;
2491  cum->vregno = ALTIVEC_ARG_MIN_REG;
2492  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2493  cum->call_cookie = CALL_NORMAL;
2494  cum->sysv_gregno = GP_ARG_MIN_REG;
2495
2496  if (incoming)
2497    cum->nargs_prototype = 1000;		/* don't return a PARALLEL */
2498
2499  else if (cum->prototype)
2500    cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2501			    + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2502			       || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2503
2504  else
2505    cum->nargs_prototype = 0;
2506
2507  cum->orig_nargs = cum->nargs_prototype;
2508
2509  /* Check for longcall's */
2510  if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2511    cum->call_cookie = CALL_LONG;
2512
2513  if (TARGET_DEBUG_ARG)
2514    {
2515      fprintf (stderr, "\ninit_cumulative_args:");
2516      if (fntype)
2517	{
2518	  tree ret_type = TREE_TYPE (fntype);
2519	  fprintf (stderr, " ret code = %s,",
2520		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
2521	}
2522
2523      if (cum->call_cookie & CALL_LONG)
2524	fprintf (stderr, " longcall,");
2525
2526      fprintf (stderr, " proto = %d, nargs = %d\n",
2527	       cum->prototype, cum->nargs_prototype);
2528    }
2529}
2530
2531/* If defined, a C expression which determines whether, and in which
2532   direction, to pad out an argument with extra space.  The value
2533   should be of type `enum direction': either `upward' to pad above
2534   the argument, `downward' to pad below, or `none' to inhibit
2535   padding.
2536
2537   For the AIX ABI structs are always stored left shifted in their
2538   argument slot.  */
2539
2540enum direction
2541function_arg_padding (mode, type)
2542     enum machine_mode mode;
2543     tree type;
2544{
2545  if (type != 0 && AGGREGATE_TYPE_P (type))
2546    return upward;
2547
2548  /* This is the default definition.  */
2549  return (! BYTES_BIG_ENDIAN
2550          ? upward
2551          : ((mode == BLKmode
2552              ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2553                 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2554              : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2555             ? downward : upward));
2556}
2557
2558/* If defined, a C expression that gives the alignment boundary, in bits,
2559   of an argument with the specified mode and type.  If it is not defined,
2560   PARM_BOUNDARY is used for all arguments.
2561
2562   V.4 wants long longs to be double word aligned.  */
2563
2564int
2565function_arg_boundary (mode, type)
2566     enum machine_mode mode;
2567     tree type ATTRIBUTE_UNUSED;
2568{
2569  if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2570    return 64;
2571  else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2572    return 128;
2573  else
2574    return PARM_BOUNDARY;
2575}
2576
2577/* Update the data in CUM to advance over an argument
2578   of mode MODE and data type TYPE.
2579   (TYPE is null for libcalls where that information may not be available.)  */
2580
2581void
2582function_arg_advance (cum, mode, type, named)
2583     CUMULATIVE_ARGS *cum;
2584     enum machine_mode mode;
2585     tree type;
2586     int named;
2587{
2588  cum->nargs_prototype--;
2589
2590  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2591    {
2592      if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2593	cum->vregno++;
2594      else
2595	cum->words += RS6000_ARG_SIZE (mode, type);
2596    }
2597  else if (DEFAULT_ABI == ABI_V4)
2598    {
2599      if (TARGET_HARD_FLOAT
2600	  && (mode == SFmode || mode == DFmode))
2601	{
2602	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2603	    cum->fregno++;
2604	  else
2605	    {
2606	      if (mode == DFmode)
2607	        cum->words += cum->words & 1;
2608	      cum->words += RS6000_ARG_SIZE (mode, type);
2609	    }
2610	}
2611      else
2612	{
2613	  int n_words;
2614	  int gregno = cum->sysv_gregno;
2615
2616	  /* Aggregates and IEEE quad get passed by reference.  */
2617	  if ((type && AGGREGATE_TYPE_P (type))
2618	      || mode == TFmode)
2619	    n_words = 1;
2620	  else
2621	    n_words = RS6000_ARG_SIZE (mode, type);
2622
2623	  /* Long long is put in odd registers.  */
2624	  if (n_words == 2 && (gregno & 1) == 0)
2625	    gregno += 1;
2626
2627	  /* Long long is not split between registers and stack.  */
2628	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2629	    {
2630	      /* Long long is aligned on the stack.  */
2631	      if (n_words == 2)
2632		cum->words += cum->words & 1;
2633	      cum->words += n_words;
2634	    }
2635
2636	  /* Note: continuing to accumulate gregno past when we've started
2637	     spilling to the stack indicates the fact that we've started
2638	     spilling to the stack to expand_builtin_saveregs.  */
2639	  cum->sysv_gregno = gregno + n_words;
2640	}
2641
2642      if (TARGET_DEBUG_ARG)
2643	{
2644	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2645		   cum->words, cum->fregno);
2646	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2647		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2648	  fprintf (stderr, "mode = %4s, named = %d\n",
2649		   GET_MODE_NAME (mode), named);
2650	}
2651    }
2652  else
2653    {
2654      int align = (TARGET_32BIT && (cum->words & 1) != 0
2655		   && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2656
2657      cum->words += align + RS6000_ARG_SIZE (mode, type);
2658
2659      if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2660	cum->fregno++;
2661
2662      if (TARGET_DEBUG_ARG)
2663	{
2664	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2665		   cum->words, cum->fregno);
2666	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2667		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2668	  fprintf (stderr, "named = %d, align = %d\n", named, align);
2669	}
2670    }
2671}
2672
2673/* Determine where to put an argument to a function.
2674   Value is zero to push the argument on the stack,
2675   or a hard register in which to store the argument.
2676
2677   MODE is the argument's machine mode.
2678   TYPE is the data type of the argument (as a tree).
2679    This is null for libcalls where that information may
2680    not be available.
2681   CUM is a variable of type CUMULATIVE_ARGS which gives info about
2682    the preceding args and about the function being called.
2683   NAMED is nonzero if this argument is a named parameter
2684    (otherwise it is an extra parameter matching an ellipsis).
2685
2686   On RS/6000 the first eight words of non-FP are normally in registers
2687   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
2688   Under V.4, the first 8 FP args are in registers.
2689
2690   If this is floating-point and no prototype is specified, we use
2691   both an FP and integer register (or possibly FP reg and stack).  Library
2692   functions (when TYPE is zero) always have the proper types for args,
2693   so we can pass the FP value just in one register.  emit_library_function
2694   doesn't support PARALLEL anyway.  */
2695
2696struct rtx_def *
2697function_arg (cum, mode, type, named)
2698     CUMULATIVE_ARGS *cum;
2699     enum machine_mode mode;
2700     tree type;
2701     int named;
2702{
2703  enum rs6000_abi abi = DEFAULT_ABI;
2704
2705  /* Return a marker to indicate whether CR1 needs to set or clear the
2706     bit that V.4 uses to say fp args were passed in registers.
2707     Assume that we don't need the marker for software floating point,
2708     or compiler generated library calls.  */
2709  if (mode == VOIDmode)
2710    {
2711      if (abi == ABI_V4
2712	  && TARGET_HARD_FLOAT
2713	  && cum->nargs_prototype < 0
2714	  && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2715	{
2716	  return GEN_INT (cum->call_cookie
2717			  | ((cum->fregno == FP_ARG_MIN_REG)
2718			     ? CALL_V4_SET_FP_ARGS
2719			     : CALL_V4_CLEAR_FP_ARGS));
2720	}
2721
2722      return GEN_INT (cum->call_cookie);
2723    }
2724
2725  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2726    {
2727      if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2728	return gen_rtx_REG (mode, cum->vregno);
2729      else
2730	return NULL;
2731    }
2732  else if (abi == ABI_V4)
2733    {
2734      if (TARGET_HARD_FLOAT
2735	  && (mode == SFmode || mode == DFmode))
2736	{
2737	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2738	    return gen_rtx_REG (mode, cum->fregno);
2739	  else
2740	    return NULL;
2741	}
2742      else
2743	{
2744	  int n_words;
2745	  int gregno = cum->sysv_gregno;
2746
2747	  /* Aggregates and IEEE quad get passed by reference.  */
2748	  if ((type && AGGREGATE_TYPE_P (type))
2749	      || mode == TFmode)
2750	    n_words = 1;
2751	  else
2752	    n_words = RS6000_ARG_SIZE (mode, type);
2753
2754	  /* Long long is put in odd registers.  */
2755	  if (n_words == 2 && (gregno & 1) == 0)
2756	    gregno += 1;
2757
2758	  /* Long long is not split between registers and stack.  */
2759	  if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2760	    return gen_rtx_REG (mode, gregno);
2761	  else
2762	    return NULL;
2763	}
2764    }
2765  else
2766    {
2767      int align = (TARGET_32BIT && (cum->words & 1) != 0
2768	           && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2769      int align_words = cum->words + align;
2770
2771      if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2772        return NULL_RTX;
2773
2774      if (USE_FP_FOR_ARG_P (*cum, mode, type))
2775	{
2776	  if (! type
2777	      || ((cum->nargs_prototype > 0)
2778	          /* IBM AIX extended its linkage convention definition always
2779		     to require FP args after register save area hole on the
2780		     stack.  */
2781	          && (DEFAULT_ABI != ABI_AIX
2782		      || ! TARGET_XL_CALL
2783		      || (align_words < GP_ARG_NUM_REG))))
2784	    return gen_rtx_REG (mode, cum->fregno);
2785
2786          return gen_rtx_PARALLEL (mode,
2787	    gen_rtvec (2,
2788		       gen_rtx_EXPR_LIST (VOIDmode,
2789				((align_words >= GP_ARG_NUM_REG)
2790				 ? NULL_RTX
2791				 : (align_words
2792				    + RS6000_ARG_SIZE (mode, type)
2793				    > GP_ARG_NUM_REG
2794				    /* If this is partially on the stack, then
2795				       we only include the portion actually
2796				       in registers here.  */
2797				    ? gen_rtx_REG (SImode,
2798					       GP_ARG_MIN_REG + align_words)
2799				    : gen_rtx_REG (mode,
2800					       GP_ARG_MIN_REG + align_words))),
2801				const0_rtx),
2802		       gen_rtx_EXPR_LIST (VOIDmode,
2803				gen_rtx_REG (mode, cum->fregno),
2804				const0_rtx)));
2805	}
2806      else if (align_words < GP_ARG_NUM_REG)
2807	return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2808      else
2809	return NULL_RTX;
2810    }
2811}
2812
2813/* For an arg passed partly in registers and partly in memory,
2814   this is the number of registers used.
2815   For args passed entirely in registers or entirely in memory, zero.  */
2816
2817int
2818function_arg_partial_nregs (cum, mode, type, named)
2819     CUMULATIVE_ARGS *cum;
2820     enum machine_mode mode;
2821     tree type;
2822     int named ATTRIBUTE_UNUSED;
2823{
2824  if (DEFAULT_ABI == ABI_V4)
2825    return 0;
2826
2827  if (USE_FP_FOR_ARG_P (*cum, mode, type)
2828      || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2829    {
2830      if (cum->nargs_prototype >= 0)
2831	return 0;
2832    }
2833
2834  if (cum->words < GP_ARG_NUM_REG
2835      && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2836    {
2837      int ret = GP_ARG_NUM_REG - cum->words;
2838      if (ret && TARGET_DEBUG_ARG)
2839	fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2840
2841      return ret;
2842    }
2843
2844  return 0;
2845}
2846
2847/* A C expression that indicates when an argument must be passed by
2848   reference.  If nonzero for an argument, a copy of that argument is
2849   made in memory and a pointer to the argument is passed instead of
2850   the argument itself.  The pointer is passed in whatever way is
2851   appropriate for passing a pointer to that type.
2852
2853   Under V.4, structures and unions are passed by reference.  */
2854
2855int
2856function_arg_pass_by_reference (cum, mode, type, named)
2857     CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2858     enum machine_mode mode ATTRIBUTE_UNUSED;
2859     tree type;
2860     int named ATTRIBUTE_UNUSED;
2861{
2862  if (DEFAULT_ABI == ABI_V4
2863      && ((type && AGGREGATE_TYPE_P (type))
2864	  || mode == TFmode))
2865    {
2866      if (TARGET_DEBUG_ARG)
2867	fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2868
2869      return 1;
2870    }
2871
2872  return 0;
2873}
2874
2875/* Perform any needed actions needed for a function that is receiving a
2876   variable number of arguments.
2877
2878   CUM is as above.
2879
2880   MODE and TYPE are the mode and type of the current parameter.
2881
2882   PRETEND_SIZE is a variable that should be set to the amount of stack
2883   that must be pushed by the prolog to pretend that our caller pushed
2884   it.
2885
2886   Normally, this macro will push all remaining incoming registers on the
2887   stack and set PRETEND_SIZE to the length of the registers pushed.  */
2888
2889void
2890setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2891     CUMULATIVE_ARGS *cum;
2892     enum machine_mode mode;
2893     tree type;
2894     int *pretend_size;
2895     int no_rtl;
2896
2897{
2898  CUMULATIVE_ARGS next_cum;
2899  int reg_size = TARGET_32BIT ? 4 : 8;
2900  rtx save_area = NULL_RTX, mem;
2901  int first_reg_offset, set;
2902  tree fntype;
2903  int stdarg_p;
2904
2905  fntype = TREE_TYPE (current_function_decl);
2906  stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2907	      && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2908		  != void_type_node));
2909
2910  /* For varargs, we do not want to skip the dummy va_dcl argument.
2911     For stdargs, we do want to skip the last named argument.  */
2912  next_cum = *cum;
2913  if (stdarg_p)
2914    function_arg_advance (&next_cum, mode, type, 1);
2915
2916  if (DEFAULT_ABI == ABI_V4)
2917    {
2918      /* Indicate to allocate space on the stack for varargs save area.  */
2919      /* ??? Does this really have to be located at a magic spot on the
2920	 stack, or can we allocate this with assign_stack_local instead.  */
2921      cfun->machine->sysv_varargs_p = 1;
2922      if (! no_rtl)
2923	save_area = plus_constant (virtual_stack_vars_rtx,
2924				   - RS6000_VARARGS_SIZE);
2925
2926      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2927    }
2928  else
2929    {
2930      first_reg_offset = next_cum.words;
2931      save_area = virtual_incoming_args_rtx;
2932      cfun->machine->sysv_varargs_p = 0;
2933
2934      if (MUST_PASS_IN_STACK (mode, type))
2935	first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2936    }
2937
2938  set = get_varargs_alias_set ();
2939  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2940    {
2941      mem = gen_rtx_MEM (BLKmode,
2942		         plus_constant (save_area,
2943					first_reg_offset * reg_size)),
2944      set_mem_alias_set (mem, set);
2945      set_mem_align (mem, BITS_PER_WORD);
2946
2947      move_block_from_reg
2948	(GP_ARG_MIN_REG + first_reg_offset, mem,
2949	 GP_ARG_NUM_REG - first_reg_offset,
2950	 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2951
2952      /* ??? Does ABI_V4 need this at all?  */
2953      *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2954    }
2955
2956  /* Save FP registers if needed.  */
2957  if (DEFAULT_ABI == ABI_V4
2958      && TARGET_HARD_FLOAT && ! no_rtl
2959      && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2960    {
2961      int fregno = next_cum.fregno;
2962      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2963      rtx lab = gen_label_rtx ();
2964      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2965
2966      emit_jump_insn (gen_rtx_SET (VOIDmode,
2967				   pc_rtx,
2968				   gen_rtx_IF_THEN_ELSE (VOIDmode,
2969					    gen_rtx_NE (VOIDmode, cr1,
2970						        const0_rtx),
2971					    gen_rtx_LABEL_REF (VOIDmode, lab),
2972					    pc_rtx)));
2973
2974      while (fregno <= FP_ARG_V4_MAX_REG)
2975	{
2976	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2977          set_mem_alias_set (mem, set);
2978	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2979	  fregno++;
2980	  off += 8;
2981	}
2982
2983      emit_label (lab);
2984    }
2985}
2986
2987/* Create the va_list data type.  */
2988
2989tree
2990rs6000_build_va_list ()
2991{
2992  tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2993
2994  /* For AIX, prefer 'char *' because that's what the system
2995     header files like.  */
2996  if (DEFAULT_ABI != ABI_V4)
2997    return build_pointer_type (char_type_node);
2998
2999  record = make_lang_type (RECORD_TYPE);
3000  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3001
3002  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3003		      unsigned_char_type_node);
3004  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3005		      unsigned_char_type_node);
3006  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3007		      ptr_type_node);
3008  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3009		      ptr_type_node);
3010
3011  DECL_FIELD_CONTEXT (f_gpr) = record;
3012  DECL_FIELD_CONTEXT (f_fpr) = record;
3013  DECL_FIELD_CONTEXT (f_ovf) = record;
3014  DECL_FIELD_CONTEXT (f_sav) = record;
3015
3016  TREE_CHAIN (record) = type_decl;
3017  TYPE_NAME (record) = type_decl;
3018  TYPE_FIELDS (record) = f_gpr;
3019  TREE_CHAIN (f_gpr) = f_fpr;
3020  TREE_CHAIN (f_fpr) = f_ovf;
3021  TREE_CHAIN (f_ovf) = f_sav;
3022
3023  layout_type (record);
3024
3025  /* The correct type is an array type of one element.  */
3026  return build_array_type (record, build_index_type (size_zero_node));
3027}
3028
3029/* Implement va_start.  */
3030
3031void
3032rs6000_va_start (stdarg_p, valist, nextarg)
3033     int stdarg_p;
3034     tree valist;
3035     rtx nextarg;
3036{
3037  HOST_WIDE_INT words, n_gpr, n_fpr;
3038  tree f_gpr, f_fpr, f_ovf, f_sav;
3039  tree gpr, fpr, ovf, sav, t;
3040
3041  /* Only SVR4 needs something special.  */
3042  if (DEFAULT_ABI != ABI_V4)
3043    {
3044      std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3045      return;
3046    }
3047
3048  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3049  f_fpr = TREE_CHAIN (f_gpr);
3050  f_ovf = TREE_CHAIN (f_fpr);
3051  f_sav = TREE_CHAIN (f_ovf);
3052
3053  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3054  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3055  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3056  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3057  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3058
3059  /* Count number of gp and fp argument registers used.  */
3060  words = current_function_args_info.words;
3061  n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3062  n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3063
3064  if (TARGET_DEBUG_ARG)
3065    {
3066      fputs ("va_start: words = ", stderr);
3067      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3068      fputs (", n_gpr = ", stderr);
3069      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3070      fputs (", n_fpr = ", stderr);
3071      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3072      putc ('\n', stderr);
3073    }
3074
3075  t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3076  TREE_SIDE_EFFECTS (t) = 1;
3077  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3078
3079  t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3080  TREE_SIDE_EFFECTS (t) = 1;
3081  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3082
3083  /* Find the overflow area.  */
3084  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3085  if (words != 0)
3086    t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3087	       build_int_2 (words * UNITS_PER_WORD, 0));
3088  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3089  TREE_SIDE_EFFECTS (t) = 1;
3090  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3091
3092  /* Find the register save area.  */
3093  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3094  t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3095	     build_int_2 (-RS6000_VARARGS_SIZE, -1));
3096  t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3097  TREE_SIDE_EFFECTS (t) = 1;
3098  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3099}
3100
3101/* Implement va_arg.  */
3102
3103rtx
3104rs6000_va_arg (valist, type)
3105     tree valist, type;
3106{
3107  tree f_gpr, f_fpr, f_ovf, f_sav;
3108  tree gpr, fpr, ovf, sav, reg, t, u;
3109  int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3110  rtx lab_false, lab_over, addr_rtx, r;
3111
3112  /* For AIX, the rule is that structures are passed left-aligned in
3113     their stack slot.  However, GCC does not presently do this:
3114     structures which are the same size as integer types are passed
3115     right-aligned, as if they were in fact integers.  This only
3116     matters for structures of size 1 or 2, or 4 when TARGET_64BIT.  */
3117  if (DEFAULT_ABI != ABI_V4)
3118    return std_expand_builtin_va_arg (valist, type);
3119
3120  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3121  f_fpr = TREE_CHAIN (f_gpr);
3122  f_ovf = TREE_CHAIN (f_fpr);
3123  f_sav = TREE_CHAIN (f_ovf);
3124
3125  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3126  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3127  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3128  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3129  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3130
3131  size = int_size_in_bytes (type);
3132  rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3133
3134  if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3135    {
3136      /* Aggregates and long doubles are passed by reference.  */
3137      indirect_p = 1;
3138      reg = gpr;
3139      n_reg = 1;
3140      sav_ofs = 0;
3141      sav_scale = 4;
3142      size = UNITS_PER_WORD;
3143      rsize = 1;
3144    }
3145  else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3146    {
3147      /* FP args go in FP registers, if present.  */
3148      indirect_p = 0;
3149      reg = fpr;
3150      n_reg = 1;
3151      sav_ofs = 8*4;
3152      sav_scale = 8;
3153    }
3154  else
3155    {
3156      /* Otherwise into GP registers.  */
3157      indirect_p = 0;
3158      reg = gpr;
3159      n_reg = rsize;
3160      sav_ofs = 0;
3161      sav_scale = 4;
3162    }
3163
3164  /* Pull the value out of the saved registers ...  */
3165
3166  lab_false = gen_label_rtx ();
3167  lab_over = gen_label_rtx ();
3168  addr_rtx = gen_reg_rtx (Pmode);
3169
3170  /*  Vectors never go in registers.  */
3171  if (TREE_CODE (type) != VECTOR_TYPE)
3172    {
3173      TREE_THIS_VOLATILE (reg) = 1;
3174      emit_cmp_and_jump_insns
3175	(expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3176	 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3177	 lab_false);
3178
3179      /* Long long is aligned in the registers.  */
3180      if (n_reg > 1)
3181	{
3182	  u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3183		     build_int_2 (n_reg - 1, 0));
3184	  u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3185	  u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3186	  TREE_SIDE_EFFECTS (u) = 1;
3187	  expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3188	}
3189
3190      if (sav_ofs)
3191	t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3192      else
3193	t = sav;
3194
3195      u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3196		 build_int_2 (n_reg, 0));
3197      TREE_SIDE_EFFECTS (u) = 1;
3198
3199      u = build1 (CONVERT_EXPR, integer_type_node, u);
3200      TREE_SIDE_EFFECTS (u) = 1;
3201
3202      u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3203      TREE_SIDE_EFFECTS (u) = 1;
3204
3205      t = build (PLUS_EXPR, ptr_type_node, t, u);
3206      TREE_SIDE_EFFECTS (t) = 1;
3207
3208      r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3209      if (r != addr_rtx)
3210	emit_move_insn (addr_rtx, r);
3211
3212      emit_jump_insn (gen_jump (lab_over));
3213      emit_barrier ();
3214    }
3215
3216  emit_label (lab_false);
3217
3218  /* ... otherwise out of the overflow area.  */
3219
3220  /* Make sure we don't find reg 7 for the next int arg.
3221
3222     All AltiVec vectors go in the overflow area.  So in the AltiVec
3223     case we need to get the vectors from the overflow area, but
3224     remember where the GPRs and FPRs are.  */
3225  if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3226    {
3227      t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3228      TREE_SIDE_EFFECTS (t) = 1;
3229      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3230    }
3231
3232  /* Care for on-stack alignment if needed.  */
3233  if (rsize <= 1)
3234    t = ovf;
3235  else
3236    {
3237      int align;
3238
3239      /* Vectors are 16 byte aligned.  */
3240      if (TREE_CODE (type) == VECTOR_TYPE)
3241	align = 15;
3242      else
3243	align = 7;
3244
3245      t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3246      t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3247    }
3248  t = save_expr (t);
3249
3250  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3251  if (r != addr_rtx)
3252    emit_move_insn (addr_rtx, r);
3253
3254  t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3255  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3256  TREE_SIDE_EFFECTS (t) = 1;
3257  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3258
3259  emit_label (lab_over);
3260
3261  if (indirect_p)
3262    {
3263      r = gen_rtx_MEM (Pmode, addr_rtx);
3264      set_mem_alias_set (r, get_varargs_alias_set ());
3265      emit_move_insn (addr_rtx, r);
3266    }
3267
3268  return addr_rtx;
3269}
3270
3271/* Builtins.  */
3272
3273#define def_builtin(MASK, NAME, TYPE, CODE)				\
3274do {									\
3275  if ((MASK) & target_flags)						\
3276    builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL);	\
3277} while (0)
3278
3279struct builtin_description
3280{
3281  const unsigned int mask;
3282  const enum insn_code icode;
3283  const char *const name;
3284  const enum rs6000_builtins code;
3285};
3286
3287/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
3288
3289static const struct builtin_description bdesc_3arg[] =
3290{
3291  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3292  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3293  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3294  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3295  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3296  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3297  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3298  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3299  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3300  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3301  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3302  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3303  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3304  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3305  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3306  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3307  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3308  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3309  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3310  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3311  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3312  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3313  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3314};
3315
3316/* DST operations: void foo (void *, const int, const char).  */
3317
3318static const struct builtin_description bdesc_dst[] =
3319{
3320  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3321  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3322  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3323  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3324};
3325
3326/* Simple binary operations: VECc = foo (VECa, VECb).  */
3327
3328static const struct builtin_description bdesc_2arg[] =
3329{
3330  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3331  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3332  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3333  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3334  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3335  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3336  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3337  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3338  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3339  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3340  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3341  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3342  { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3343  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3344  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3345  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3346  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3347  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3348  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3349  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3350  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3351  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3352  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3353  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3354  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3355  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3356  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3357  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3358  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3359  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3360  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3361  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3362  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3363  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3364  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3365  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3366  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3367  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3368  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3369  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3370  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3371  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3372  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3373  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3374  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3375  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3376  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3377  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3378  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3379  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3380  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3381  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3382  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3383  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3384  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3385  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3386  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3387  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3388  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3389  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3390  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3391  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3392  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3393  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3394  { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3395  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3396  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3397  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3398  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3399  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3400  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3401  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3402  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3403  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3404  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3405  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3406  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3407  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3408  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3409  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3410  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3411  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3412  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3413  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3414  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3415  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3416  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3417  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3418  { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3419  { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3420  { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3421  { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3422  { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3423  { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3424  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3425  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3426  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3427  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3428  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3429  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3430  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3431  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3432  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3433  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3434  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3435  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3436  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3437  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3438  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3439  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3440  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3441  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3442  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3443};
3444
3445/* AltiVec predicates.  */
3446
3447struct builtin_description_predicates
3448{
3449  const unsigned int mask;
3450  const enum insn_code icode;
3451  const char *opcode;
3452  const char *const name;
3453  const enum rs6000_builtins code;
3454};
3455
3456static const struct builtin_description_predicates bdesc_altivec_preds[] =
3457{
3458  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3459  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3460  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3461  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3462  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3463  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3464  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3465  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3466  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3467  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3468  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3469  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3470  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3471};
3472
3473/* ABS* opreations.  */
3474
3475static const struct builtin_description bdesc_abs[] =
3476{
3477  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3478  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3479  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3480  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3481  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3482  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3483  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3484};
3485
3486/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3487   foo (VECa).  */
3488
3489static const struct builtin_description bdesc_1arg[] =
3490{
3491  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3492  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3493  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3494  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3495  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3496  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3497  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3498  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3499  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3500  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3501  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3502  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3503  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3504  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3505  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3506  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3507  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3508};
3509
3510static rtx
3511altivec_expand_unop_builtin (icode, arglist, target)
3512     enum insn_code icode;
3513     tree arglist;
3514     rtx target;
3515{
3516  rtx pat;
3517  tree arg0 = TREE_VALUE (arglist);
3518  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3519  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3520  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3521
3522  /* If we got invalid arguments bail out before generating bad rtl.  */
3523  if (arg0 == error_mark_node)
3524    return NULL_RTX;
3525
3526  if (target == 0
3527      || GET_MODE (target) != tmode
3528      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3529    target = gen_reg_rtx (tmode);
3530
3531  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3532    op0 = copy_to_mode_reg (mode0, op0);
3533
3534  pat = GEN_FCN (icode) (target, op0);
3535  if (! pat)
3536    return 0;
3537  emit_insn (pat);
3538
3539  return target;
3540}
3541
3542static rtx
3543altivec_expand_abs_builtin (icode, arglist, target)
3544     enum insn_code icode;
3545     tree arglist;
3546     rtx target;
3547{
3548  rtx pat, scratch1, scratch2;
3549  tree arg0 = TREE_VALUE (arglist);
3550  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3551  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3552  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3553
3554  /* If we have invalid arguments, bail out before generating bad rtl.  */
3555  if (arg0 == error_mark_node)
3556    return NULL_RTX;
3557
3558  if (target == 0
3559      || GET_MODE (target) != tmode
3560      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3561    target = gen_reg_rtx (tmode);
3562
3563  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3564    op0 = copy_to_mode_reg (mode0, op0);
3565
3566  scratch1 = gen_reg_rtx (mode0);
3567  scratch2 = gen_reg_rtx (mode0);
3568
3569  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3570  if (! pat)
3571    return 0;
3572  emit_insn (pat);
3573
3574  return target;
3575}
3576
3577static rtx
3578altivec_expand_binop_builtin (icode, arglist, target)
3579     enum insn_code icode;
3580     tree arglist;
3581     rtx target;
3582{
3583  rtx pat;
3584  tree arg0 = TREE_VALUE (arglist);
3585  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3586  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3587  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3588  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3589  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3590  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3591
3592  /* If we got invalid arguments bail out before generating bad rtl.  */
3593  if (arg0 == error_mark_node || arg1 == error_mark_node)
3594    return NULL_RTX;
3595
3596  if (target == 0
3597      || GET_MODE (target) != tmode
3598      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3599    target = gen_reg_rtx (tmode);
3600
3601  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3602    op0 = copy_to_mode_reg (mode0, op0);
3603  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3604    op1 = copy_to_mode_reg (mode1, op1);
3605
3606  pat = GEN_FCN (icode) (target, op0, op1);
3607  if (! pat)
3608    return 0;
3609  emit_insn (pat);
3610
3611  return target;
3612}
3613
3614static rtx
3615altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3616     enum insn_code icode;
3617     const char *opcode;
3618     tree arglist;
3619     rtx target;
3620{
3621  rtx pat, scratch;
3622  tree cr6_form = TREE_VALUE (arglist);
3623  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3624  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3625  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3626  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3627  enum machine_mode tmode = SImode;
3628  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3629  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3630  int cr6_form_int;
3631
3632  if (TREE_CODE (cr6_form) != INTEGER_CST)
3633    {
3634      error ("argument 1 of __builtin_altivec_predicate must be a constant");
3635      return NULL_RTX;
3636    }
3637  else
3638    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3639
3640  if (mode0 != mode1)
3641    abort ();
3642
3643  /* If we have invalid arguments, bail out before generating bad rtl.  */
3644  if (arg0 == error_mark_node || arg1 == error_mark_node)
3645    return NULL_RTX;
3646
3647  if (target == 0
3648      || GET_MODE (target) != tmode
3649      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3650    target = gen_reg_rtx (tmode);
3651
3652  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3653    op0 = copy_to_mode_reg (mode0, op0);
3654  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3655    op1 = copy_to_mode_reg (mode1, op1);
3656
3657  scratch = gen_reg_rtx (mode0);
3658
3659  pat = GEN_FCN (icode) (scratch, op0, op1,
3660			 gen_rtx (SYMBOL_REF, Pmode, opcode));
3661  if (! pat)
3662    return 0;
3663  emit_insn (pat);
3664
3665  /* The vec_any* and vec_all* predicates use the same opcodes for two
3666     different operations, but the bits in CR6 will be different
3667     depending on what information we want.  So we have to play tricks
3668     with CR6 to get the right bits out.
3669
3670     If you think this is disgusting, look at the specs for the
3671     AltiVec predicates.  */
3672
3673     switch (cr6_form_int)
3674       {
3675       case 0:
3676	 emit_insn (gen_cr6_test_for_zero (target));
3677	 break;
3678       case 1:
3679	 emit_insn (gen_cr6_test_for_zero_reverse (target));
3680	 break;
3681       case 2:
3682	 emit_insn (gen_cr6_test_for_lt (target));
3683	 break;
3684       case 3:
3685	 emit_insn (gen_cr6_test_for_lt_reverse (target));
3686	 break;
3687       default:
3688	 error ("argument 1 of __builtin_altivec_predicate is out of range");
3689	 break;
3690       }
3691
3692  return target;
3693}
3694
3695static rtx
3696altivec_expand_stv_builtin (icode, arglist)
3697     enum insn_code icode;
3698     tree arglist;
3699{
3700  tree arg0 = TREE_VALUE (arglist);
3701  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3702  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3703  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3704  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3705  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3706  rtx pat;
3707  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3708  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3709  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3710
3711  /* Invalid arguments.  Bail before doing anything stoopid!  */
3712  if (arg0 == error_mark_node
3713      || arg1 == error_mark_node
3714      || arg2 == error_mark_node)
3715    return NULL_RTX;
3716
3717  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3718    op0 = copy_to_mode_reg (mode2, op0);
3719  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3720    op1 = copy_to_mode_reg (mode0, op1);
3721  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3722    op2 = copy_to_mode_reg (mode1, op2);
3723
3724  pat = GEN_FCN (icode) (op1, op2, op0);
3725  if (pat)
3726    emit_insn (pat);
3727  return NULL_RTX;
3728}
3729
3730static rtx
3731altivec_expand_ternop_builtin (icode, arglist, target)
3732     enum insn_code icode;
3733     tree arglist;
3734     rtx target;
3735{
3736  rtx pat;
3737  tree arg0 = TREE_VALUE (arglist);
3738  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3739  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3740  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3741  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3742  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3743  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3744  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3745  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3746  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3747
3748  /* If we got invalid arguments bail out before generating bad rtl.  */
3749  if (arg0 == error_mark_node
3750      || arg1 == error_mark_node
3751      || arg2 == error_mark_node)
3752    return NULL_RTX;
3753
3754  if (target == 0
3755      || GET_MODE (target) != tmode
3756      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3757    target = gen_reg_rtx (tmode);
3758
3759  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3760    op0 = copy_to_mode_reg (mode0, op0);
3761  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3762    op1 = copy_to_mode_reg (mode1, op1);
3763  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3764    op2 = copy_to_mode_reg (mode2, op2);
3765
3766  pat = GEN_FCN (icode) (target, op0, op1, op2);
3767  if (! pat)
3768    return 0;
3769  emit_insn (pat);
3770
3771  return target;
3772}
3773static rtx
3774altivec_expand_builtin (exp, target)
3775     tree exp;
3776     rtx target;
3777{
3778  struct builtin_description *d;
3779  struct builtin_description_predicates *dp;
3780  size_t i;
3781  enum insn_code icode;
3782  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3783  tree arglist = TREE_OPERAND (exp, 1);
3784  tree arg0, arg1, arg2;
3785  rtx op0, op1, op2, pat;
3786  enum machine_mode tmode, mode0, mode1, mode2;
3787  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3788
3789  switch (fcode)
3790    {
3791    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3792      icode = CODE_FOR_altivec_lvx_16qi;
3793      arg0 = TREE_VALUE (arglist);
3794      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3795      tmode = insn_data[icode].operand[0].mode;
3796      mode0 = insn_data[icode].operand[1].mode;
3797
3798      if (target == 0
3799	  || GET_MODE (target) != tmode
3800	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3801	target = gen_reg_rtx (tmode);
3802
3803      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3804	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3805
3806      pat = GEN_FCN (icode) (target, op0);
3807      if (! pat)
3808	return 0;
3809      emit_insn (pat);
3810      return target;
3811
3812    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3813      icode = CODE_FOR_altivec_lvx_8hi;
3814      arg0 = TREE_VALUE (arglist);
3815      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3816      tmode = insn_data[icode].operand[0].mode;
3817      mode0 = insn_data[icode].operand[1].mode;
3818
3819      if (target == 0
3820	  || GET_MODE (target) != tmode
3821	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3822	target = gen_reg_rtx (tmode);
3823
3824      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3825	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3826
3827      pat = GEN_FCN (icode) (target, op0);
3828      if (! pat)
3829	return 0;
3830      emit_insn (pat);
3831      return target;
3832
3833    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3834      icode = CODE_FOR_altivec_lvx_4si;
3835      arg0 = TREE_VALUE (arglist);
3836      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3837      tmode = insn_data[icode].operand[0].mode;
3838      mode0 = insn_data[icode].operand[1].mode;
3839
3840      if (target == 0
3841	  || GET_MODE (target) != tmode
3842	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3843	target = gen_reg_rtx (tmode);
3844
3845      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3846	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3847
3848      pat = GEN_FCN (icode) (target, op0);
3849      if (! pat)
3850	return 0;
3851      emit_insn (pat);
3852      return target;
3853
3854    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3855      icode = CODE_FOR_altivec_lvx_4sf;
3856      arg0 = TREE_VALUE (arglist);
3857      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3858      tmode = insn_data[icode].operand[0].mode;
3859      mode0 = insn_data[icode].operand[1].mode;
3860
3861      if (target == 0
3862	  || GET_MODE (target) != tmode
3863	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3864	target = gen_reg_rtx (tmode);
3865
3866      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3867	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3868
3869      pat = GEN_FCN (icode) (target, op0);
3870      if (! pat)
3871	return 0;
3872      emit_insn (pat);
3873      return target;
3874
3875    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3876      icode = CODE_FOR_altivec_stvx_16qi;
3877      arg0 = TREE_VALUE (arglist);
3878      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3879      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3880      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3881      mode0 = insn_data[icode].operand[0].mode;
3882      mode1 = insn_data[icode].operand[1].mode;
3883
3884      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3885	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3886      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3887	op1 = copy_to_mode_reg (mode1, op1);
3888
3889      pat = GEN_FCN (icode) (op0, op1);
3890      if (pat)
3891	emit_insn (pat);
3892      return NULL_RTX;
3893
3894    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3895      icode = CODE_FOR_altivec_stvx_8hi;
3896      arg0 = TREE_VALUE (arglist);
3897      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3898      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3899      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3900      mode0 = insn_data[icode].operand[0].mode;
3901      mode1 = insn_data[icode].operand[1].mode;
3902
3903      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3904	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3905      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3906	op1 = copy_to_mode_reg (mode1, op1);
3907
3908      pat = GEN_FCN (icode) (op0, op1);
3909      if (pat)
3910	emit_insn (pat);
3911      return NULL_RTX;
3912
3913    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3914      icode = CODE_FOR_altivec_stvx_4si;
3915      arg0 = TREE_VALUE (arglist);
3916      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3917      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3918      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3919      mode0 = insn_data[icode].operand[0].mode;
3920      mode1 = insn_data[icode].operand[1].mode;
3921
3922      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3923	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3924      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3925	op1 = copy_to_mode_reg (mode1, op1);
3926
3927      pat = GEN_FCN (icode) (op0, op1);
3928      if (pat)
3929	emit_insn (pat);
3930      return NULL_RTX;
3931
3932    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3933      icode = CODE_FOR_altivec_stvx_4sf;
3934      arg0 = TREE_VALUE (arglist);
3935      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3936      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3937      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3938      mode0 = insn_data[icode].operand[0].mode;
3939      mode1 = insn_data[icode].operand[1].mode;
3940
3941      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3942	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3943      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3944	op1 = copy_to_mode_reg (mode1, op1);
3945
3946      pat = GEN_FCN (icode) (op0, op1);
3947      if (pat)
3948	emit_insn (pat);
3949      return NULL_RTX;
3950
3951    case ALTIVEC_BUILTIN_STVX:
3952      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3953    case ALTIVEC_BUILTIN_STVEBX:
3954      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3955    case ALTIVEC_BUILTIN_STVEHX:
3956      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3957    case ALTIVEC_BUILTIN_STVEWX:
3958      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3959    case ALTIVEC_BUILTIN_STVXL:
3960      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3961
3962    case ALTIVEC_BUILTIN_MFVSCR:
3963      icode = CODE_FOR_altivec_mfvscr;
3964      tmode = insn_data[icode].operand[0].mode;
3965
3966      if (target == 0
3967	  || GET_MODE (target) != tmode
3968	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3969	target = gen_reg_rtx (tmode);
3970
3971      pat = GEN_FCN (icode) (target);
3972      if (! pat)
3973	return 0;
3974      emit_insn (pat);
3975      return target;
3976
3977    case ALTIVEC_BUILTIN_MTVSCR:
3978      icode = CODE_FOR_altivec_mtvscr;
3979      arg0 = TREE_VALUE (arglist);
3980      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3981      mode0 = insn_data[icode].operand[0].mode;
3982
3983      /* If we got invalid arguments bail out before generating bad rtl.  */
3984      if (arg0 == error_mark_node)
3985	return NULL_RTX;
3986
3987      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3988	op0 = copy_to_mode_reg (mode0, op0);
3989
3990      pat = GEN_FCN (icode) (op0);
3991      if (pat)
3992	emit_insn (pat);
3993      return NULL_RTX;
3994
3995    case ALTIVEC_BUILTIN_DSSALL:
3996      emit_insn (gen_altivec_dssall ());
3997      return NULL_RTX;
3998
3999    case ALTIVEC_BUILTIN_DSS:
4000      icode = CODE_FOR_altivec_dss;
4001      arg0 = TREE_VALUE (arglist);
4002      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4003      mode0 = insn_data[icode].operand[0].mode;
4004
4005      /* If we got invalid arguments bail out before generating bad rtl.  */
4006      if (arg0 == error_mark_node)
4007	return NULL_RTX;
4008
4009      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4010	op0 = copy_to_mode_reg (mode0, op0);
4011
4012      emit_insn (gen_altivec_dss (op0));
4013      return NULL_RTX;
4014    }
4015
4016  /* Handle DST variants.  */
4017  d = (struct builtin_description *) bdesc_dst;
4018  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4019    if (d->code == fcode)
4020      {
4021	arg0 = TREE_VALUE (arglist);
4022	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4023	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4024	op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4025	op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4026	op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4027	mode0 = insn_data[d->icode].operand[0].mode;
4028	mode1 = insn_data[d->icode].operand[1].mode;
4029	mode2 = insn_data[d->icode].operand[2].mode;
4030
4031	/* Invalid arguments, bail out before generating bad rtl.  */
4032	if (arg0 == error_mark_node
4033	    || arg1 == error_mark_node
4034	    || arg2 == error_mark_node)
4035	  return NULL_RTX;
4036
4037	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4038	  op0 = copy_to_mode_reg (mode0, op0);
4039	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4040	  op1 = copy_to_mode_reg (mode1, op1);
4041
4042	if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4043	  {
4044	    error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4045	    return NULL_RTX;
4046	  }
4047
4048	pat = GEN_FCN (d->icode) (op0, op1, op2);
4049	if (pat != 0)
4050	  emit_insn (pat);
4051
4052	return NULL_RTX;
4053      }
4054
4055  /* Expand abs* operations.  */
4056  d = (struct builtin_description *) bdesc_abs;
4057  for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4058    if (d->code == fcode)
4059      return altivec_expand_abs_builtin (d->icode, arglist, target);
4060
4061  /* Handle simple unary operations.  */
4062  d = (struct builtin_description *) bdesc_1arg;
4063  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4064    if (d->code == fcode)
4065      return altivec_expand_unop_builtin (d->icode, arglist, target);
4066
4067  /* Handle simple binary operations.  */
4068  d = (struct builtin_description *) bdesc_2arg;
4069  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4070    if (d->code == fcode)
4071      return altivec_expand_binop_builtin (d->icode, arglist, target);
4072
4073  /* Expand the AltiVec predicates.  */
4074  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4075  for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4076    if (dp->code == fcode)
4077      return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4078
4079  /* LV* are funky.  We initialized them differently.  */
4080  switch (fcode)
4081    {
4082    case ALTIVEC_BUILTIN_LVSL:
4083      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4084					   arglist, target);
4085    case ALTIVEC_BUILTIN_LVSR:
4086      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4087					   arglist, target);
4088    case ALTIVEC_BUILTIN_LVEBX:
4089      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4090					   arglist, target);
4091    case ALTIVEC_BUILTIN_LVEHX:
4092      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4093					   arglist, target);
4094    case ALTIVEC_BUILTIN_LVEWX:
4095      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4096					   arglist, target);
4097    case ALTIVEC_BUILTIN_LVXL:
4098      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4099					   arglist, target);
4100    case ALTIVEC_BUILTIN_LVX:
4101      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4102					   arglist, target);
4103    default:
4104      break;
4105      /* Fall through.  */
4106    }
4107
4108  /* Handle simple ternary operations.  */
4109  d = (struct builtin_description *) bdesc_3arg;
4110  for (i = 0; i < sizeof  (bdesc_3arg) / sizeof *d; i++, d++)
4111    if (d->code == fcode)
4112      return altivec_expand_ternop_builtin (d->icode, arglist, target);
4113
4114  abort ();
4115  return NULL_RTX;
4116}
4117
4118/* Expand an expression EXP that calls a built-in function,
4119   with result going to TARGET if that's convenient
4120   (and in mode MODE if that's convenient).
4121   SUBTARGET may be used as the target for computing one of EXP's operands.
4122   IGNORE is nonzero if the value is to be ignored.  */
4123
4124static rtx
4125rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4126     tree exp;
4127     rtx target;
4128     rtx subtarget ATTRIBUTE_UNUSED;
4129     enum machine_mode mode ATTRIBUTE_UNUSED;
4130     int ignore ATTRIBUTE_UNUSED;
4131{
4132  if (TARGET_ALTIVEC)
4133    return altivec_expand_builtin (exp, target);
4134
4135  abort ();
4136}
4137
4138static void
4139rs6000_init_builtins ()
4140{
4141  if (TARGET_ALTIVEC)
4142    altivec_init_builtins ();
4143}
4144
4145static void
4146altivec_init_builtins (void)
4147{
4148  struct builtin_description *d;
4149  struct builtin_description_predicates *dp;
4150  size_t i;
4151
4152  tree endlink = void_list_node;
4153
4154  tree pint_type_node = build_pointer_type (integer_type_node);
4155  tree pvoid_type_node = build_pointer_type (void_type_node);
4156  tree pshort_type_node = build_pointer_type (short_integer_type_node);
4157  tree pchar_type_node = build_pointer_type (char_type_node);
4158  tree pfloat_type_node = build_pointer_type (float_type_node);
4159
4160  tree v4sf_ftype_v4sf_v4sf_v16qi
4161    = build_function_type (V4SF_type_node,
4162			   tree_cons (NULL_TREE, V4SF_type_node,
4163				      tree_cons (NULL_TREE, V4SF_type_node,
4164						 tree_cons (NULL_TREE,
4165							    V16QI_type_node,
4166							    endlink))));
4167  tree v4si_ftype_v4si_v4si_v16qi
4168    = build_function_type (V4SI_type_node,
4169			   tree_cons (NULL_TREE, V4SI_type_node,
4170				      tree_cons (NULL_TREE, V4SI_type_node,
4171						 tree_cons (NULL_TREE,
4172							    V16QI_type_node,
4173							    endlink))));
4174  tree v8hi_ftype_v8hi_v8hi_v16qi
4175    = build_function_type (V8HI_type_node,
4176			   tree_cons (NULL_TREE, V8HI_type_node,
4177				      tree_cons (NULL_TREE, V8HI_type_node,
4178						 tree_cons (NULL_TREE,
4179							    V16QI_type_node,
4180							    endlink))));
4181  tree v16qi_ftype_v16qi_v16qi_v16qi
4182    = build_function_type (V16QI_type_node,
4183			   tree_cons (NULL_TREE, V16QI_type_node,
4184				      tree_cons (NULL_TREE, V16QI_type_node,
4185						 tree_cons (NULL_TREE,
4186							    V16QI_type_node,
4187							    endlink))));
4188
4189  /* V4SI foo (char).  */
4190  tree v4si_ftype_char
4191    = build_function_type (V4SI_type_node,
4192		           tree_cons (NULL_TREE, char_type_node, endlink));
4193
4194  /* V8HI foo (char).  */
4195  tree v8hi_ftype_char
4196    = build_function_type (V8HI_type_node,
4197		           tree_cons (NULL_TREE, char_type_node, endlink));
4198
4199  /* V16QI foo (char).  */
4200  tree v16qi_ftype_char
4201    = build_function_type (V16QI_type_node,
4202		           tree_cons (NULL_TREE, char_type_node, endlink));
4203  /* V4SF foo (V4SF).  */
4204  tree v4sf_ftype_v4sf
4205    = build_function_type (V4SF_type_node,
4206			   tree_cons (NULL_TREE, V4SF_type_node, endlink));
4207
4208  /* V4SI foo (int *).  */
4209  tree v4si_ftype_pint
4210    = build_function_type (V4SI_type_node,
4211			   tree_cons (NULL_TREE, pint_type_node, endlink));
4212  /* V8HI foo (short *).  */
4213  tree v8hi_ftype_pshort
4214    = build_function_type (V8HI_type_node,
4215			   tree_cons (NULL_TREE, pshort_type_node, endlink));
4216  /* V16QI foo (char *).  */
4217  tree v16qi_ftype_pchar
4218    = build_function_type (V16QI_type_node,
4219			   tree_cons (NULL_TREE, pchar_type_node, endlink));
4220  /* V4SF foo (float *).  */
4221  tree v4sf_ftype_pfloat
4222    = build_function_type (V4SF_type_node,
4223			   tree_cons (NULL_TREE, pfloat_type_node, endlink));
4224
4225  /* V8HI foo (V16QI).  */
4226  tree v8hi_ftype_v16qi
4227    = build_function_type (V8HI_type_node,
4228			   tree_cons (NULL_TREE, V16QI_type_node, endlink));
4229
4230  /* void foo (void *, int, char/literal).  */
4231  tree void_ftype_pvoid_int_char
4232    = build_function_type (void_type_node,
4233			   tree_cons (NULL_TREE, pvoid_type_node,
4234				      tree_cons (NULL_TREE, integer_type_node,
4235						 tree_cons (NULL_TREE,
4236							    char_type_node,
4237							    endlink))));
4238
4239  /* void foo (int *, V4SI).  */
4240  tree void_ftype_pint_v4si
4241    = build_function_type (void_type_node,
4242			   tree_cons (NULL_TREE, pint_type_node,
4243				      tree_cons (NULL_TREE, V4SI_type_node,
4244						 endlink)));
4245  /* void foo (short *, V8HI).  */
4246  tree void_ftype_pshort_v8hi
4247    = build_function_type (void_type_node,
4248			   tree_cons (NULL_TREE, pshort_type_node,
4249				      tree_cons (NULL_TREE, V8HI_type_node,
4250						 endlink)));
4251  /* void foo (char *, V16QI).  */
4252  tree void_ftype_pchar_v16qi
4253    = build_function_type (void_type_node,
4254			   tree_cons (NULL_TREE, pchar_type_node,
4255				      tree_cons (NULL_TREE, V16QI_type_node,
4256						 endlink)));
4257  /* void foo (float *, V4SF).  */
4258  tree void_ftype_pfloat_v4sf
4259    = build_function_type (void_type_node,
4260			   tree_cons (NULL_TREE, pfloat_type_node,
4261				      tree_cons (NULL_TREE, V4SF_type_node,
4262						 endlink)));
4263
4264  /* void foo (V4SI).  */
4265  tree void_ftype_v4si
4266    = build_function_type (void_type_node,
4267			   tree_cons (NULL_TREE, V4SI_type_node,
4268				      endlink));
4269
4270  /* void foo (vint, int, void *).  */
4271  tree void_ftype_v4si_int_pvoid
4272    = build_function_type (void_type_node,
4273			   tree_cons (NULL_TREE, V4SI_type_node,
4274				      tree_cons (NULL_TREE, integer_type_node,
4275						 tree_cons (NULL_TREE,
4276							    pvoid_type_node,
4277							    endlink))));
4278
4279  /* void foo (vchar, int, void *).  */
4280  tree void_ftype_v16qi_int_pvoid
4281    = build_function_type (void_type_node,
4282			   tree_cons (NULL_TREE, V16QI_type_node,
4283				      tree_cons (NULL_TREE, integer_type_node,
4284						 tree_cons (NULL_TREE,
4285							    pvoid_type_node,
4286							    endlink))));
4287
4288  /* void foo (vshort, int, void *).  */
4289  tree void_ftype_v8hi_int_pvoid
4290    = build_function_type (void_type_node,
4291			   tree_cons (NULL_TREE, V8HI_type_node,
4292				      tree_cons (NULL_TREE, integer_type_node,
4293						 tree_cons (NULL_TREE,
4294							    pvoid_type_node,
4295							    endlink))));
4296
4297  /* void foo (char).  */
4298  tree void_ftype_qi
4299    = build_function_type (void_type_node,
4300			   tree_cons (NULL_TREE, char_type_node,
4301				      endlink));
4302
4303  /* void foo (void).  */
4304  tree void_ftype_void
4305    = build_function_type (void_type_node, void_list_node);
4306
4307  /* vshort foo (void).  */
4308  tree v8hi_ftype_void
4309    = build_function_type (V8HI_type_node, void_list_node);
4310
4311  tree v4si_ftype_v4si_v4si
4312    = build_function_type (V4SI_type_node,
4313			   tree_cons (NULL_TREE, V4SI_type_node,
4314				      tree_cons (NULL_TREE, V4SI_type_node,
4315						 endlink)));
4316
4317  /* These are for the unsigned 5 bit literals.  */
4318
4319  tree v4sf_ftype_v4si_char
4320    = build_function_type (V4SF_type_node,
4321			   tree_cons (NULL_TREE, V4SI_type_node,
4322				      tree_cons (NULL_TREE, char_type_node,
4323						 endlink)));
4324  tree v4si_ftype_v4sf_char
4325    = build_function_type (V4SI_type_node,
4326			   tree_cons (NULL_TREE, V4SF_type_node,
4327				      tree_cons (NULL_TREE, char_type_node,
4328						 endlink)));
4329  tree v4si_ftype_v4si_char
4330    = build_function_type (V4SI_type_node,
4331			   tree_cons (NULL_TREE, V4SI_type_node,
4332				      tree_cons (NULL_TREE, char_type_node,
4333						 endlink)));
4334  tree v8hi_ftype_v8hi_char
4335    = build_function_type (V8HI_type_node,
4336			   tree_cons (NULL_TREE, V8HI_type_node,
4337				      tree_cons (NULL_TREE, char_type_node,
4338						 endlink)));
4339  tree v16qi_ftype_v16qi_char
4340    = build_function_type (V16QI_type_node,
4341			   tree_cons (NULL_TREE, V16QI_type_node,
4342				      tree_cons (NULL_TREE, char_type_node,
4343						 endlink)));
4344
4345  /* These are for the unsigned 4 bit literals.  */
4346
4347  tree v16qi_ftype_v16qi_v16qi_char
4348    = build_function_type (V16QI_type_node,
4349			   tree_cons (NULL_TREE, V16QI_type_node,
4350				      tree_cons (NULL_TREE, V16QI_type_node,
4351						 tree_cons (NULL_TREE,
4352							    char_type_node,
4353							    endlink))));
4354
4355  tree v8hi_ftype_v8hi_v8hi_char
4356    = build_function_type (V8HI_type_node,
4357			   tree_cons (NULL_TREE, V8HI_type_node,
4358				      tree_cons (NULL_TREE, V8HI_type_node,
4359						 tree_cons (NULL_TREE,
4360							    char_type_node,
4361							    endlink))));
4362
4363  tree v4si_ftype_v4si_v4si_char
4364    = build_function_type (V4SI_type_node,
4365			   tree_cons (NULL_TREE, V4SI_type_node,
4366				      tree_cons (NULL_TREE, V4SI_type_node,
4367						 tree_cons (NULL_TREE,
4368							    char_type_node,
4369							    endlink))));
4370
4371  tree v4sf_ftype_v4sf_v4sf_char
4372    = build_function_type (V4SF_type_node,
4373			   tree_cons (NULL_TREE, V4SF_type_node,
4374				      tree_cons (NULL_TREE, V4SF_type_node,
4375						 tree_cons (NULL_TREE,
4376							    char_type_node,
4377							    endlink))));
4378
4379  /* End of 4 bit literals.  */
4380
4381  tree v4sf_ftype_v4sf_v4sf
4382    = build_function_type (V4SF_type_node,
4383			   tree_cons (NULL_TREE, V4SF_type_node,
4384				      tree_cons (NULL_TREE, V4SF_type_node,
4385						 endlink)));
4386  tree v4sf_ftype_v4sf_v4sf_v4si
4387    = build_function_type (V4SF_type_node,
4388			   tree_cons (NULL_TREE, V4SF_type_node,
4389				      tree_cons (NULL_TREE, V4SF_type_node,
4390						 tree_cons (NULL_TREE,
4391							    V4SI_type_node,
4392							    endlink))));
4393  tree v4sf_ftype_v4sf_v4sf_v4sf
4394    = build_function_type (V4SF_type_node,
4395			   tree_cons (NULL_TREE, V4SF_type_node,
4396				      tree_cons (NULL_TREE, V4SF_type_node,
4397						 tree_cons (NULL_TREE,
4398							    V4SF_type_node,
4399							    endlink))));
4400  tree v4si_ftype_v4si_v4si_v4si
4401    = build_function_type (V4SI_type_node,
4402			   tree_cons (NULL_TREE, V4SI_type_node,
4403				      tree_cons (NULL_TREE, V4SI_type_node,
4404						 tree_cons (NULL_TREE,
4405							    V4SI_type_node,
4406							    endlink))));
4407
4408  tree v8hi_ftype_v8hi_v8hi
4409    = build_function_type (V8HI_type_node,
4410			   tree_cons (NULL_TREE, V8HI_type_node,
4411				      tree_cons (NULL_TREE, V8HI_type_node,
4412						 endlink)));
4413  tree v8hi_ftype_v8hi_v8hi_v8hi
4414    = build_function_type (V8HI_type_node,
4415			   tree_cons (NULL_TREE, V8HI_type_node,
4416				      tree_cons (NULL_TREE, V8HI_type_node,
4417						 tree_cons (NULL_TREE,
4418							    V8HI_type_node,
4419							    endlink))));
4420 tree v4si_ftype_v8hi_v8hi_v4si
4421    = build_function_type (V4SI_type_node,
4422			   tree_cons (NULL_TREE, V8HI_type_node,
4423				      tree_cons (NULL_TREE, V8HI_type_node,
4424						 tree_cons (NULL_TREE,
4425							    V4SI_type_node,
4426							    endlink))));
4427 tree v4si_ftype_v16qi_v16qi_v4si
4428    = build_function_type (V4SI_type_node,
4429			   tree_cons (NULL_TREE, V16QI_type_node,
4430				      tree_cons (NULL_TREE, V16QI_type_node,
4431						 tree_cons (NULL_TREE,
4432							    V4SI_type_node,
4433							    endlink))));
4434
4435  tree v16qi_ftype_v16qi_v16qi
4436    = build_function_type (V16QI_type_node,
4437			   tree_cons (NULL_TREE, V16QI_type_node,
4438				      tree_cons (NULL_TREE, V16QI_type_node,
4439						 endlink)));
4440
4441  tree v4si_ftype_v4sf_v4sf
4442    = build_function_type (V4SI_type_node,
4443			   tree_cons (NULL_TREE, V4SF_type_node,
4444				      tree_cons (NULL_TREE, V4SF_type_node,
4445						 endlink)));
4446
4447  tree v4si_ftype_v4si
4448    = build_function_type (V4SI_type_node,
4449			   tree_cons (NULL_TREE, V4SI_type_node, endlink));
4450
4451  tree v8hi_ftype_v8hi
4452    = build_function_type (V8HI_type_node,
4453			   tree_cons (NULL_TREE, V8HI_type_node, endlink));
4454
4455  tree v16qi_ftype_v16qi
4456    = build_function_type (V16QI_type_node,
4457			   tree_cons (NULL_TREE, V16QI_type_node, endlink));
4458
4459  tree v8hi_ftype_v16qi_v16qi
4460    = build_function_type (V8HI_type_node,
4461			   tree_cons (NULL_TREE, V16QI_type_node,
4462				      tree_cons (NULL_TREE, V16QI_type_node,
4463						 endlink)));
4464
4465  tree v4si_ftype_v8hi_v8hi
4466    = build_function_type (V4SI_type_node,
4467			   tree_cons (NULL_TREE, V8HI_type_node,
4468				      tree_cons (NULL_TREE, V8HI_type_node,
4469						 endlink)));
4470
4471  tree v8hi_ftype_v4si_v4si
4472    = build_function_type (V8HI_type_node,
4473			   tree_cons (NULL_TREE, V4SI_type_node,
4474				      tree_cons (NULL_TREE, V4SI_type_node,
4475						 endlink)));
4476
4477  tree v16qi_ftype_v8hi_v8hi
4478    = build_function_type (V16QI_type_node,
4479			   tree_cons (NULL_TREE, V8HI_type_node,
4480				      tree_cons (NULL_TREE, V8HI_type_node,
4481						 endlink)));
4482
4483  tree v4si_ftype_v16qi_v4si
4484    = build_function_type (V4SI_type_node,
4485			   tree_cons (NULL_TREE, V16QI_type_node,
4486				      tree_cons (NULL_TREE, V4SI_type_node,
4487						 endlink)));
4488
4489  tree v4si_ftype_v16qi_v16qi
4490    = build_function_type (V4SI_type_node,
4491			   tree_cons (NULL_TREE, V16QI_type_node,
4492				      tree_cons (NULL_TREE, V16QI_type_node,
4493						 endlink)));
4494
4495  tree v4si_ftype_v8hi_v4si
4496    = build_function_type (V4SI_type_node,
4497			   tree_cons (NULL_TREE, V8HI_type_node,
4498				      tree_cons (NULL_TREE, V4SI_type_node,
4499						 endlink)));
4500
4501  tree v4si_ftype_v8hi
4502    = build_function_type (V4SI_type_node,
4503			   tree_cons (NULL_TREE, V8HI_type_node, endlink));
4504
4505  tree int_ftype_v4si_v4si
4506    = build_function_type (integer_type_node,
4507			   tree_cons (NULL_TREE, V4SI_type_node,
4508				      tree_cons (NULL_TREE, V4SI_type_node,
4509						 endlink)));
4510
4511  tree int_ftype_v4sf_v4sf
4512    = build_function_type (integer_type_node,
4513			   tree_cons (NULL_TREE, V4SF_type_node,
4514				      tree_cons (NULL_TREE, V4SF_type_node,
4515						 endlink)));
4516
4517  tree int_ftype_v16qi_v16qi
4518    = build_function_type (integer_type_node,
4519			   tree_cons (NULL_TREE, V16QI_type_node,
4520				      tree_cons (NULL_TREE, V16QI_type_node,
4521						 endlink)));
4522
4523  tree int_ftype_int_v4si_v4si
4524    = build_function_type
4525    (integer_type_node,
4526     tree_cons (NULL_TREE, integer_type_node,
4527		tree_cons (NULL_TREE, V4SI_type_node,
4528			   tree_cons (NULL_TREE, V4SI_type_node,
4529				      endlink))));
4530
4531  tree int_ftype_int_v4sf_v4sf
4532    = build_function_type
4533    (integer_type_node,
4534     tree_cons (NULL_TREE, integer_type_node,
4535		tree_cons (NULL_TREE, V4SF_type_node,
4536			   tree_cons (NULL_TREE, V4SF_type_node,
4537				      endlink))));
4538
4539  tree int_ftype_int_v8hi_v8hi
4540    = build_function_type
4541    (integer_type_node,
4542     tree_cons (NULL_TREE, integer_type_node,
4543		 tree_cons (NULL_TREE, V8HI_type_node,
4544			    tree_cons (NULL_TREE, V8HI_type_node,
4545				       endlink))));
4546
4547  tree int_ftype_int_v16qi_v16qi
4548    = build_function_type
4549    (integer_type_node,
4550     tree_cons (NULL_TREE, integer_type_node,
4551		tree_cons (NULL_TREE, V16QI_type_node,
4552			   tree_cons (NULL_TREE, V16QI_type_node,
4553				      endlink))));
4554
4555  tree v16qi_ftype_int_pvoid
4556    = build_function_type (V16QI_type_node,
4557			   tree_cons (NULL_TREE, integer_type_node,
4558				      tree_cons (NULL_TREE, pvoid_type_node,
4559						 endlink)));
4560
4561  tree v4si_ftype_int_pvoid
4562    = build_function_type (V4SI_type_node,
4563			   tree_cons (NULL_TREE, integer_type_node,
4564				      tree_cons (NULL_TREE, pvoid_type_node,
4565						 endlink)));
4566
4567  tree v8hi_ftype_int_pvoid
4568    = build_function_type (V8HI_type_node,
4569			   tree_cons (NULL_TREE, integer_type_node,
4570				      tree_cons (NULL_TREE, pvoid_type_node,
4571						 endlink)));
4572
4573  tree int_ftype_v8hi_v8hi
4574    = build_function_type (integer_type_node,
4575			   tree_cons (NULL_TREE, V8HI_type_node,
4576				      tree_cons (NULL_TREE, V8HI_type_node,
4577						 endlink)));
4578
4579  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4580  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4581  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4582  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4583  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4584  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4585  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4586  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4587  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4588  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4589  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4590  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4591  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4592  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4593  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4594  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4595  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4596  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4597  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4598  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4599  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4600  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4601  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4602  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4603
4604  /* Add the simple ternary operators.  */
4605  d = (struct builtin_description *) bdesc_3arg;
4606  for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4607    {
4608
4609      enum machine_mode mode0, mode1, mode2, mode3;
4610      tree type;
4611
4612      if (d->name == 0)
4613	continue;
4614
4615      mode0 = insn_data[d->icode].operand[0].mode;
4616      mode1 = insn_data[d->icode].operand[1].mode;
4617      mode2 = insn_data[d->icode].operand[2].mode;
4618      mode3 = insn_data[d->icode].operand[3].mode;
4619
4620      /* When all four are of the same mode.  */
4621      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4622	{
4623	  switch (mode0)
4624	    {
4625	    case V4SImode:
4626	      type = v4si_ftype_v4si_v4si_v4si;
4627	      break;
4628	    case V4SFmode:
4629	      type = v4sf_ftype_v4sf_v4sf_v4sf;
4630	      break;
4631	    case V8HImode:
4632	      type = v8hi_ftype_v8hi_v8hi_v8hi;
4633	      break;
4634	    case V16QImode:
4635	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4636	      break;
4637	    default:
4638	      abort();
4639	    }
4640	}
4641      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4642        {
4643	  switch (mode0)
4644	    {
4645	    case V4SImode:
4646	      type = v4si_ftype_v4si_v4si_v16qi;
4647	      break;
4648	    case V4SFmode:
4649	      type = v4sf_ftype_v4sf_v4sf_v16qi;
4650	      break;
4651	    case V8HImode:
4652	      type = v8hi_ftype_v8hi_v8hi_v16qi;
4653	      break;
4654	    case V16QImode:
4655	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4656	      break;
4657	    default:
4658	      abort();
4659	    }
4660	}
4661      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4662	       && mode3 == V4SImode)
4663	type = v4si_ftype_v16qi_v16qi_v4si;
4664      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4665	       && mode3 == V4SImode)
4666	type = v4si_ftype_v8hi_v8hi_v4si;
4667      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4668	       && mode3 == V4SImode)
4669	type = v4sf_ftype_v4sf_v4sf_v4si;
4670
4671      /* vchar, vchar, vchar, 4 bit literal.  */
4672      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4673	       && mode3 == QImode)
4674	type = v16qi_ftype_v16qi_v16qi_char;
4675
4676      /* vshort, vshort, vshort, 4 bit literal.  */
4677      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4678	       && mode3 == QImode)
4679	type = v8hi_ftype_v8hi_v8hi_char;
4680
4681      /* vint, vint, vint, 4 bit literal.  */
4682      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4683	       && mode3 == QImode)
4684	type = v4si_ftype_v4si_v4si_char;
4685
4686      /* vfloat, vfloat, vfloat, 4 bit literal.  */
4687      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4688	       && mode3 == QImode)
4689	type = v4sf_ftype_v4sf_v4sf_char;
4690
4691      else
4692	abort ();
4693
4694      def_builtin (d->mask, d->name, type, d->code);
4695    }
4696
4697  /* Add the DST variants.  */
4698  d = (struct builtin_description *) bdesc_dst;
4699  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4700    def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4701
4702  /* Initialize the predicates.  */
4703  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4704  for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4705    {
4706      enum machine_mode mode1;
4707      tree type;
4708
4709      mode1 = insn_data[dp->icode].operand[1].mode;
4710
4711      switch (mode1)
4712	{
4713	case V4SImode:
4714	  type = int_ftype_int_v4si_v4si;
4715	  break;
4716	case V8HImode:
4717	  type = int_ftype_int_v8hi_v8hi;
4718	  break;
4719	case V16QImode:
4720	  type = int_ftype_int_v16qi_v16qi;
4721	  break;
4722	case V4SFmode:
4723	  type = int_ftype_int_v4sf_v4sf;
4724	  break;
4725	default:
4726	  abort ();
4727	}
4728
4729      def_builtin (dp->mask, dp->name, type, dp->code);
4730    }
4731
4732  /* Add the simple binary operators.  */
4733  d = (struct builtin_description *) bdesc_2arg;
4734  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4735    {
4736      enum machine_mode mode0, mode1, mode2;
4737      tree type;
4738
4739      if (d->name == 0)
4740	continue;
4741
4742      mode0 = insn_data[d->icode].operand[0].mode;
4743      mode1 = insn_data[d->icode].operand[1].mode;
4744      mode2 = insn_data[d->icode].operand[2].mode;
4745
4746      /* When all three operands are of the same mode.  */
4747      if (mode0 == mode1 && mode1 == mode2)
4748	{
4749	  switch (mode0)
4750	    {
4751	    case V4SFmode:
4752	      type = v4sf_ftype_v4sf_v4sf;
4753	      break;
4754	    case V4SImode:
4755	      type = v4si_ftype_v4si_v4si;
4756	      break;
4757	    case V16QImode:
4758	      type = v16qi_ftype_v16qi_v16qi;
4759	      break;
4760	    case V8HImode:
4761	      type = v8hi_ftype_v8hi_v8hi;
4762	      break;
4763	    default:
4764	      abort ();
4765	    }
4766	}
4767
4768      /* A few other combos we really don't want to do manually.  */
4769
4770      /* vint, vfloat, vfloat.  */
4771      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4772	type = v4si_ftype_v4sf_v4sf;
4773
4774      /* vshort, vchar, vchar.  */
4775      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4776	type = v8hi_ftype_v16qi_v16qi;
4777
4778      /* vint, vshort, vshort.  */
4779      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4780	type = v4si_ftype_v8hi_v8hi;
4781
4782      /* vshort, vint, vint.  */
4783      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4784	type = v8hi_ftype_v4si_v4si;
4785
4786      /* vchar, vshort, vshort.  */
4787      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4788	type = v16qi_ftype_v8hi_v8hi;
4789
4790      /* vint, vchar, vint.  */
4791      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4792	type = v4si_ftype_v16qi_v4si;
4793
4794      /* vint, vchar, vchar.  */
4795      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4796	type = v4si_ftype_v16qi_v16qi;
4797
4798      /* vint, vshort, vint.  */
4799      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4800	type = v4si_ftype_v8hi_v4si;
4801
4802      /* vint, vint, 5 bit literal.  */
4803      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4804	type = v4si_ftype_v4si_char;
4805
4806      /* vshort, vshort, 5 bit literal.  */
4807      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4808	type = v8hi_ftype_v8hi_char;
4809
4810      /* vchar, vchar, 5 bit literal.  */
4811      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4812	type = v16qi_ftype_v16qi_char;
4813
4814      /* vfloat, vint, 5 bit literal.  */
4815      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4816	type = v4sf_ftype_v4si_char;
4817
4818      /* vint, vfloat, 5 bit literal.  */
4819      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4820	type = v4si_ftype_v4sf_char;
4821
4822      /* int, x, x.  */
4823      else if (mode0 == SImode)
4824	{
4825	  switch (mode1)
4826	    {
4827	    case V4SImode:
4828	      type = int_ftype_v4si_v4si;
4829	      break;
4830	    case V4SFmode:
4831	      type = int_ftype_v4sf_v4sf;
4832	      break;
4833	    case V16QImode:
4834	      type = int_ftype_v16qi_v16qi;
4835	      break;
4836	    case V8HImode:
4837	      type = int_ftype_v8hi_v8hi;
4838	      break;
4839	    default:
4840	      abort ();
4841	    }
4842	}
4843
4844      else
4845	abort ();
4846
4847      def_builtin (d->mask, d->name, type, d->code);
4848    }
4849
4850  /* Initialize the abs* operators.  */
4851  d = (struct builtin_description *) bdesc_abs;
4852  for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4853    {
4854      enum machine_mode mode0;
4855      tree type;
4856
4857      mode0 = insn_data[d->icode].operand[0].mode;
4858
4859      switch (mode0)
4860	{
4861	case V4SImode:
4862	  type = v4si_ftype_v4si;
4863	  break;
4864	case V8HImode:
4865	  type = v8hi_ftype_v8hi;
4866	  break;
4867	case V16QImode:
4868	  type = v16qi_ftype_v16qi;
4869	  break;
4870	case V4SFmode:
4871	  type = v4sf_ftype_v4sf;
4872	  break;
4873	default:
4874	  abort ();
4875	}
4876
4877      def_builtin (d->mask, d->name, type, d->code);
4878    }
4879
4880  /* Add the simple unary operators.  */
4881  d = (struct builtin_description *) bdesc_1arg;
4882  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4883    {
4884      enum machine_mode mode0, mode1;
4885      tree type;
4886
4887      if (d->name == 0)
4888	continue;
4889
4890      mode0 = insn_data[d->icode].operand[0].mode;
4891      mode1 = insn_data[d->icode].operand[1].mode;
4892
4893      if (mode0 == V4SImode && mode1 == QImode)
4894        type = v4si_ftype_char;
4895      else if (mode0 == V8HImode && mode1 == QImode)
4896        type = v8hi_ftype_char;
4897      else if (mode0 == V16QImode && mode1 == QImode)
4898        type = v16qi_ftype_char;
4899      else if (mode0 == V4SFmode && mode1 == V4SFmode)
4900	type = v4sf_ftype_v4sf;
4901      else if (mode0 == V8HImode && mode1 == V16QImode)
4902	type = v8hi_ftype_v16qi;
4903      else if (mode0 == V4SImode && mode1 == V8HImode)
4904	type = v4si_ftype_v8hi;
4905      else
4906	abort ();
4907
4908      def_builtin (d->mask, d->name, type, d->code);
4909    }
4910}
4911
4912
4913/* Expand a block move operation, and return 1 if successful.  Return 0
4914   if we should let the compiler generate normal code.
4915
4916   operands[0] is the destination
4917   operands[1] is the source
4918   operands[2] is the length
4919   operands[3] is the alignment */
4920
4921#define MAX_MOVE_REG 4
4922
4923int
4924expand_block_move (operands)
4925     rtx operands[];
4926{
4927  rtx orig_dest = operands[0];
4928  rtx orig_src	= operands[1];
4929  rtx bytes_rtx	= operands[2];
4930  rtx align_rtx = operands[3];
4931  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
4932  int align;
4933  int bytes;
4934  int offset;
4935  int move_bytes;
4936
4937  /* If this is not a fixed size move, just call memcpy */
4938  if (! constp)
4939    return 0;
4940
4941  /* If this is not a fixed size alignment, abort */
4942  if (GET_CODE (align_rtx) != CONST_INT)
4943    abort ();
4944  align = INTVAL (align_rtx);
4945
4946  /* Anything to move? */
4947  bytes = INTVAL (bytes_rtx);
4948  if (bytes <= 0)
4949    return 1;
4950
4951  /* store_one_arg depends on expand_block_move to handle at least the size of
4952     reg_parm_stack_space.  */
4953  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4954    return 0;
4955
4956  if (TARGET_STRING)	/* string instructions are available */
4957    {
4958      for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
4959	{
4960	  union {
4961	    rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
4962	    rtx (*mov) PARAMS ((rtx, rtx));
4963	  } gen_func;
4964	  enum machine_mode mode = BLKmode;
4965	  rtx src, dest;
4966
4967	  if (bytes > 24		/* move up to 32 bytes at a time */
4968	      && ! fixed_regs[5]
4969	      && ! fixed_regs[6]
4970	      && ! fixed_regs[7]
4971	      && ! fixed_regs[8]
4972	      && ! fixed_regs[9]
4973	      && ! fixed_regs[10]
4974	      && ! fixed_regs[11]
4975	      && ! fixed_regs[12])
4976	    {
4977	      move_bytes = (bytes > 32) ? 32 : bytes;
4978	      gen_func.movstrsi = gen_movstrsi_8reg;
4979	    }
4980	  else if (bytes > 16	/* move up to 24 bytes at a time */
4981		   && ! fixed_regs[5]
4982		   && ! fixed_regs[6]
4983		   && ! fixed_regs[7]
4984		   && ! fixed_regs[8]
4985		   && ! fixed_regs[9]
4986		   && ! fixed_regs[10])
4987	    {
4988	      move_bytes = (bytes > 24) ? 24 : bytes;
4989	      gen_func.movstrsi = gen_movstrsi_6reg;
4990	    }
4991	  else if (bytes > 8	/* move up to 16 bytes at a time */
4992		   && ! fixed_regs[5]
4993		   && ! fixed_regs[6]
4994		   && ! fixed_regs[7]
4995		   && ! fixed_regs[8])
4996	    {
4997	      move_bytes = (bytes > 16) ? 16 : bytes;
4998	      gen_func.movstrsi = gen_movstrsi_4reg;
4999	    }
5000	  else if (bytes >= 8 && TARGET_POWERPC64
5001		   /* 64-bit loads and stores require word-aligned
5002                      displacements.  */
5003		   && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5004	    {
5005	      move_bytes = 8;
5006	      mode = DImode;
5007	      gen_func.mov = gen_movdi;
5008	    }
5009	  else if (bytes > 4 && !TARGET_POWERPC64)
5010	    {			/* move up to 8 bytes at a time */
5011	      move_bytes = (bytes > 8) ? 8 : bytes;
5012	      gen_func.movstrsi = gen_movstrsi_2reg;
5013	    }
5014	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5015	    {			/* move 4 bytes */
5016	      move_bytes = 4;
5017	      mode = SImode;
5018	      gen_func.mov = gen_movsi;
5019	    }
5020	  else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5021	    {			/* move 2 bytes */
5022	      move_bytes = 2;
5023	      mode = HImode;
5024	      gen_func.mov = gen_movhi;
5025	    }
5026	  else if (bytes == 1)	/* move 1 byte */
5027	    {
5028	      move_bytes = 1;
5029	      mode = QImode;
5030	      gen_func.mov = gen_movqi;
5031	    }
5032	  else
5033	    {			/* move up to 4 bytes at a time */
5034	      move_bytes = (bytes > 4) ? 4 : bytes;
5035	      gen_func.movstrsi = gen_movstrsi_1reg;
5036	    }
5037
5038	  src = adjust_address (orig_src, mode, offset);
5039	  dest = adjust_address (orig_dest, mode, offset);
5040
5041	  if (mode == BLKmode)
5042	    {
5043	      /* Move the address into scratch registers.  The movstrsi
5044		 patterns require zero offset.  */
5045	      if (!REG_P (XEXP (src, 0)))
5046		{
5047		  rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
5048		  src = replace_equiv_address (src, src_reg);
5049		}
5050	      set_mem_size (src, GEN_INT (move_bytes));
5051
5052	      if (!REG_P (XEXP (dest, 0)))
5053		{
5054		  rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
5055		  dest = replace_equiv_address (dest, dest_reg);
5056		}
5057	      set_mem_size (dest, GEN_INT (move_bytes));
5058
5059	      emit_insn ((*gen_func.movstrsi) (dest, src,
5060					       GEN_INT (move_bytes & 31),
5061					       align_rtx));
5062	    }
5063	  else
5064	    {
5065	      rtx tmp_reg = gen_reg_rtx (mode);
5066
5067	      emit_insn ((*gen_func.mov) (tmp_reg, src));
5068	      emit_insn ((*gen_func.mov) (dest, tmp_reg));
5069	    }
5070	}
5071    }
5072
5073  else			/* string instructions not available */
5074    {
5075      rtx stores[MAX_MOVE_REG];
5076      int num_reg = 0;
5077      int i;
5078
5079      for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
5080	{
5081	  rtx (*gen_mov_func) PARAMS ((rtx, rtx));
5082	  enum machine_mode mode;
5083	  rtx src, dest, tmp_reg;
5084
5085	  /* Generate the appropriate load and store, saving the stores
5086	     for later.  */
5087	  if (bytes >= 8 && TARGET_POWERPC64
5088	      /* 64-bit loads and stores require word-aligned
5089                 displacements.  */
5090	      && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5091	    {
5092	      move_bytes = 8;
5093	      mode = DImode;
5094	      gen_mov_func = gen_movdi;
5095	    }
5096	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5097	    {
5098	      move_bytes = 4;
5099	      mode = SImode;
5100	      gen_mov_func = gen_movsi;
5101	    }
5102	  else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5103	    {
5104	      move_bytes = 2;
5105	      mode = HImode;
5106	      gen_mov_func = gen_movhi;
5107	    }
5108	  else
5109	    {
5110	      move_bytes = 1;
5111	      mode = QImode;
5112	      gen_mov_func = gen_movqi;
5113	    }
5114
5115	  src = adjust_address (orig_src, mode, offset);
5116	  dest = adjust_address (orig_dest, mode, offset);
5117	  tmp_reg = gen_reg_rtx (mode);
5118
5119	  emit_insn ((*gen_mov_func) (tmp_reg, src));
5120	  stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
5121
5122	  if (num_reg >= MAX_MOVE_REG)
5123	    {
5124	      for (i = 0; i < num_reg; i++)
5125		emit_insn (stores[i]);
5126	      num_reg = 0;
5127	    }
5128	}
5129
5130      for (i = 0; i < num_reg; i++)
5131	emit_insn (stores[i]);
5132    }
5133
5134  return 1;
5135}
5136
5137
5138/* Return 1 if OP is a load multiple operation.  It is known to be a
5139   PARALLEL and the first section will be tested.  */
5140
5141int
5142load_multiple_operation (op, mode)
5143     rtx op;
5144     enum machine_mode mode ATTRIBUTE_UNUSED;
5145{
5146  int count = XVECLEN (op, 0);
5147  unsigned int dest_regno;
5148  rtx src_addr;
5149  int i;
5150
5151  /* Perform a quick check so we don't blow up below.  */
5152  if (count <= 1
5153      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5154      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5155      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5156    return 0;
5157
5158  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5159  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5160
5161  for (i = 1; i < count; i++)
5162    {
5163      rtx elt = XVECEXP (op, 0, i);
5164
5165      if (GET_CODE (elt) != SET
5166	  || GET_CODE (SET_DEST (elt)) != REG
5167	  || GET_MODE (SET_DEST (elt)) != SImode
5168	  || REGNO (SET_DEST (elt)) != dest_regno + i
5169	  || GET_CODE (SET_SRC (elt)) != MEM
5170	  || GET_MODE (SET_SRC (elt)) != SImode
5171	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5172	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5173	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5174	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5175	return 0;
5176    }
5177
5178  return 1;
5179}
5180
5181/* Similar, but tests for store multiple.  Here, the second vector element
5182   is a CLOBBER.  It will be tested later.  */
5183
5184int
5185store_multiple_operation (op, mode)
5186     rtx op;
5187     enum machine_mode mode ATTRIBUTE_UNUSED;
5188{
5189  int count = XVECLEN (op, 0) - 1;
5190  unsigned int src_regno;
5191  rtx dest_addr;
5192  int i;
5193
5194  /* Perform a quick check so we don't blow up below.  */
5195  if (count <= 1
5196      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5197      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5198      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5199    return 0;
5200
5201  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5202  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5203
5204  for (i = 1; i < count; i++)
5205    {
5206      rtx elt = XVECEXP (op, 0, i + 1);
5207
5208      if (GET_CODE (elt) != SET
5209	  || GET_CODE (SET_SRC (elt)) != REG
5210	  || GET_MODE (SET_SRC (elt)) != SImode
5211	  || REGNO (SET_SRC (elt)) != src_regno + i
5212	  || GET_CODE (SET_DEST (elt)) != MEM
5213	  || GET_MODE (SET_DEST (elt)) != SImode
5214	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5215	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5216	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5217	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5218	return 0;
5219    }
5220
5221  return 1;
5222}
5223
5224/* Return a string to perform a load_multiple operation.
5225   operands[0] is the vector.
5226   operands[1] is the source address.
5227   operands[2] is the first destination register.  */
5228
5229const char *
5230rs6000_output_load_multiple (operands)
5231     rtx operands[3];
5232{
5233  /* We have to handle the case where the pseudo used to contain the address
5234     is assigned to one of the output registers.  */
5235  int i, j;
5236  int words = XVECLEN (operands[0], 0);
5237  rtx xop[10];
5238
5239  if (XVECLEN (operands[0], 0) == 1)
5240    return "{l|lwz} %2,0(%1)";
5241
5242  for (i = 0; i < words; i++)
5243    if (refers_to_regno_p (REGNO (operands[2]) + i,
5244			   REGNO (operands[2]) + i + 1, operands[1], 0))
5245      {
5246	if (i == words-1)
5247	  {
5248	    xop[0] = GEN_INT (4 * (words-1));
5249	    xop[1] = operands[1];
5250	    xop[2] = operands[2];
5251	    output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
5252	    return "";
5253	  }
5254	else if (i == 0)
5255	  {
5256	    xop[0] = GEN_INT (4 * (words-1));
5257	    xop[1] = operands[1];
5258	    xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5259	    output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
5260	    return "";
5261	  }
5262	else
5263	  {
5264	    for (j = 0; j < words; j++)
5265	      if (j != i)
5266		{
5267		  xop[0] = GEN_INT (j * 4);
5268		  xop[1] = operands[1];
5269		  xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
5270		  output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
5271		}
5272	    xop[0] = GEN_INT (i * 4);
5273	    xop[1] = operands[1];
5274	    output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
5275	    return "";
5276	  }
5277      }
5278
5279  return "{lsi|lswi} %2,%1,%N0";
5280}
5281
5282/* Return 1 for a parallel vrsave operation.  */
5283
5284int
5285vrsave_operation (op, mode)
5286     rtx op;
5287     enum machine_mode mode ATTRIBUTE_UNUSED;
5288{
5289  int count = XVECLEN (op, 0);
5290  unsigned int dest_regno, src_regno;
5291  int i;
5292
5293  if (count <= 1
5294      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5295      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5296      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5297    return 0;
5298
5299  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5300  src_regno  = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5301
5302  if (dest_regno != VRSAVE_REGNO
5303      && src_regno != VRSAVE_REGNO)
5304    return 0;
5305
5306  for (i = 1; i < count; i++)
5307    {
5308      rtx elt = XVECEXP (op, 0, i);
5309
5310      if (GET_CODE (elt) != CLOBBER
5311	  && GET_CODE (elt) != SET)
5312	return 0;
5313    }
5314
5315  return 1;
5316}
5317
5318/* Return 1 for an PARALLEL suitable for mtcrf.  */
5319
5320int
5321mtcrf_operation (op, mode)
5322     rtx op;
5323     enum machine_mode mode ATTRIBUTE_UNUSED;
5324{
5325  int count = XVECLEN (op, 0);
5326  int i;
5327  rtx src_reg;
5328
5329  /* Perform a quick check so we don't blow up below.  */
5330  if (count < 1
5331      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5332      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5333      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5334    return 0;
5335  src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5336
5337  if (GET_CODE (src_reg) != REG
5338      || GET_MODE (src_reg) != SImode
5339      || ! INT_REGNO_P (REGNO (src_reg)))
5340    return 0;
5341
5342  for (i = 0; i < count; i++)
5343    {
5344      rtx exp = XVECEXP (op, 0, i);
5345      rtx unspec;
5346      int maskval;
5347
5348      if (GET_CODE (exp) != SET
5349	  || GET_CODE (SET_DEST (exp)) != REG
5350	  || GET_MODE (SET_DEST (exp)) != CCmode
5351	  || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5352	return 0;
5353      unspec = SET_SRC (exp);
5354      maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5355
5356      if (GET_CODE (unspec) != UNSPEC
5357	  || XINT (unspec, 1) != 20
5358	  || XVECLEN (unspec, 0) != 2
5359	  || XVECEXP (unspec, 0, 0) != src_reg
5360	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5361	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5362	return 0;
5363    }
5364  return 1;
5365}
5366
5367/* Return 1 for an PARALLEL suitable for lmw.  */
5368
5369int
5370lmw_operation (op, mode)
5371     rtx op;
5372     enum machine_mode mode ATTRIBUTE_UNUSED;
5373{
5374  int count = XVECLEN (op, 0);
5375  unsigned int dest_regno;
5376  rtx src_addr;
5377  unsigned int base_regno;
5378  HOST_WIDE_INT offset;
5379  int i;
5380
5381  /* Perform a quick check so we don't blow up below.  */
5382  if (count <= 1
5383      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5384      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5385      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5386    return 0;
5387
5388  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5389  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5390
5391  if (dest_regno > 31
5392      || count != 32 - (int) dest_regno)
5393    return 0;
5394
5395  if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5396    {
5397      offset = 0;
5398      base_regno = REGNO (src_addr);
5399      if (base_regno == 0)
5400	return 0;
5401    }
5402  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5403    {
5404      offset = INTVAL (XEXP (src_addr, 1));
5405      base_regno = REGNO (XEXP (src_addr, 0));
5406    }
5407  else
5408    return 0;
5409
5410  for (i = 0; i < count; i++)
5411    {
5412      rtx elt = XVECEXP (op, 0, i);
5413      rtx newaddr;
5414      rtx addr_reg;
5415      HOST_WIDE_INT newoffset;
5416
5417      if (GET_CODE (elt) != SET
5418	  || GET_CODE (SET_DEST (elt)) != REG
5419	  || GET_MODE (SET_DEST (elt)) != SImode
5420	  || REGNO (SET_DEST (elt)) != dest_regno + i
5421	  || GET_CODE (SET_SRC (elt)) != MEM
5422	  || GET_MODE (SET_SRC (elt)) != SImode)
5423	return 0;
5424      newaddr = XEXP (SET_SRC (elt), 0);
5425      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5426	{
5427	  newoffset = 0;
5428	  addr_reg = newaddr;
5429	}
5430      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5431	{
5432	  addr_reg = XEXP (newaddr, 0);
5433	  newoffset = INTVAL (XEXP (newaddr, 1));
5434	}
5435      else
5436	return 0;
5437      if (REGNO (addr_reg) != base_regno
5438	  || newoffset != offset + 4 * i)
5439	return 0;
5440    }
5441
5442  return 1;
5443}
5444
5445/* Return 1 for an PARALLEL suitable for stmw.  */
5446
5447int
5448stmw_operation (op, mode)
5449     rtx op;
5450     enum machine_mode mode ATTRIBUTE_UNUSED;
5451{
5452  int count = XVECLEN (op, 0);
5453  unsigned int src_regno;
5454  rtx dest_addr;
5455  unsigned int base_regno;
5456  HOST_WIDE_INT offset;
5457  int i;
5458
5459  /* Perform a quick check so we don't blow up below.  */
5460  if (count <= 1
5461      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5462      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5463      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5464    return 0;
5465
5466  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5467  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5468
5469  if (src_regno > 31
5470      || count != 32 - (int) src_regno)
5471    return 0;
5472
5473  if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5474    {
5475      offset = 0;
5476      base_regno = REGNO (dest_addr);
5477      if (base_regno == 0)
5478	return 0;
5479    }
5480  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5481    {
5482      offset = INTVAL (XEXP (dest_addr, 1));
5483      base_regno = REGNO (XEXP (dest_addr, 0));
5484    }
5485  else
5486    return 0;
5487
5488  for (i = 0; i < count; i++)
5489    {
5490      rtx elt = XVECEXP (op, 0, i);
5491      rtx newaddr;
5492      rtx addr_reg;
5493      HOST_WIDE_INT newoffset;
5494
5495      if (GET_CODE (elt) != SET
5496	  || GET_CODE (SET_SRC (elt)) != REG
5497	  || GET_MODE (SET_SRC (elt)) != SImode
5498	  || REGNO (SET_SRC (elt)) != src_regno + i
5499	  || GET_CODE (SET_DEST (elt)) != MEM
5500	  || GET_MODE (SET_DEST (elt)) != SImode)
5501	return 0;
5502      newaddr = XEXP (SET_DEST (elt), 0);
5503      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5504	{
5505	  newoffset = 0;
5506	  addr_reg = newaddr;
5507	}
5508      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5509	{
5510	  addr_reg = XEXP (newaddr, 0);
5511	  newoffset = INTVAL (XEXP (newaddr, 1));
5512	}
5513      else
5514	return 0;
5515      if (REGNO (addr_reg) != base_regno
5516	  || newoffset != offset + 4 * i)
5517	return 0;
5518    }
5519
5520  return 1;
5521}
5522
5523/* A validation routine: say whether CODE, a condition code, and MODE
5524   match.  The other alternatives either don't make sense or should
5525   never be generated.  */
5526
5527static void
5528validate_condition_mode (code, mode)
5529     enum rtx_code code;
5530     enum machine_mode mode;
5531{
5532  if (GET_RTX_CLASS (code) != '<'
5533      || GET_MODE_CLASS (mode) != MODE_CC)
5534    abort ();
5535
5536  /* These don't make sense.  */
5537  if ((code == GT || code == LT || code == GE || code == LE)
5538      && mode == CCUNSmode)
5539    abort ();
5540
5541  if ((code == GTU || code == LTU || code == GEU || code == LEU)
5542      && mode != CCUNSmode)
5543    abort ();
5544
5545  if (mode != CCFPmode
5546      && (code == ORDERED || code == UNORDERED
5547	  || code == UNEQ || code == LTGT
5548	  || code == UNGT || code == UNLT
5549	  || code == UNGE || code == UNLE))
5550    abort ();
5551
5552  /* These should never be generated except for
5553     flag_unsafe_math_optimizations.  */
5554  if (mode == CCFPmode
5555      && ! flag_unsafe_math_optimizations
5556      && (code == LE || code == GE
5557	  || code == UNEQ || code == LTGT
5558	  || code == UNGT || code == UNLT))
5559    abort ();
5560
5561  /* These are invalid; the information is not there.  */
5562  if (mode == CCEQmode
5563      && code != EQ && code != NE)
5564    abort ();
5565}
5566
5567/* Return 1 if OP is a comparison operation that is valid for a branch insn.
5568   We only check the opcode against the mode of the CC value here.  */
5569
5570int
5571branch_comparison_operator (op, mode)
5572     rtx op;
5573     enum machine_mode mode ATTRIBUTE_UNUSED;
5574{
5575  enum rtx_code code = GET_CODE (op);
5576  enum machine_mode cc_mode;
5577
5578  if (GET_RTX_CLASS (code) != '<')
5579    return 0;
5580
5581  cc_mode = GET_MODE (XEXP (op, 0));
5582  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5583    return 0;
5584
5585  validate_condition_mode (code, cc_mode);
5586
5587  return 1;
5588}
5589
5590/* Return 1 if OP is a comparison operation that is valid for a branch
5591   insn and which is true if the corresponding bit in the CC register
5592   is set.  */
5593
5594int
5595branch_positive_comparison_operator (op, mode)
5596     rtx op;
5597     enum machine_mode mode;
5598{
5599  enum rtx_code code;
5600
5601  if (! branch_comparison_operator (op, mode))
5602    return 0;
5603
5604  code = GET_CODE (op);
5605  return (code == EQ || code == LT || code == GT
5606	  || code == LTU || code == GTU
5607	  || code == UNORDERED);
5608}
5609
5610/* Return 1 if OP is a comparison operation that is valid for an scc insn.
5611   We check the opcode against the mode of the CC value and disallow EQ or
5612   NE comparisons for integers.  */
5613
5614int
5615scc_comparison_operator (op, mode)
5616     rtx op;
5617     enum machine_mode mode;
5618{
5619  enum rtx_code code = GET_CODE (op);
5620  enum machine_mode cc_mode;
5621
5622  if (GET_MODE (op) != mode && mode != VOIDmode)
5623    return 0;
5624
5625  if (GET_RTX_CLASS (code) != '<')
5626    return 0;
5627
5628  cc_mode = GET_MODE (XEXP (op, 0));
5629  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5630    return 0;
5631
5632  validate_condition_mode (code, cc_mode);
5633
5634  if (code == NE && cc_mode != CCFPmode)
5635    return 0;
5636
5637  return 1;
5638}
5639
5640int
5641trap_comparison_operator (op, mode)
5642    rtx op;
5643    enum machine_mode mode;
5644{
5645  if (mode != VOIDmode && mode != GET_MODE (op))
5646    return 0;
5647  return GET_RTX_CLASS (GET_CODE (op)) == '<';
5648}
5649
5650int
5651boolean_operator (op, mode)
5652    rtx op;
5653    enum machine_mode mode ATTRIBUTE_UNUSED;
5654{
5655  enum rtx_code code = GET_CODE (op);
5656  return (code == AND || code == IOR || code == XOR);
5657}
5658
5659int
5660boolean_or_operator (op, mode)
5661    rtx op;
5662    enum machine_mode mode ATTRIBUTE_UNUSED;
5663{
5664  enum rtx_code code = GET_CODE (op);
5665  return (code == IOR || code == XOR);
5666}
5667
5668int
5669min_max_operator (op, mode)
5670    rtx op;
5671    enum machine_mode mode ATTRIBUTE_UNUSED;
5672{
5673  enum rtx_code code = GET_CODE (op);
5674  return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5675}
5676
5677/* Return 1 if ANDOP is a mask that has no bits on that are not in the
5678   mask required to convert the result of a rotate insn into a shift
5679   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
5680
5681int
5682includes_lshift_p (shiftop, andop)
5683     rtx shiftop;
5684     rtx andop;
5685{
5686  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5687
5688  shift_mask <<= INTVAL (shiftop);
5689
5690  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5691}
5692
5693/* Similar, but for right shift.  */
5694
5695int
5696includes_rshift_p (shiftop, andop)
5697     rtx shiftop;
5698     rtx andop;
5699{
5700  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5701
5702  shift_mask >>= INTVAL (shiftop);
5703
5704  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5705}
5706
5707/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5708   to perform a left shift.  It must have exactly SHIFTOP least
5709   signifigant 0's, then one or more 1's, then zero or more 0's.  */
5710
5711int
5712includes_rldic_lshift_p (shiftop, andop)
5713     rtx shiftop;
5714     rtx andop;
5715{
5716  if (GET_CODE (andop) == CONST_INT)
5717    {
5718      HOST_WIDE_INT c, lsb, shift_mask;
5719
5720      c = INTVAL (andop);
5721      if (c == 0 || c == ~0)
5722	return 0;
5723
5724      shift_mask = ~0;
5725      shift_mask <<= INTVAL (shiftop);
5726
5727      /* Find the least signifigant one bit.  */
5728      lsb = c & -c;
5729
5730      /* It must coincide with the LSB of the shift mask.  */
5731      if (-lsb != shift_mask)
5732	return 0;
5733
5734      /* Invert to look for the next transition (if any).  */
5735      c = ~c;
5736
5737      /* Remove the low group of ones (originally low group of zeros).  */
5738      c &= -lsb;
5739
5740      /* Again find the lsb, and check we have all 1's above.  */
5741      lsb = c & -c;
5742      return c == -lsb;
5743    }
5744  else if (GET_CODE (andop) == CONST_DOUBLE
5745	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5746    {
5747      HOST_WIDE_INT low, high, lsb;
5748      HOST_WIDE_INT shift_mask_low, shift_mask_high;
5749
5750      low = CONST_DOUBLE_LOW (andop);
5751      if (HOST_BITS_PER_WIDE_INT < 64)
5752	high = CONST_DOUBLE_HIGH (andop);
5753
5754      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5755	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5756	return 0;
5757
5758      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5759	{
5760	  shift_mask_high = ~0;
5761	  if (INTVAL (shiftop) > 32)
5762	    shift_mask_high <<= INTVAL (shiftop) - 32;
5763
5764	  lsb = high & -high;
5765
5766	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5767	    return 0;
5768
5769	  high = ~high;
5770	  high &= -lsb;
5771
5772	  lsb = high & -high;
5773	  return high == -lsb;
5774	}
5775
5776      shift_mask_low = ~0;
5777      shift_mask_low <<= INTVAL (shiftop);
5778
5779      lsb = low & -low;
5780
5781      if (-lsb != shift_mask_low)
5782	return 0;
5783
5784      if (HOST_BITS_PER_WIDE_INT < 64)
5785	high = ~high;
5786      low = ~low;
5787      low &= -lsb;
5788
5789      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5790	{
5791	  lsb = high & -high;
5792	  return high == -lsb;
5793	}
5794
5795      lsb = low & -low;
5796      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5797    }
5798  else
5799    return 0;
5800}
5801
5802/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5803   to perform a left shift.  It must have SHIFTOP or more least
5804   signifigant 0's, with the remainder of the word 1's.  */
5805
5806int
5807includes_rldicr_lshift_p (shiftop, andop)
5808     rtx shiftop;
5809     rtx andop;
5810{
5811  if (GET_CODE (andop) == CONST_INT)
5812    {
5813      HOST_WIDE_INT c, lsb, shift_mask;
5814
5815      shift_mask = ~0;
5816      shift_mask <<= INTVAL (shiftop);
5817      c = INTVAL (andop);
5818
5819      /* Find the least signifigant one bit.  */
5820      lsb = c & -c;
5821
5822      /* It must be covered by the shift mask.
5823	 This test also rejects c == 0.  */
5824      if ((lsb & shift_mask) == 0)
5825	return 0;
5826
5827      /* Check we have all 1's above the transition, and reject all 1's.  */
5828      return c == -lsb && lsb != 1;
5829    }
5830  else if (GET_CODE (andop) == CONST_DOUBLE
5831	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5832    {
5833      HOST_WIDE_INT low, lsb, shift_mask_low;
5834
5835      low = CONST_DOUBLE_LOW (andop);
5836
5837      if (HOST_BITS_PER_WIDE_INT < 64)
5838	{
5839	  HOST_WIDE_INT high, shift_mask_high;
5840
5841	  high = CONST_DOUBLE_HIGH (andop);
5842
5843	  if (low == 0)
5844	    {
5845	      shift_mask_high = ~0;
5846	      if (INTVAL (shiftop) > 32)
5847		shift_mask_high <<= INTVAL (shiftop) - 32;
5848
5849	      lsb = high & -high;
5850
5851	      if ((lsb & shift_mask_high) == 0)
5852		return 0;
5853
5854	      return high == -lsb;
5855	    }
5856	  if (high != ~0)
5857	    return 0;
5858	}
5859
5860      shift_mask_low = ~0;
5861      shift_mask_low <<= INTVAL (shiftop);
5862
5863      lsb = low & -low;
5864
5865      if ((lsb & shift_mask_low) == 0)
5866	return 0;
5867
5868      return low == -lsb && lsb != 1;
5869    }
5870  else
5871    return 0;
5872}
5873
5874/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5875   for lfq and stfq insns.
5876
5877   Note reg1 and reg2 *must* be hard registers.  To be sure we will
5878   abort if we are passed pseudo registers.  */
5879
5880int
5881registers_ok_for_quad_peep (reg1, reg2)
5882     rtx reg1, reg2;
5883{
5884  /* We might have been passed a SUBREG.  */
5885  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5886    return 0;
5887
5888  return (REGNO (reg1) == REGNO (reg2) - 1);
5889}
5890
5891/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5892   addr1 and addr2 must be in consecutive memory locations
5893   (addr2 == addr1 + 8).  */
5894
5895int
5896addrs_ok_for_quad_peep (addr1, addr2)
5897     rtx addr1;
5898     rtx addr2;
5899{
5900  unsigned int reg1;
5901  int offset1;
5902
5903  /* Extract an offset (if used) from the first addr.  */
5904  if (GET_CODE (addr1) == PLUS)
5905    {
5906      /* If not a REG, return zero.  */
5907      if (GET_CODE (XEXP (addr1, 0)) != REG)
5908	return 0;
5909      else
5910	{
5911          reg1 = REGNO (XEXP (addr1, 0));
5912	  /* The offset must be constant!  */
5913	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5914            return 0;
5915          offset1 = INTVAL (XEXP (addr1, 1));
5916	}
5917    }
5918  else if (GET_CODE (addr1) != REG)
5919    return 0;
5920  else
5921    {
5922      reg1 = REGNO (addr1);
5923      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
5924      offset1 = 0;
5925    }
5926
5927  /* Make sure the second address is a (mem (plus (reg) (const_int))).  */
5928  if (GET_CODE (addr2) != PLUS)
5929    return 0;
5930
5931  if (GET_CODE (XEXP (addr2, 0)) != REG
5932      || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5933    return 0;
5934
5935  if (reg1 != REGNO (XEXP (addr2, 0)))
5936    return 0;
5937
5938  /* The offset for the second addr must be 8 more than the first addr.  */
5939  if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5940    return 0;
5941
5942  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
5943     instructions.  */
5944  return 1;
5945}
5946
5947/* Return the register class of a scratch register needed to copy IN into
5948   or out of a register in CLASS in MODE.  If it can be done directly,
5949   NO_REGS is returned.  */
5950
5951enum reg_class
5952secondary_reload_class (class, mode, in)
5953     enum reg_class class;
5954     enum machine_mode mode ATTRIBUTE_UNUSED;
5955     rtx in;
5956{
5957  int regno;
5958
5959  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5960    {
5961      /* We cannot copy a symbolic operand directly into anything
5962         other than BASE_REGS for TARGET_ELF.  So indicate that a
5963         register from BASE_REGS is needed as an intermediate
5964         register.
5965
5966	 On Darwin, pic addresses require a load from memory, which
5967	 needs a base register.  */
5968      if (class != BASE_REGS
5969          && (GET_CODE (in) == SYMBOL_REF
5970              || GET_CODE (in) == HIGH
5971              || GET_CODE (in) == LABEL_REF
5972              || GET_CODE (in) == CONST))
5973        return BASE_REGS;
5974    }
5975
5976  if (GET_CODE (in) == REG)
5977    {
5978      regno = REGNO (in);
5979      if (regno >= FIRST_PSEUDO_REGISTER)
5980	{
5981	  regno = true_regnum (in);
5982	  if (regno >= FIRST_PSEUDO_REGISTER)
5983	    regno = -1;
5984	}
5985    }
5986  else if (GET_CODE (in) == SUBREG)
5987    {
5988      regno = true_regnum (in);
5989      if (regno >= FIRST_PSEUDO_REGISTER)
5990	regno = -1;
5991    }
5992  else
5993    regno = -1;
5994
5995  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5996     into anything.  */
5997  if (class == GENERAL_REGS || class == BASE_REGS
5998      || (regno >= 0 && INT_REGNO_P (regno)))
5999    return NO_REGS;
6000
6001  /* Constants, memory, and FP registers can go into FP registers.  */
6002  if ((regno == -1 || FP_REGNO_P (regno))
6003      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6004    return NO_REGS;
6005
6006  /* Memory, and AltiVec registers can go into AltiVec registers.  */
6007  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6008      && class == ALTIVEC_REGS)
6009    return NO_REGS;
6010
6011  /* We can copy among the CR registers.  */
6012  if ((class == CR_REGS || class == CR0_REGS)
6013      && regno >= 0 && CR_REGNO_P (regno))
6014    return NO_REGS;
6015
6016  /* Otherwise, we need GENERAL_REGS.  */
6017  return GENERAL_REGS;
6018}
6019
6020/* Given a comparison operation, return the bit number in CCR to test.  We
6021   know this is a valid comparison.
6022
6023   SCC_P is 1 if this is for an scc.  That means that %D will have been
6024   used instead of %C, so the bits will be in different places.
6025
6026   Return -1 if OP isn't a valid comparison for some reason.  */
6027
6028int
6029ccr_bit (op, scc_p)
6030     rtx op;
6031     int scc_p;
6032{
6033  enum rtx_code code = GET_CODE (op);
6034  enum machine_mode cc_mode;
6035  int cc_regnum;
6036  int base_bit;
6037  rtx reg;
6038
6039  if (GET_RTX_CLASS (code) != '<')
6040    return -1;
6041
6042  reg = XEXP (op, 0);
6043
6044  if (GET_CODE (reg) != REG
6045      || ! CR_REGNO_P (REGNO (reg)))
6046    abort ();
6047
6048  cc_mode = GET_MODE (reg);
6049  cc_regnum = REGNO (reg);
6050  base_bit = 4 * (cc_regnum - CR0_REGNO);
6051
6052  validate_condition_mode (code, cc_mode);
6053
6054  switch (code)
6055    {
6056    case NE:
6057      return scc_p ? base_bit + 3 : base_bit + 2;
6058    case EQ:
6059      return base_bit + 2;
6060    case GT:  case GTU:  case UNLE:
6061      return base_bit + 1;
6062    case LT:  case LTU:  case UNGE:
6063      return base_bit;
6064    case ORDERED:  case UNORDERED:
6065      return base_bit + 3;
6066
6067    case GE:  case GEU:
6068      /* If scc, we will have done a cror to put the bit in the
6069	 unordered position.  So test that bit.  For integer, this is ! LT
6070	 unless this is an scc insn.  */
6071      return scc_p ? base_bit + 3 : base_bit;
6072
6073    case LE:  case LEU:
6074      return scc_p ? base_bit + 3 : base_bit + 1;
6075
6076    default:
6077      abort ();
6078    }
6079}
6080
6081/* Return the GOT register.  */
6082
6083struct rtx_def *
6084rs6000_got_register (value)
6085     rtx value ATTRIBUTE_UNUSED;
6086{
6087  /* The second flow pass currently (June 1999) can't update
6088     regs_ever_live without disturbing other parts of the compiler, so
6089     update it here to make the prolog/epilogue code happy.  */
6090  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6091    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6092
6093  current_function_uses_pic_offset_table = 1;
6094
6095  return pic_offset_table_rtx;
6096}
6097
6098/* Functions to init, mark and free struct machine_function.
6099   These will be called, via pointer variables,
6100   from push_function_context and pop_function_context.  */
6101
6102static void
6103rs6000_init_machine_status (p)
6104     struct function *p;
6105{
6106  p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6107}
6108
6109static void
6110rs6000_free_machine_status (p)
6111     struct function *p;
6112{
6113  if (p->machine == NULL)
6114    return;
6115
6116  free (p->machine);
6117  p->machine = NULL;
6118}
6119
6120
6121/* Print an operand.  Recognize special options, documented below.  */
6122
6123#if TARGET_ELF
6124#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6125#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6126#else
6127#define SMALL_DATA_RELOC "sda21"
6128#define SMALL_DATA_REG 0
6129#endif
6130
6131void
6132print_operand (file, x, code)
6133    FILE *file;
6134    rtx x;
6135    int code;
6136{
6137  int i;
6138  HOST_WIDE_INT val;
6139
6140  /* These macros test for integers and extract the low-order bits.  */
6141#define INT_P(X)  \
6142((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
6143 && GET_MODE (X) == VOIDmode)
6144
6145#define INT_LOWPART(X) \
6146  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6147
6148  switch (code)
6149    {
6150    case '.':
6151      /* Write out an instruction after the call which may be replaced
6152	 with glue code by the loader.  This depends on the AIX version.  */
6153      asm_fprintf (file, RS6000_CALL_GLUE);
6154      return;
6155
6156      /* %a is output_address.  */
6157
6158    case 'A':
6159      /* If X is a constant integer whose low-order 5 bits are zero,
6160	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
6161	 in the AIX assembler where "sri" with a zero shift count
6162	 writes a trash instruction.  */
6163      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6164	putc ('l', file);
6165      else
6166	putc ('r', file);
6167      return;
6168
6169    case 'b':
6170      /* If constant, low-order 16 bits of constant, unsigned.
6171	 Otherwise, write normally.  */
6172      if (INT_P (x))
6173	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6174      else
6175	print_operand (file, x, 0);
6176      return;
6177
6178    case 'B':
6179      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6180	 for 64-bit mask direction.  */
6181      putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6182      return;
6183
6184      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6185	 output_operand.  */
6186
6187    case 'D':
6188      /* There used to be a comment for 'C' reading "This is an
6189	   optional cror needed for certain floating-point
6190	   comparisons.  Otherwise write nothing."  */
6191
6192      /* Similar, except that this is for an scc, so we must be able to
6193	 encode the test in a single bit that is one.  We do the above
6194	 for any LE, GE, GEU, or LEU and invert the bit for NE.  */
6195      if (GET_CODE (x) == LE || GET_CODE (x) == GE
6196	  || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6197	{
6198	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6199
6200	  fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6201		   base_bit + 2,
6202		   base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6203	}
6204
6205      else if (GET_CODE (x) == NE)
6206	{
6207	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6208
6209	  fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6210		   base_bit + 2, base_bit + 2);
6211	}
6212      return;
6213
6214    case 'E':
6215      /* X is a CR register.  Print the number of the EQ bit of the CR */
6216      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6217	output_operand_lossage ("invalid %%E value");
6218      else
6219	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6220      return;
6221
6222    case 'f':
6223      /* X is a CR register.  Print the shift count needed to move it
6224	 to the high-order four bits.  */
6225      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6226	output_operand_lossage ("invalid %%f value");
6227      else
6228	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6229      return;
6230
6231    case 'F':
6232      /* Similar, but print the count for the rotate in the opposite
6233	 direction.  */
6234      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6235	output_operand_lossage ("invalid %%F value");
6236      else
6237	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6238      return;
6239
6240    case 'G':
6241      /* X is a constant integer.  If it is negative, print "m",
6242	 otherwise print "z".  This is to make a aze or ame insn.  */
6243      if (GET_CODE (x) != CONST_INT)
6244	output_operand_lossage ("invalid %%G value");
6245      else if (INTVAL (x) >= 0)
6246	putc ('z', file);
6247      else
6248	putc ('m', file);
6249      return;
6250
6251    case 'h':
6252      /* If constant, output low-order five bits.  Otherwise, write
6253	 normally.  */
6254      if (INT_P (x))
6255	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6256      else
6257	print_operand (file, x, 0);
6258      return;
6259
6260    case 'H':
6261      /* If constant, output low-order six bits.  Otherwise, write
6262	 normally.  */
6263      if (INT_P (x))
6264	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6265      else
6266	print_operand (file, x, 0);
6267      return;
6268
6269    case 'I':
6270      /* Print `i' if this is a constant, else nothing.  */
6271      if (INT_P (x))
6272	putc ('i', file);
6273      return;
6274
6275    case 'j':
6276      /* Write the bit number in CCR for jump.  */
6277      i = ccr_bit (x, 0);
6278      if (i == -1)
6279	output_operand_lossage ("invalid %%j code");
6280      else
6281	fprintf (file, "%d", i);
6282      return;
6283
6284    case 'J':
6285      /* Similar, but add one for shift count in rlinm for scc and pass
6286	 scc flag to `ccr_bit'.  */
6287      i = ccr_bit (x, 1);
6288      if (i == -1)
6289	output_operand_lossage ("invalid %%J code");
6290      else
6291	/* If we want bit 31, write a shift count of zero, not 32.  */
6292	fprintf (file, "%d", i == 31 ? 0 : i + 1);
6293      return;
6294
6295    case 'k':
6296      /* X must be a constant.  Write the 1's complement of the
6297	 constant.  */
6298      if (! INT_P (x))
6299	output_operand_lossage ("invalid %%k value");
6300      else
6301	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6302      return;
6303
6304    case 'K':
6305      /* X must be a symbolic constant on ELF.  Write an
6306	 expression suitable for an 'addi' that adds in the low 16
6307	 bits of the MEM.  */
6308      if (GET_CODE (x) != CONST)
6309	{
6310	  print_operand_address (file, x);
6311	  fputs ("@l", file);
6312	}
6313      else
6314	{
6315	  if (GET_CODE (XEXP (x, 0)) != PLUS
6316	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6317		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6318	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6319	    output_operand_lossage ("invalid %%K value");
6320	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
6321	  fputs ("@l", file);
6322	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6323	}
6324      return;
6325
6326      /* %l is output_asm_label.  */
6327
6328    case 'L':
6329      /* Write second word of DImode or DFmode reference.  Works on register
6330	 or non-indexed memory only.  */
6331      if (GET_CODE (x) == REG)
6332	fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6333      else if (GET_CODE (x) == MEM)
6334	{
6335	  /* Handle possible auto-increment.  Since it is pre-increment and
6336	     we have already done it, we can just use an offset of word.  */
6337	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6338	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6339	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6340					   UNITS_PER_WORD));
6341	  else
6342	    output_address (XEXP (adjust_address_nv (x, SImode,
6343						     UNITS_PER_WORD),
6344				  0));
6345
6346	  if (small_data_operand (x, GET_MODE (x)))
6347	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6348		     reg_names[SMALL_DATA_REG]);
6349	}
6350      return;
6351
6352    case 'm':
6353      /* MB value for a mask operand.  */
6354      if (! mask_operand (x, SImode))
6355	output_operand_lossage ("invalid %%m value");
6356
6357      val = INT_LOWPART (x);
6358
6359      /* If the high bit is set and the low bit is not, the value is zero.
6360	 If the high bit is zero, the value is the first 1 bit we find from
6361	 the left.  */
6362      if ((val & 0x80000000) && ((val & 1) == 0))
6363	{
6364	  putc ('0', file);
6365	  return;
6366	}
6367      else if ((val & 0x80000000) == 0)
6368	{
6369	  for (i = 1; i < 32; i++)
6370	    if ((val <<= 1) & 0x80000000)
6371	      break;
6372	  fprintf (file, "%d", i);
6373	  return;
6374	}
6375
6376      /* Otherwise, look for the first 0 bit from the right.  The result is its
6377	 number plus 1. We know the low-order bit is one.  */
6378      for (i = 0; i < 32; i++)
6379	if (((val >>= 1) & 1) == 0)
6380	  break;
6381
6382      /* If we ended in ...01, i would be 0.  The correct value is 31, so
6383	 we want 31 - i.  */
6384      fprintf (file, "%d", 31 - i);
6385      return;
6386
6387    case 'M':
6388      /* ME value for a mask operand.  */
6389      if (! mask_operand (x, SImode))
6390	output_operand_lossage ("invalid %%M value");
6391
6392      val = INT_LOWPART (x);
6393
6394      /* If the low bit is set and the high bit is not, the value is 31.
6395	 If the low bit is zero, the value is the first 1 bit we find from
6396	 the right.  */
6397      if ((val & 1) && ((val & 0x80000000) == 0))
6398	{
6399	  fputs ("31", file);
6400	  return;
6401	}
6402      else if ((val & 1) == 0)
6403	{
6404	  for (i = 0; i < 32; i++)
6405	    if ((val >>= 1) & 1)
6406	      break;
6407
6408	  /* If we had ....10, i would be 0.  The result should be
6409	     30, so we need 30 - i.  */
6410	  fprintf (file, "%d", 30 - i);
6411	  return;
6412	}
6413
6414      /* Otherwise, look for the first 0 bit from the left.  The result is its
6415	 number minus 1. We know the high-order bit is one.  */
6416      for (i = 0; i < 32; i++)
6417	if (((val <<= 1) & 0x80000000) == 0)
6418	  break;
6419
6420      fprintf (file, "%d", i);
6421      return;
6422
6423      /* %n outputs the negative of its operand.  */
6424
6425    case 'N':
6426      /* Write the number of elements in the vector times 4.  */
6427      if (GET_CODE (x) != PARALLEL)
6428	output_operand_lossage ("invalid %%N value");
6429      else
6430	fprintf (file, "%d", XVECLEN (x, 0) * 4);
6431      return;
6432
6433    case 'O':
6434      /* Similar, but subtract 1 first.  */
6435      if (GET_CODE (x) != PARALLEL)
6436	output_operand_lossage ("invalid %%O value");
6437      else
6438	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6439      return;
6440
6441    case 'p':
6442      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
6443      if (! INT_P (x)
6444	  || INT_LOWPART (x) < 0
6445	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
6446	output_operand_lossage ("invalid %%p value");
6447      else
6448	fprintf (file, "%d", i);
6449      return;
6450
6451    case 'P':
6452      /* The operand must be an indirect memory reference.  The result
6453	 is the register number.  */
6454      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6455	  || REGNO (XEXP (x, 0)) >= 32)
6456	output_operand_lossage ("invalid %%P value");
6457      else
6458	fprintf (file, "%d", REGNO (XEXP (x, 0)));
6459      return;
6460
6461    case 'q':
6462      /* This outputs the logical code corresponding to a boolean
6463	 expression.  The expression may have one or both operands
6464	 negated (if one, only the first one).  For condition register
6465         logical operations, it will also treat the negated
6466         CR codes as NOTs, but not handle NOTs of them.  */
6467      {
6468	const char *const *t = 0;
6469	const char *s;
6470	enum rtx_code code = GET_CODE (x);
6471	static const char * const tbl[3][3] = {
6472	  { "and", "andc", "nor" },
6473	  { "or", "orc", "nand" },
6474	  { "xor", "eqv", "xor" } };
6475
6476	if (code == AND)
6477	  t = tbl[0];
6478	else if (code == IOR)
6479	  t = tbl[1];
6480	else if (code == XOR)
6481	  t = tbl[2];
6482	else
6483	  output_operand_lossage ("invalid %%q value");
6484
6485	if (GET_CODE (XEXP (x, 0)) != NOT)
6486	  s = t[0];
6487	else
6488	  {
6489	    if (GET_CODE (XEXP (x, 1)) == NOT)
6490	      s = t[2];
6491	    else
6492	      s = t[1];
6493	  }
6494
6495	fputs (s, file);
6496      }
6497      return;
6498
6499    case 'R':
6500      /* X is a CR register.  Print the mask for `mtcrf'.  */
6501      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6502	output_operand_lossage ("invalid %%R value");
6503      else
6504	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6505      return;
6506
6507    case 's':
6508      /* Low 5 bits of 32 - value */
6509      if (! INT_P (x))
6510	output_operand_lossage ("invalid %%s value");
6511      else
6512	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6513      return;
6514
6515    case 'S':
6516      /* PowerPC64 mask position.  All 0's and all 1's are excluded.
6517	 CONST_INT 32-bit mask is considered sign-extended so any
6518	 transition must occur within the CONST_INT, not on the boundary.  */
6519      if (! mask64_operand (x, DImode))
6520	output_operand_lossage ("invalid %%S value");
6521
6522      val = INT_LOWPART (x);
6523
6524      if (val & 1)      /* Clear Left */
6525	{
6526	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6527	    if (!((val >>= 1) & 1))
6528	      break;
6529
6530#if HOST_BITS_PER_WIDE_INT == 32
6531	  if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6532	    {
6533	      val = CONST_DOUBLE_HIGH (x);
6534
6535	      if (val == 0)
6536		--i;
6537	      else
6538		for (i = 32; i < 64; i++)
6539		  if (!((val >>= 1) & 1))
6540		    break;
6541	    }
6542#endif
6543	/* i = index of last set bit from right
6544	   mask begins at 63 - i from left */
6545	  if (i > 63)
6546	    output_operand_lossage ("%%S computed all 1's mask");
6547
6548	  fprintf (file, "%d", 63 - i);
6549	  return;
6550	}
6551      else	/* Clear Right */
6552	{
6553	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6554	    if ((val >>= 1) & 1)
6555	      break;
6556
6557#if HOST_BITS_PER_WIDE_INT == 32
6558	if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6559	  {
6560	    val = CONST_DOUBLE_HIGH (x);
6561
6562	    if (val == (HOST_WIDE_INT) -1)
6563	      --i;
6564	    else
6565	      for (i = 32; i < 64; i++)
6566		if ((val >>= 1) & 1)
6567		  break;
6568	  }
6569#endif
6570	/* i = index of last clear bit from right
6571	   mask ends at 62 - i from left */
6572	  if (i > 62)
6573	    output_operand_lossage ("%%S computed all 0's mask");
6574
6575	  fprintf (file, "%d", 62 - i);
6576	  return;
6577	}
6578
6579    case 'T':
6580      /* Print the symbolic name of a branch target register.  */
6581      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6582				  && REGNO (x) != COUNT_REGISTER_REGNUM))
6583	output_operand_lossage ("invalid %%T value");
6584      else if (REGNO (x) == LINK_REGISTER_REGNUM)
6585	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6586      else
6587	fputs ("ctr", file);
6588      return;
6589
6590    case 'u':
6591      /* High-order 16 bits of constant for use in unsigned operand.  */
6592      if (! INT_P (x))
6593	output_operand_lossage ("invalid %%u value");
6594      else
6595	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6596		 (INT_LOWPART (x) >> 16) & 0xffff);
6597      return;
6598
6599    case 'v':
6600      /* High-order 16 bits of constant for use in signed operand.  */
6601      if (! INT_P (x))
6602	output_operand_lossage ("invalid %%v value");
6603      else
6604	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6605		 (INT_LOWPART (x) >> 16) & 0xffff);
6606      return;
6607
6608    case 'U':
6609      /* Print `u' if this has an auto-increment or auto-decrement.  */
6610      if (GET_CODE (x) == MEM
6611	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
6612	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6613	putc ('u', file);
6614      return;
6615
6616    case 'V':
6617      /* Print the trap code for this operand.  */
6618      switch (GET_CODE (x))
6619	{
6620	case EQ:
6621	  fputs ("eq", file);   /* 4 */
6622	  break;
6623	case NE:
6624	  fputs ("ne", file);   /* 24 */
6625	  break;
6626	case LT:
6627	  fputs ("lt", file);   /* 16 */
6628	  break;
6629	case LE:
6630	  fputs ("le", file);   /* 20 */
6631	  break;
6632	case GT:
6633	  fputs ("gt", file);   /* 8 */
6634	  break;
6635	case GE:
6636	  fputs ("ge", file);   /* 12 */
6637	  break;
6638	case LTU:
6639	  fputs ("llt", file);  /* 2 */
6640	  break;
6641	case LEU:
6642	  fputs ("lle", file);  /* 6 */
6643	  break;
6644	case GTU:
6645	  fputs ("lgt", file);  /* 1 */
6646	  break;
6647	case GEU:
6648	  fputs ("lge", file);  /* 5 */
6649	  break;
6650	default:
6651	  abort ();
6652	}
6653      break;
6654
6655    case 'w':
6656      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
6657	 normally.  */
6658      if (INT_P (x))
6659	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6660		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6661      else
6662	print_operand (file, x, 0);
6663      return;
6664
6665    case 'W':
6666      /* MB value for a PowerPC64 rldic operand.  */
6667      val = (GET_CODE (x) == CONST_INT
6668	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6669
6670      if (val < 0)
6671	i = -1;
6672      else
6673	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6674	  if ((val <<= 1) < 0)
6675	    break;
6676
6677#if HOST_BITS_PER_WIDE_INT == 32
6678      if (GET_CODE (x) == CONST_INT && i >= 0)
6679	i += 32;  /* zero-extend high-part was all 0's */
6680      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6681	{
6682	  val = CONST_DOUBLE_LOW (x);
6683
6684	  if (val == 0)
6685	    abort ();
6686	  else if (val < 0)
6687	    --i;
6688	  else
6689	    for ( ; i < 64; i++)
6690	      if ((val <<= 1) < 0)
6691		break;
6692	}
6693#endif
6694
6695      fprintf (file, "%d", i + 1);
6696      return;
6697
6698    case 'X':
6699      if (GET_CODE (x) == MEM
6700	  && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6701	putc ('x', file);
6702      return;
6703
6704    case 'Y':
6705      /* Like 'L', for third word of TImode  */
6706      if (GET_CODE (x) == REG)
6707	fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6708      else if (GET_CODE (x) == MEM)
6709	{
6710	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6711	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6712	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6713	  else
6714	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6715	  if (small_data_operand (x, GET_MODE (x)))
6716	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6717		     reg_names[SMALL_DATA_REG]);
6718	}
6719      return;
6720
6721    case 'z':
6722      /* X is a SYMBOL_REF.  Write out the name preceded by a
6723	 period and without any trailing data in brackets.  Used for function
6724	 names.  If we are configured for System V (or the embedded ABI) on
6725	 the PowerPC, do not emit the period, since those systems do not use
6726	 TOCs and the like.  */
6727      if (GET_CODE (x) != SYMBOL_REF)
6728	abort ();
6729
6730      if (XSTR (x, 0)[0] != '.')
6731	{
6732	  switch (DEFAULT_ABI)
6733	    {
6734	    default:
6735	      abort ();
6736
6737	    case ABI_AIX:
6738	      putc ('.', file);
6739	      break;
6740
6741	    case ABI_V4:
6742	    case ABI_AIX_NODESC:
6743	    case ABI_DARWIN:
6744	      break;
6745	    }
6746	}
6747#if TARGET_AIX
6748      RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6749#else
6750      assemble_name (file, XSTR (x, 0));
6751#endif
6752      return;
6753
6754    case 'Z':
6755      /* Like 'L', for last word of TImode.  */
6756      if (GET_CODE (x) == REG)
6757	fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6758      else if (GET_CODE (x) == MEM)
6759	{
6760	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6761	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6762	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6763	  else
6764	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6765	  if (small_data_operand (x, GET_MODE (x)))
6766	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6767		     reg_names[SMALL_DATA_REG]);
6768	}
6769      return;
6770
6771      /* Print AltiVec memory operand.  */
6772    case 'y':
6773      {
6774	rtx tmp;
6775
6776	if (GET_CODE (x) != MEM)
6777	  abort ();
6778
6779	tmp = XEXP (x, 0);
6780
6781	if (GET_CODE (tmp) == REG)
6782	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6783	else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6784	  {
6785	    if (REGNO (XEXP (tmp, 0)) == 0)
6786	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6787		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
6788	    else
6789	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6790		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
6791	  }
6792	else
6793	  abort ();
6794	break;
6795      }
6796
6797    case 0:
6798      if (GET_CODE (x) == REG)
6799	fprintf (file, "%s", reg_names[REGNO (x)]);
6800      else if (GET_CODE (x) == MEM)
6801	{
6802	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
6803	     know the width from the mode.  */
6804	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6805	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6806		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6807	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6808	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6809		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6810	  else
6811	    output_address (XEXP (x, 0));
6812	}
6813      else
6814	output_addr_const (file, x);
6815      return;
6816
6817    default:
6818      output_operand_lossage ("invalid %%xn code");
6819    }
6820}
6821
6822/* Print the address of an operand.  */
6823
6824void
6825print_operand_address (file, x)
6826     FILE *file;
6827     rtx x;
6828{
6829  if (GET_CODE (x) == REG)
6830    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6831  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6832	   || GET_CODE (x) == LABEL_REF)
6833    {
6834      output_addr_const (file, x);
6835      if (small_data_operand (x, GET_MODE (x)))
6836	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6837		 reg_names[SMALL_DATA_REG]);
6838      else if (TARGET_TOC)
6839	abort ();
6840    }
6841  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6842    {
6843      if (REGNO (XEXP (x, 0)) == 0)
6844	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6845		 reg_names[ REGNO (XEXP (x, 0)) ]);
6846      else
6847	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6848		 reg_names[ REGNO (XEXP (x, 1)) ]);
6849    }
6850  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6851    {
6852      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6853      fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6854    }
6855#if TARGET_ELF
6856  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6857           && CONSTANT_P (XEXP (x, 1)))
6858    {
6859      output_addr_const (file, XEXP (x, 1));
6860      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6861    }
6862#endif
6863#if TARGET_MACHO
6864  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6865           && CONSTANT_P (XEXP (x, 1)))
6866    {
6867      fprintf (file, "lo16(");
6868      output_addr_const (file, XEXP (x, 1));
6869      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6870    }
6871#endif
6872  else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6873    {
6874      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6875	{
6876	  rtx contains_minus = XEXP (x, 1);
6877	  rtx minus, symref;
6878	  const char *name;
6879
6880	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
6881	     turn it into (sym) for output_addr_const.  */
6882	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6883	    contains_minus = XEXP (contains_minus, 0);
6884
6885	  minus = XEXP (contains_minus, 0);
6886	  symref = XEXP (minus, 0);
6887	  XEXP (contains_minus, 0) = symref;
6888	  if (TARGET_ELF)
6889	    {
6890	      char *newname;
6891
6892	      name = XSTR (symref, 0);
6893	      newname = alloca (strlen (name) + sizeof ("@toc"));
6894	      strcpy (newname, name);
6895	      strcat (newname, "@toc");
6896	      XSTR (symref, 0) = newname;
6897	    }
6898	  output_addr_const (file, XEXP (x, 1));
6899	  if (TARGET_ELF)
6900	    XSTR (symref, 0) = name;
6901	  XEXP (contains_minus, 0) = minus;
6902	}
6903      else
6904	output_addr_const (file, XEXP (x, 1));
6905
6906      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6907    }
6908  else
6909    abort ();
6910}
6911
6912/* Target hook for assembling integer objects.  The powerpc version has
6913   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6914   is defined.  It also needs to handle DI-mode objects on 64-bit
6915   targets.  */
6916
6917static bool
6918rs6000_assemble_integer (x, size, aligned_p)
6919     rtx x;
6920     unsigned int size;
6921     int aligned_p;
6922{
6923#ifdef RELOCATABLE_NEEDS_FIXUP
6924  /* Special handling for SI values.  */
6925  if (size == 4 && aligned_p)
6926    {
6927      extern int in_toc_section PARAMS ((void));
6928      static int recurse = 0;
6929
6930      /* For -mrelocatable, we mark all addresses that need to be fixed up
6931	 in the .fixup section.  */
6932      if (TARGET_RELOCATABLE
6933	  && !in_toc_section ()
6934	  && !in_text_section ()
6935	  && !recurse
6936	  && GET_CODE (x) != CONST_INT
6937	  && GET_CODE (x) != CONST_DOUBLE
6938	  && CONSTANT_P (x))
6939	{
6940	  char buf[256];
6941
6942	  recurse = 1;
6943	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6944	  fixuplabelno++;
6945	  ASM_OUTPUT_LABEL (asm_out_file, buf);
6946	  fprintf (asm_out_file, "\t.long\t(");
6947	  output_addr_const (asm_out_file, x);
6948	  fprintf (asm_out_file, ")@fixup\n");
6949	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6950	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
6951	  fprintf (asm_out_file, "\t.long\t");
6952	  assemble_name (asm_out_file, buf);
6953	  fprintf (asm_out_file, "\n\t.previous\n");
6954	  recurse = 0;
6955	  return true;
6956	}
6957      /* Remove initial .'s to turn a -mcall-aixdesc function
6958	 address into the address of the descriptor, not the function
6959	 itself.  */
6960      else if (GET_CODE (x) == SYMBOL_REF
6961	       && XSTR (x, 0)[0] == '.'
6962	       && DEFAULT_ABI == ABI_AIX)
6963	{
6964	  const char *name = XSTR (x, 0);
6965	  while (*name == '.')
6966	    name++;
6967
6968	  fprintf (asm_out_file, "\t.long\t%s\n", name);
6969	  return true;
6970	}
6971    }
6972#endif /* RELOCATABLE_NEEDS_FIXUP */
6973  return default_assemble_integer (x, size, aligned_p);
6974}
6975
6976enum rtx_code
6977rs6000_reverse_condition (mode, code)
6978     enum machine_mode mode;
6979     enum rtx_code code;
6980{
6981  /* Reversal of FP compares takes care -- an ordered compare
6982     becomes an unordered compare and vice versa.  */
6983  if (mode == CCFPmode && !flag_unsafe_math_optimizations)
6984    return reverse_condition_maybe_unordered (code);
6985  else
6986    return reverse_condition (code);
6987}
6988
6989/* Generate a compare for CODE.  Return a brand-new rtx that
6990   represents the result of the compare.  */
6991
6992static rtx
6993rs6000_generate_compare (code)
6994     enum rtx_code code;
6995{
6996  enum machine_mode comp_mode;
6997  rtx compare_result;
6998
6999  if (rs6000_compare_fp_p)
7000    comp_mode = CCFPmode;
7001  else if (code == GTU || code == LTU
7002	  || code == GEU || code == LEU)
7003    comp_mode = CCUNSmode;
7004  else
7005    comp_mode = CCmode;
7006
7007  /* First, the compare.  */
7008  compare_result = gen_reg_rtx (comp_mode);
7009  emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7010			  gen_rtx_COMPARE (comp_mode,
7011					   rs6000_compare_op0,
7012					   rs6000_compare_op1)));
7013
7014  /* Some kinds of FP comparisons need an OR operation;
7015     except for flag_unsafe_math_optimizations we don't bother.  */
7016  if (rs6000_compare_fp_p
7017      && ! flag_unsafe_math_optimizations
7018      && (code == LE || code == GE
7019	  || code == UNEQ || code == LTGT
7020	  || code == UNGT || code == UNLT))
7021    {
7022      enum rtx_code or1, or2;
7023      rtx or1_rtx, or2_rtx, compare2_rtx;
7024      rtx or_result = gen_reg_rtx (CCEQmode);
7025
7026      switch (code)
7027	{
7028	case LE: or1 = LT;  or2 = EQ;  break;
7029	case GE: or1 = GT;  or2 = EQ;  break;
7030	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
7031	case LTGT: or1 = LT;  or2 = GT;  break;
7032	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
7033	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
7034	default:  abort ();
7035	}
7036      validate_condition_mode (or1, comp_mode);
7037      validate_condition_mode (or2, comp_mode);
7038      or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7039      or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7040      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7041				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7042				      const_true_rtx);
7043      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7044
7045      compare_result = or_result;
7046      code = EQ;
7047    }
7048
7049  validate_condition_mode (code, GET_MODE (compare_result));
7050
7051  return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7052}
7053
7054
7055/* Emit the RTL for an sCOND pattern.  */
7056
7057void
7058rs6000_emit_sCOND (code, result)
7059     enum rtx_code code;
7060     rtx result;
7061{
7062  rtx condition_rtx;
7063  enum machine_mode op_mode;
7064
7065  condition_rtx = rs6000_generate_compare (code);
7066
7067  op_mode = GET_MODE (rs6000_compare_op0);
7068  if (op_mode == VOIDmode)
7069    op_mode = GET_MODE (rs6000_compare_op1);
7070
7071  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7072    {
7073      PUT_MODE (condition_rtx, DImode);
7074      convert_move (result, condition_rtx, 0);
7075    }
7076  else
7077    {
7078      PUT_MODE (condition_rtx, SImode);
7079      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7080    }
7081}
7082
7083/* Emit a branch of kind CODE to location LOC.  */
7084
7085void
7086rs6000_emit_cbranch (code, loc)
7087     enum rtx_code code;
7088     rtx loc;
7089{
7090  rtx condition_rtx, loc_ref;
7091
7092  condition_rtx = rs6000_generate_compare (code);
7093  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7094  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7095			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7096						     loc_ref, pc_rtx)));
7097}
7098
7099/* Return the string to output a conditional branch to LABEL, which is
7100   the operand number of the label, or -1 if the branch is really a
7101   conditional return.
7102
7103   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
7104   condition code register and its mode specifies what kind of
7105   comparison we made.
7106
7107   REVERSED is non-zero if we should reverse the sense of the comparison.
7108
7109   INSN is the insn.  */
7110
7111char *
7112output_cbranch (op, label, reversed, insn)
7113     rtx op;
7114     const char * label;
7115     int reversed;
7116     rtx insn;
7117{
7118  static char string[64];
7119  enum rtx_code code = GET_CODE (op);
7120  rtx cc_reg = XEXP (op, 0);
7121  enum machine_mode mode = GET_MODE (cc_reg);
7122  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7123  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7124  int really_reversed = reversed ^ need_longbranch;
7125  char *s = string;
7126  const char *ccode;
7127  const char *pred;
7128  rtx note;
7129
7130  validate_condition_mode (code, mode);
7131
7132  /* Work out which way this really branches.  We could use
7133     reverse_condition_maybe_unordered here always but this
7134     makes the resulting assembler clearer.  */
7135  if (really_reversed)
7136    {
7137      /* Reversal of FP compares takes care -- an ordered compare
7138	 becomes an unordered compare and vice versa.  */
7139      if (mode == CCFPmode)
7140	code = reverse_condition_maybe_unordered (code);
7141      else
7142	code = reverse_condition (code);
7143    }
7144
7145  switch (code)
7146    {
7147      /* Not all of these are actually distinct opcodes, but
7148	 we distinguish them for clarity of the resulting assembler.  */
7149    case NE: case LTGT:
7150      ccode = "ne"; break;
7151    case EQ: case UNEQ:
7152      ccode = "eq"; break;
7153    case GE: case GEU:
7154      ccode = "ge"; break;
7155    case GT: case GTU: case UNGT:
7156      ccode = "gt"; break;
7157    case LE: case LEU:
7158      ccode = "le"; break;
7159    case LT: case LTU: case UNLT:
7160      ccode = "lt"; break;
7161    case UNORDERED: ccode = "un"; break;
7162    case ORDERED: ccode = "nu"; break;
7163    case UNGE: ccode = "nl"; break;
7164    case UNLE: ccode = "ng"; break;
7165    default:
7166      abort ();
7167    }
7168
7169  /* Maybe we have a guess as to how likely the branch is.
7170     The old mnemonics don't have a way to specify this information.  */
7171  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7172  if (note != NULL_RTX)
7173    {
7174      /* PROB is the difference from 50%.  */
7175      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7176
7177      /* For branches that are very close to 50%, assume not-taken.  */
7178      if (abs (prob) > REG_BR_PROB_BASE / 20
7179	  && ((prob > 0) ^ need_longbranch))
7180	pred = "+";
7181      else
7182	pred = "-";
7183    }
7184  else
7185    pred = "";
7186
7187  if (label == NULL)
7188    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7189  else
7190    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7191
7192  /* We need to escape any '%' characters in the reg_names string.
7193     Assume they'd only be the first character...  */
7194  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7195    *s++ = '%';
7196  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7197
7198  if (label != NULL)
7199    {
7200      /* If the branch distance was too far, we may have to use an
7201	 unconditional branch to go the distance.  */
7202      if (need_longbranch)
7203	s += sprintf (s, ",$+8\n\tb %s", label);
7204      else
7205	s += sprintf (s, ",%s", label);
7206    }
7207
7208  return string;
7209}
7210
7211/* Emit a conditional move: move TRUE_COND to DEST if OP of the
7212   operands of the last comparison is nonzero/true, FALSE_COND if it
7213   is zero/false.  Return 0 if the hardware has no such operation.  */
7214
7215int
7216rs6000_emit_cmove (dest, op, true_cond, false_cond)
7217     rtx dest;
7218     rtx op;
7219     rtx true_cond;
7220     rtx false_cond;
7221{
7222  enum rtx_code code = GET_CODE (op);
7223  rtx op0 = rs6000_compare_op0;
7224  rtx op1 = rs6000_compare_op1;
7225  REAL_VALUE_TYPE c1;
7226  enum machine_mode compare_mode = GET_MODE (op0);
7227  enum machine_mode result_mode = GET_MODE (dest);
7228  rtx temp;
7229
7230  /* These modes should always match. */
7231  if (GET_MODE (op1) != compare_mode)
7232    return 0;
7233  if (GET_MODE (true_cond) != result_mode)
7234    return 0;
7235  if (GET_MODE (false_cond) != result_mode)
7236    return 0;
7237
7238  /* First, work out if the hardware can do this at all, or
7239     if it's too slow...  */
7240  /* If the comparison is an integer one, since we only have fsel
7241     it'll be cheaper to use a branch.  */
7242  if (! rs6000_compare_fp_p)
7243    return 0;
7244
7245  /* Eliminate half of the comparisons by switching operands, this
7246     makes the remaining code simpler.  */
7247  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7248      || code == LTGT || code == LT)
7249    {
7250      code = reverse_condition_maybe_unordered (code);
7251      temp = true_cond;
7252      true_cond = false_cond;
7253      false_cond = temp;
7254    }
7255
7256  /* UNEQ and LTGT take four instructions for a comparison with zero,
7257     it'll probably be faster to use a branch here too.  */
7258  if (code == UNEQ)
7259    return 0;
7260
7261  if (GET_CODE (op1) == CONST_DOUBLE)
7262    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7263
7264  /* We're going to try to implement comparions by performing
7265     a subtract, then comparing against zero.  Unfortunately,
7266     Inf - Inf is NaN which is not zero, and so if we don't
7267     know that the the operand is finite and the comparison
7268     would treat EQ different to UNORDERED, we can't do it.  */
7269  if (! flag_unsafe_math_optimizations
7270      && code != GT && code != UNGE
7271      && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7272      /* Constructs of the form (a OP b ? a : b) are safe.  */
7273      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7274	  || (! rtx_equal_p (op0, true_cond)
7275	      && ! rtx_equal_p (op1, true_cond))))
7276    return 0;
7277  /* At this point we know we can use fsel.  */
7278
7279  /* Reduce the comparison to a comparison against zero.  */
7280  temp = gen_reg_rtx (compare_mode);
7281  emit_insn (gen_rtx_SET (VOIDmode, temp,
7282			  gen_rtx_MINUS (compare_mode, op0, op1)));
7283  op0 = temp;
7284  op1 = CONST0_RTX (compare_mode);
7285
7286  /* If we don't care about NaNs we can reduce some of the comparisons
7287     down to faster ones.  */
7288  if (flag_unsafe_math_optimizations)
7289    switch (code)
7290      {
7291      case GT:
7292	code = LE;
7293	temp = true_cond;
7294	true_cond = false_cond;
7295	false_cond = temp;
7296	break;
7297      case UNGE:
7298	code = GE;
7299	break;
7300      case UNEQ:
7301	code = EQ;
7302	break;
7303      default:
7304	break;
7305      }
7306
7307  /* Now, reduce everything down to a GE.  */
7308  switch (code)
7309    {
7310    case GE:
7311      break;
7312
7313    case LE:
7314      temp = gen_reg_rtx (compare_mode);
7315      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7316      op0 = temp;
7317      break;
7318
7319    case ORDERED:
7320      temp = gen_reg_rtx (compare_mode);
7321      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7322      op0 = temp;
7323      break;
7324
7325    case EQ:
7326      temp = gen_reg_rtx (compare_mode);
7327      emit_insn (gen_rtx_SET (VOIDmode, temp,
7328			      gen_rtx_NEG (compare_mode,
7329					   gen_rtx_ABS (compare_mode, op0))));
7330      op0 = temp;
7331      break;
7332
7333    case UNGE:
7334      temp = gen_reg_rtx (result_mode);
7335      emit_insn (gen_rtx_SET (VOIDmode, temp,
7336			      gen_rtx_IF_THEN_ELSE (result_mode,
7337						    gen_rtx_GE (VOIDmode,
7338								op0, op1),
7339						    true_cond, false_cond)));
7340      false_cond = temp;
7341      true_cond = false_cond;
7342
7343      temp = gen_reg_rtx (compare_mode);
7344      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7345      op0 = temp;
7346      break;
7347
7348    case GT:
7349      temp = gen_reg_rtx (result_mode);
7350      emit_insn (gen_rtx_SET (VOIDmode, temp,
7351			      gen_rtx_IF_THEN_ELSE (result_mode,
7352						    gen_rtx_GE (VOIDmode,
7353								op0, op1),
7354						    true_cond, false_cond)));
7355      true_cond = temp;
7356      false_cond = true_cond;
7357
7358      temp = gen_reg_rtx (compare_mode);
7359      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7360      op0 = temp;
7361      break;
7362
7363    default:
7364      abort ();
7365    }
7366
7367  emit_insn (gen_rtx_SET (VOIDmode, dest,
7368			  gen_rtx_IF_THEN_ELSE (result_mode,
7369						gen_rtx_GE (VOIDmode,
7370							    op0, op1),
7371						true_cond, false_cond)));
7372  return 1;
7373}
7374
7375void
7376rs6000_emit_minmax (dest, code, op0, op1)
7377     rtx dest;
7378     enum rtx_code code;
7379     rtx op0;
7380     rtx op1;
7381{
7382  enum machine_mode mode = GET_MODE (op0);
7383  rtx target;
7384  if (code == SMAX || code == UMAX)
7385    target = emit_conditional_move (dest, GE, op0, op1, mode,
7386				    op0, op1, mode, 0);
7387  else
7388    target = emit_conditional_move (dest, GE, op0, op1, mode,
7389				    op1, op0, mode, 0);
7390  if (target == NULL_RTX)
7391    abort ();
7392  if (target != dest)
7393    emit_move_insn (dest, target);
7394}
7395
7396/* This page contains routines that are used to determine what the
7397   function prologue and epilogue code will do and write them out.  */
7398
7399/* Return the first fixed-point register that is required to be
7400   saved. 32 if none.  */
7401
7402int
7403first_reg_to_save ()
7404{
7405  int first_reg;
7406
7407  /* Find lowest numbered live register.  */
7408  for (first_reg = 13; first_reg <= 31; first_reg++)
7409    if (regs_ever_live[first_reg]
7410	&& (! call_used_regs[first_reg]
7411	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7412		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7413		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7414      break;
7415
7416#if TARGET_MACHO
7417  if (flag_pic && current_function_uses_pic_offset_table &&
7418      (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7419    return RS6000_PIC_OFFSET_TABLE_REGNUM;
7420#endif
7421
7422  return first_reg;
7423}
7424
7425/* Similar, for FP regs.  */
7426
7427int
7428first_fp_reg_to_save ()
7429{
7430  int first_reg;
7431
7432  /* Find lowest numbered live register.  */
7433  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7434    if (regs_ever_live[first_reg])
7435      break;
7436
7437  return first_reg;
7438}
7439
7440/* Similar, for AltiVec regs.  */
7441
7442static int
7443first_altivec_reg_to_save ()
7444{
7445  int i;
7446
7447  /* Stack frame remains as is unless we are in AltiVec ABI.  */
7448  if (! TARGET_ALTIVEC_ABI)
7449    return LAST_ALTIVEC_REGNO + 1;
7450
7451  /* Find lowest numbered live register.  */
7452  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7453    if (regs_ever_live[i])
7454      break;
7455
7456  return i;
7457}
7458
7459/* Return a 32-bit mask of the AltiVec registers we need to set in
7460   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
7461   the 32-bit word is 0.  */
7462
7463static unsigned int
7464compute_vrsave_mask ()
7465{
7466  unsigned int i, mask = 0;
7467
7468  /* First, find out if we use _any_ altivec registers.  */
7469  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7470    if (regs_ever_live[i])
7471      mask |= ALTIVEC_REG_BIT (i);
7472
7473  if (mask == 0)
7474    return mask;
7475
7476  /* Next, add all registers that are call-clobbered.  We do this
7477     because post-reload register optimizers such as regrename_optimize
7478     may choose to use them.  They never change the register class
7479     chosen by reload, so cannot create new uses of altivec registers
7480     if there were none before, so the early exit above is safe.  */
7481  /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7482     altivec registers not saved in the mask, which might well make the
7483     adjustments below more effective in eliding the save/restore of
7484     VRSAVE in small functions.  */
7485  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7486    if (call_used_regs[i])
7487      mask |= ALTIVEC_REG_BIT (i);
7488
7489  /* Next, remove the argument registers from the set.  These must
7490     be in the VRSAVE mask set by the caller, so we don't need to add
7491     them in again.  More importantly, the mask we compute here is
7492     used to generate CLOBBERs in the set_vrsave insn, and we do not
7493     wish the argument registers to die.  */
7494  for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7495    mask &= ~ALTIVEC_REG_BIT (i);
7496
7497  /* Similarly, remove the return value from the set.  */
7498  {
7499    bool yes = false;
7500    diddle_return_value (is_altivec_return_reg, &yes);
7501    if (yes)
7502      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7503  }
7504
7505  return mask;
7506}
7507
7508static void
7509is_altivec_return_reg (reg, xyes)
7510     rtx reg;
7511     void *xyes;
7512{
7513  bool *yes = (bool *) xyes;
7514  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7515    *yes = true;
7516}
7517
7518
7519/* Calculate the stack information for the current function.  This is
7520   complicated by having two separate calling sequences, the AIX calling
7521   sequence and the V.4 calling sequence.
7522
7523   AIX (and Darwin/Mac OS X) stack frames look like:
7524							  32-bit  64-bit
7525	SP---->	+---------------------------------------+
7526		| back chain to caller			| 0	  0
7527		+---------------------------------------+
7528		| saved CR				| 4       8 (8-11)
7529		+---------------------------------------+
7530		| saved LR				| 8       16
7531		+---------------------------------------+
7532		| reserved for compilers		| 12      24
7533		+---------------------------------------+
7534		| reserved for binders			| 16      32
7535		+---------------------------------------+
7536		| saved TOC pointer			| 20      40
7537		+---------------------------------------+
7538		| Parameter save area (P)		| 24      48
7539		+---------------------------------------+
7540		| Alloca space (A)			| 24+P    etc.
7541		+---------------------------------------+
7542		| Local variable space (L)		| 24+P+A
7543		+---------------------------------------+
7544		| Float/int conversion temporary (X)	| 24+P+A+L
7545		+---------------------------------------+
7546		| Save area for AltiVec registers (W)	| 24+P+A+L+X
7547		+---------------------------------------+
7548		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
7549		+---------------------------------------+
7550		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
7551		+---------------------------------------+
7552		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
7553		+---------------------------------------+
7554		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
7555		+---------------------------------------+
7556	old SP->| back chain to caller's caller		|
7557		+---------------------------------------+
7558
7559   The required alignment for AIX configurations is two words (i.e., 8
7560   or 16 bytes).
7561
7562
7563   V.4 stack frames look like:
7564
7565	SP---->	+---------------------------------------+
7566		| back chain to caller			| 0
7567		+---------------------------------------+
7568		| caller's saved LR			| 4
7569		+---------------------------------------+
7570		| Parameter save area (P)		| 8
7571		+---------------------------------------+
7572		| Alloca space (A)			| 8+P
7573		+---------------------------------------+
7574		| Varargs save area (V)			| 8+P+A
7575		+---------------------------------------+
7576		| Local variable space (L)		| 8+P+A+V
7577		+---------------------------------------+
7578		| Float/int conversion temporary (X)	| 8+P+A+V+L
7579		+---------------------------------------+
7580		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
7581		+---------------------------------------+
7582		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
7583		+---------------------------------------+
7584		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
7585		+---------------------------------------+
7586		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
7587		+---------------------------------------+
7588		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
7589		+---------------------------------------+
7590		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
7591		+---------------------------------------+
7592	old SP->| back chain to caller's caller		|
7593		+---------------------------------------+
7594
7595   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7596   given.  (But note below and in sysv4.h that we require only 8 and
7597   may round up the size of our stack frame anyways.  The historical
7598   reason is early versions of powerpc-linux which didn't properly
7599   align the stack at program startup.  A happy side-effect is that
7600   -mno-eabi libraries can be used with -meabi programs.)
7601
7602   The EABI configuration defaults to the V.4 layout, unless
7603   -mcall-aix is used, in which case the AIX layout is used.  However,
7604   the stack alignment requirements may differ.  If -mno-eabi is not
7605   given, the required stack alignment is 8 bytes; if -mno-eabi is
7606   given, the required alignment is 16 bytes.  (But see V.4 comment
7607   above.)  */
7608
7609#ifndef ABI_STACK_BOUNDARY
7610#define ABI_STACK_BOUNDARY STACK_BOUNDARY
7611#endif
7612
7613rs6000_stack_t *
7614rs6000_stack_info ()
7615{
7616  static rs6000_stack_t info, zero_info;
7617  rs6000_stack_t *info_ptr = &info;
7618  int reg_size = TARGET_POWERPC64 ? 8 : 4;
7619  enum rs6000_abi abi;
7620  int ehrd_size;
7621  int total_raw_size;
7622
7623  /* Zero all fields portably.  */
7624  info = zero_info;
7625
7626  /* Select which calling sequence.  */
7627  info_ptr->abi = abi = DEFAULT_ABI;
7628
7629  /* Calculate which registers need to be saved & save area size.  */
7630  info_ptr->first_gp_reg_save = first_reg_to_save ();
7631  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7632     even if it currently looks like we won't.  */
7633  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7634       || (flag_pic == 1 && abi == ABI_V4)
7635       || (flag_pic && abi == ABI_DARWIN))
7636      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7637    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7638  else
7639    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7640
7641  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7642  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7643
7644  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7645  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7646				 - info_ptr->first_altivec_reg_save);
7647
7648  /* Does this function call anything?  */
7649  info_ptr->calls_p = (! current_function_is_leaf
7650		       || cfun->machine->ra_needs_full_frame);
7651
7652  /* Determine if we need to save the link register.  */
7653  if (rs6000_ra_ever_killed ()
7654      || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7655#ifdef TARGET_RELOCATABLE
7656      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7657#endif
7658      || (info_ptr->first_fp_reg_save != 64
7659	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7660      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7661      || (abi == ABI_V4 && current_function_calls_alloca)
7662      || (DEFAULT_ABI == ABI_DARWIN
7663	  && flag_pic
7664	  && current_function_uses_pic_offset_table)
7665      || info_ptr->calls_p)
7666    {
7667      info_ptr->lr_save_p = 1;
7668      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7669    }
7670
7671  /* Determine if we need to save the condition code registers.  */
7672  if (regs_ever_live[CR2_REGNO]
7673      || regs_ever_live[CR3_REGNO]
7674      || regs_ever_live[CR4_REGNO])
7675    {
7676      info_ptr->cr_save_p = 1;
7677      if (abi == ABI_V4)
7678	info_ptr->cr_size = reg_size;
7679    }
7680
7681  /* If the current function calls __builtin_eh_return, then we need
7682     to allocate stack space for registers that will hold data for
7683     the exception handler.  */
7684  if (current_function_calls_eh_return)
7685    {
7686      unsigned int i;
7687      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7688	continue;
7689      ehrd_size = i * UNITS_PER_WORD;
7690    }
7691  else
7692    ehrd_size = 0;
7693
7694  /* Determine various sizes.  */
7695  info_ptr->reg_size     = reg_size;
7696  info_ptr->fixed_size   = RS6000_SAVE_AREA;
7697  info_ptr->varargs_size = RS6000_VARARGS_AREA;
7698  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
7699  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
7700					 8);
7701
7702  if (TARGET_ALTIVEC_ABI)
7703    {
7704      info_ptr->vrsave_mask = compute_vrsave_mask ();
7705      info_ptr->vrsave_size  = info_ptr->vrsave_mask ? 4 : 0;
7706    }
7707  else
7708    {
7709      info_ptr->vrsave_mask = 0;
7710      info_ptr->vrsave_size = 0;
7711    }
7712
7713  /* Calculate the offsets.  */
7714  switch (abi)
7715    {
7716    case ABI_NONE:
7717    default:
7718      abort ();
7719
7720    case ABI_AIX:
7721    case ABI_AIX_NODESC:
7722    case ABI_DARWIN:
7723      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7724      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7725
7726      if (TARGET_ALTIVEC_ABI)
7727	{
7728	  info_ptr->vrsave_save_offset
7729	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7730
7731	  /* Align stack so vector save area is on a quadword boundary.  */
7732	  if (info_ptr->altivec_size != 0)
7733	    info_ptr->altivec_padding_size
7734	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7735	  else
7736	    info_ptr->altivec_padding_size = 0;
7737
7738	  info_ptr->altivec_save_offset
7739	    = info_ptr->vrsave_save_offset
7740	    - info_ptr->altivec_padding_size
7741	    - info_ptr->altivec_size;
7742
7743	  /* Adjust for AltiVec case.  */
7744	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7745	}
7746      else
7747	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
7748      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
7749      info_ptr->lr_save_offset   = 2*reg_size;
7750      break;
7751
7752    case ABI_V4:
7753      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7754      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7755      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
7756
7757      if (TARGET_ALTIVEC_ABI)
7758	{
7759	  info_ptr->vrsave_save_offset
7760	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7761
7762	  /* Align stack so vector save area is on a quadword boundary.  */
7763	  if (info_ptr->altivec_size != 0)
7764	    info_ptr->altivec_padding_size
7765	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7766	  else
7767	    info_ptr->altivec_padding_size = 0;
7768
7769	  info_ptr->altivec_save_offset
7770	    = info_ptr->vrsave_save_offset
7771	    - info_ptr->altivec_padding_size
7772	    - info_ptr->altivec_size;
7773
7774	  /* Adjust for AltiVec case.  */
7775	  info_ptr->toc_save_offset
7776	    = info_ptr->altivec_save_offset - info_ptr->toc_size;
7777	}
7778      else
7779	info_ptr->toc_save_offset  = info_ptr->cr_save_offset - info_ptr->toc_size;
7780      info_ptr->ehrd_offset      = info_ptr->toc_save_offset - ehrd_size;
7781      info_ptr->lr_save_offset   = reg_size;
7782      break;
7783    }
7784
7785  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
7786					 + info_ptr->gp_size
7787					 + info_ptr->altivec_size
7788					 + info_ptr->altivec_padding_size
7789					 + info_ptr->vrsave_size
7790					 + ehrd_size
7791					 + info_ptr->cr_size
7792					 + info_ptr->lr_size
7793					 + info_ptr->vrsave_size
7794					 + info_ptr->toc_size,
7795					 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7796					 ? 16 : 8);
7797
7798  total_raw_size	 = (info_ptr->vars_size
7799			    + info_ptr->parm_size
7800			    + info_ptr->save_size
7801			    + info_ptr->varargs_size
7802			    + info_ptr->fixed_size);
7803
7804  info_ptr->total_size =
7805    RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7806
7807  /* Determine if we need to allocate any stack frame:
7808
7809     For AIX we need to push the stack if a frame pointer is needed
7810     (because the stack might be dynamically adjusted), if we are
7811     debugging, if we make calls, or if the sum of fp_save, gp_save,
7812     and local variables are more than the space needed to save all
7813     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7814     + 18*8 = 288 (GPR13 reserved).
7815
7816     For V.4 we don't have the stack cushion that AIX uses, but assume
7817     that the debugger can handle stackless frames.  */
7818
7819  if (info_ptr->calls_p)
7820    info_ptr->push_p = 1;
7821
7822  else if (abi == ABI_V4)
7823    info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7824
7825  else
7826    info_ptr->push_p = (frame_pointer_needed
7827			|| (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7828			|| ((total_raw_size - info_ptr->fixed_size)
7829			    > (TARGET_32BIT ? 220 : 288)));
7830
7831  /* Zero offsets if we're not saving those registers.  */
7832  if (info_ptr->fp_size == 0)
7833    info_ptr->fp_save_offset = 0;
7834
7835  if (info_ptr->gp_size == 0)
7836    info_ptr->gp_save_offset = 0;
7837
7838  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7839    info_ptr->altivec_save_offset = 0;
7840
7841  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7842    info_ptr->vrsave_save_offset = 0;
7843
7844  if (! info_ptr->lr_save_p)
7845    info_ptr->lr_save_offset = 0;
7846
7847  if (! info_ptr->cr_save_p)
7848    info_ptr->cr_save_offset = 0;
7849
7850  if (! info_ptr->toc_save_p)
7851    info_ptr->toc_save_offset = 0;
7852
7853  return info_ptr;
7854}
7855
7856void
7857debug_stack_info (info)
7858     rs6000_stack_t *info;
7859{
7860  const char *abi_string;
7861
7862  if (! info)
7863    info = rs6000_stack_info ();
7864
7865  fprintf (stderr, "\nStack information for function %s:\n",
7866	   ((current_function_decl && DECL_NAME (current_function_decl))
7867	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7868	    : "<unknown>"));
7869
7870  switch (info->abi)
7871    {
7872    default:		 abi_string = "Unknown";	break;
7873    case ABI_NONE:	 abi_string = "NONE";		break;
7874    case ABI_AIX:
7875    case ABI_AIX_NODESC: abi_string = "AIX";		break;
7876    case ABI_DARWIN:	 abi_string = "Darwin";		break;
7877    case ABI_V4:	 abi_string = "V.4";		break;
7878    }
7879
7880  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
7881
7882  if (TARGET_ALTIVEC_ABI)
7883    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7884
7885  if (info->first_gp_reg_save != 32)
7886    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
7887
7888  if (info->first_fp_reg_save != 64)
7889    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
7890
7891  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7892    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7893	     info->first_altivec_reg_save);
7894
7895  if (info->lr_save_p)
7896    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
7897
7898  if (info->cr_save_p)
7899    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
7900
7901  if (info->toc_save_p)
7902    fprintf (stderr, "\ttoc_save_p          = %5d\n", info->toc_save_p);
7903
7904  if (info->vrsave_mask)
7905    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
7906
7907  if (info->push_p)
7908    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
7909
7910  if (info->calls_p)
7911    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
7912
7913  if (info->gp_save_offset)
7914    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
7915
7916  if (info->fp_save_offset)
7917    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
7918
7919  if (info->altivec_save_offset)
7920    fprintf (stderr, "\taltivec_save_offset = %5d\n",
7921	     info->altivec_save_offset);
7922
7923  if (info->vrsave_save_offset)
7924    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
7925	     info->vrsave_save_offset);
7926
7927  if (info->lr_save_offset)
7928    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
7929
7930  if (info->cr_save_offset)
7931    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
7932
7933  if (info->toc_save_offset)
7934    fprintf (stderr, "\ttoc_save_offset     = %5d\n", info->toc_save_offset);
7935
7936  if (info->varargs_save_offset)
7937    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7938
7939  if (info->total_size)
7940    fprintf (stderr, "\ttotal_size          = %5d\n", info->total_size);
7941
7942  if (info->varargs_size)
7943    fprintf (stderr, "\tvarargs_size        = %5d\n", info->varargs_size);
7944
7945  if (info->vars_size)
7946    fprintf (stderr, "\tvars_size           = %5d\n", info->vars_size);
7947
7948  if (info->parm_size)
7949    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
7950
7951  if (info->fixed_size)
7952    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
7953
7954  if (info->gp_size)
7955    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
7956
7957  if (info->fp_size)
7958    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
7959
7960  if (info->altivec_size)
7961    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
7962
7963  if (info->vrsave_size)
7964    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
7965
7966  if (info->altivec_padding_size)
7967    fprintf (stderr, "\taltivec_padding_size= %5d\n",
7968	     info->altivec_padding_size);
7969
7970  if (info->lr_size)
7971    fprintf (stderr, "\tlr_size             = %5d\n", info->lr_size);
7972
7973  if (info->cr_size)
7974    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
7975
7976  if (info->toc_size)
7977    fprintf (stderr, "\ttoc_size            = %5d\n", info->toc_size);
7978
7979  if (info->save_size)
7980    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
7981
7982  if (info->reg_size != 4)
7983    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
7984
7985  fprintf (stderr, "\n");
7986}
7987
7988rtx
7989rs6000_return_addr (count, frame)
7990     int count;
7991     rtx frame;
7992{
7993  /* Currently we don't optimize very well between prolog and body
7994     code and for PIC code the code can be actually quite bad, so
7995     don't try to be too clever here.  */
7996  if (count != 0
7997      || flag_pic != 0
7998      || DEFAULT_ABI == ABI_AIX
7999      || DEFAULT_ABI == ABI_AIX_NODESC)
8000    {
8001      cfun->machine->ra_needs_full_frame = 1;
8002
8003      return
8004	gen_rtx_MEM
8005	  (Pmode,
8006	   memory_address
8007	   (Pmode,
8008	    plus_constant (copy_to_reg
8009			   (gen_rtx_MEM (Pmode,
8010					 memory_address (Pmode, frame))),
8011			   RETURN_ADDRESS_OFFSET)));
8012    }
8013
8014  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8015}
8016
8017static int
8018rs6000_ra_ever_killed ()
8019{
8020  rtx top;
8021
8022#ifdef ASM_OUTPUT_MI_THUNK
8023  if (current_function_is_thunk)
8024    return 0;
8025#endif
8026  if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8027      || cfun->machine->ra_needs_full_frame)
8028    return regs_ever_live[LINK_REGISTER_REGNUM];
8029
8030  push_topmost_sequence ();
8031  top = get_insns ();
8032  pop_topmost_sequence ();
8033
8034  return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8035			    top, NULL_RTX);
8036}
8037
8038/* Add a REG_MAYBE_DEAD note to the insn.  */
8039static void
8040rs6000_maybe_dead (insn)
8041     rtx insn;
8042{
8043  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8044					const0_rtx,
8045					REG_NOTES (insn));
8046}
8047
8048/* Emit instructions needed to load the TOC register.
8049   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8050   a constant pool; or for SVR4 -fpic.  */
8051
8052void
8053rs6000_emit_load_toc_table (fromprolog)
8054     int fromprolog;
8055{
8056  rtx dest;
8057  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8058
8059  if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8060    {
8061      rtx temp = (fromprolog
8062		  ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8063		  : gen_reg_rtx (Pmode));
8064      rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8065      rs6000_maybe_dead (emit_move_insn (dest, temp));
8066    }
8067  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
8068    {
8069      char buf[30];
8070      rtx tempLR = (fromprolog
8071		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8072		    : gen_reg_rtx (Pmode));
8073      rtx temp0 = (fromprolog
8074		   ? gen_rtx_REG (Pmode, 0)
8075		   : gen_reg_rtx (Pmode));
8076      rtx symF;
8077
8078      /* possibly create the toc section */
8079      if (! toc_initialized)
8080	{
8081	  toc_section ();
8082	  function_section (current_function_decl);
8083	}
8084
8085      if (fromprolog)
8086	{
8087	  rtx symL;
8088
8089	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8090	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8091
8092	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8093	  symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8094
8095	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8096							       symF)));
8097	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8098	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8099							       symL,
8100							       symF)));
8101	}
8102      else
8103	{
8104	  rtx tocsym;
8105	  static int reload_toc_labelno = 0;
8106
8107	  tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8108
8109	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8110	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8111
8112	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8113								symF,
8114								tocsym)));
8115	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8116	  rs6000_maybe_dead (emit_move_insn (temp0,
8117					     gen_rtx_MEM (Pmode, dest)));
8118	}
8119      rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8120    }
8121  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
8122    {
8123      /* This is for AIX code running in non-PIC ELF32.  */
8124      char buf[30];
8125      rtx realsym;
8126      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8127      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8128
8129      rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8130      rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8131    }
8132  else if (DEFAULT_ABI == ABI_AIX)
8133    {
8134      if (TARGET_32BIT)
8135	rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8136      else
8137	rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8138    }
8139  else
8140    abort ();
8141}
8142
8143int
8144get_TOC_alias_set ()
8145{
8146    static int set = -1;
8147    if (set == -1)
8148      set = new_alias_set ();
8149    return set;
8150}
8151
8152/* This retuns nonzero if the current function uses the TOC.  This is
8153   determined by the presence of (unspec ... 7), which is generated by
8154   the various load_toc_* patterns.  */
8155
8156int
8157uses_TOC ()
8158{
8159    rtx insn;
8160
8161    for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8162      if (INSN_P (insn))
8163	{
8164	  rtx pat = PATTERN (insn);
8165	  int i;
8166
8167	  if (GET_CODE (pat) == PARALLEL)
8168	    for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8169	      if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8170		 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8171		  return 1;
8172	}
8173    return 0;
8174}
8175
8176rtx
8177create_TOC_reference (symbol)
8178    rtx symbol;
8179{
8180  return gen_rtx_PLUS (Pmode,
8181	   gen_rtx_REG (Pmode, TOC_REGISTER),
8182	     gen_rtx_CONST (Pmode,
8183	       gen_rtx_MINUS (Pmode, symbol,
8184		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8185}
8186
8187#if TARGET_AIX
8188/* __throw will restore its own return address to be the same as the
8189   return address of the function that the throw is being made to.
8190   This is unfortunate, because we want to check the original
8191   return address to see if we need to restore the TOC.
8192   So we have to squirrel it away here.
8193   This is used only in compiling __throw and __rethrow.
8194
8195   Most of this code should be removed by CSE.  */
8196static rtx insn_after_throw;
8197
8198/* This does the saving...  */
8199void
8200rs6000_aix_emit_builtin_unwind_init ()
8201{
8202  rtx mem;
8203  rtx stack_top = gen_reg_rtx (Pmode);
8204  rtx opcode_addr = gen_reg_rtx (Pmode);
8205
8206  insn_after_throw = gen_reg_rtx (SImode);
8207
8208  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8209  emit_move_insn (stack_top, mem);
8210
8211  mem = gen_rtx_MEM (Pmode,
8212		     gen_rtx_PLUS (Pmode, stack_top,
8213				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8214  emit_move_insn (opcode_addr, mem);
8215  emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8216}
8217
8218/* Emit insns to _restore_ the TOC register, at runtime (specifically
8219   in _eh.o).  Only used on AIX.
8220
8221   The idea is that on AIX, function calls look like this:
8222	bl  somefunction-trampoline
8223	lwz r2,20(sp)
8224
8225   and later,
8226	somefunction-trampoline:
8227	stw r2,20(sp)
8228	 ... load function address in the count register ...
8229	bctr
8230   or like this, if the linker determines that this is not a cross-module call
8231   and so the TOC need not be restored:
8232	bl  somefunction
8233	nop
8234   or like this, if the compiler could determine that this is not a
8235   cross-module call:
8236	bl  somefunction
8237   now, the tricky bit here is that register 2 is saved and restored
8238   by the _linker_, so we can't readily generate debugging information
8239   for it.  So we need to go back up the call chain looking at the
8240   insns at return addresses to see which calls saved the TOC register
8241   and so see where it gets restored from.
8242
8243   Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8244   just before the actual epilogue.
8245
8246   On the bright side, this incurs no space or time overhead unless an
8247   exception is thrown, except for the extra code in libgcc.a.
8248
8249   The parameter STACKSIZE is a register containing (at runtime)
8250   the amount to be popped off the stack in addition to the stack frame
8251   of this routine (which will be __throw or __rethrow, and so is
8252   guaranteed to have a stack frame).  */
8253
8254void
8255rs6000_emit_eh_toc_restore (stacksize)
8256     rtx stacksize;
8257{
8258  rtx top_of_stack;
8259  rtx bottom_of_stack = gen_reg_rtx (Pmode);
8260  rtx tocompare = gen_reg_rtx (SImode);
8261  rtx opcode = gen_reg_rtx (SImode);
8262  rtx opcode_addr = gen_reg_rtx (Pmode);
8263  rtx mem;
8264  rtx loop_start = gen_label_rtx ();
8265  rtx no_toc_restore_needed = gen_label_rtx ();
8266  rtx loop_exit = gen_label_rtx ();
8267
8268  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8269  set_mem_alias_set (mem, rs6000_sr_alias_set);
8270  emit_move_insn (bottom_of_stack, mem);
8271
8272  top_of_stack = expand_binop (Pmode, add_optab,
8273			       bottom_of_stack, stacksize,
8274			       NULL_RTX, 1, OPTAB_WIDEN);
8275
8276  emit_move_insn (tocompare,
8277		  GEN_INT (trunc_int_for_mode (TARGET_32BIT
8278					       ? 0x80410014
8279					       : 0xE8410028, SImode)));
8280
8281  if (insn_after_throw == NULL_RTX)
8282    abort ();
8283  emit_move_insn (opcode, insn_after_throw);
8284
8285  emit_note (NULL, NOTE_INSN_LOOP_BEG);
8286  emit_label (loop_start);
8287
8288  do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8289			   SImode, NULL_RTX, NULL_RTX,
8290			   no_toc_restore_needed);
8291
8292  mem = gen_rtx_MEM (Pmode,
8293		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8294				   GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8295  emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8296
8297  emit_label (no_toc_restore_needed);
8298  do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8299			   Pmode, NULL_RTX, NULL_RTX,
8300			   loop_exit);
8301
8302  mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8303  set_mem_alias_set (mem, rs6000_sr_alias_set);
8304  emit_move_insn (bottom_of_stack, mem);
8305
8306  mem = gen_rtx_MEM (Pmode,
8307		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8308				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8309  emit_move_insn (opcode_addr, mem);
8310  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8311
8312  emit_note (NULL, NOTE_INSN_LOOP_CONT);
8313  emit_jump (loop_start);
8314  emit_note (NULL, NOTE_INSN_LOOP_END);
8315  emit_label (loop_exit);
8316}
8317#endif /* TARGET_AIX */
8318
8319/* This ties together stack memory (MEM with an alias set of
8320   rs6000_sr_alias_set) and the change to the stack pointer.  */
8321
8322static void
8323rs6000_emit_stack_tie ()
8324{
8325  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8326
8327  set_mem_alias_set (mem, rs6000_sr_alias_set);
8328  emit_insn (gen_stack_tie (mem));
8329}
8330
8331/* Emit the correct code for allocating stack space, as insns.
8332   If COPY_R12, make sure a copy of the old frame is left in r12.
8333   The generated code may use hard register 0 as a temporary.  */
8334
8335static void
8336rs6000_emit_allocate_stack (size, copy_r12)
8337     HOST_WIDE_INT size;
8338     int copy_r12;
8339{
8340  rtx insn;
8341  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8342  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8343  rtx todec = GEN_INT (-size);
8344
8345  if (current_function_limit_stack)
8346    {
8347      if (REG_P (stack_limit_rtx)
8348	  && REGNO (stack_limit_rtx) > 1
8349	  && REGNO (stack_limit_rtx) <= 31)
8350	{
8351	  emit_insn (Pmode == SImode
8352		     ? gen_addsi3 (tmp_reg,
8353				   stack_limit_rtx,
8354				   GEN_INT (size))
8355		     : gen_adddi3 (tmp_reg,
8356				   stack_limit_rtx,
8357				   GEN_INT (size)));
8358
8359	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8360				    const0_rtx));
8361	}
8362      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8363	       && TARGET_32BIT
8364	       && DEFAULT_ABI == ABI_V4)
8365	{
8366	  rtx toload = gen_rtx_CONST (VOIDmode,
8367				      gen_rtx_PLUS (Pmode,
8368						    stack_limit_rtx,
8369						    GEN_INT (size)));
8370
8371	  emit_insn (gen_elf_high (tmp_reg, toload));
8372	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8373	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8374				    const0_rtx));
8375	}
8376      else
8377	warning ("stack limit expression is not supported");
8378    }
8379
8380  if (copy_r12 || ! TARGET_UPDATE)
8381    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8382
8383  if (TARGET_UPDATE)
8384    {
8385      if (size > 32767)
8386	{
8387	  /* Need a note here so that try_split doesn't get confused.  */
8388	  if (get_last_insn() == NULL_RTX)
8389	    emit_note (0, NOTE_INSN_DELETED);
8390	  insn = emit_move_insn (tmp_reg, todec);
8391	  try_split (PATTERN (insn), insn, 0);
8392	  todec = tmp_reg;
8393	}
8394
8395      if (Pmode == SImode)
8396	insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8397					    todec, stack_reg));
8398      else
8399	insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8400					    todec, stack_reg));
8401    }
8402  else
8403    {
8404      if (Pmode == SImode)
8405	insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8406      else
8407	insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8408      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8409		      gen_rtx_REG (Pmode, 12));
8410    }
8411
8412  RTX_FRAME_RELATED_P (insn) = 1;
8413  REG_NOTES (insn) =
8414    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8415		       gen_rtx_SET (VOIDmode, stack_reg,
8416				    gen_rtx_PLUS (Pmode, stack_reg,
8417						  GEN_INT (-size))),
8418		       REG_NOTES (insn));
8419}
8420
8421/* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8422   knows that:
8423
8424     (mem (plus (blah) (regXX)))
8425
8426   is really:
8427
8428     (mem (plus (blah) (const VALUE_OF_REGXX))).  */
8429
8430static void
8431altivec_frame_fixup (insn, reg, val)
8432     rtx insn, reg;
8433     HOST_WIDE_INT val;
8434{
8435  rtx real;
8436
8437  real = copy_rtx (PATTERN (insn));
8438
8439  real = replace_rtx (real, reg, GEN_INT (val));
8440
8441  RTX_FRAME_RELATED_P (insn) = 1;
8442  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8443					real,
8444					REG_NOTES (insn));
8445}
8446
8447/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8448   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8449   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
8450   deduce these equivalences by itself so it wasn't necessary to hold
8451   its hand so much.  */
8452
8453static void
8454rs6000_frame_related (insn, reg, val, reg2, rreg)
8455     rtx insn;
8456     rtx reg;
8457     HOST_WIDE_INT val;
8458     rtx reg2;
8459     rtx rreg;
8460{
8461  rtx real, temp;
8462
8463  real = copy_rtx (PATTERN (insn));
8464
8465  real = replace_rtx (real, reg,
8466		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8467							STACK_POINTER_REGNUM),
8468				    GEN_INT (val)));
8469
8470  /* We expect that 'real' is either a SET or a PARALLEL containing
8471     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
8472     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
8473
8474  if (GET_CODE (real) == SET)
8475    {
8476      rtx set = real;
8477
8478      temp = simplify_rtx (SET_SRC (set));
8479      if (temp)
8480	SET_SRC (set) = temp;
8481      temp = simplify_rtx (SET_DEST (set));
8482      if (temp)
8483	SET_DEST (set) = temp;
8484      if (GET_CODE (SET_DEST (set)) == MEM)
8485	{
8486	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8487	  if (temp)
8488	    XEXP (SET_DEST (set), 0) = temp;
8489	}
8490    }
8491  else if (GET_CODE (real) == PARALLEL)
8492    {
8493      int i;
8494      for (i = 0; i < XVECLEN (real, 0); i++)
8495	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8496	  {
8497	    rtx set = XVECEXP (real, 0, i);
8498
8499	    temp = simplify_rtx (SET_SRC (set));
8500	    if (temp)
8501	      SET_SRC (set) = temp;
8502	    temp = simplify_rtx (SET_DEST (set));
8503	    if (temp)
8504	      SET_DEST (set) = temp;
8505	    if (GET_CODE (SET_DEST (set)) == MEM)
8506	      {
8507		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8508		if (temp)
8509		  XEXP (SET_DEST (set), 0) = temp;
8510	      }
8511	    RTX_FRAME_RELATED_P (set) = 1;
8512	  }
8513    }
8514  else
8515    abort ();
8516
8517  if (reg2 != NULL_RTX)
8518    real = replace_rtx (real, reg2, rreg);
8519
8520  RTX_FRAME_RELATED_P (insn) = 1;
8521  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8522					real,
8523					REG_NOTES (insn));
8524}
8525
8526/* Returns an insn that has a vrsave set operation with the
8527   appropriate CLOBBERs.  */
8528
8529static rtx
8530generate_set_vrsave (reg, info, epiloguep)
8531     rtx reg;
8532     rs6000_stack_t *info;
8533     int epiloguep;
8534{
8535  int nclobs, i;
8536  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8537  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8538
8539  clobs[0]
8540    = gen_rtx_SET (VOIDmode,
8541		   vrsave,
8542		   gen_rtx_UNSPEC_VOLATILE (SImode,
8543					    gen_rtvec (2, reg, vrsave),
8544					    30));
8545
8546  nclobs = 1;
8547
8548  /* We need to clobber the registers in the mask so the scheduler
8549     does not move sets to VRSAVE before sets of AltiVec registers.
8550
8551     However, if the function receives nonlocal gotos, reload will set
8552     all call saved registers live.  We will end up with:
8553
8554     	(set (reg 999) (mem))
8555	(parallel [ (set (reg vrsave) (unspec blah))
8556		    (clobber (reg 999))])
8557
8558     The clobber will cause the store into reg 999 to be dead, and
8559     flow will attempt to delete an epilogue insn.  In this case, we
8560     need an unspec use/set of the register.  */
8561
8562  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8563    if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8564      {
8565	if (!epiloguep || call_used_regs [i])
8566	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8567					     gen_rtx_REG (V4SImode, i));
8568	else
8569	  {
8570	    rtx reg = gen_rtx_REG (V4SImode, i);
8571
8572	    clobs[nclobs++]
8573	      = gen_rtx_SET (VOIDmode,
8574			     reg,
8575			     gen_rtx_UNSPEC (V4SImode,
8576					     gen_rtvec (1, reg), 27));
8577	  }
8578      }
8579
8580  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8581
8582  for (i = 0; i < nclobs; ++i)
8583    XVECEXP (insn, 0, i) = clobs[i];
8584
8585  return insn;
8586}
8587
8588/* Emit function prologue as insns.  */
8589
8590void
8591rs6000_emit_prologue ()
8592{
8593  rs6000_stack_t *info = rs6000_stack_info ();
8594  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8595  int reg_size = TARGET_POWERPC64 ? 8 : 4;
8596  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8597  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8598  rtx frame_reg_rtx = sp_reg_rtx;
8599  rtx cr_save_rtx = NULL;
8600  rtx insn;
8601  int saving_FPRs_inline;
8602  int using_store_multiple;
8603  HOST_WIDE_INT sp_offset = 0;
8604
8605  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8606			  && info->first_gp_reg_save < 31);
8607  saving_FPRs_inline = (info->first_fp_reg_save == 64
8608			|| FP_SAVE_INLINE (info->first_fp_reg_save));
8609
8610  /* For V.4, update stack before we do any saving and set back pointer.  */
8611  if (info->push_p && DEFAULT_ABI == ABI_V4)
8612    {
8613      if (info->total_size < 32767)
8614	sp_offset = info->total_size;
8615      else
8616	frame_reg_rtx = frame_ptr_rtx;
8617      rs6000_emit_allocate_stack (info->total_size,
8618				  (frame_reg_rtx != sp_reg_rtx
8619				   && (info->cr_save_p
8620				       || info->lr_save_p
8621				       || info->first_fp_reg_save < 64
8622				       || info->first_gp_reg_save < 32
8623				       )));
8624      if (frame_reg_rtx != sp_reg_rtx)
8625	rs6000_emit_stack_tie ();
8626    }
8627
8628  /* Save AltiVec registers if needed.  */
8629  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8630    {
8631      int i;
8632
8633      /* There should be a non inline version of this, for when we
8634	 are saving lots of vector registers.  */
8635      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8636	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8637	  {
8638	    rtx areg, savereg, mem;
8639	    int offset;
8640
8641	    offset = info->altivec_save_offset + sp_offset
8642	      + 16 * (i - info->first_altivec_reg_save);
8643
8644	    savereg = gen_rtx_REG (V4SImode, i);
8645
8646	    areg = gen_rtx_REG (Pmode, 0);
8647	    emit_move_insn (areg, GEN_INT (offset));
8648
8649	    /* AltiVec addressing mode is [reg+reg].  */
8650	    mem = gen_rtx_MEM (V4SImode,
8651			       gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8652
8653	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8654
8655	    insn = emit_move_insn (mem, savereg);
8656
8657	    altivec_frame_fixup (insn, areg, offset);
8658	  }
8659    }
8660
8661  /* VRSAVE is a bit vector representing which AltiVec registers
8662     are used.  The OS uses this to determine which vector
8663     registers to save on a context switch.  We need to save
8664     VRSAVE on the stack frame, add whatever AltiVec registers we
8665     used in this function, and do the corresponding magic in the
8666     epilogue.  */
8667
8668  if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8669    {
8670      rtx reg, mem, vrsave;
8671      int offset;
8672
8673      /* Get VRSAVE onto a GPR.  */
8674      reg = gen_rtx_REG (SImode, 12);
8675      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8676      if (TARGET_MACHO)
8677	emit_insn (gen_get_vrsave_internal (reg));
8678      else
8679	emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8680
8681      /* Save VRSAVE.  */
8682      offset = info->vrsave_save_offset + sp_offset;
8683      mem
8684	= gen_rtx_MEM (SImode,
8685		       gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8686      set_mem_alias_set (mem, rs6000_sr_alias_set);
8687      insn = emit_move_insn (mem, reg);
8688
8689      /* Include the registers in the mask.  */
8690      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8691
8692      insn = emit_insn (generate_set_vrsave (reg, info, 0));
8693    }
8694
8695  /* If we use the link register, get it into r0.  */
8696  if (info->lr_save_p)
8697    emit_move_insn (gen_rtx_REG (Pmode, 0),
8698		    gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8699
8700  /* If we need to save CR, put it into r12.  */
8701  if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8702    {
8703      cr_save_rtx = gen_rtx_REG (SImode, 12);
8704      emit_insn (gen_movesi_from_cr (cr_save_rtx));
8705    }
8706
8707  /* Do any required saving of fpr's.  If only one or two to save, do
8708     it ourselves.  Otherwise, call function.  */
8709  if (saving_FPRs_inline)
8710    {
8711      int i;
8712      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8713	if ((regs_ever_live[info->first_fp_reg_save+i]
8714	     && ! call_used_regs[info->first_fp_reg_save+i]))
8715	  {
8716	    rtx addr, reg, mem;
8717	    reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8718	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8719				 GEN_INT (info->fp_save_offset
8720					  + sp_offset
8721					  + 8 * i));
8722	    mem = gen_rtx_MEM (DFmode, addr);
8723	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8724
8725	    insn = emit_move_insn (mem, reg);
8726	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8727				  NULL_RTX, NULL_RTX);
8728	  }
8729    }
8730  else if (info->first_fp_reg_save != 64)
8731    {
8732      int i;
8733      char rname[30];
8734      const char *alloc_rname;
8735      rtvec p;
8736      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8737
8738      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8739					  gen_rtx_REG (Pmode,
8740						       LINK_REGISTER_REGNUM));
8741      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8742	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8743      alloc_rname = ggc_strdup (rname);
8744      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8745				      gen_rtx_SYMBOL_REF (Pmode,
8746							  alloc_rname));
8747      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8748	{
8749	  rtx addr, reg, mem;
8750	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8751	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8752			       GEN_INT (info->fp_save_offset
8753					+ sp_offset + 8*i));
8754	  mem = gen_rtx_MEM (DFmode, addr);
8755	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8756
8757	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8758	}
8759      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8760      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8761			    NULL_RTX, NULL_RTX);
8762    }
8763
8764  /* Save GPRs.  This is done as a PARALLEL if we are using
8765     the store-multiple instructions.  */
8766  if (using_store_multiple)
8767    {
8768      rtvec p, dwarfp;
8769      int i;
8770      p = rtvec_alloc (32 - info->first_gp_reg_save);
8771      dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8772      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8773	{
8774	  rtx addr, reg, mem;
8775	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8776	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8777			       GEN_INT (info->gp_save_offset
8778					+ sp_offset
8779					+ reg_size * i));
8780	  mem = gen_rtx_MEM (reg_mode, addr);
8781	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8782
8783	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8784	}
8785      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8786      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8787			    NULL_RTX, NULL_RTX);
8788    }
8789  else
8790    {
8791      int i;
8792      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8793	if ((regs_ever_live[info->first_gp_reg_save+i]
8794	     && ! call_used_regs[info->first_gp_reg_save+i])
8795	    || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8796		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8797		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8798	  {
8799	    rtx addr, reg, mem;
8800	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8801	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8802				 GEN_INT (info->gp_save_offset
8803					  + sp_offset
8804					  + reg_size * i));
8805	    mem = gen_rtx_MEM (reg_mode, addr);
8806	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8807
8808	    insn = emit_move_insn (mem, reg);
8809	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8810				  NULL_RTX, NULL_RTX);
8811	  }
8812    }
8813
8814  /* ??? There's no need to emit actual instructions here, but it's the
8815     easiest way to get the frame unwind information emitted.  */
8816  if (current_function_calls_eh_return)
8817    {
8818      unsigned int i, regno;
8819
8820      for (i = 0; ; ++i)
8821	{
8822	  rtx addr, reg, mem;
8823
8824	  regno = EH_RETURN_DATA_REGNO (i);
8825	  if (regno == INVALID_REGNUM)
8826	    break;
8827
8828	  reg = gen_rtx_REG (reg_mode, regno);
8829	  addr = plus_constant (frame_reg_rtx,
8830				info->ehrd_offset + sp_offset
8831				+ reg_size * (int) i);
8832	  mem = gen_rtx_MEM (reg_mode, addr);
8833	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8834
8835	  insn = emit_move_insn (mem, reg);
8836	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8837				NULL_RTX, NULL_RTX);
8838	}
8839    }
8840
8841  /* Save lr if we used it.  */
8842  if (info->lr_save_p)
8843    {
8844      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8845			       GEN_INT (info->lr_save_offset + sp_offset));
8846      rtx reg = gen_rtx_REG (Pmode, 0);
8847      rtx mem = gen_rtx_MEM (Pmode, addr);
8848      /* This should not be of rs6000_sr_alias_set, because of
8849	 __builtin_return_address.  */
8850
8851      insn = emit_move_insn (mem, reg);
8852      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8853			    reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8854    }
8855
8856  /* Save CR if we use any that must be preserved.  */
8857  if (info->cr_save_p)
8858    {
8859      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8860			       GEN_INT (info->cr_save_offset + sp_offset));
8861      rtx mem = gen_rtx_MEM (SImode, addr);
8862
8863      set_mem_alias_set (mem, rs6000_sr_alias_set);
8864
8865      /* If r12 was used to hold the original sp, copy cr into r0 now
8866	 that it's free.  */
8867      if (REGNO (frame_reg_rtx) == 12)
8868	{
8869	  cr_save_rtx = gen_rtx_REG (SImode, 0);
8870	  emit_insn (gen_movesi_from_cr (cr_save_rtx));
8871	}
8872      insn = emit_move_insn (mem, cr_save_rtx);
8873
8874      /* Now, there's no way that dwarf2out_frame_debug_expr is going
8875	 to understand '(unspec:SI [(reg:CC 68) ...] 19)'.  But that's
8876	 OK.  All we have to do is specify that _one_ condition code
8877	 register is saved in this stack slot.  The thrower's epilogue
8878	 will then restore all the call-saved registers.
8879	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
8880      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8881			    cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8882    }
8883
8884  /* Update stack and set back pointer unless this is V.4,
8885     for which it was done previously.  */
8886  if (info->push_p && DEFAULT_ABI != ABI_V4)
8887    rs6000_emit_allocate_stack (info->total_size, FALSE);
8888
8889  /* Set frame pointer, if needed.  */
8890  if (frame_pointer_needed)
8891    {
8892      insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8893			     sp_reg_rtx);
8894      RTX_FRAME_RELATED_P (insn) = 1;
8895    }
8896
8897  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
8898  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8899      || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8900	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
8901  {
8902    /* If emit_load_toc_table will use the link register, we need to save
8903       it.  We use R11 for this purpose because emit_load_toc_table
8904       can use register 0.  This allows us to use a plain 'blr' to return
8905       from the procedure more often.  */
8906    int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8907				    && ! info->lr_save_p
8908				    && EXIT_BLOCK_PTR->pred != NULL);
8909    if (save_LR_around_toc_setup)
8910      emit_move_insn (gen_rtx_REG (Pmode, 11),
8911		      gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8912
8913    rs6000_emit_load_toc_table (TRUE);
8914
8915    if (save_LR_around_toc_setup)
8916      emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8917		      gen_rtx_REG (Pmode, 11));
8918  }
8919
8920  if (DEFAULT_ABI == ABI_DARWIN
8921      && flag_pic && current_function_uses_pic_offset_table)
8922    {
8923      rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8924
8925      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8926
8927      rs6000_maybe_dead (
8928	emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
8929			gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8930    }
8931}
8932
8933/* Write function prologue.  */
8934
8935static void
8936rs6000_output_function_prologue (file, size)
8937     FILE *file;
8938     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8939{
8940  rs6000_stack_t *info = rs6000_stack_info ();
8941
8942  if (TARGET_DEBUG_STACK)
8943    debug_stack_info (info);
8944
8945  /* Write .extern for any function we will call to save and restore
8946     fp values.  */
8947  if (info->first_fp_reg_save < 64
8948      && !FP_SAVE_INLINE (info->first_fp_reg_save))
8949    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8950	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8951	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8952	     RESTORE_FP_SUFFIX);
8953
8954  /* Write .extern for AIX common mode routines, if needed.  */
8955  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8956    {
8957      fputs ("\t.extern __mulh\n", file);
8958      fputs ("\t.extern __mull\n", file);
8959      fputs ("\t.extern __divss\n", file);
8960      fputs ("\t.extern __divus\n", file);
8961      fputs ("\t.extern __quoss\n", file);
8962      fputs ("\t.extern __quous\n", file);
8963      common_mode_defined = 1;
8964    }
8965
8966  if (! HAVE_prologue)
8967    {
8968      start_sequence ();
8969
8970      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8971	 the "toplevel" insn chain.  */
8972      emit_note (0, NOTE_INSN_DELETED);
8973      rs6000_emit_prologue ();
8974      emit_note (0, NOTE_INSN_DELETED);
8975
8976      /* Expand INSN_ADDRESSES so final() doesn't crash. */
8977      {
8978	rtx insn;
8979	unsigned addr = 0;
8980	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
8981	  {
8982	    INSN_ADDRESSES_NEW (insn, addr);
8983	    addr += 4;
8984	  }
8985      }
8986
8987      if (TARGET_DEBUG_STACK)
8988	debug_rtx_list (get_insns (), 100);
8989      final (get_insns (), file, FALSE, FALSE);
8990      end_sequence ();
8991    }
8992
8993  rs6000_pic_labelno++;
8994}
8995
8996/* Emit function epilogue as insns.
8997
8998   At present, dwarf2out_frame_debug_expr doesn't understand
8999   register restores, so we don't bother setting RTX_FRAME_RELATED_P
9000   anywhere in the epilogue.  Most of the insns below would in any case
9001   need special notes to explain where r11 is in relation to the stack.  */
9002
9003void
9004rs6000_emit_epilogue (sibcall)
9005     int sibcall;
9006{
9007  rs6000_stack_t *info;
9008  int restoring_FPRs_inline;
9009  int using_load_multiple;
9010  int using_mfcr_multiple;
9011  int use_backchain_to_restore_sp;
9012  int sp_offset = 0;
9013  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9014  rtx frame_reg_rtx = sp_reg_rtx;
9015  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9016  int reg_size = TARGET_POWERPC64 ? 8 : 4;
9017  int i;
9018
9019  info = rs6000_stack_info ();
9020  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9021			 && info->first_gp_reg_save < 31);
9022  restoring_FPRs_inline = (sibcall
9023			   || current_function_calls_eh_return
9024			   || info->first_fp_reg_save == 64
9025			   || FP_SAVE_INLINE (info->first_fp_reg_save));
9026  use_backchain_to_restore_sp = (frame_pointer_needed
9027				 || current_function_calls_alloca
9028				 || info->total_size > 32767);
9029  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9030			 || rs6000_cpu == PROCESSOR_PPC603
9031			 || rs6000_cpu == PROCESSOR_PPC750
9032			 || optimize_size);
9033
9034  /* If we have a frame pointer, a call to alloca,  or a large stack
9035     frame, restore the old stack pointer using the backchain.  Otherwise,
9036     we know what size to update it with.  */
9037  if (use_backchain_to_restore_sp)
9038    {
9039      /* Under V.4, don't reset the stack pointer until after we're done
9040	 loading the saved registers.  */
9041      if (DEFAULT_ABI == ABI_V4)
9042	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9043
9044      emit_move_insn (frame_reg_rtx,
9045		      gen_rtx_MEM (Pmode, sp_reg_rtx));
9046
9047    }
9048  else if (info->push_p)
9049    {
9050      if (DEFAULT_ABI == ABI_V4)
9051	sp_offset = info->total_size;
9052      else
9053	{
9054	  emit_insn (TARGET_32BIT
9055		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9056				   GEN_INT (info->total_size))
9057		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9058				   GEN_INT (info->total_size)));
9059	}
9060    }
9061
9062  /* Restore AltiVec registers if needed.  */
9063  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9064    {
9065      int i;
9066
9067      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9068	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9069	  {
9070	    rtx addr, areg, mem;
9071
9072	    areg = gen_rtx_REG (Pmode, 0);
9073	    emit_move_insn
9074	      (areg, GEN_INT (info->altivec_save_offset
9075			      + sp_offset
9076			      + 16 * (i - info->first_altivec_reg_save)));
9077
9078	    /* AltiVec addressing mode is [reg+reg].  */
9079	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9080	    mem = gen_rtx_MEM (V4SImode, addr);
9081	    set_mem_alias_set (mem, rs6000_sr_alias_set);
9082
9083	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9084	  }
9085    }
9086
9087  /* Restore VRSAVE if needed.  */
9088  if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9089    {
9090      rtx addr, mem, reg;
9091
9092      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9093			   GEN_INT (info->vrsave_save_offset + sp_offset));
9094      mem = gen_rtx_MEM (SImode, addr);
9095      set_mem_alias_set (mem, rs6000_sr_alias_set);
9096      reg = gen_rtx_REG (SImode, 12);
9097      emit_move_insn (reg, mem);
9098
9099      emit_insn (generate_set_vrsave (reg, info, 1));
9100    }
9101
9102  /* Get the old lr if we saved it.  */
9103  if (info->lr_save_p)
9104    {
9105      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9106			       GEN_INT (info->lr_save_offset + sp_offset));
9107      rtx mem = gen_rtx_MEM (Pmode, addr);
9108
9109      set_mem_alias_set (mem, rs6000_sr_alias_set);
9110
9111      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9112    }
9113
9114  /* Get the old cr if we saved it.  */
9115  if (info->cr_save_p)
9116    {
9117      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9118			       GEN_INT (info->cr_save_offset + sp_offset));
9119      rtx mem = gen_rtx_MEM (SImode, addr);
9120
9121      set_mem_alias_set (mem, rs6000_sr_alias_set);
9122
9123      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9124    }
9125
9126  /* Set LR here to try to overlap restores below.  */
9127  if (info->lr_save_p)
9128    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9129		    gen_rtx_REG (Pmode, 0));
9130
9131  /* Load exception handler data registers, if needed.  */
9132  if (current_function_calls_eh_return)
9133    {
9134      unsigned int i, regno;
9135
9136      for (i = 0; ; ++i)
9137	{
9138	  rtx addr, mem;
9139
9140	  regno = EH_RETURN_DATA_REGNO (i);
9141	  if (regno == INVALID_REGNUM)
9142	    break;
9143
9144	  addr = plus_constant (frame_reg_rtx,
9145				info->ehrd_offset + sp_offset
9146				+ reg_size * (int) i);
9147	  mem = gen_rtx_MEM (reg_mode, addr);
9148	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9149
9150	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9151	}
9152    }
9153
9154  /* Restore GPRs.  This is done as a PARALLEL if we are using
9155     the load-multiple instructions.  */
9156  if (using_load_multiple)
9157    {
9158      rtvec p;
9159      p = rtvec_alloc (32 - info->first_gp_reg_save);
9160      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9161	{
9162	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9163				   GEN_INT (info->gp_save_offset
9164					    + sp_offset
9165					    + reg_size * i));
9166	  rtx mem = gen_rtx_MEM (reg_mode, addr);
9167
9168	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9169
9170	  RTVEC_ELT (p, i) =
9171	    gen_rtx_SET (VOIDmode,
9172			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9173			 mem);
9174	}
9175      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9176    }
9177  else
9178    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9179      if ((regs_ever_live[info->first_gp_reg_save+i]
9180	   && ! call_used_regs[info->first_gp_reg_save+i])
9181	  || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9182	      && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9183		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9184	{
9185	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9186				   GEN_INT (info->gp_save_offset
9187					    + sp_offset
9188					    + reg_size * i));
9189	  rtx mem = gen_rtx_MEM (reg_mode, addr);
9190
9191	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9192
9193	  emit_move_insn (gen_rtx_REG (reg_mode,
9194				       info->first_gp_reg_save + i),
9195			  mem);
9196	}
9197
9198  /* Restore fpr's if we need to do it without calling a function.  */
9199  if (restoring_FPRs_inline)
9200    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9201      if ((regs_ever_live[info->first_fp_reg_save+i]
9202	   && ! call_used_regs[info->first_fp_reg_save+i]))
9203	{
9204	  rtx addr, mem;
9205	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9206			       GEN_INT (info->fp_save_offset
9207					+ sp_offset
9208					+ 8 * i));
9209	  mem = gen_rtx_MEM (DFmode, addr);
9210	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9211
9212	  emit_move_insn (gen_rtx_REG (DFmode,
9213				       info->first_fp_reg_save + i),
9214			  mem);
9215	}
9216
9217  /* If we saved cr, restore it here.  Just those that were used.  */
9218  if (info->cr_save_p)
9219    {
9220      rtx r12_rtx = gen_rtx_REG (SImode, 12);
9221      int count = 0;
9222
9223      if (using_mfcr_multiple)
9224	{
9225	  for (i = 0; i < 8; i++)
9226	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9227	      count++;
9228	  if (count == 0)
9229	    abort ();
9230	}
9231
9232      if (using_mfcr_multiple && count > 1)
9233	{
9234	  rtvec p;
9235	  int ndx;
9236
9237	  p = rtvec_alloc (count);
9238
9239	  ndx = 0;
9240	  for (i = 0; i < 8; i++)
9241	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9242	      {
9243		rtvec r = rtvec_alloc (2);
9244		RTVEC_ELT (r, 0) = r12_rtx;
9245		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9246		RTVEC_ELT (p, ndx) =
9247		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9248			       gen_rtx_UNSPEC (CCmode, r, 20));
9249		ndx++;
9250	      }
9251	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9252	  if (ndx != count)
9253	    abort ();
9254	}
9255      else
9256	for (i = 0; i < 8; i++)
9257	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9258	    {
9259	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9260							   CR0_REGNO+i),
9261					      r12_rtx));
9262	    }
9263    }
9264
9265  /* If this is V.4, unwind the stack pointer after all of the loads
9266     have been done.  We need to emit a block here so that sched
9267     doesn't decide to move the sp change before the register restores
9268     (which may not have any obvious dependency on the stack).  This
9269     doesn't hurt performance, because there is no scheduling that can
9270     be done after this point.  */
9271  if (DEFAULT_ABI == ABI_V4)
9272    {
9273      if (frame_reg_rtx != sp_reg_rtx)
9274	  rs6000_emit_stack_tie ();
9275
9276      if (use_backchain_to_restore_sp)
9277	{
9278	  emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9279	}
9280      else if (sp_offset != 0)
9281	{
9282	  emit_insn (Pmode == SImode
9283		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9284				   GEN_INT (sp_offset))
9285		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9286				   GEN_INT (sp_offset)));
9287	}
9288    }
9289
9290  if (current_function_calls_eh_return)
9291    {
9292      rtx sa = EH_RETURN_STACKADJ_RTX;
9293      emit_insn (Pmode == SImode
9294		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9295		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9296    }
9297
9298  if (!sibcall)
9299    {
9300      rtvec p;
9301      if (! restoring_FPRs_inline)
9302	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9303      else
9304	p = rtvec_alloc (2);
9305
9306      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9307      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9308				      gen_rtx_REG (Pmode,
9309						   LINK_REGISTER_REGNUM));
9310
9311      /* If we have to restore more than two FP registers, branch to the
9312	 restore function.  It will return to our caller.  */
9313      if (! restoring_FPRs_inline)
9314	{
9315	  int i;
9316	  char rname[30];
9317	  const char *alloc_rname;
9318
9319	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9320		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9321	  alloc_rname = ggc_strdup (rname);
9322	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9323					  gen_rtx_SYMBOL_REF (Pmode,
9324							      alloc_rname));
9325
9326	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9327	    {
9328	      rtx addr, mem;
9329	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9330				   GEN_INT (info->fp_save_offset + 8*i));
9331	      mem = gen_rtx_MEM (DFmode, addr);
9332	      set_mem_alias_set (mem, rs6000_sr_alias_set);
9333
9334	      RTVEC_ELT (p, i+3) =
9335		gen_rtx_SET (VOIDmode,
9336			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9337			     mem);
9338	    }
9339	}
9340
9341      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9342    }
9343}
9344
9345/* Write function epilogue.  */
9346
9347static void
9348rs6000_output_function_epilogue (file, size)
9349     FILE *file;
9350     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9351{
9352  rs6000_stack_t *info = rs6000_stack_info ();
9353  int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9354
9355  if (! HAVE_epilogue)
9356    {
9357      rtx insn = get_last_insn ();
9358      /* If the last insn was a BARRIER, we don't have to write anything except
9359	 the trace table.  */
9360      if (GET_CODE (insn) == NOTE)
9361	insn = prev_nonnote_insn (insn);
9362      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
9363	{
9364	  /* This is slightly ugly, but at least we don't have two
9365	     copies of the epilogue-emitting code.  */
9366	  start_sequence ();
9367
9368	  /* A NOTE_INSN_DELETED is supposed to be at the start
9369	     and end of the "toplevel" insn chain.  */
9370	  emit_note (0, NOTE_INSN_DELETED);
9371	  rs6000_emit_epilogue (FALSE);
9372	  emit_note (0, NOTE_INSN_DELETED);
9373
9374	  /* Expand INSN_ADDRESSES so final() doesn't crash. */
9375	  {
9376	    rtx insn;
9377	    unsigned addr = 0;
9378	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9379	      {
9380		INSN_ADDRESSES_NEW (insn, addr);
9381		addr += 4;
9382	      }
9383	  }
9384
9385	  if (TARGET_DEBUG_STACK)
9386	    debug_rtx_list (get_insns (), 100);
9387	  final (get_insns (), file, FALSE, FALSE);
9388	  end_sequence ();
9389	}
9390    }
9391
9392  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
9393     on its format.
9394
9395     We don't output a traceback table if -finhibit-size-directive was
9396     used.  The documentation for -finhibit-size-directive reads
9397     ``don't output a @code{.size} assembler directive, or anything
9398     else that would cause trouble if the function is split in the
9399     middle, and the two halves are placed at locations far apart in
9400     memory.''  The traceback table has this property, since it
9401     includes the offset from the start of the function to the
9402     traceback table itself.
9403
9404     System V.4 Powerpc's (and the embedded ABI derived from it) use a
9405     different traceback table.  */
9406  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9407    {
9408      const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9409      const char *language_string = lang_hooks.name;
9410      int fixed_parms = 0, float_parms = 0, parm_info = 0;
9411      int i;
9412
9413      while (*fname == '.')	/* V.4 encodes . in the name */
9414	fname++;
9415
9416      /* Need label immediately before tbtab, so we can compute its offset
9417	 from the function start.  */
9418      if (*fname == '*')
9419	++fname;
9420      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9421      ASM_OUTPUT_LABEL (file, fname);
9422
9423      /* The .tbtab pseudo-op can only be used for the first eight
9424	 expressions, since it can't handle the possibly variable
9425	 length fields that follow.  However, if you omit the optional
9426	 fields, the assembler outputs zeros for all optional fields
9427	 anyways, giving each variable length field is minimum length
9428	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
9429	 pseudo-op at all.  */
9430
9431      /* An all-zero word flags the start of the tbtab, for debuggers
9432	 that have to find it by searching forward from the entry
9433	 point or from the current pc.  */
9434      fputs ("\t.long 0\n", file);
9435
9436      /* Tbtab format type.  Use format type 0.  */
9437      fputs ("\t.byte 0,", file);
9438
9439      /* Language type.  Unfortunately, there doesn't seem to be any
9440	 official way to get this info, so we use language_string.  C
9441	 is 0.  C++ is 9.  No number defined for Obj-C, so use the
9442	 value for C for now.  There is no official value for Java,
9443         although IBM appears to be using 13.  There is no official value
9444	 for Chill, so we've chosen 44 pseudo-randomly.  */
9445      if (! strcmp (language_string, "GNU C")
9446	  || ! strcmp (language_string, "GNU Objective-C"))
9447	i = 0;
9448      else if (! strcmp (language_string, "GNU F77"))
9449	i = 1;
9450      else if (! strcmp (language_string, "GNU Ada"))
9451	i = 3;
9452      else if (! strcmp (language_string, "GNU Pascal"))
9453	i = 2;
9454      else if (! strcmp (language_string, "GNU C++"))
9455	i = 9;
9456      else if (! strcmp (language_string, "GNU Java"))
9457	i = 13;
9458      else if (! strcmp (language_string, "GNU CHILL"))
9459	i = 44;
9460      else
9461	abort ();
9462      fprintf (file, "%d,", i);
9463
9464      /* 8 single bit fields: global linkage (not set for C extern linkage,
9465	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9466	 from start of procedure stored in tbtab, internal function, function
9467	 has controlled storage, function has no toc, function uses fp,
9468	 function logs/aborts fp operations.  */
9469      /* Assume that fp operations are used if any fp reg must be saved.  */
9470      fprintf (file, "%d,",
9471	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9472
9473      /* 6 bitfields: function is interrupt handler, name present in
9474	 proc table, function calls alloca, on condition directives
9475	 (controls stack walks, 3 bits), saves condition reg, saves
9476	 link reg.  */
9477      /* The `function calls alloca' bit seems to be set whenever reg 31 is
9478	 set up as a frame pointer, even when there is no alloca call.  */
9479      fprintf (file, "%d,",
9480	       ((optional_tbtab << 6)
9481		| ((optional_tbtab & frame_pointer_needed) << 5)
9482		| (info->cr_save_p << 1)
9483		| (info->lr_save_p)));
9484
9485      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9486	 (6 bits).  */
9487      fprintf (file, "%d,",
9488	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
9489
9490      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
9491      fprintf (file, "%d,", (32 - first_reg_to_save ()));
9492
9493      if (optional_tbtab)
9494	{
9495	  /* Compute the parameter info from the function decl argument
9496	     list.  */
9497	  tree decl;
9498	  int next_parm_info_bit = 31;
9499
9500	  for (decl = DECL_ARGUMENTS (current_function_decl);
9501	       decl; decl = TREE_CHAIN (decl))
9502	    {
9503	      rtx parameter = DECL_INCOMING_RTL (decl);
9504	      enum machine_mode mode = GET_MODE (parameter);
9505
9506	      if (GET_CODE (parameter) == REG)
9507		{
9508		  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9509		    {
9510		      int bits;
9511
9512		      float_parms++;
9513
9514		      if (mode == SFmode)
9515			bits = 0x2;
9516		      else if (mode == DFmode)
9517			bits = 0x3;
9518		      else
9519			abort ();
9520
9521		      /* If only one bit will fit, don't or in this entry.  */
9522		      if (next_parm_info_bit > 0)
9523			parm_info |= (bits << (next_parm_info_bit - 1));
9524		      next_parm_info_bit -= 2;
9525		    }
9526		  else
9527		    {
9528		      fixed_parms += ((GET_MODE_SIZE (mode)
9529				       + (UNITS_PER_WORD - 1))
9530				      / UNITS_PER_WORD);
9531		      next_parm_info_bit -= 1;
9532		    }
9533		}
9534	    }
9535	}
9536
9537      /* Number of fixed point parameters.  */
9538      /* This is actually the number of words of fixed point parameters; thus
9539	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
9540      fprintf (file, "%d,", fixed_parms);
9541
9542      /* 2 bitfields: number of floating point parameters (7 bits), parameters
9543	 all on stack.  */
9544      /* This is actually the number of fp registers that hold parameters;
9545	 and thus the maximum value is 13.  */
9546      /* Set parameters on stack bit if parameters are not in their original
9547	 registers, regardless of whether they are on the stack?  Xlc
9548	 seems to set the bit when not optimizing.  */
9549      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9550
9551      if (! optional_tbtab)
9552	return;
9553
9554      /* Optional fields follow.  Some are variable length.  */
9555
9556      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9557	 11 double float.  */
9558      /* There is an entry for each parameter in a register, in the order that
9559	 they occur in the parameter list.  Any intervening arguments on the
9560	 stack are ignored.  If the list overflows a long (max possible length
9561	 34 bits) then completely leave off all elements that don't fit.  */
9562      /* Only emit this long if there was at least one parameter.  */
9563      if (fixed_parms || float_parms)
9564	fprintf (file, "\t.long %d\n", parm_info);
9565
9566      /* Offset from start of code to tb table.  */
9567      fputs ("\t.long ", file);
9568      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9569#if TARGET_AIX
9570      RS6000_OUTPUT_BASENAME (file, fname);
9571#else
9572      assemble_name (file, fname);
9573#endif
9574      fputs ("-.", file);
9575#if TARGET_AIX
9576      RS6000_OUTPUT_BASENAME (file, fname);
9577#else
9578      assemble_name (file, fname);
9579#endif
9580      putc ('\n', file);
9581
9582      /* Interrupt handler mask.  */
9583      /* Omit this long, since we never set the interrupt handler bit
9584	 above.  */
9585
9586      /* Number of CTL (controlled storage) anchors.  */
9587      /* Omit this long, since the has_ctl bit is never set above.  */
9588
9589      /* Displacement into stack of each CTL anchor.  */
9590      /* Omit this list of longs, because there are no CTL anchors.  */
9591
9592      /* Length of function name.  */
9593      fprintf (file, "\t.short %d\n", (int) strlen (fname));
9594
9595      /* Function name.  */
9596      assemble_string (fname, strlen (fname));
9597
9598      /* Register for alloca automatic storage; this is always reg 31.
9599	 Only emit this if the alloca bit was set above.  */
9600      if (frame_pointer_needed)
9601	fputs ("\t.byte 31\n", file);
9602
9603      fputs ("\t.align 2\n", file);
9604    }
9605  return;
9606}
9607
9608/* A C compound statement that outputs the assembler code for a thunk
9609   function, used to implement C++ virtual function calls with
9610   multiple inheritance.  The thunk acts as a wrapper around a virtual
9611   function, adjusting the implicit object parameter before handing
9612   control off to the real function.
9613
9614   First, emit code to add the integer DELTA to the location that
9615   contains the incoming first argument.  Assume that this argument
9616   contains a pointer, and is the one used to pass the `this' pointer
9617   in C++.  This is the incoming argument *before* the function
9618   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
9619   values of all other incoming arguments.
9620
9621   After the addition, emit code to jump to FUNCTION, which is a
9622   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
9623   not touch the return address.  Hence returning from FUNCTION will
9624   return to whoever called the current `thunk'.
9625
9626   The effect must be as if FUNCTION had been called directly with the
9627   adjusted first argument.  This macro is responsible for emitting
9628   all of the code for a thunk function; output_function_prologue()
9629   and output_function_epilogue() are not invoked.
9630
9631   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
9632   been extracted from it.)  It might possibly be useful on some
9633   targets, but probably not.
9634
9635   If you do not define this macro, the target-independent code in the
9636   C++ frontend will generate a less efficient heavyweight thunk that
9637   calls FUNCTION instead of jumping to it.  The generic approach does
9638   not support varargs.  */
9639
9640void
9641output_mi_thunk (file, thunk_fndecl, delta, function)
9642     FILE *file;
9643     tree thunk_fndecl ATTRIBUTE_UNUSED;
9644     int delta;
9645     tree function;
9646{
9647  const char *this_reg =
9648    reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9649  const char *prefix;
9650  const char *fname;
9651  const char *r0	 = reg_names[0];
9652  const char *toc	 = reg_names[2];
9653  const char *schain	 = reg_names[11];
9654  const char *r12	 = reg_names[12];
9655  char buf[512];
9656  static int labelno = 0;
9657
9658  /* Small constants that can be done by one add instruction.  */
9659  if (delta >= -32768 && delta <= 32767)
9660    {
9661      if (! TARGET_NEW_MNEMONICS)
9662	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9663      else
9664	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9665    }
9666
9667  /* 64-bit constants.  If "int" is 32 bits, we'll never hit this abort.  */
9668  else if (TARGET_64BIT && (delta < -2147483647 - 1 || delta > 2147483647))
9669    abort ();
9670
9671  /* Large constants that can be done by one addis instruction.  */
9672  else if ((delta & 0xffff) == 0)
9673    asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9674		 delta >> 16);
9675
9676  /* 32-bit constants that can be done by an add and addis instruction.  */
9677  else
9678    {
9679      /* Break into two pieces, propagating the sign bit from the low
9680	 word to the upper word.  */
9681      int delta_low  = ((delta & 0xffff) ^ 0x8000) - 0x8000;
9682      int delta_high = (delta - delta_low) >> 16;
9683
9684      asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9685		   delta_high);
9686
9687      if (! TARGET_NEW_MNEMONICS)
9688	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9689      else
9690	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9691    }
9692
9693  /* Get the prefix in front of the names.  */
9694  switch (DEFAULT_ABI)
9695    {
9696    default:
9697      abort ();
9698
9699    case ABI_AIX:
9700      prefix = ".";
9701      break;
9702
9703    case ABI_V4:
9704    case ABI_AIX_NODESC:
9705      prefix = "";
9706      break;
9707    }
9708
9709  /* If the function is compiled in this module, jump to it directly.
9710     Otherwise, load up its address and jump to it.  */
9711
9712  fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9713
9714  if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9715      && ! lookup_attribute ("longcall",
9716			     TYPE_ATTRIBUTES (TREE_TYPE (function))))
9717    {
9718      fprintf (file, "\tb %s", prefix);
9719      assemble_name (file, fname);
9720      if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9721      putc ('\n', file);
9722    }
9723
9724  else
9725    {
9726      switch (DEFAULT_ABI)
9727	{
9728	default:
9729	  abort ();
9730
9731	case ABI_AIX:
9732	  /* Set up a TOC entry for the function.  */
9733	  ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9734	  toc_section ();
9735	  ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9736	  labelno++;
9737
9738	  if (TARGET_MINIMAL_TOC)
9739	    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9740	  else
9741	    {
9742	      fputs ("\t.tc ", file);
9743	      assemble_name (file, fname);
9744	      fputs ("[TC],", file);
9745	    }
9746	  assemble_name (file, fname);
9747	  putc ('\n', file);
9748	  if (TARGET_ELF)
9749	    function_section (current_function_decl);
9750	  else
9751	    text_section ();
9752	  if (TARGET_MINIMAL_TOC)
9753	    asm_fprintf (file, (TARGET_32BIT)
9754			 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9755			 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9756	  asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9757	  assemble_name (file, buf);
9758	  if (TARGET_ELF && TARGET_MINIMAL_TOC)
9759	    fputs ("-(.LCTOC1)", file);
9760	  asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9761	  asm_fprintf (file,
9762		       (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9763		       r0, r12);
9764
9765	  asm_fprintf (file,
9766		       (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9767		       toc, r12);
9768
9769	  asm_fprintf (file, "\tmtctr %s\n", r0);
9770	  asm_fprintf (file,
9771		       (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9772		       schain, r12);
9773
9774	  asm_fprintf (file, "\tbctr\n");
9775	  break;
9776
9777	case ABI_AIX_NODESC:
9778	case ABI_V4:
9779	  fprintf (file, "\tb %s", prefix);
9780	  assemble_name (file, fname);
9781	  if (flag_pic) fputs ("@plt", file);
9782	  putc ('\n', file);
9783	  break;
9784
9785#if TARGET_MACHO
9786	case ABI_DARWIN:
9787	  fprintf (file, "\tb %s", prefix);
9788	  if (flag_pic && !machopic_name_defined_p (fname))
9789	    assemble_name (file, machopic_stub_name (fname));
9790	  else
9791	    assemble_name (file, fname);
9792	  putc ('\n', file);
9793	  break;
9794#endif
9795	}
9796    }
9797}
9798
9799
9800/* A quick summary of the various types of 'constant-pool tables'
9801   under PowerPC:
9802
9803   Target	Flags		Name		One table per
9804   AIX		(none)		AIX TOC		object file
9805   AIX		-mfull-toc	AIX TOC		object file
9806   AIX		-mminimal-toc	AIX minimal TOC	translation unit
9807   SVR4/EABI	(none)		SVR4 SDATA	object file
9808   SVR4/EABI	-fpic		SVR4 pic	object file
9809   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
9810   SVR4/EABI	-mrelocatable	EABI TOC	function
9811   SVR4/EABI	-maix		AIX TOC		object file
9812   SVR4/EABI	-maix -mminimal-toc
9813				AIX minimal TOC	translation unit
9814
9815   Name			Reg.	Set by	entries	      contains:
9816					made by	 addrs?	fp?	sum?
9817
9818   AIX TOC		2	crt0	as	 Y	option	option
9819   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
9820   SVR4 SDATA		13	crt0	gcc	 N	Y	N
9821   SVR4 pic		30	prolog	ld	 Y	not yet	N
9822   SVR4 PIC		30	prolog	gcc	 Y	option	option
9823   EABI TOC		30	prolog	gcc	 Y	option	option
9824
9825*/
9826
9827/* Hash table stuff for keeping track of TOC entries.  */
9828
9829struct toc_hash_struct
9830{
9831  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9832     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
9833  rtx key;
9834  enum machine_mode key_mode;
9835  int labelno;
9836};
9837
9838static htab_t toc_hash_table;
9839
9840/* Hash functions for the hash table.  */
9841
9842static unsigned
9843rs6000_hash_constant (k)
9844     rtx k;
9845{
9846  unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9847  const char *format = GET_RTX_FORMAT (GET_CODE (k));
9848  int flen = strlen (format);
9849  int fidx;
9850
9851  if (GET_CODE (k) == LABEL_REF)
9852    return result * 1231 + X0INT (XEXP (k, 0), 3);
9853
9854  if (GET_CODE (k) == CONST_DOUBLE)
9855    fidx = 1;
9856  else if (GET_CODE (k) == CODE_LABEL)
9857    fidx = 3;
9858  else
9859    fidx = 0;
9860
9861  for (; fidx < flen; fidx++)
9862    switch (format[fidx])
9863      {
9864      case 's':
9865	{
9866	  unsigned i, len;
9867	  const char *str = XSTR (k, fidx);
9868	  len = strlen (str);
9869	  result = result * 613 + len;
9870	  for (i = 0; i < len; i++)
9871	    result = result * 613 + (unsigned) str[i];
9872	  break;
9873	}
9874      case 'u':
9875      case 'e':
9876	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9877	break;
9878      case 'i':
9879      case 'n':
9880	result = result * 613 + (unsigned) XINT (k, fidx);
9881	break;
9882      case 'w':
9883	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9884	  result = result * 613 + (unsigned) XWINT (k, fidx);
9885	else
9886	  {
9887	    size_t i;
9888	    for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9889	      result = result * 613 + (unsigned) (XWINT (k, fidx)
9890						  >> CHAR_BIT * i);
9891	  }
9892	break;
9893      default:
9894	abort ();
9895      }
9896  return result;
9897}
9898
9899static unsigned
9900toc_hash_function (hash_entry)
9901     const void * hash_entry;
9902{
9903  const struct toc_hash_struct *thc =
9904    (const struct toc_hash_struct *) hash_entry;
9905  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9906}
9907
9908/* Compare H1 and H2 for equivalence.  */
9909
9910static int
9911toc_hash_eq (h1, h2)
9912     const void * h1;
9913     const void * h2;
9914{
9915  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9916  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9917
9918  if (((const struct toc_hash_struct *) h1)->key_mode
9919      != ((const struct toc_hash_struct *) h2)->key_mode)
9920    return 0;
9921
9922  /* Gotcha:  One of these const_doubles will be in memory.
9923     The other may be on the constant-pool chain.
9924     So rtx_equal_p will think they are different...  */
9925  if (r1 == r2)
9926    return 1;
9927  if (GET_CODE (r1) != GET_CODE (r2)
9928      || GET_MODE (r1) != GET_MODE (r2))
9929    return 0;
9930  if (GET_CODE (r1) == CONST_DOUBLE)
9931    {
9932      int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9933      int i;
9934      for (i = 1; i < format_len; i++)
9935	if (XWINT (r1, i) != XWINT (r2, i))
9936	  return 0;
9937
9938      return 1;
9939    }
9940  else if (GET_CODE (r1) == LABEL_REF)
9941    return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9942	    == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9943  else
9944    return rtx_equal_p (r1, r2);
9945}
9946
9947/* Mark the hash table-entry HASH_ENTRY.  */
9948
9949static int
9950toc_hash_mark_entry (hash_slot, unused)
9951     void ** hash_slot;
9952     void * unused ATTRIBUTE_UNUSED;
9953{
9954  const struct toc_hash_struct * hash_entry =
9955    *(const struct toc_hash_struct **) hash_slot;
9956  rtx r = hash_entry->key;
9957  ggc_set_mark (hash_entry);
9958  /* For CODE_LABELS, we don't want to drag in the whole insn chain...  */
9959  if (GET_CODE (r) == LABEL_REF)
9960    {
9961      ggc_set_mark (r);
9962      ggc_set_mark (XEXP (r, 0));
9963    }
9964  else
9965    ggc_mark_rtx (r);
9966  return 1;
9967}
9968
9969/* Mark all the elements of the TOC hash-table *HT.  */
9970
9971static void
9972toc_hash_mark_table (vht)
9973     void *vht;
9974{
9975  htab_t *ht = vht;
9976
9977  htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9978}
9979
9980/* These are the names given by the C++ front-end to vtables, and
9981   vtable-like objects.  Ideally, this logic should not be here;
9982   instead, there should be some programmatic way of inquiring as
9983   to whether or not an object is a vtable.  */
9984
9985#define VTABLE_NAME_P(NAME)				\
9986  (strncmp ("_vt.", name, strlen("_vt.")) == 0		\
9987  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
9988  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
9989  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9990
9991void
9992rs6000_output_symbol_ref (file, x)
9993     FILE *file;
9994     rtx x;
9995{
9996  /* Currently C++ toc references to vtables can be emitted before it
9997     is decided whether the vtable is public or private.  If this is
9998     the case, then the linker will eventually complain that there is
9999     a reference to an unknown section.  Thus, for vtables only,
10000     we emit the TOC reference to reference the symbol and not the
10001     section.  */
10002  const char *name = XSTR (x, 0);
10003
10004  if (VTABLE_NAME_P (name))
10005    {
10006      RS6000_OUTPUT_BASENAME (file, name);
10007    }
10008  else
10009    assemble_name (file, name);
10010}
10011
10012/* Output a TOC entry.  We derive the entry name from what is being
10013   written.  */
10014
10015void
10016output_toc (file, x, labelno, mode)
10017     FILE *file;
10018     rtx x;
10019     int labelno;
10020     enum machine_mode mode;
10021{
10022  char buf[256];
10023  const char *name = buf;
10024  const char *real_name;
10025  rtx base = x;
10026  int offset = 0;
10027
10028  if (TARGET_NO_TOC)
10029    abort ();
10030
10031  /* When the linker won't eliminate them, don't output duplicate
10032     TOC entries (this happens on AIX if there is any kind of TOC,
10033     and on SVR4 under -fPIC or -mrelocatable).  */
10034  if (TARGET_TOC)
10035    {
10036      struct toc_hash_struct *h;
10037      void * * found;
10038
10039      h = ggc_alloc (sizeof (*h));
10040      h->key = x;
10041      h->key_mode = mode;
10042      h->labelno = labelno;
10043
10044      found = htab_find_slot (toc_hash_table, h, 1);
10045      if (*found == NULL)
10046	*found = h;
10047      else  /* This is indeed a duplicate.
10048	       Set this label equal to that label.  */
10049	{
10050	  fputs ("\t.set ", file);
10051	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10052	  fprintf (file, "%d,", labelno);
10053	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10054	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10055					      found)->labelno));
10056	  return;
10057	}
10058    }
10059
10060  /* If we're going to put a double constant in the TOC, make sure it's
10061     aligned properly when strict alignment is on.  */
10062  if (GET_CODE (x) == CONST_DOUBLE
10063      && STRICT_ALIGNMENT
10064      && GET_MODE_BITSIZE (mode) >= 64
10065      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10066    ASM_OUTPUT_ALIGN (file, 3);
10067  }
10068
10069  ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10070
10071  /* Handle FP constants specially.  Note that if we have a minimal
10072     TOC, things we put here aren't actually in the TOC, so we can allow
10073     FP constants.  */
10074  if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10075    {
10076      REAL_VALUE_TYPE rv;
10077      long k[2];
10078
10079      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10080      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10081
10082      if (TARGET_64BIT)
10083	{
10084	  if (TARGET_MINIMAL_TOC)
10085	    fputs (DOUBLE_INT_ASM_OP, file);
10086	  else
10087	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10088		     k[0] & 0xffffffff, k[1] & 0xffffffff);
10089	  fprintf (file, "0x%lx%08lx\n",
10090		   k[0] & 0xffffffff, k[1] & 0xffffffff);
10091	  return;
10092	}
10093      else
10094	{
10095	  if (TARGET_MINIMAL_TOC)
10096	    fputs ("\t.long ", file);
10097	  else
10098	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10099		     k[0] & 0xffffffff, k[1] & 0xffffffff);
10100	  fprintf (file, "0x%lx,0x%lx\n",
10101		   k[0] & 0xffffffff, k[1] & 0xffffffff);
10102	  return;
10103	}
10104    }
10105  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10106    {
10107      REAL_VALUE_TYPE rv;
10108      long l;
10109
10110      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10111      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10112
10113      if (TARGET_64BIT)
10114	{
10115	  if (TARGET_MINIMAL_TOC)
10116	    fputs (DOUBLE_INT_ASM_OP, file);
10117	  else
10118	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10119	  fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
10120	  return;
10121	}
10122      else
10123	{
10124	  if (TARGET_MINIMAL_TOC)
10125	    fputs ("\t.long ", file);
10126	  else
10127	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10128	  fprintf (file, "0x%lx\n", l & 0xffffffff);
10129	  return;
10130	}
10131    }
10132  else if (GET_MODE (x) == VOIDmode
10133	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10134    {
10135      unsigned HOST_WIDE_INT low;
10136      HOST_WIDE_INT high;
10137
10138      if (GET_CODE (x) == CONST_DOUBLE)
10139	{
10140	  low = CONST_DOUBLE_LOW (x);
10141	  high = CONST_DOUBLE_HIGH (x);
10142	}
10143      else
10144#if HOST_BITS_PER_WIDE_INT == 32
10145	{
10146	  low = INTVAL (x);
10147	  high = (low & 0x80000000) ? ~0 : 0;
10148	}
10149#else
10150	{
10151          low = INTVAL (x) & 0xffffffff;
10152          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10153	}
10154#endif
10155
10156      /* TOC entries are always Pmode-sized, but since this
10157	 is a bigendian machine then if we're putting smaller
10158	 integer constants in the TOC we have to pad them.
10159	 (This is still a win over putting the constants in
10160	 a separate constant pool, because then we'd have
10161	 to have both a TOC entry _and_ the actual constant.)
10162
10163	 For a 32-bit target, CONST_INT values are loaded and shifted
10164	 entirely within `low' and can be stored in one TOC entry.  */
10165
10166      if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10167	abort ();/* It would be easy to make this work, but it doesn't now.  */
10168
10169      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10170	{
10171#if HOST_BITS_PER_WIDE_INT == 32
10172	  lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10173			 POINTER_SIZE, &low, &high, 0);
10174#else
10175	  low |= high << 32;
10176	  low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
10177	  high = (HOST_WIDE_INT) low >> 32;
10178	  low &= 0xffffffff;
10179#endif
10180	}
10181
10182      if (TARGET_64BIT)
10183	{
10184	  if (TARGET_MINIMAL_TOC)
10185	    fputs (DOUBLE_INT_ASM_OP, file);
10186	  else
10187	    fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10188		     (long) high & 0xffffffff, (long) low & 0xffffffff);
10189	  fprintf (file, "0x%lx%08lx\n",
10190		   (long) high & 0xffffffff, (long) low & 0xffffffff);
10191	  return;
10192	}
10193      else
10194	{
10195	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10196	    {
10197	      if (TARGET_MINIMAL_TOC)
10198		fputs ("\t.long ", file);
10199	      else
10200		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10201			 (long) high & 0xffffffff, (long) low & 0xffffffff);
10202	      fprintf (file, "0x%lx,0x%lx\n",
10203		       (long) high & 0xffffffff, (long) low & 0xffffffff);
10204	    }
10205	  else
10206	    {
10207	      if (TARGET_MINIMAL_TOC)
10208		fputs ("\t.long ", file);
10209	      else
10210		fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
10211	      fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
10212	    }
10213	  return;
10214	}
10215    }
10216
10217  if (GET_CODE (x) == CONST)
10218    {
10219      if (GET_CODE (XEXP (x, 0)) != PLUS)
10220	abort ();
10221
10222      base = XEXP (XEXP (x, 0), 0);
10223      offset = INTVAL (XEXP (XEXP (x, 0), 1));
10224    }
10225
10226  if (GET_CODE (base) == SYMBOL_REF)
10227    name = XSTR (base, 0);
10228  else if (GET_CODE (base) == LABEL_REF)
10229    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10230  else if (GET_CODE (base) == CODE_LABEL)
10231    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10232  else
10233    abort ();
10234
10235  STRIP_NAME_ENCODING (real_name, name);
10236  if (TARGET_MINIMAL_TOC)
10237    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10238  else
10239    {
10240      fprintf (file, "\t.tc %s", real_name);
10241
10242      if (offset < 0)
10243	fprintf (file, ".N%d", - offset);
10244      else if (offset)
10245	fprintf (file, ".P%d", offset);
10246
10247      fputs ("[TC],", file);
10248    }
10249
10250  /* Currently C++ toc references to vtables can be emitted before it
10251     is decided whether the vtable is public or private.  If this is
10252     the case, then the linker will eventually complain that there is
10253     a TOC reference to an unknown section.  Thus, for vtables only,
10254     we emit the TOC reference to reference the symbol and not the
10255     section.  */
10256  if (VTABLE_NAME_P (name))
10257    {
10258      RS6000_OUTPUT_BASENAME (file, name);
10259      if (offset < 0)
10260	fprintf (file, "%d", offset);
10261      else if (offset > 0)
10262	fprintf (file, "+%d", offset);
10263    }
10264  else
10265    output_addr_const (file, x);
10266  putc ('\n', file);
10267}
10268
10269/* Output an assembler pseudo-op to write an ASCII string of N characters
10270   starting at P to FILE.
10271
10272   On the RS/6000, we have to do this using the .byte operation and
10273   write out special characters outside the quoted string.
10274   Also, the assembler is broken; very long strings are truncated,
10275   so we must artificially break them up early.  */
10276
10277void
10278output_ascii (file, p, n)
10279     FILE *file;
10280     const char *p;
10281     int n;
10282{
10283  char c;
10284  int i, count_string;
10285  const char *for_string = "\t.byte \"";
10286  const char *for_decimal = "\t.byte ";
10287  const char *to_close = NULL;
10288
10289  count_string = 0;
10290  for (i = 0; i < n; i++)
10291    {
10292      c = *p++;
10293      if (c >= ' ' && c < 0177)
10294	{
10295	  if (for_string)
10296	    fputs (for_string, file);
10297	  putc (c, file);
10298
10299	  /* Write two quotes to get one.  */
10300	  if (c == '"')
10301	    {
10302	      putc (c, file);
10303	      ++count_string;
10304	    }
10305
10306	  for_string = NULL;
10307	  for_decimal = "\"\n\t.byte ";
10308	  to_close = "\"\n";
10309	  ++count_string;
10310
10311	  if (count_string >= 512)
10312	    {
10313	      fputs (to_close, file);
10314
10315	      for_string = "\t.byte \"";
10316	      for_decimal = "\t.byte ";
10317	      to_close = NULL;
10318	      count_string = 0;
10319	    }
10320	}
10321      else
10322	{
10323	  if (for_decimal)
10324	    fputs (for_decimal, file);
10325	  fprintf (file, "%d", c);
10326
10327	  for_string = "\n\t.byte \"";
10328	  for_decimal = ", ";
10329	  to_close = "\n";
10330	  count_string = 0;
10331	}
10332    }
10333
10334  /* Now close the string if we have written one.  Then end the line.  */
10335  if (to_close)
10336    fputs (to_close, file);
10337}
10338
10339/* Generate a unique section name for FILENAME for a section type
10340   represented by SECTION_DESC.  Output goes into BUF.
10341
10342   SECTION_DESC can be any string, as long as it is different for each
10343   possible section type.
10344
10345   We name the section in the same manner as xlc.  The name begins with an
10346   underscore followed by the filename (after stripping any leading directory
10347   names) with the last period replaced by the string SECTION_DESC.  If
10348   FILENAME does not contain a period, SECTION_DESC is appended to the end of
10349   the name.  */
10350
10351void
10352rs6000_gen_section_name (buf, filename, section_desc)
10353     char **buf;
10354     const char *filename;
10355     const char *section_desc;
10356{
10357  const char *q, *after_last_slash, *last_period = 0;
10358  char *p;
10359  int len;
10360
10361  after_last_slash = filename;
10362  for (q = filename; *q; q++)
10363    {
10364      if (*q == '/')
10365	after_last_slash = q + 1;
10366      else if (*q == '.')
10367	last_period = q;
10368    }
10369
10370  len = strlen (after_last_slash) + strlen (section_desc) + 2;
10371  *buf = (char *) permalloc (len);
10372
10373  p = *buf;
10374  *p++ = '_';
10375
10376  for (q = after_last_slash; *q; q++)
10377    {
10378      if (q == last_period)
10379        {
10380	  strcpy (p, section_desc);
10381	  p += strlen (section_desc);
10382        }
10383
10384      else if (ISALNUM (*q))
10385        *p++ = *q;
10386    }
10387
10388  if (last_period == 0)
10389    strcpy (p, section_desc);
10390  else
10391    *p = '\0';
10392}
10393
10394/* Emit profile function.  */
10395
10396void
10397output_profile_hook (labelno)
10398     int labelno ATTRIBUTE_UNUSED;
10399{
10400  if (DEFAULT_ABI == ABI_AIX)
10401    {
10402#ifdef NO_PROFILE_COUNTERS
10403      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
10404#else
10405      char buf[30];
10406      const char *label_name;
10407      rtx fun;
10408
10409      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10410      STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10411      fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10412
10413      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10414                         fun, Pmode);
10415#endif
10416    }
10417  else if (DEFAULT_ABI == ABI_DARWIN)
10418    {
10419      const char *mcount_name = RS6000_MCOUNT;
10420      int caller_addr_regno = LINK_REGISTER_REGNUM;
10421
10422      /* Be conservative and always set this, at least for now.  */
10423      current_function_uses_pic_offset_table = 1;
10424
10425#if TARGET_MACHO
10426      /* For PIC code, set up a stub and collect the caller's address
10427	 from r0, which is where the prologue puts it.  */
10428      if (flag_pic)
10429	{
10430	  mcount_name = machopic_stub_name (mcount_name);
10431	  if (current_function_uses_pic_offset_table)
10432	    caller_addr_regno = 0;
10433	}
10434#endif
10435      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10436			 0, VOIDmode, 1,
10437			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10438    }
10439}
10440
10441/* Write function profiler code.  */
10442
10443void
10444output_function_profiler (file, labelno)
10445  FILE *file;
10446  int labelno;
10447{
10448  char buf[100];
10449  int save_lr = 8;
10450
10451  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10452  switch (DEFAULT_ABI)
10453    {
10454    default:
10455      abort ();
10456
10457    case ABI_V4:
10458      save_lr = 4;
10459      /* Fall through.  */
10460
10461    case ABI_AIX_NODESC:
10462      if (!TARGET_32BIT)
10463	{
10464	  warning ("no profiling of 64-bit code for this ABI");
10465	  return;
10466	}
10467      fprintf (file, "\tmflr %s\n", reg_names[0]);
10468      if (flag_pic == 1)
10469	{
10470	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10471	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10472		       reg_names[0], save_lr, reg_names[1]);
10473	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10474	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10475	  assemble_name (file, buf);
10476	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10477	}
10478      else if (flag_pic > 1)
10479	{
10480	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10481		       reg_names[0], save_lr, reg_names[1]);
10482	  /* Now, we need to get the address of the label.  */
10483	  fputs ("\tbl 1f\n\t.long ", file);
10484	  assemble_name (file, buf);
10485	  fputs ("-.\n1:", file);
10486	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10487	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10488		       reg_names[0], reg_names[11]);
10489	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10490		       reg_names[0], reg_names[0], reg_names[11]);
10491	}
10492      else
10493	{
10494	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10495	  assemble_name (file, buf);
10496	  fputs ("@ha\n", file);
10497	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10498		       reg_names[0], save_lr, reg_names[1]);
10499	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10500	  assemble_name (file, buf);
10501	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10502	}
10503
10504      if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
10505	{
10506	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10507		       reg_names[STATIC_CHAIN_REGNUM],
10508		       12, reg_names[1]);
10509	  fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10510	  asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
10511		       reg_names[STATIC_CHAIN_REGNUM],
10512		       12, reg_names[1]);
10513	}
10514      else
10515	/* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
10516	fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10517      break;
10518
10519    case ABI_AIX:
10520    case ABI_DARWIN:
10521      /* Don't do anything, done in output_profile_hook ().  */
10522      break;
10523    }
10524}
10525
10526/* Adjust the cost of a scheduling dependency.  Return the new cost of
10527   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
10528
10529static int
10530rs6000_adjust_cost (insn, link, dep_insn, cost)
10531     rtx insn;
10532     rtx link;
10533     rtx dep_insn ATTRIBUTE_UNUSED;
10534     int cost;
10535{
10536  if (! recog_memoized (insn))
10537    return 0;
10538
10539  if (REG_NOTE_KIND (link) != 0)
10540    return 0;
10541
10542  if (REG_NOTE_KIND (link) == 0)
10543    {
10544      /* Data dependency; DEP_INSN writes a register that INSN reads
10545	 some cycles later.  */
10546      switch (get_attr_type (insn))
10547	{
10548	case TYPE_JMPREG:
10549          /* Tell the first scheduling pass about the latency between
10550	     a mtctr and bctr (and mtlr and br/blr).  The first
10551	     scheduling pass will not know about this latency since
10552	     the mtctr instruction, which has the latency associated
10553	     to it, will be generated by reload.  */
10554          return TARGET_POWER ? 5 : 4;
10555	case TYPE_BRANCH:
10556	  /* Leave some extra cycles between a compare and its
10557	     dependent branch, to inhibit expensive mispredicts.  */
10558	  if ((rs6000_cpu_attr == CPU_PPC750
10559               || rs6000_cpu_attr == CPU_PPC7400
10560               || rs6000_cpu_attr == CPU_PPC7450)
10561	      && recog_memoized (dep_insn)
10562	      && (INSN_CODE (dep_insn) >= 0)
10563	      && (get_attr_type (dep_insn) == TYPE_COMPARE
10564		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10565		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10566		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10567	    return cost + 2;
10568	default:
10569	  break;
10570	}
10571      /* Fall out to return default cost.  */
10572    }
10573
10574  return cost;
10575}
10576
10577/* A C statement (sans semicolon) to update the integer scheduling
10578   priority INSN_PRIORITY (INSN).  Reduce the priority to execute the
10579   INSN earlier, increase the priority to execute INSN later.  Do not
10580   define this macro if you do not need to adjust the scheduling
10581   priorities of insns.  */
10582
10583static int
10584rs6000_adjust_priority (insn, priority)
10585     rtx insn ATTRIBUTE_UNUSED;
10586     int priority;
10587{
10588  /* On machines (like the 750) which have asymmetric integer units,
10589     where one integer unit can do multiply and divides and the other
10590     can't, reduce the priority of multiply/divide so it is scheduled
10591     before other integer operations.  */
10592
10593#if 0
10594  if (! INSN_P (insn))
10595    return priority;
10596
10597  if (GET_CODE (PATTERN (insn)) == USE)
10598    return priority;
10599
10600  switch (rs6000_cpu_attr) {
10601  case CPU_PPC750:
10602    switch (get_attr_type (insn))
10603      {
10604      default:
10605	break;
10606
10607      case TYPE_IMUL:
10608      case TYPE_IDIV:
10609	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10610		 priority, priority);
10611	if (priority >= 0 && priority < 0x01000000)
10612	  priority >>= 3;
10613	break;
10614      }
10615  }
10616#endif
10617
10618  return priority;
10619}
10620
10621/* Return how many instructions the machine can issue per cycle.  */
10622
10623static int
10624rs6000_issue_rate ()
10625{
10626  switch (rs6000_cpu_attr) {
10627  case CPU_RIOS1:  /* ? */
10628  case CPU_RS64A:
10629  case CPU_PPC601: /* ? */
10630  case CPU_PPC7450:
10631    return 3;
10632  case CPU_PPC603:
10633  case CPU_PPC750:
10634  case CPU_PPC7400:
10635    return 2;
10636  case CPU_RIOS2:
10637  case CPU_PPC604:
10638  case CPU_PPC604E:
10639  case CPU_PPC620:
10640  case CPU_PPC630:
10641    return 4;
10642  default:
10643    return 1;
10644  }
10645}
10646
10647
10648/* Length in units of the trampoline for entering a nested function.  */
10649
10650int
10651rs6000_trampoline_size ()
10652{
10653  int ret = 0;
10654
10655  switch (DEFAULT_ABI)
10656    {
10657    default:
10658      abort ();
10659
10660    case ABI_AIX:
10661      ret = (TARGET_32BIT) ? 12 : 24;
10662      break;
10663
10664    case ABI_DARWIN:
10665    case ABI_V4:
10666    case ABI_AIX_NODESC:
10667      ret = (TARGET_32BIT) ? 40 : 48;
10668      break;
10669    }
10670
10671  return ret;
10672}
10673
10674/* Emit RTL insns to initialize the variable parts of a trampoline.
10675   FNADDR is an RTX for the address of the function's pure code.
10676   CXT is an RTX for the static chain value for the function.  */
10677
10678void
10679rs6000_initialize_trampoline (addr, fnaddr, cxt)
10680     rtx addr;
10681     rtx fnaddr;
10682     rtx cxt;
10683{
10684  enum machine_mode pmode = Pmode;
10685  int regsize = (TARGET_32BIT) ? 4 : 8;
10686  rtx ctx_reg = force_reg (pmode, cxt);
10687
10688  switch (DEFAULT_ABI)
10689    {
10690    default:
10691      abort ();
10692
10693/* Macros to shorten the code expansions below.  */
10694#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10695#define MEM_PLUS(addr,offset) \
10696  gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10697
10698    /* Under AIX, just build the 3 word function descriptor */
10699    case ABI_AIX:
10700      {
10701	rtx fn_reg = gen_reg_rtx (pmode);
10702	rtx toc_reg = gen_reg_rtx (pmode);
10703	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10704	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10705	emit_move_insn (MEM_DEREF (addr), fn_reg);
10706	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10707	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10708      }
10709      break;
10710
10711    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
10712    case ABI_DARWIN:
10713    case ABI_V4:
10714    case ABI_AIX_NODESC:
10715      emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10716			 FALSE, VOIDmode, 4,
10717			 addr, pmode,
10718			 GEN_INT (rs6000_trampoline_size ()), SImode,
10719			 fnaddr, pmode,
10720			 ctx_reg, pmode);
10721      break;
10722    }
10723
10724  return;
10725}
10726
10727
10728/* Table of valid machine attributes.  */
10729
10730const struct attribute_spec rs6000_attribute_table[] =
10731{
10732  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10733  { "longcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
10734  { NULL,       0, 0, false, false, false, NULL }
10735};
10736
10737/* Handle a "longcall" attribute; arguments as in struct
10738   attribute_spec.handler.  */
10739
10740static tree
10741rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10742     tree *node;
10743     tree name;
10744     tree args ATTRIBUTE_UNUSED;
10745     int flags ATTRIBUTE_UNUSED;
10746     bool *no_add_attrs;
10747{
10748  if (TREE_CODE (*node) != FUNCTION_TYPE
10749      && TREE_CODE (*node) != FIELD_DECL
10750      && TREE_CODE (*node) != TYPE_DECL)
10751    {
10752      warning ("`%s' attribute only applies to functions",
10753	       IDENTIFIER_POINTER (name));
10754      *no_add_attrs = true;
10755    }
10756
10757  return NULL_TREE;
10758}
10759
10760/* Return a reference suitable for calling a function with the
10761   longcall attribute.  */
10762
10763struct rtx_def *
10764rs6000_longcall_ref (call_ref)
10765     rtx call_ref;
10766{
10767  const char *call_name;
10768  tree node;
10769
10770  if (GET_CODE (call_ref) != SYMBOL_REF)
10771    return call_ref;
10772
10773  /* System V adds '.' to the internal name, so skip them.  */
10774  call_name = XSTR (call_ref, 0);
10775  if (*call_name == '.')
10776    {
10777      while (*call_name == '.')
10778	call_name++;
10779
10780      node = get_identifier (call_name);
10781      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10782    }
10783
10784  return force_reg (Pmode, call_ref);
10785}
10786
10787
10788/* A C statement or statements to switch to the appropriate section
10789   for output of RTX in mode MODE.  You can assume that RTX is some
10790   kind of constant in RTL.  The argument MODE is redundant except in
10791   the case of a `const_int' rtx.  Select the section by calling
10792   `text_section' or one of the alternatives for other sections.
10793
10794   Do not define this macro if you put all constants in the read-only
10795   data section.  */
10796
10797#ifdef USING_ELFOS_H
10798
10799void
10800rs6000_select_rtx_section (mode, x)
10801     enum machine_mode mode;
10802     rtx x;
10803{
10804  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10805    toc_section ();
10806  else if (flag_pic
10807	   && (GET_CODE (x) == SYMBOL_REF
10808	       || GET_CODE (x) == LABEL_REF
10809	       || GET_CODE (x) == CONST))
10810    data_section ();
10811  else
10812    const_section ();
10813}
10814
10815/* A C statement or statements to switch to the appropriate
10816   section for output of DECL.  DECL is either a `VAR_DECL' node
10817   or a constant of some sort.  RELOC indicates whether forming
10818   the initial value of DECL requires link-time relocations.  */
10819
10820void
10821rs6000_select_section (decl, reloc)
10822     tree decl;
10823     int reloc;
10824{
10825  int size = int_size_in_bytes (TREE_TYPE (decl));
10826  int needs_sdata;
10827  int readonly;
10828  static void (* const sec_funcs[4]) PARAMS ((void)) = {
10829    &const_section,
10830    &sdata2_section,
10831    &data_section,
10832    &sdata_section
10833  };
10834
10835  needs_sdata = (size > 0
10836		 && size <= g_switch_value
10837		 && rs6000_sdata != SDATA_NONE
10838		 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10839
10840  if (TREE_CODE (decl) == STRING_CST)
10841    readonly = ! flag_writable_strings;
10842  else if (TREE_CODE (decl) == VAR_DECL)
10843    readonly = (! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc)
10844		&& TREE_READONLY (decl)
10845		&& ! TREE_SIDE_EFFECTS (decl)
10846		&& DECL_INITIAL (decl)
10847		&& DECL_INITIAL (decl) != error_mark_node
10848		&& TREE_CONSTANT (DECL_INITIAL (decl)));
10849  else if (TREE_CODE (decl) == CONSTRUCTOR)
10850    readonly = (! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc)
10851		&& ! TREE_SIDE_EFFECTS (decl)
10852		&& TREE_CONSTANT (decl));
10853  else
10854    readonly = ! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc);
10855
10856  if (needs_sdata && rs6000_sdata != SDATA_EABI)
10857    readonly = 0;
10858
10859  (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10860}
10861
10862/* A C statement to build up a unique section name, expressed as a
10863   STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10864   RELOC indicates whether the initial value of EXP requires
10865   link-time relocations.  If you do not define this macro, GCC will use
10866   the symbol name prefixed by `.' as the section name.  Note - this
10867   macro can now be called for uninitialized data items as well as
10868   initialised data and functions.  */
10869
10870void
10871rs6000_unique_section (decl, reloc)
10872     tree decl;
10873     int reloc;
10874{
10875  int len;
10876  int sec;
10877  const char *name;
10878  char *string;
10879  const char *prefix;
10880
10881  static const char *const prefixes[7][2] =
10882  {
10883    { ".rodata.", ".gnu.linkonce.r." },
10884    { ".sdata2.", ".gnu.linkonce.s2." },
10885    { ".data.",   ".gnu.linkonce.d." },
10886    { ".sdata.",  ".gnu.linkonce.s." },
10887    { ".bss.",    ".gnu.linkonce.b." },
10888    { ".sbss.",   ".gnu.linkonce.sb." },
10889    { ".text.",   ".gnu.linkonce.t." }
10890  };
10891
10892  if (TREE_CODE (decl) == FUNCTION_DECL)
10893    sec = 6;
10894  else
10895    {
10896      int readonly;
10897      int needs_sdata;
10898      int size;
10899
10900      if (TREE_CODE (decl) == STRING_CST)
10901	readonly = ! flag_writable_strings;
10902      else if (TREE_CODE (decl) == VAR_DECL)
10903	readonly = (! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc)
10904		    && TREE_READONLY (decl)
10905		    && ! TREE_SIDE_EFFECTS (decl)
10906		    && TREE_CONSTANT (DECL_INITIAL (decl)));
10907      else
10908	readonly = ! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc);
10909
10910      size = int_size_in_bytes (TREE_TYPE (decl));
10911      needs_sdata = (size > 0
10912		     && size <= g_switch_value
10913		     && rs6000_sdata != SDATA_NONE
10914		     && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10915
10916      if (DECL_INITIAL (decl) == 0
10917	  || DECL_INITIAL (decl) == error_mark_node)
10918	sec = 4;
10919      else if (! readonly)
10920	sec = 2;
10921      else
10922	sec = 0;
10923
10924      if (needs_sdata)
10925	{
10926	  /* .sdata2 is only for EABI.  */
10927	  if (sec == 0 && rs6000_sdata != SDATA_EABI)
10928	    sec = 2;
10929	  sec += 1;
10930	}
10931    }
10932
10933  STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10934  prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10935  len    = strlen (name) + strlen (prefix);
10936  string = alloca (len + 1);
10937
10938  sprintf (string, "%s%s", prefix, name);
10939
10940  DECL_SECTION_NAME (decl) = build_string (len, string);
10941}
10942
10943
10944static bool
10945rs6000_binds_local_p (exp)
10946     tree exp;
10947{
10948  bool local_p;
10949  tree attr;
10950
10951  /* A non-decl is an entry in the constant pool.  */
10952  if (!DECL_P (exp))
10953    local_p = true;
10954  /* Static variables are always local.  */
10955  else if (! TREE_PUBLIC (exp))
10956    local_p = true;
10957  /* Otherwise, variables defined outside this object may not be local.  */
10958  else if (DECL_EXTERNAL (exp))
10959    local_p = false;
10960  /* Linkonce and weak data are never local.  */
10961  else if (DECL_ONE_ONLY (exp) || DECL_WEAK (exp))
10962    local_p = false;
10963  /* If PIC, then assume that any global name can be overridden by
10964   *      symbols resolved from other modules.  */
10965  else if (flag_pic || rs6000_flag_pic)
10966    local_p = false;
10967  /* Uninitialized COMMON variable may be unified with symbols
10968   *      resolved from other modules.  */
10969  else if (DECL_COMMON (exp)
10970	   && (DECL_INITIAL (exp) == NULL
10971	       || DECL_INITIAL (exp) == error_mark_node))
10972    local_p = false;
10973  /* Otherwise we're left with initialized (or non-common) global data
10974   *      which is of necessity defined locally.  */
10975  else
10976    local_p = true;
10977
10978  return local_p;
10979}
10980
10981
10982/* If we are referencing a function that is static or is known to be
10983   in this file, make the SYMBOL_REF special.  We can use this to indicate
10984   that we can branch to this function without emitting a no-op after the
10985   call.  For real AIX calling sequences, we also replace the
10986   function name with the real name (1 or 2 leading .'s), rather than
10987   the function descriptor name.  This saves a lot of overriding code
10988   to read the prefixes.  */
10989
10990void
10991rs6000_encode_section_info (decl)
10992     tree decl;
10993{
10994  if (TREE_CODE (decl) == FUNCTION_DECL)
10995    {
10996      rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10997      if (rs6000_binds_local_p (decl))
10998	SYMBOL_REF_FLAG (sym_ref) = 1;
10999
11000      if (DEFAULT_ABI == ABI_AIX)
11001	{
11002	  size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
11003	  size_t len2 = strlen (XSTR (sym_ref, 0));
11004	  char *str = alloca (len1 + len2 + 1);
11005	  str[0] = '.';
11006	  str[1] = '.';
11007	  memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
11008
11009	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
11010	}
11011    }
11012  else if (rs6000_sdata != SDATA_NONE
11013	   && DEFAULT_ABI == ABI_V4
11014	   && TREE_CODE (decl) == VAR_DECL)
11015    {
11016      rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11017      int size = int_size_in_bytes (TREE_TYPE (decl));
11018      tree section_name = DECL_SECTION_NAME (decl);
11019      const char *name = (char *)0;
11020      int len = 0;
11021
11022      if (rs6000_binds_local_p (decl))
11023	SYMBOL_REF_FLAG (sym_ref) = 1;
11024
11025      if (section_name)
11026	{
11027	  if (TREE_CODE (section_name) == STRING_CST)
11028	    {
11029	      name = TREE_STRING_POINTER (section_name);
11030	      len = TREE_STRING_LENGTH (section_name);
11031	    }
11032	  else
11033	    abort ();
11034	}
11035
11036      if ((size > 0 && size <= g_switch_value)
11037	  || (name
11038	      && ((len == sizeof (".sdata") - 1
11039		   && strcmp (name, ".sdata") == 0)
11040		  || (len == sizeof (".sdata2") - 1
11041		      && strcmp (name, ".sdata2") == 0)
11042		  || (len == sizeof (".sbss") - 1
11043		      && strcmp (name, ".sbss") == 0)
11044		  || (len == sizeof (".sbss2") - 1
11045		      && strcmp (name, ".sbss2") == 0)
11046		  || (len == sizeof (".PPC.EMB.sdata0") - 1
11047		      && strcmp (name, ".PPC.EMB.sdata0") == 0)
11048		  || (len == sizeof (".PPC.EMB.sbss0") - 1
11049		      && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11050	{
11051	  rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11052	  size_t len = strlen (XSTR (sym_ref, 0));
11053	  char *str = alloca (len + 2);
11054
11055	  str[0] = '@';
11056	  memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11057	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11058	}
11059    }
11060}
11061
11062#endif /* USING_ELFOS_H */
11063
11064
11065/* Return a REG that occurs in ADDR with coefficient 1.
11066   ADDR can be effectively incremented by incrementing REG.
11067
11068   r0 is special and we must not select it as an address
11069   register by this routine since our caller will try to
11070   increment the returned register via an "la" instruction.  */
11071
11072struct rtx_def *
11073find_addr_reg (addr)
11074     rtx addr;
11075{
11076  while (GET_CODE (addr) == PLUS)
11077    {
11078      if (GET_CODE (XEXP (addr, 0)) == REG
11079	  && REGNO (XEXP (addr, 0)) != 0)
11080	addr = XEXP (addr, 0);
11081      else if (GET_CODE (XEXP (addr, 1)) == REG
11082	       && REGNO (XEXP (addr, 1)) != 0)
11083	addr = XEXP (addr, 1);
11084      else if (CONSTANT_P (XEXP (addr, 0)))
11085	addr = XEXP (addr, 1);
11086      else if (CONSTANT_P (XEXP (addr, 1)))
11087	addr = XEXP (addr, 0);
11088      else
11089	abort ();
11090    }
11091  if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11092    return addr;
11093  abort ();
11094}
11095
11096void
11097rs6000_fatal_bad_address (op)
11098  rtx op;
11099{
11100  fatal_insn ("bad address", op);
11101}
11102
11103/* Called to register all of our global variables with the garbage
11104   collector.  */
11105
11106static void
11107rs6000_add_gc_roots ()
11108{
11109  ggc_add_rtx_root (&rs6000_compare_op0, 1);
11110  ggc_add_rtx_root (&rs6000_compare_op1, 1);
11111
11112  toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11113  ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11114		toc_hash_mark_table);
11115
11116#if TARGET_MACHO
11117  machopic_add_gc_roots ();
11118#endif
11119}
11120
11121#if TARGET_MACHO
11122
11123#if 0
11124/* Returns 1 if OP is either a symbol reference or a sum of a symbol
11125   reference and a constant.  */
11126
11127int
11128symbolic_operand (op)
11129     rtx op;
11130{
11131  switch (GET_CODE (op))
11132    {
11133    case SYMBOL_REF:
11134    case LABEL_REF:
11135      return 1;
11136    case CONST:
11137      op = XEXP (op, 0);
11138      return (GET_CODE (op) == SYMBOL_REF ||
11139	      (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11140	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11141	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
11142    default:
11143      return 0;
11144    }
11145}
11146#endif
11147
11148#ifdef RS6000_LONG_BRANCH
11149
11150static tree stub_list = 0;
11151
11152/* ADD_COMPILER_STUB adds the compiler generated stub for handling
11153   procedure calls to the linked list.  */
11154
11155void
11156add_compiler_stub (label_name, function_name, line_number)
11157     tree label_name;
11158     tree function_name;
11159     int line_number;
11160{
11161  tree stub = build_tree_list (function_name, label_name);
11162  TREE_TYPE (stub) = build_int_2 (line_number, 0);
11163  TREE_CHAIN (stub) = stub_list;
11164  stub_list = stub;
11165}
11166
11167#define STUB_LABEL_NAME(STUB)     TREE_VALUE (STUB)
11168#define STUB_FUNCTION_NAME(STUB)  TREE_PURPOSE (STUB)
11169#define STUB_LINE_NUMBER(STUB)    TREE_INT_CST_LOW (TREE_TYPE (STUB))
11170
11171/* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11172   handling procedure calls from the linked list and initializes the
11173   linked list.  */
11174
11175void
11176output_compiler_stub ()
11177{
11178  char tmp_buf[256];
11179  char label_buf[256];
11180  char *label;
11181  tree tmp_stub, stub;
11182
11183  if (!flag_pic)
11184    for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11185      {
11186	fprintf (asm_out_file,
11187		 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11188
11189#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11190	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11191	  fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11192#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11193
11194	if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11195	  strcpy (label_buf,
11196		  IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11197	else
11198	  {
11199	    label_buf[0] = '_';
11200	    strcpy (label_buf+1,
11201		    IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11202	  }
11203
11204	strcpy (tmp_buf, "lis r12,hi16(");
11205	strcat (tmp_buf, label_buf);
11206	strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11207	strcat (tmp_buf, label_buf);
11208	strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11209	output_asm_insn (tmp_buf, 0);
11210
11211#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11212	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11213	  fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11214#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11215      }
11216
11217  stub_list = 0;
11218}
11219
11220/* NO_PREVIOUS_DEF checks in the link list whether the function name is
11221   already there or not.  */
11222
11223int
11224no_previous_def (function_name)
11225     tree function_name;
11226{
11227  tree stub;
11228  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11229    if (function_name == STUB_FUNCTION_NAME (stub))
11230      return 0;
11231  return 1;
11232}
11233
11234/* GET_PREV_LABEL gets the label name from the previous definition of
11235   the function.  */
11236
11237tree
11238get_prev_label (function_name)
11239     tree function_name;
11240{
11241  tree stub;
11242  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11243    if (function_name == STUB_FUNCTION_NAME (stub))
11244      return STUB_LABEL_NAME (stub);
11245  return 0;
11246}
11247
11248/* INSN is either a function call or a millicode call.  It may have an
11249   unconditional jump in its delay slot.
11250
11251   CALL_DEST is the routine we are calling.  */
11252
11253char *
11254output_call (insn, call_dest, operand_number)
11255     rtx insn;
11256     rtx call_dest;
11257     int operand_number;
11258{
11259  static char buf[256];
11260  if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11261    {
11262      tree labelname;
11263      tree funname = get_identifier (XSTR (call_dest, 0));
11264
11265      if (no_previous_def (funname))
11266	{
11267	  int line_number;
11268	  rtx label_rtx = gen_label_rtx ();
11269	  char *label_buf, temp_buf[256];
11270	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11271				       CODE_LABEL_NUMBER (label_rtx));
11272	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11273	  labelname = get_identifier (label_buf);
11274	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11275	  if (insn)
11276	    line_number = NOTE_LINE_NUMBER (insn);
11277	  add_compiler_stub (labelname, funname, line_number);
11278	}
11279      else
11280	labelname = get_prev_label (funname);
11281
11282      sprintf (buf, "jbsr %%z%d,%.246s",
11283	       operand_number, IDENTIFIER_POINTER (labelname));
11284      return buf;
11285    }
11286  else
11287    {
11288      sprintf (buf, "bl %%z%d", operand_number);
11289      return buf;
11290    }
11291}
11292
11293#endif /* RS6000_LONG_BRANCH */
11294
11295#define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N)		\
11296  do {								\
11297    const char *const symbol_ = (SYMBOL);			\
11298    char *buffer_ = (BUF);					\
11299    if (symbol_[0] == '"')					\
11300      {								\
11301        sprintf(buffer_, "\"L%d$%s", (N), symbol_+1);		\
11302      }								\
11303    else if (name_needs_quotes(symbol_))			\
11304      {								\
11305        sprintf(buffer_, "\"L%d$%s\"", (N), symbol_);		\
11306      }								\
11307    else							\
11308      {								\
11309        sprintf(buffer_, "L%d$%s", (N), symbol_);		\
11310      }								\
11311  } while (0)
11312
11313
11314/* Generate PIC and indirect symbol stubs.  */
11315
11316void
11317machopic_output_stub (file, symb, stub)
11318     FILE *file;
11319     const char *symb, *stub;
11320{
11321  unsigned int length;
11322  char *symbol_name, *lazy_ptr_name;
11323  char *local_label_0;
11324  static int label = 0;
11325
11326  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
11327  STRIP_NAME_ENCODING (symb, symb);
11328
11329  label += 1;
11330
11331  length = strlen (symb);
11332  symbol_name = alloca (length + 32);
11333  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11334
11335  lazy_ptr_name = alloca (length + 32);
11336  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11337
11338  local_label_0 = alloca (length + 32);
11339  GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11340
11341  if (flag_pic == 2)
11342    machopic_picsymbol_stub_section ();
11343  else
11344    machopic_symbol_stub_section ();
11345
11346  fprintf (file, "%s:\n", stub);
11347  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11348
11349  if (flag_pic == 2)
11350    {
11351      fprintf (file, "\tmflr r0\n");
11352      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11353      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11354      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11355	       lazy_ptr_name, local_label_0);
11356      fprintf (file, "\tmtlr r0\n");
11357      fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11358	       lazy_ptr_name, local_label_0);
11359      fprintf (file, "\tmtctr r12\n");
11360      fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11361	       lazy_ptr_name, local_label_0);
11362      fprintf (file, "\tbctr\n");
11363    }
11364  else
11365    fprintf (file, "non-pure not supported\n");
11366
11367  machopic_lazy_symbol_ptr_section ();
11368  fprintf (file, "%s:\n", lazy_ptr_name);
11369  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11370  fprintf (file, "\t.long dyld_stub_binding_helper\n");
11371}
11372
11373/* Legitimize PIC addresses.  If the address is already
11374   position-independent, we return ORIG.  Newly generated
11375   position-independent addresses go into a reg.  This is REG if non
11376   zero, otherwise we allocate register(s) as necessary.  */
11377
11378#define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11379
11380rtx
11381rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11382     rtx orig;
11383     enum machine_mode mode;
11384     rtx reg;
11385{
11386  rtx base, offset;
11387
11388  if (reg == NULL && ! reload_in_progress && ! reload_completed)
11389    reg = gen_reg_rtx (Pmode);
11390
11391  if (GET_CODE (orig) == CONST)
11392    {
11393      if (GET_CODE (XEXP (orig, 0)) == PLUS
11394	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11395	return orig;
11396
11397      if (GET_CODE (XEXP (orig, 0)) == PLUS)
11398	{
11399	  base =
11400	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11401						    Pmode, reg);
11402	  offset =
11403	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11404						    Pmode, reg);
11405	}
11406      else
11407	abort ();
11408
11409      if (GET_CODE (offset) == CONST_INT)
11410	{
11411	  if (SMALL_INT (offset))
11412	    return plus_constant (base, INTVAL (offset));
11413	  else if (! reload_in_progress && ! reload_completed)
11414	    offset = force_reg (Pmode, offset);
11415	  else
11416	    {
11417 	      rtx mem = force_const_mem (Pmode, orig);
11418	      return machopic_legitimize_pic_address (mem, Pmode, reg);
11419	    }
11420	}
11421      return gen_rtx (PLUS, Pmode, base, offset);
11422    }
11423
11424  /* Fall back on generic machopic code.  */
11425  return machopic_legitimize_pic_address (orig, mode, reg);
11426}
11427
11428/* This is just a placeholder to make linking work without having to
11429   add this to the generic Darwin EXTRA_SECTIONS.  If -mcall-aix is
11430   ever needed for Darwin (not too likely!) this would have to get a
11431   real definition.  */
11432
11433void
11434toc_section ()
11435{
11436}
11437
11438#endif /* TARGET_MACHO */
11439
11440#if TARGET_ELF
11441static unsigned int
11442rs6000_elf_section_type_flags (decl, name, reloc)
11443     tree decl;
11444     const char *name;
11445     int reloc;
11446{
11447  unsigned int flags = default_section_type_flags (decl, name, reloc);
11448
11449  if (TARGET_RELOCATABLE)
11450    flags |= SECTION_WRITE;
11451
11452  return flags;
11453}
11454
11455/* Record an element in the table of global constructors.  SYMBOL is
11456   a SYMBOL_REF of the function to be called; PRIORITY is a number
11457   between 0 and MAX_INIT_PRIORITY.
11458
11459   This differs from default_named_section_asm_out_constructor in
11460   that we have special handling for -mrelocatable.  */
11461
11462static void
11463rs6000_elf_asm_out_constructor (symbol, priority)
11464     rtx symbol;
11465     int priority;
11466{
11467  const char *section = ".ctors";
11468  char buf[16];
11469
11470  if (priority != DEFAULT_INIT_PRIORITY)
11471    {
11472      sprintf (buf, ".ctors.%.5u",
11473               /* Invert the numbering so the linker puts us in the proper
11474                  order; constructors are run from right to left, and the
11475                  linker sorts in increasing order.  */
11476               MAX_INIT_PRIORITY - priority);
11477      section = buf;
11478    }
11479
11480  named_section_flags (section, SECTION_WRITE);
11481  assemble_align (POINTER_SIZE);
11482
11483  if (TARGET_RELOCATABLE)
11484    {
11485      fputs ("\t.long (", asm_out_file);
11486      output_addr_const (asm_out_file, symbol);
11487      fputs (")@fixup\n", asm_out_file);
11488    }
11489  else
11490    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11491}
11492
11493static void
11494rs6000_elf_asm_out_destructor (symbol, priority)
11495     rtx symbol;
11496     int priority;
11497{
11498  const char *section = ".dtors";
11499  char buf[16];
11500
11501  if (priority != DEFAULT_INIT_PRIORITY)
11502    {
11503      sprintf (buf, ".dtors.%.5u",
11504               /* Invert the numbering so the linker puts us in the proper
11505                  order; constructors are run from right to left, and the
11506                  linker sorts in increasing order.  */
11507               MAX_INIT_PRIORITY - priority);
11508      section = buf;
11509    }
11510
11511  named_section_flags (section, SECTION_WRITE);
11512  assemble_align (POINTER_SIZE);
11513
11514  if (TARGET_RELOCATABLE)
11515    {
11516      fputs ("\t.long (", asm_out_file);
11517      output_addr_const (asm_out_file, symbol);
11518      fputs (")@fixup\n", asm_out_file);
11519    }
11520  else
11521    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11522}
11523#endif
11524
11525#ifdef OBJECT_FORMAT_COFF
11526static void
11527xcoff_asm_named_section (name, flags)
11528     const char *name;
11529     unsigned int flags ATTRIBUTE_UNUSED;
11530{
11531  fprintf (asm_out_file, "\t.csect %s\n", name);
11532}
11533#endif
11534