rs6000.c revision 103445
1132718Skan/* Subroutines used for code generation on IBM RS/6000.
252284Sobrien   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
390075Sobrien   2000, 2001, 2002 Free Software Foundation, Inc.
452284Sobrien   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5132718Skan
652284SobrienThis file is part of GNU CC.
7132718Skan
852284SobrienGNU CC is free software; you can redistribute it and/or modify
952284Sobrienit under the terms of the GNU General Public License as published by
1052284Sobrienthe Free Software Foundation; either version 2, or (at your option)
1152284Sobrienany later version.
12132718Skan
1352284SobrienGNU CC is distributed in the hope that it will be useful,
1452284Sobrienbut WITHOUT ANY WARRANTY; without even the implied warranty of
1552284SobrienMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
1652284SobrienGNU General Public License for more details.
1752284Sobrien
18132718SkanYou should have received a copy of the GNU General Public License
1952284Sobrienalong with GNU CC; see the file COPYING.  If not, write to
2090075Sobrienthe Free Software Foundation, 59 Temple Place - Suite 330,
2152284SobrienBoston, MA 02111-1307, USA.  */
2290075Sobrien
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
28#include "real.h"
29#include "insn-config.h"
30#include "conditions.h"
31#include "insn-attr.h"
32#include "flags.h"
33#include "recog.h"
34#include "obstack.h"
35#include "tree.h"
36#include "expr.h"
37#include "optabs.h"
38#include "except.h"
39#include "function.h"
40#include "output.h"
41#include "basic-block.h"
42#include "integrate.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "hashtab.h"
46#include "tm_p.h"
47#include "target.h"
48#include "target-def.h"
49#include "langhooks.h"
50#include "reload.h"
51
52#ifndef TARGET_NO_PROTOTYPE
53#define TARGET_NO_PROTOTYPE 0
54#endif
55
56#define min(A,B)	((A) < (B) ? (A) : (B))
57#define max(A,B)	((A) > (B) ? (A) : (B))
58
59/* Target cpu type */
60
61enum processor_type rs6000_cpu;
62struct rs6000_cpu_select rs6000_select[3] =
63{
64  /* switch		name,			tune	arch */
65  { (const char *)0,	"--with-cpu=",		1,	1 },
66  { (const char *)0,	"-mcpu=",		1,	1 },
67  { (const char *)0,	"-mtune=",		1,	0 },
68};
69
70/* Size of long double */
71const char *rs6000_long_double_size_string;
72int rs6000_long_double_type_size;
73
74/* Whether -mabi=altivec has appeared */
75int rs6000_altivec_abi;
76
77/* Set to non-zero once AIX common-mode calls have been defined.  */
78static int common_mode_defined;
79
80/* Save information from a "cmpxx" operation until the branch or scc is
81   emitted.  */
82rtx rs6000_compare_op0, rs6000_compare_op1;
83int rs6000_compare_fp_p;
84
85/* Label number of label created for -mrelocatable, to call to so we can
86   get the address of the GOT section */
87int rs6000_pic_labelno;
88
89#ifdef USING_ELFOS_H
90/* Which abi to adhere to */
91const char *rs6000_abi_name = RS6000_ABI_NAME;
92
93/* Semantics of the small data area */
94enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
95
96/* Which small data model to use */
97const char *rs6000_sdata_name = (char *)0;
98
99/* Counter for labels which are to be placed in .fixup.  */
100int fixuplabelno = 0;
101#endif
102
103/* ABI enumeration available for subtarget to use.  */
104enum rs6000_abi rs6000_current_abi;
105
106/* ABI string from -mabi= option.  */
107const char *rs6000_abi_string;
108
109/* Debug flags */
110const char *rs6000_debug_name;
111int rs6000_debug_stack;		/* debug stack applications */
112int rs6000_debug_arg;		/* debug argument handling */
113
114/* Flag to say the TOC is initialized */
115int toc_initialized;
116char toc_label_name[10];
117
118/* Alias set for saves and restores from the rs6000 stack.  */
119static int rs6000_sr_alias_set;
120
121static void rs6000_add_gc_roots PARAMS ((void));
122static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123static void validate_condition_mode
124  PARAMS ((enum rtx_code, enum machine_mode));
125static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
126static void rs6000_maybe_dead PARAMS ((rtx));
127static void rs6000_emit_stack_tie PARAMS ((void));
128static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
129static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
130static unsigned rs6000_hash_constant PARAMS ((rtx));
131static unsigned toc_hash_function PARAMS ((const void *));
132static int toc_hash_eq PARAMS ((const void *, const void *));
133static int toc_hash_mark_entry PARAMS ((void **, void *));
134static void toc_hash_mark_table PARAMS ((void *));
135static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
136static void rs6000_free_machine_status PARAMS ((struct function *));
137static void rs6000_init_machine_status PARAMS ((struct function *));
138static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
139static int rs6000_ra_ever_killed PARAMS ((void));
140static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
141const struct attribute_spec rs6000_attribute_table[];
142static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
143static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
144static rtx rs6000_emit_set_long_const PARAMS ((rtx,
145  HOST_WIDE_INT, HOST_WIDE_INT));
146#if TARGET_ELF
147static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
148							   int));
149static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
150static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
151#endif
152#ifdef OBJECT_FORMAT_COFF
153static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
154#endif
155static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
156static int rs6000_adjust_priority PARAMS ((rtx, int));
157static int rs6000_issue_rate PARAMS ((void));
158
159static void rs6000_init_builtins PARAMS ((void));
160static void altivec_init_builtins PARAMS ((void));
161static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
162static rtx altivec_expand_builtin PARAMS ((tree, rtx));
163static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
164static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
165static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
166static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
167static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
168static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
169static void rs6000_parse_abi_options PARAMS ((void));
170static int first_altivec_reg_to_save PARAMS ((void));
171static unsigned int compute_vrsave_mask PARAMS ((void));
172static void is_altivec_return_reg PARAMS ((rtx, void *));
173int vrsave_operation PARAMS ((rtx, enum machine_mode));
174static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
175static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
176static int easy_vector_constant PARAMS ((rtx));
177
178/* Default register names.  */
179char rs6000_reg_names[][8] =
180{
181      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
182      "8",  "9", "10", "11", "12", "13", "14", "15",
183     "16", "17", "18", "19", "20", "21", "22", "23",
184     "24", "25", "26", "27", "28", "29", "30", "31",
185      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
186      "8",  "9", "10", "11", "12", "13", "14", "15",
187     "16", "17", "18", "19", "20", "21", "22", "23",
188     "24", "25", "26", "27", "28", "29", "30", "31",
189     "mq", "lr", "ctr","ap",
190      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
191      "xer",
192      /* AltiVec registers.  */
193      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
194      "8",  "9",  "10", "11", "12", "13", "14", "15",
195      "16", "17", "18", "19", "20", "21", "22", "23",
196      "24", "25", "26", "27", "28", "29", "30", "31",
197      "vrsave"
198};
199
200#ifdef TARGET_REGNAMES
201static const char alt_reg_names[][8] =
202{
203   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
204   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
205  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
206  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
207   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
208   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
209  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
210  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
211    "mq",    "lr",  "ctr",   "ap",
212  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
213   "xer",
214   /* AltiVec registers.  */
215   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
216   "%v8",  "%v9",  "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
217   "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
218   "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
219   "vrsave"
220};
221#endif
222
223#ifndef MASK_STRICT_ALIGN
224#define MASK_STRICT_ALIGN 0
225#endif
226
227/* Initialize the GCC target structure.  */
228#undef TARGET_ATTRIBUTE_TABLE
229#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
230
231#undef TARGET_ASM_ALIGNED_DI_OP
232#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
233
234/* Default unaligned ops are only provided for ELF.  Find the ops needed
235   for non-ELF systems.  */
236#ifndef OBJECT_FORMAT_ELF
237#ifdef OBJECT_FORMAT_COFF
238/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
239   64-bit targets.  */
240#undef TARGET_ASM_UNALIGNED_HI_OP
241#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
242#undef TARGET_ASM_UNALIGNED_SI_OP
243#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
244#undef TARGET_ASM_UNALIGNED_DI_OP
245#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
246#else
247/* For Darwin.  */
248#undef TARGET_ASM_UNALIGNED_HI_OP
249#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
250#undef TARGET_ASM_UNALIGNED_SI_OP
251#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
252#endif
253#endif
254
255/* This hook deals with fixups for relocatable code and DI-mode objects
256   in 64-bit code.  */
257#undef TARGET_ASM_INTEGER
258#define TARGET_ASM_INTEGER rs6000_assemble_integer
259
260#undef TARGET_ASM_FUNCTION_PROLOGUE
261#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
262#undef TARGET_ASM_FUNCTION_EPILOGUE
263#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
264
265#if TARGET_ELF
266#undef TARGET_SECTION_TYPE_FLAGS
267#define TARGET_SECTION_TYPE_FLAGS  rs6000_elf_section_type_flags
268#endif
269
270#undef TARGET_SCHED_ISSUE_RATE
271#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
272#undef TARGET_SCHED_ADJUST_COST
273#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
274#undef TARGET_SCHED_ADJUST_PRIORITY
275#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
276
277#undef TARGET_INIT_BUILTINS
278#define TARGET_INIT_BUILTINS rs6000_init_builtins
279
280#undef TARGET_EXPAND_BUILTIN
281#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
282
283/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
284#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
285
286struct gcc_target targetm = TARGET_INITIALIZER;
287
288/* Override command line options.  Mostly we process the processor
289   type and sometimes adjust other TARGET_ options.  */
290
291void
292rs6000_override_options (default_cpu)
293     const char *default_cpu;
294{
295  size_t i, j;
296  struct rs6000_cpu_select *ptr;
297
298  /* Simplify the entries below by making a mask for any POWER
299     variant and any PowerPC variant.  */
300
301#define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
302#define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
303		       | MASK_PPC_GFXOPT | MASK_POWERPC64)
304#define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
305
306  static struct ptt
307    {
308      const char *const name;		/* Canonical processor name.  */
309      const enum processor_type processor; /* Processor type enum value.  */
310      const int target_enable;	/* Target flags to enable.  */
311      const int target_disable;	/* Target flags to disable.  */
312    } const processor_target_table[]
313      = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
314	    POWER_MASKS | POWERPC_MASKS},
315	 {"power", PROCESSOR_POWER,
316	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
317	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
318	 {"power2", PROCESSOR_POWER,
319	    MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
320	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
321	 {"power3", PROCESSOR_PPC630,
322	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
323	    POWER_MASKS | MASK_PPC_GPOPT},
324	 {"powerpc", PROCESSOR_POWERPC,
325	    MASK_POWERPC | MASK_NEW_MNEMONICS,
326	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
327	 {"powerpc64", PROCESSOR_POWERPC64,
328	    MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
329	    POWER_MASKS | POWERPC_OPT_MASKS},
330	 {"rios", PROCESSOR_RIOS1,
331	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
332	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
333	 {"rios1", PROCESSOR_RIOS1,
334	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
335	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
336	 {"rsc", PROCESSOR_PPC601,
337	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
338	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
339	 {"rsc1", PROCESSOR_PPC601,
340	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
341	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
342	 {"rios2", PROCESSOR_RIOS2,
343	    MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
344	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
345	 {"rs64a", PROCESSOR_RS64A,
346	    MASK_POWERPC | MASK_NEW_MNEMONICS,
347	    POWER_MASKS | POWERPC_OPT_MASKS},
348	 {"401", PROCESSOR_PPC403,
349	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
350	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
351	 {"403", PROCESSOR_PPC403,
352	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
353	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
354	 {"405", PROCESSOR_PPC405,
355	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
356	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
357	 {"505", PROCESSOR_MPCCORE,
358	    MASK_POWERPC | MASK_NEW_MNEMONICS,
359	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
360	 {"601", PROCESSOR_PPC601,
361	    MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
362	    MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
363	 {"602", PROCESSOR_PPC603,
364	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
365	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
366	 {"603", PROCESSOR_PPC603,
367	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
368	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
369	 {"603e", PROCESSOR_PPC603,
370	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
371	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
372	 {"ec603e", PROCESSOR_PPC603,
373	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
374	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
375	 {"604", PROCESSOR_PPC604,
376	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
377	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
378	 {"604e", PROCESSOR_PPC604e,
379	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
380	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
381	 {"620", PROCESSOR_PPC620,
382	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
383	    POWER_MASKS | MASK_PPC_GPOPT},
384	 {"630", PROCESSOR_PPC630,
385	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
386	    POWER_MASKS | MASK_PPC_GPOPT},
387	 {"740", PROCESSOR_PPC750,
388 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
389 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
390	 {"750", PROCESSOR_PPC750,
391 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
392 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
393	 {"7400", PROCESSOR_PPC7400,
394            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
395            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
396	 {"7450", PROCESSOR_PPC7450,
397            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
398            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
399	 {"801", PROCESSOR_MPCCORE,
400	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
401	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
402	 {"821", PROCESSOR_MPCCORE,
403	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
404	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
405	 {"823", PROCESSOR_MPCCORE,
406	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
407	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
408	 {"860", PROCESSOR_MPCCORE,
409	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
410	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
411
412  size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
413
414  /* Save current -mmultiple/-mno-multiple status.  */
415  int multiple = TARGET_MULTIPLE;
416  /* Save current -mstring/-mno-string status.  */
417  int string = TARGET_STRING;
418
419  /* Identify the processor type.  */
420  rs6000_select[0].string = default_cpu;
421  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
422
423  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
424    {
425      ptr = &rs6000_select[i];
426      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
427	{
428	  for (j = 0; j < ptt_size; j++)
429	    if (! strcmp (ptr->string, processor_target_table[j].name))
430	      {
431		if (ptr->set_tune_p)
432		  rs6000_cpu = processor_target_table[j].processor;
433
434		if (ptr->set_arch_p)
435		  {
436		    target_flags |= processor_target_table[j].target_enable;
437		    target_flags &= ~processor_target_table[j].target_disable;
438		  }
439		break;
440	      }
441
442	  if (j == ptt_size)
443	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
444	}
445    }
446
447  /* If we are optimizing big endian systems for space, use the store
448     multiple instructions.  */
449  if (BYTES_BIG_ENDIAN && optimize_size)
450    target_flags |= MASK_MULTIPLE;
451
452  /* If -mmultiple or -mno-multiple was explicitly used, don't
453     override with the processor default */
454  if (TARGET_MULTIPLE_SET)
455    target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
456
457  /* If -mstring or -mno-string was explicitly used, don't override
458     with the processor default.  */
459  if (TARGET_STRING_SET)
460    target_flags = (target_flags & ~MASK_STRING) | string;
461
462  /* Don't allow -mmultiple or -mstring on little endian systems
463     unless the cpu is a 750, because the hardware doesn't support the
464     instructions used in little endian mode, and causes an alignment
465     trap.  The 750 does not cause an alignment trap (except when the
466     target is unaligned).  */
467
468  if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
469    {
470      if (TARGET_MULTIPLE)
471	{
472	  target_flags &= ~MASK_MULTIPLE;
473	  if (TARGET_MULTIPLE_SET)
474	    warning ("-mmultiple is not supported on little endian systems");
475	}
476
477      if (TARGET_STRING)
478	{
479	  target_flags &= ~MASK_STRING;
480	  if (TARGET_STRING_SET)
481	    warning ("-mstring is not supported on little endian systems");
482	}
483    }
484
485  if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
486    {
487      flag_pic = 0;
488
489      if (extra_warnings)
490	warning ("-f%s ignored (all code is position independent)",
491		 (flag_pic > 1) ? "PIC" : "pic");
492    }
493
494#ifdef XCOFF_DEBUGGING_INFO
495  if (flag_function_sections && (write_symbols != NO_DEBUG)
496      && DEFAULT_ABI == ABI_AIX)
497    {
498      warning ("-ffunction-sections disabled on AIX when debugging");
499      flag_function_sections = 0;
500    }
501
502  if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
503    {
504      warning ("-fdata-sections not supported on AIX");
505      flag_data_sections = 0;
506    }
507#endif
508
509  /* Set debug flags */
510  if (rs6000_debug_name)
511    {
512      if (! strcmp (rs6000_debug_name, "all"))
513	rs6000_debug_stack = rs6000_debug_arg = 1;
514      else if (! strcmp (rs6000_debug_name, "stack"))
515	rs6000_debug_stack = 1;
516      else if (! strcmp (rs6000_debug_name, "arg"))
517	rs6000_debug_arg = 1;
518      else
519	error ("unknown -mdebug-%s switch", rs6000_debug_name);
520    }
521
522  /* Set size of long double */
523  rs6000_long_double_type_size = 64;
524  if (rs6000_long_double_size_string)
525    {
526      char *tail;
527      int size = strtol (rs6000_long_double_size_string, &tail, 10);
528      if (*tail != '\0' || (size != 64 && size != 128))
529	error ("Unknown switch -mlong-double-%s",
530	       rs6000_long_double_size_string);
531      else
532	rs6000_long_double_type_size = size;
533    }
534
535  /* Handle -mabi= options.  */
536  rs6000_parse_abi_options ();
537
538#ifdef TARGET_REGNAMES
539  /* If the user desires alternate register names, copy in the
540     alternate names now.  */
541  if (TARGET_REGNAMES)
542    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
543#endif
544
545#ifdef SUBTARGET_OVERRIDE_OPTIONS
546  SUBTARGET_OVERRIDE_OPTIONS;
547#endif
548#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
549  SUBSUBTARGET_OVERRIDE_OPTIONS;
550#endif
551
552  /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
553     If -maix-struct-return or -msvr4-struct-return was explicitly
554     used, don't override with the ABI default.  */
555  if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
556    {
557      if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
558	target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
559      else
560	target_flags |= MASK_AIX_STRUCT_RET;
561    }
562
563  /* Register global variables with the garbage collector.  */
564  rs6000_add_gc_roots ();
565
566  /* Allocate an alias set for register saves & restores from stack.  */
567  rs6000_sr_alias_set = new_alias_set ();
568
569  if (TARGET_TOC)
570    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
571
572  /* We can only guarantee the availability of DI pseudo-ops when
573     assembling for 64-bit targets.  */
574  if (!TARGET_64BIT)
575    {
576      targetm.asm_out.aligned_op.di = NULL;
577      targetm.asm_out.unaligned_op.di = NULL;
578    }
579
580  /* Arrange to save and restore machine status around nested functions.  */
581  init_machine_status = rs6000_init_machine_status;
582  free_machine_status = rs6000_free_machine_status;
583}
584
585/* Handle -mabi= options.  */
586static void
587rs6000_parse_abi_options ()
588{
589  if (rs6000_abi_string == 0)
590    return;
591  else if (! strcmp (rs6000_abi_string, "altivec"))
592    rs6000_altivec_abi = 1;
593  else if (! strcmp (rs6000_abi_string, "no-altivec"))
594    rs6000_altivec_abi = 0;
595  else
596    error ("unknown ABI specified: '%s'", rs6000_abi_string);
597}
598
599void
600optimization_options (level, size)
601     int level ATTRIBUTE_UNUSED;
602     int size ATTRIBUTE_UNUSED;
603{
604}
605
606/* Do anything needed at the start of the asm file.  */
607
608void
609rs6000_file_start (file, default_cpu)
610     FILE *file;
611     const char *default_cpu;
612{
613  size_t i;
614  char buffer[80];
615  const char *start = buffer;
616  struct rs6000_cpu_select *ptr;
617
618  if (flag_verbose_asm)
619    {
620      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
621      rs6000_select[0].string = default_cpu;
622
623      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
624	{
625	  ptr = &rs6000_select[i];
626	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
627	    {
628	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
629	      start = "";
630	    }
631	}
632
633#ifdef USING_ELFOS_H
634      switch (rs6000_sdata)
635	{
636	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
637	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
638	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
639	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
640	}
641
642      if (rs6000_sdata && g_switch_value)
643	{
644	  fprintf (file, "%s -G %d", start, g_switch_value);
645	  start = "";
646	}
647#endif
648
649      if (*start == '\0')
650	putc ('\n', file);
651    }
652}
653
654
655/* Create a CONST_DOUBLE from a string.  */
656
657struct rtx_def *
658rs6000_float_const (string, mode)
659     const char *string;
660     enum machine_mode mode;
661{
662  REAL_VALUE_TYPE value;
663  value = REAL_VALUE_ATOF (string, mode);
664  return immed_real_const_1 (value, mode);
665}
666
667/* Return non-zero if this function is known to have a null epilogue.  */
668
669int
670direct_return ()
671{
672  if (reload_completed)
673    {
674      rs6000_stack_t *info = rs6000_stack_info ();
675
676      if (info->first_gp_reg_save == 32
677	  && info->first_fp_reg_save == 64
678	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
679	  && ! info->lr_save_p
680	  && ! info->cr_save_p
681	  && info->vrsave_mask == 0
682	  && ! info->push_p)
683	return 1;
684    }
685
686  return 0;
687}
688
689/* Returns 1 always.  */
690
691int
692any_operand (op, mode)
693     rtx op ATTRIBUTE_UNUSED;
694     enum machine_mode mode ATTRIBUTE_UNUSED;
695{
696  return 1;
697}
698
699/* Returns 1 if op is the count register.  */
700int
701count_register_operand (op, mode)
702     rtx op;
703     enum machine_mode mode ATTRIBUTE_UNUSED;
704{
705  if (GET_CODE (op) != REG)
706    return 0;
707
708  if (REGNO (op) == COUNT_REGISTER_REGNUM)
709    return 1;
710
711  if (REGNO (op) > FIRST_PSEUDO_REGISTER)
712    return 1;
713
714  return 0;
715}
716
717/* Returns 1 if op is an altivec register.  */
718int
719altivec_register_operand (op, mode)
720     rtx op;
721     enum machine_mode mode ATTRIBUTE_UNUSED;
722{
723
724  return (register_operand (op, mode)
725	  && (GET_CODE (op) != REG
726	      || REGNO (op) > FIRST_PSEUDO_REGISTER
727	      || ALTIVEC_REGNO_P (REGNO (op))));
728}
729
730int
731xer_operand (op, mode)
732     rtx op;
733     enum machine_mode mode ATTRIBUTE_UNUSED;
734{
735  if (GET_CODE (op) != REG)
736    return 0;
737
738  if (XER_REGNO_P (REGNO (op)))
739    return 1;
740
741  return 0;
742}
743
744/* Return 1 if OP is a signed 8-bit constant.  Int multiplication
745   by such constants completes more quickly.  */
746
747int
748s8bit_cint_operand (op, mode)
749     rtx op;
750     enum machine_mode mode ATTRIBUTE_UNUSED;
751{
752  return ( GET_CODE (op) == CONST_INT
753	  && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
754}
755
756/* Return 1 if OP is a constant that can fit in a D field.  */
757
758int
759short_cint_operand (op, mode)
760     rtx op;
761     enum machine_mode mode ATTRIBUTE_UNUSED;
762{
763  return (GET_CODE (op) == CONST_INT
764	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
765}
766
767/* Similar for an unsigned D field.  */
768
769int
770u_short_cint_operand (op, mode)
771     rtx op;
772     enum machine_mode mode ATTRIBUTE_UNUSED;
773{
774  return (GET_CODE (op) == CONST_INT
775	  && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
776}
777
778/* Return 1 if OP is a CONST_INT that cannot fit in a signed D field.  */
779
780int
781non_short_cint_operand (op, mode)
782     rtx op;
783     enum machine_mode mode ATTRIBUTE_UNUSED;
784{
785  return (GET_CODE (op) == CONST_INT
786	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
787}
788
789/* Returns 1 if OP is a CONST_INT that is a positive value
790   and an exact power of 2.  */
791
792int
793exact_log2_cint_operand (op, mode)
794     rtx op;
795     enum machine_mode mode ATTRIBUTE_UNUSED;
796{
797  return (GET_CODE (op) == CONST_INT
798	  && INTVAL (op) > 0
799	  && exact_log2 (INTVAL (op)) >= 0);
800}
801
802/* Returns 1 if OP is a register that is not special (i.e., not MQ,
803   ctr, or lr).  */
804
805int
806gpc_reg_operand (op, mode)
807     rtx op;
808     enum machine_mode mode;
809{
810  return (register_operand (op, mode)
811	  && (GET_CODE (op) != REG
812	      || (REGNO (op) >= ARG_POINTER_REGNUM
813		  && !XER_REGNO_P (REGNO (op)))
814	      || REGNO (op) < MQ_REGNO));
815}
816
817/* Returns 1 if OP is either a pseudo-register or a register denoting a
818   CR field.  */
819
820int
821cc_reg_operand (op, mode)
822     rtx op;
823     enum machine_mode mode;
824{
825  return (register_operand (op, mode)
826	  && (GET_CODE (op) != REG
827	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
828	      || CR_REGNO_P (REGNO (op))));
829}
830
831/* Returns 1 if OP is either a pseudo-register or a register denoting a
832   CR field that isn't CR0.  */
833
834int
835cc_reg_not_cr0_operand (op, mode)
836     rtx op;
837     enum machine_mode mode;
838{
839  return (register_operand (op, mode)
840	  && (GET_CODE (op) != REG
841	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
842	      || CR_REGNO_NOT_CR0_P (REGNO (op))));
843}
844
845/* Returns 1 if OP is either a constant integer valid for a D-field or
846   a non-special register.  If a register, it must be in the proper
847   mode unless MODE is VOIDmode.  */
848
849int
850reg_or_short_operand (op, mode)
851      rtx op;
852      enum machine_mode mode;
853{
854  return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
855}
856
857/* Similar, except check if the negation of the constant would be
858   valid for a D-field.  */
859
860int
861reg_or_neg_short_operand (op, mode)
862      rtx op;
863      enum machine_mode mode;
864{
865  if (GET_CODE (op) == CONST_INT)
866    return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
867
868  return gpc_reg_operand (op, mode);
869}
870
871/* Returns 1 if OP is either a constant integer valid for a DS-field or
872   a non-special register.  If a register, it must be in the proper
873   mode unless MODE is VOIDmode.  */
874
875int
876reg_or_aligned_short_operand (op, mode)
877      rtx op;
878      enum machine_mode mode;
879{
880  if (gpc_reg_operand (op, mode))
881    return 1;
882  else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
883    return 1;
884
885  return 0;
886}
887
888
889/* Return 1 if the operand is either a register or an integer whose
890   high-order 16 bits are zero.  */
891
892int
893reg_or_u_short_operand (op, mode)
894     rtx op;
895     enum machine_mode mode;
896{
897  return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
898}
899
900/* Return 1 is the operand is either a non-special register or ANY
901   constant integer.  */
902
903int
904reg_or_cint_operand (op, mode)
905    rtx op;
906    enum machine_mode mode;
907{
908  return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
909}
910
911/* Return 1 is the operand is either a non-special register or ANY
912   32-bit signed constant integer.  */
913
914int
915reg_or_arith_cint_operand (op, mode)
916    rtx op;
917    enum machine_mode mode;
918{
919  return (gpc_reg_operand (op, mode)
920	  || (GET_CODE (op) == CONST_INT
921#if HOST_BITS_PER_WIDE_INT != 32
922	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
923		  < (unsigned HOST_WIDE_INT) 0x100000000ll)
924#endif
925	      ));
926}
927
928/* Return 1 is the operand is either a non-special register or a 32-bit
929   signed constant integer valid for 64-bit addition.  */
930
931int
932reg_or_add_cint64_operand (op, mode)
933    rtx op;
934    enum machine_mode mode;
935{
936  return (gpc_reg_operand (op, mode)
937	  || (GET_CODE (op) == CONST_INT
938	      && INTVAL (op) < 0x7fff8000
939#if HOST_BITS_PER_WIDE_INT != 32
940	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
941		  < 0x100000000ll)
942#endif
943	      ));
944}
945
946/* Return 1 is the operand is either a non-special register or a 32-bit
947   signed constant integer valid for 64-bit subtraction.  */
948
949int
950reg_or_sub_cint64_operand (op, mode)
951    rtx op;
952    enum machine_mode mode;
953{
954  return (gpc_reg_operand (op, mode)
955	  || (GET_CODE (op) == CONST_INT
956	      && (- INTVAL (op)) < 0x7fff8000
957#if HOST_BITS_PER_WIDE_INT != 32
958	      && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
959		  < 0x100000000ll)
960#endif
961	      ));
962}
963
964/* Return 1 is the operand is either a non-special register or ANY
965   32-bit unsigned constant integer.  */
966
967int
968reg_or_logical_cint_operand (op, mode)
969    rtx op;
970    enum machine_mode mode;
971{
972  if (GET_CODE (op) == CONST_INT)
973    {
974      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
975	{
976	  if (GET_MODE_BITSIZE (mode) <= 32)
977	    abort ();
978
979	  if (INTVAL (op) < 0)
980	    return 0;
981	}
982
983      return ((INTVAL (op) & GET_MODE_MASK (mode)
984	       & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
985    }
986  else if (GET_CODE (op) == CONST_DOUBLE)
987    {
988      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
989	  || mode != DImode)
990	abort ();
991
992      return CONST_DOUBLE_HIGH (op) == 0;
993    }
994  else
995    return gpc_reg_operand (op, mode);
996}
997
998/* Return 1 if the operand is an operand that can be loaded via the GOT.  */
999
1000int
1001got_operand (op, mode)
1002     rtx op;
1003     enum machine_mode mode ATTRIBUTE_UNUSED;
1004{
1005  return (GET_CODE (op) == SYMBOL_REF
1006	  || GET_CODE (op) == CONST
1007	  || GET_CODE (op) == LABEL_REF);
1008}
1009
1010/* Return 1 if the operand is a simple references that can be loaded via
1011   the GOT (labels involving addition aren't allowed).  */
1012
1013int
1014got_no_const_operand (op, mode)
1015     rtx op;
1016     enum machine_mode mode ATTRIBUTE_UNUSED;
1017{
1018  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1019}
1020
1021/* Return the number of instructions it takes to form a constant in an
1022   integer register.  */
1023
1024static int
1025num_insns_constant_wide (value)
1026     HOST_WIDE_INT value;
1027{
1028  /* signed constant loadable with {cal|addi} */
1029  if (CONST_OK_FOR_LETTER_P (value, 'I'))
1030    return 1;
1031
1032  /* constant loadable with {cau|addis} */
1033  else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1034    return 1;
1035
1036#if HOST_BITS_PER_WIDE_INT == 64
1037  else if (TARGET_POWERPC64)
1038    {
1039      HOST_WIDE_INT low  = value & 0xffffffff;
1040      HOST_WIDE_INT high = value >> 32;
1041
1042      low = (low ^ 0x80000000) - 0x80000000;  /* sign extend */
1043
1044      if (high == 0 && (low & 0x80000000) == 0)
1045	return 2;
1046
1047      else if (high == -1 && (low & 0x80000000) != 0)
1048	return 2;
1049
1050      else if (! low)
1051	return num_insns_constant_wide (high) + 1;
1052
1053      else
1054	return (num_insns_constant_wide (high)
1055		+ num_insns_constant_wide (low) + 1);
1056    }
1057#endif
1058
1059  else
1060    return 2;
1061}
1062
1063int
1064num_insns_constant (op, mode)
1065     rtx op;
1066     enum machine_mode mode;
1067{
1068  if (GET_CODE (op) == CONST_INT)
1069    {
1070#if HOST_BITS_PER_WIDE_INT == 64
1071      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1072	  && mask64_operand (op, mode))
1073	    return 2;
1074      else
1075#endif
1076	return num_insns_constant_wide (INTVAL (op));
1077    }
1078
1079  else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1080    {
1081      long l;
1082      REAL_VALUE_TYPE rv;
1083
1084      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1085      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1086      return num_insns_constant_wide ((HOST_WIDE_INT) l);
1087    }
1088
1089  else if (GET_CODE (op) == CONST_DOUBLE)
1090    {
1091      HOST_WIDE_INT low;
1092      HOST_WIDE_INT high;
1093      long l[2];
1094      REAL_VALUE_TYPE rv;
1095      int endian = (WORDS_BIG_ENDIAN == 0);
1096
1097      if (mode == VOIDmode || mode == DImode)
1098	{
1099	  high = CONST_DOUBLE_HIGH (op);
1100	  low  = CONST_DOUBLE_LOW (op);
1101	}
1102      else
1103	{
1104	  REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1105	  REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1106	  high = l[endian];
1107	  low  = l[1 - endian];
1108	}
1109
1110      if (TARGET_32BIT)
1111	return (num_insns_constant_wide (low)
1112		+ num_insns_constant_wide (high));
1113
1114      else
1115	{
1116	  if (high == 0 && low >= 0)
1117	    return num_insns_constant_wide (low);
1118
1119	  else if (high == -1 && low < 0)
1120	    return num_insns_constant_wide (low);
1121
1122	  else if (mask64_operand (op, mode))
1123	    return 2;
1124
1125	  else if (low == 0)
1126	    return num_insns_constant_wide (high) + 1;
1127
1128	  else
1129	    return (num_insns_constant_wide (high)
1130		    + num_insns_constant_wide (low) + 1);
1131	}
1132    }
1133
1134  else
1135    abort ();
1136}
1137
1138/* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1139   register with one instruction per word.  We only do this if we can
1140   safely read CONST_DOUBLE_{LOW,HIGH}.  */
1141
1142int
1143easy_fp_constant (op, mode)
1144     rtx op;
1145     enum machine_mode mode;
1146{
1147  if (GET_CODE (op) != CONST_DOUBLE
1148      || GET_MODE (op) != mode
1149      || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1150    return 0;
1151
1152  /* Consider all constants with -msoft-float to be easy.  */
1153  if (TARGET_SOFT_FLOAT && mode != DImode)
1154    return 1;
1155
1156  /* If we are using V.4 style PIC, consider all constants to be hard.  */
1157  if (flag_pic && DEFAULT_ABI == ABI_V4)
1158    return 0;
1159
1160#ifdef TARGET_RELOCATABLE
1161  /* Similarly if we are using -mrelocatable, consider all constants
1162     to be hard.  */
1163  if (TARGET_RELOCATABLE)
1164    return 0;
1165#endif
1166
1167  if (mode == DFmode)
1168    {
1169      long k[2];
1170      REAL_VALUE_TYPE rv;
1171
1172      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1173      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1174
1175      return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1176	      && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1177    }
1178
1179  else if (mode == SFmode)
1180    {
1181      long l;
1182      REAL_VALUE_TYPE rv;
1183
1184      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1185      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1186
1187      return num_insns_constant_wide (l) == 1;
1188    }
1189
1190  else if (mode == DImode)
1191    return ((TARGET_POWERPC64
1192	     && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1193	    || (num_insns_constant (op, DImode) <= 2));
1194
1195  else if (mode == SImode)
1196    return 1;
1197  else
1198    abort ();
1199}
1200
1201/* Return 1 if the operand is a CONST_INT and can be put into a
1202   register with one instruction.  */
1203
1204static int
1205easy_vector_constant (op)
1206     rtx op;
1207{
1208  rtx elt;
1209  int units, i;
1210
1211  if (GET_CODE (op) != CONST_VECTOR)
1212    return 0;
1213
1214  units = CONST_VECTOR_NUNITS (op);
1215
1216  /* We can generate 0 easily.  Look for that.  */
1217  for (i = 0; i < units; ++i)
1218    {
1219      elt = CONST_VECTOR_ELT (op, i);
1220
1221      /* We could probably simplify this by just checking for equality
1222	 with CONST0_RTX for the current mode, but let's be safe
1223	 instead.  */
1224
1225      switch (GET_CODE (elt))
1226	{
1227	case CONST_INT:
1228	  if (INTVAL (elt) != 0)
1229	    return 0;
1230	  break;
1231	case CONST_DOUBLE:
1232	  if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1233	    return 0;
1234	  break;
1235	default:
1236	  return 0;
1237	}
1238    }
1239
1240  /* We could probably generate a few other constants trivially, but
1241     gcc doesn't generate them yet.  FIXME later.  */
1242  return 1;
1243}
1244
1245/* Return 1 if the operand is the constant 0.  This works for scalars
1246   as well as vectors.  */
1247int
1248zero_constant (op, mode)
1249     rtx op;
1250     enum machine_mode mode;
1251{
1252  return op == CONST0_RTX (mode);
1253}
1254
1255/* Return 1 if the operand is 0.0.  */
1256int
1257zero_fp_constant (op, mode)
1258     rtx op;
1259     enum machine_mode mode;
1260{
1261  return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1262}
1263
1264/* Return 1 if the operand is in volatile memory.  Note that during
1265   the RTL generation phase, memory_operand does not return TRUE for
1266   volatile memory references.  So this function allows us to
1267   recognize volatile references where its safe.  */
1268
1269int
1270volatile_mem_operand (op, mode)
1271     rtx op;
1272     enum machine_mode mode;
1273{
1274  if (GET_CODE (op) != MEM)
1275    return 0;
1276
1277  if (!MEM_VOLATILE_P (op))
1278    return 0;
1279
1280  if (mode != GET_MODE (op))
1281    return 0;
1282
1283  if (reload_completed)
1284    return memory_operand (op, mode);
1285
1286  if (reload_in_progress)
1287    return strict_memory_address_p (mode, XEXP (op, 0));
1288
1289  return memory_address_p (mode, XEXP (op, 0));
1290}
1291
1292/* Return 1 if the operand is an offsettable memory operand.  */
1293
1294int
1295offsettable_mem_operand (op, mode)
1296     rtx op;
1297     enum machine_mode mode;
1298{
1299  return ((GET_CODE (op) == MEM)
1300	  && offsettable_address_p (reload_completed || reload_in_progress,
1301				    mode, XEXP (op, 0)));
1302}
1303
1304/* Return 1 if the operand is either an easy FP constant (see above) or
1305   memory.  */
1306
1307int
1308mem_or_easy_const_operand (op, mode)
1309     rtx op;
1310     enum machine_mode mode;
1311{
1312  return memory_operand (op, mode) || easy_fp_constant (op, mode);
1313}
1314
1315/* Return 1 if the operand is either a non-special register or an item
1316   that can be used as the operand of a `mode' add insn.  */
1317
1318int
1319add_operand (op, mode)
1320    rtx op;
1321    enum machine_mode mode;
1322{
1323  if (GET_CODE (op) == CONST_INT)
1324    return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1325	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1326
1327  return gpc_reg_operand (op, mode);
1328}
1329
1330/* Return 1 if OP is a constant but not a valid add_operand.  */
1331
1332int
1333non_add_cint_operand (op, mode)
1334     rtx op;
1335     enum machine_mode mode ATTRIBUTE_UNUSED;
1336{
1337  return (GET_CODE (op) == CONST_INT
1338	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1339	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1340}
1341
1342/* Return 1 if the operand is a non-special register or a constant that
1343   can be used as the operand of an OR or XOR insn on the RS/6000.  */
1344
1345int
1346logical_operand (op, mode)
1347     rtx op;
1348     enum machine_mode mode;
1349{
1350  HOST_WIDE_INT opl, oph;
1351
1352  if (gpc_reg_operand (op, mode))
1353    return 1;
1354
1355  if (GET_CODE (op) == CONST_INT)
1356    {
1357      opl = INTVAL (op) & GET_MODE_MASK (mode);
1358
1359#if HOST_BITS_PER_WIDE_INT <= 32
1360      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1361	return 0;
1362#endif
1363    }
1364  else if (GET_CODE (op) == CONST_DOUBLE)
1365    {
1366      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1367	abort ();
1368
1369      opl = CONST_DOUBLE_LOW (op);
1370      oph = CONST_DOUBLE_HIGH (op);
1371      if (oph != 0)
1372	return 0;
1373    }
1374  else
1375    return 0;
1376
1377  return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1378	  || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1379}
1380
1381/* Return 1 if C is a constant that is not a logical operand (as
1382   above), but could be split into one.  */
1383
1384int
1385non_logical_cint_operand (op, mode)
1386     rtx op;
1387     enum machine_mode mode;
1388{
1389  return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1390	  && ! logical_operand (op, mode)
1391	  && reg_or_logical_cint_operand (op, mode));
1392}
1393
1394/* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1395   RS/6000.  It is if there are no more than two 1->0 or 0->1 transitions.
1396   Reject all ones and all zeros, since these should have been optimized
1397   away and confuse the making of MB and ME.  */
1398
1399int
1400mask_operand (op, mode)
1401     rtx op;
1402     enum machine_mode mode ATTRIBUTE_UNUSED;
1403{
1404  HOST_WIDE_INT c, lsb;
1405
1406  if (GET_CODE (op) != CONST_INT)
1407    return 0;
1408
1409  c = INTVAL (op);
1410
1411  /* Fail in 64-bit mode if the mask wraps around because the upper
1412     32-bits of the mask will all be 1s, contrary to GCC's internal view.  */
1413  if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1414    return 0;
1415
1416  /* We don't change the number of transitions by inverting,
1417     so make sure we start with the LS bit zero.  */
1418  if (c & 1)
1419    c = ~c;
1420
1421  /* Reject all zeros or all ones.  */
1422  if (c == 0)
1423    return 0;
1424
1425  /* Find the first transition.  */
1426  lsb = c & -c;
1427
1428  /* Invert to look for a second transition.  */
1429  c = ~c;
1430
1431  /* Erase first transition.  */
1432  c &= -lsb;
1433
1434  /* Find the second transition (if any).  */
1435  lsb = c & -c;
1436
1437  /* Match if all the bits above are 1's (or c is zero).  */
1438  return c == -lsb;
1439}
1440
1441/* Return 1 if the operand is a constant that is a PowerPC64 mask.
1442   It is if there are no more than one 1->0 or 0->1 transitions.
1443   Reject all ones and all zeros, since these should have been optimized
1444   away and confuse the making of MB and ME.  */
1445
1446int
1447mask64_operand (op, mode)
1448     rtx op;
1449     enum machine_mode mode;
1450{
1451  if (GET_CODE (op) == CONST_INT)
1452    {
1453      HOST_WIDE_INT c, lsb;
1454
1455      /* We don't change the number of transitions by inverting,
1456	 so make sure we start with the LS bit zero.  */
1457      c = INTVAL (op);
1458      if (c & 1)
1459	c = ~c;
1460
1461      /* Reject all zeros or all ones.  */
1462      if (c == 0)
1463	return 0;
1464
1465      /* Find the transition, and check that all bits above are 1's.  */
1466      lsb = c & -c;
1467      return c == -lsb;
1468    }
1469  else if (GET_CODE (op) == CONST_DOUBLE
1470	   && (mode == VOIDmode || mode == DImode))
1471    {
1472      HOST_WIDE_INT low, high, lsb;
1473
1474      if (HOST_BITS_PER_WIDE_INT < 64)
1475	high = CONST_DOUBLE_HIGH (op);
1476
1477      low = CONST_DOUBLE_LOW (op);
1478      if (low & 1)
1479	{
1480	  if (HOST_BITS_PER_WIDE_INT < 64)
1481	    high = ~high;
1482	  low = ~low;
1483	}
1484
1485      if (low == 0)
1486	{
1487	  if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1488	    return 0;
1489
1490	  lsb = high & -high;
1491	  return high == -lsb;
1492	}
1493
1494      lsb = low & -low;
1495      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1496    }
1497  else
1498    return 0;
1499}
1500
1501/* Return 1 if the operand is either a non-special register or a constant
1502   that can be used as the operand of a PowerPC64 logical AND insn.  */
1503
1504int
1505and64_operand (op, mode)
1506    rtx op;
1507    enum machine_mode mode;
1508{
1509  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1510    return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1511
1512  return (logical_operand (op, mode) || mask64_operand (op, mode));
1513}
1514
1515/* Return 1 if the operand is either a non-special register or a
1516   constant that can be used as the operand of an RS/6000 logical AND insn.  */
1517
1518int
1519and_operand (op, mode)
1520    rtx op;
1521    enum machine_mode mode;
1522{
1523  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1524    return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1525
1526  return (logical_operand (op, mode) || mask_operand (op, mode));
1527}
1528
1529/* Return 1 if the operand is a general register or memory operand.  */
1530
1531int
1532reg_or_mem_operand (op, mode)
1533     rtx op;
1534     enum machine_mode mode;
1535{
1536  return (gpc_reg_operand (op, mode)
1537	  || memory_operand (op, mode)
1538	  || volatile_mem_operand (op, mode));
1539}
1540
1541/* Return 1 if the operand is a general register or memory operand without
1542   pre_inc or pre_dec which produces invalid form of PowerPC lwa
1543   instruction.  */
1544
1545int
1546lwa_operand (op, mode)
1547     rtx op;
1548     enum machine_mode mode;
1549{
1550  rtx inner = op;
1551
1552  if (reload_completed && GET_CODE (inner) == SUBREG)
1553    inner = SUBREG_REG (inner);
1554
1555  return gpc_reg_operand (inner, mode)
1556    || (memory_operand (inner, mode)
1557	&& GET_CODE (XEXP (inner, 0)) != PRE_INC
1558	&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
1559	&& (GET_CODE (XEXP (inner, 0)) != PLUS
1560	    || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1561	    || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1562}
1563
1564/* Return 1 if the operand, used inside a MEM, is a valid first argument
1565   to CALL.  This is a SYMBOL_REF or a pseudo-register, which will be
1566   forced to lr.  */
1567
1568int
1569call_operand (op, mode)
1570     rtx op;
1571     enum machine_mode mode;
1572{
1573  if (mode != VOIDmode && GET_MODE (op) != mode)
1574    return 0;
1575
1576  return (GET_CODE (op) == SYMBOL_REF
1577	  || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1578}
1579
1580/* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1581   this file and the function is not weakly defined.  */
1582
1583int
1584current_file_function_operand (op, mode)
1585     rtx op;
1586     enum machine_mode mode ATTRIBUTE_UNUSED;
1587{
1588  return (GET_CODE (op) == SYMBOL_REF
1589	  && (SYMBOL_REF_FLAG (op)
1590	      || (op == XEXP (DECL_RTL (current_function_decl), 0)
1591	          && ! DECL_WEAK (current_function_decl))));
1592}
1593
1594/* Return 1 if this operand is a valid input for a move insn.  */
1595
1596int
1597input_operand (op, mode)
1598     rtx op;
1599     enum machine_mode mode;
1600{
1601  /* Memory is always valid.  */
1602  if (memory_operand (op, mode))
1603    return 1;
1604
1605  /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary.  */
1606  if (GET_CODE (op) == CONSTANT_P_RTX)
1607    return 1;
1608
1609  /* For floating-point, easy constants are valid.  */
1610  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1611      && CONSTANT_P (op)
1612      && easy_fp_constant (op, mode))
1613    return 1;
1614
1615  /* Allow any integer constant.  */
1616  if (GET_MODE_CLASS (mode) == MODE_INT
1617      && (GET_CODE (op) == CONST_INT
1618	  || GET_CODE (op) == CONST_DOUBLE))
1619    return 1;
1620
1621  /* For floating-point or multi-word mode, the only remaining valid type
1622     is a register.  */
1623  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1624      || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1625    return register_operand (op, mode);
1626
1627  /* The only cases left are integral modes one word or smaller (we
1628     do not get called for MODE_CC values).  These can be in any
1629     register.  */
1630  if (register_operand (op, mode))
1631    return 1;
1632
1633  /* A SYMBOL_REF referring to the TOC is valid.  */
1634  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1635    return 1;
1636
1637  /* A constant pool expression (relative to the TOC) is valid */
1638  if (TOC_RELATIVE_EXPR_P (op))
1639    return 1;
1640
1641  /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1642     to be valid.  */
1643  if (DEFAULT_ABI == ABI_V4
1644      && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1645      && small_data_operand (op, Pmode))
1646    return 1;
1647
1648  return 0;
1649}
1650
1651/* Return 1 for an operand in small memory on V.4/eabi.  */
1652
1653int
1654small_data_operand (op, mode)
1655     rtx op ATTRIBUTE_UNUSED;
1656     enum machine_mode mode ATTRIBUTE_UNUSED;
1657{
1658#if TARGET_ELF
1659  rtx sym_ref;
1660
1661  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1662    return 0;
1663
1664  if (DEFAULT_ABI != ABI_V4)
1665    return 0;
1666
1667  if (GET_CODE (op) == SYMBOL_REF)
1668    sym_ref = op;
1669
1670  else if (GET_CODE (op) != CONST
1671	   || GET_CODE (XEXP (op, 0)) != PLUS
1672	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1673	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1674    return 0;
1675
1676  else
1677    {
1678      rtx sum = XEXP (op, 0);
1679      HOST_WIDE_INT summand;
1680
1681      /* We have to be careful here, because it is the referenced address
1682        that must be 32k from _SDA_BASE_, not just the symbol.  */
1683      summand = INTVAL (XEXP (sum, 1));
1684      if (summand < 0 || summand > g_switch_value)
1685       return 0;
1686
1687      sym_ref = XEXP (sum, 0);
1688    }
1689
1690  if (*XSTR (sym_ref, 0) != '@')
1691    return 0;
1692
1693  return 1;
1694
1695#else
1696  return 0;
1697#endif
1698}
1699
1700static int
1701constant_pool_expr_1 (op, have_sym, have_toc)
1702    rtx op;
1703    int *have_sym;
1704    int *have_toc;
1705{
1706  switch (GET_CODE(op))
1707    {
1708    case SYMBOL_REF:
1709      if (CONSTANT_POOL_ADDRESS_P (op))
1710	{
1711	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1712	    {
1713	      *have_sym = 1;
1714	      return 1;
1715	    }
1716	  else
1717	    return 0;
1718	}
1719      else if (! strcmp (XSTR (op, 0), toc_label_name))
1720	{
1721	  *have_toc = 1;
1722	  return 1;
1723	}
1724      else
1725	return 0;
1726    case PLUS:
1727    case MINUS:
1728      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1729	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1730    case CONST:
1731      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1732    case CONST_INT:
1733      return 1;
1734    default:
1735      return 0;
1736    }
1737}
1738
1739int
1740constant_pool_expr_p (op)
1741    rtx op;
1742{
1743  int have_sym = 0;
1744  int have_toc = 0;
1745  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1746}
1747
1748int
1749toc_relative_expr_p (op)
1750    rtx op;
1751{
1752    int have_sym = 0;
1753    int have_toc = 0;
1754    return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1755}
1756
1757/* Try machine-dependent ways of modifying an illegitimate address
1758   to be legitimate.  If we find one, return the new, valid address.
1759   This is used from only one place: `memory_address' in explow.c.
1760
1761   OLDX is the address as it was before break_out_memory_refs was
1762   called.  In some cases it is useful to look at this to decide what
1763   needs to be done.
1764
1765   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1766
1767   It is always safe for this function to do nothing.  It exists to
1768   recognize opportunities to optimize the output.
1769
1770   On RS/6000, first check for the sum of a register with a constant
1771   integer that is out of range.  If so, generate code to add the
1772   constant with the low-order 16 bits masked to the register and force
1773   this result into another register (this can be done with `cau').
1774   Then generate an address of REG+(CONST&0xffff), allowing for the
1775   possibility of bit 16 being a one.
1776
1777   Then check for the sum of a register and something not constant, try to
1778   load the other things into a register and return the sum.  */
1779rtx
1780rs6000_legitimize_address (x, oldx, mode)
1781     rtx x;
1782     rtx oldx ATTRIBUTE_UNUSED;
1783     enum machine_mode mode;
1784{
1785  if (GET_CODE (x) == PLUS
1786      && GET_CODE (XEXP (x, 0)) == REG
1787      && GET_CODE (XEXP (x, 1)) == CONST_INT
1788      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1789    {
1790      HOST_WIDE_INT high_int, low_int;
1791      rtx sum;
1792      high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1793      low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1794      if (low_int & 0x8000)
1795	high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1796      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1797					 GEN_INT (high_int)), 0);
1798      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1799    }
1800  else if (GET_CODE (x) == PLUS
1801	   && GET_CODE (XEXP (x, 0)) == REG
1802	   && GET_CODE (XEXP (x, 1)) != CONST_INT
1803	   && GET_MODE_NUNITS (mode) == 1
1804	   && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1805	   && (TARGET_POWERPC64 || mode != DImode)
1806	   && mode != TImode)
1807    {
1808      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1809			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1810    }
1811  else if (ALTIVEC_VECTOR_MODE (mode))
1812    {
1813      rtx reg;
1814
1815      /* Make sure both operands are registers.  */
1816      if (GET_CODE (x) == PLUS)
1817	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1818			     force_reg (Pmode, XEXP (x, 1)));
1819
1820      reg = force_reg (Pmode, x);
1821      return reg;
1822    }
1823  else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1824	   && GET_CODE (x) != CONST_INT
1825	   && GET_CODE (x) != CONST_DOUBLE
1826	   && CONSTANT_P (x)
1827	   && GET_MODE_NUNITS (mode) == 1
1828	   && (GET_MODE_BITSIZE (mode) <= 32
1829	       || (TARGET_HARD_FLOAT && mode == DFmode)))
1830    {
1831      rtx reg = gen_reg_rtx (Pmode);
1832      emit_insn (gen_elf_high (reg, (x)));
1833      return gen_rtx_LO_SUM (Pmode, reg, (x));
1834    }
1835  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1836	   && ! flag_pic
1837	   && GET_CODE (x) != CONST_INT
1838	   && GET_CODE (x) != CONST_DOUBLE
1839	   && CONSTANT_P (x)
1840	   && (TARGET_HARD_FLOAT || mode != DFmode)
1841	   && mode != DImode
1842	   && mode != TImode)
1843    {
1844      rtx reg = gen_reg_rtx (Pmode);
1845      emit_insn (gen_macho_high (reg, (x)));
1846      return gen_rtx_LO_SUM (Pmode, reg, (x));
1847    }
1848  else if (TARGET_TOC
1849	   && CONSTANT_POOL_EXPR_P (x)
1850	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1851    {
1852      return create_TOC_reference (x);
1853    }
1854  else
1855    return NULL_RTX;
1856}
1857
1858/* The convention appears to be to define this wherever it is used.
1859   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1860   is now used here.  */
1861#ifndef REG_MODE_OK_FOR_BASE_P
1862#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1863#endif
1864
1865/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
1866   replace the input X, or the original X if no replacement is called for.
1867   The output parameter *WIN is 1 if the calling macro should goto WIN,
1868   0 if it should not.
1869
1870   For RS/6000, we wish to handle large displacements off a base
1871   register by splitting the addend across an addiu/addis and the mem insn.
1872   This cuts number of extra insns needed from 3 to 1.
1873
1874   On Darwin, we use this to generate code for floating point constants.
1875   A movsf_low is generated so we wind up with 2 instructions rather than 3.
1876   The Darwin code is inside #if TARGET_MACHO because only then is
1877   machopic_function_base_name() defined.  */
1878rtx
1879rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1880    rtx x;
1881    enum machine_mode mode;
1882    int opnum;
1883    int type;
1884    int ind_levels ATTRIBUTE_UNUSED;
1885    int *win;
1886{
1887  /* We must recognize output that we have already generated ourselves.  */
1888  if (GET_CODE (x) == PLUS
1889      && GET_CODE (XEXP (x, 0)) == PLUS
1890      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1891      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1892      && GET_CODE (XEXP (x, 1)) == CONST_INT)
1893    {
1894      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1895                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1896                   opnum, (enum reload_type)type);
1897      *win = 1;
1898      return x;
1899    }
1900
1901#if TARGET_MACHO
1902  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1903      && GET_CODE (x) == LO_SUM
1904      && GET_CODE (XEXP (x, 0)) == PLUS
1905      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1906      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1907      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1908      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1909      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1910      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1911      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1912    {
1913      /* Result of previous invocation of this function on Darwin
1914	 floating point constant.  */
1915      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1916		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1917		opnum, (enum reload_type)type);
1918      *win = 1;
1919      return x;
1920    }
1921#endif
1922  if (GET_CODE (x) == PLUS
1923      && GET_CODE (XEXP (x, 0)) == REG
1924      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1925      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1926      && GET_CODE (XEXP (x, 1)) == CONST_INT
1927      && !ALTIVEC_VECTOR_MODE (mode))
1928    {
1929      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1930      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1931      HOST_WIDE_INT high
1932        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1933
1934      /* Check for 32-bit overflow.  */
1935      if (high + low != val)
1936        {
1937	  *win = 0;
1938	  return x;
1939	}
1940
1941      /* Reload the high part into a base reg; leave the low part
1942         in the mem directly.  */
1943
1944      x = gen_rtx_PLUS (GET_MODE (x),
1945                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1946                                      GEN_INT (high)),
1947                        GEN_INT (low));
1948
1949      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1950                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1951                   opnum, (enum reload_type)type);
1952      *win = 1;
1953      return x;
1954    }
1955#if TARGET_MACHO
1956  if (GET_CODE (x) == SYMBOL_REF
1957      && DEFAULT_ABI == ABI_DARWIN
1958      && !ALTIVEC_VECTOR_MODE (mode)
1959      && flag_pic)
1960    {
1961      /* Darwin load of floating point constant.  */
1962      rtx offset = gen_rtx (CONST, Pmode,
1963		    gen_rtx (MINUS, Pmode, x,
1964		    gen_rtx (SYMBOL_REF, Pmode,
1965			machopic_function_base_name ())));
1966      x = gen_rtx (LO_SUM, GET_MODE (x),
1967	    gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1968		gen_rtx (HIGH, Pmode, offset)), offset);
1969      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1970		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1971		opnum, (enum reload_type)type);
1972      *win = 1;
1973      return x;
1974    }
1975#endif
1976  if (TARGET_TOC
1977      && CONSTANT_POOL_EXPR_P (x)
1978      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1979    {
1980      (x) = create_TOC_reference (x);
1981      *win = 1;
1982      return x;
1983    }
1984  *win = 0;
1985  return x;
1986}
1987
1988/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1989   that is a valid memory address for an instruction.
1990   The MODE argument is the machine mode for the MEM expression
1991   that wants to use this address.
1992
1993   On the RS/6000, there are four valid address: a SYMBOL_REF that
1994   refers to a constant pool entry of an address (or the sum of it
1995   plus a constant), a short (16-bit signed) constant plus a register,
1996   the sum of two registers, or a register indirect, possibly with an
1997   auto-increment.  For DFmode and DImode with an constant plus register,
1998   we must ensure that both words are addressable or PowerPC64 with offset
1999   word aligned.
2000
2001   For modes spanning multiple registers (DFmode in 32-bit GPRs,
2002   32-bit DImode, TImode), indexed addressing cannot be used because
2003   adjacent memory cells are accessed by adding word-sized offsets
2004   during assembly output.  */
2005int
2006rs6000_legitimate_address (mode, x, reg_ok_strict)
2007    enum machine_mode mode;
2008    rtx x;
2009    int reg_ok_strict;
2010{
2011  if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2012    return 1;
2013  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2014      && TARGET_UPDATE
2015      && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2016    return 1;
2017  if (LEGITIMATE_SMALL_DATA_P (mode, x))
2018    return 1;
2019  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2020    return 1;
2021  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
2022  if (! reg_ok_strict
2023      && GET_CODE (x) == PLUS
2024      && GET_CODE (XEXP (x, 0)) == REG
2025      && XEXP (x, 0) == virtual_stack_vars_rtx
2026      && GET_CODE (XEXP (x, 1)) == CONST_INT)
2027    return 1;
2028  if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2029    return 1;
2030  if (mode != TImode
2031      && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2032      && (TARGET_POWERPC64 || mode != DImode)
2033      && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2034    return 1;
2035  if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2036    return 1;
2037  return 0;
2038}
2039
2040/* Try to output insns to set TARGET equal to the constant C if it can
2041   be done in less than N insns.  Do all computations in MODE.
2042   Returns the place where the output has been placed if it can be
2043   done and the insns have been emitted.  If it would take more than N
2044   insns, zero is returned and no insns and emitted.  */
2045
2046rtx
2047rs6000_emit_set_const (dest, mode, source, n)
2048     rtx dest, source;
2049     enum machine_mode mode;
2050     int n ATTRIBUTE_UNUSED;
2051{
2052  HOST_WIDE_INT c0, c1;
2053
2054  if (mode == QImode || mode == HImode || mode == SImode)
2055    {
2056      if (dest == NULL)
2057        dest = gen_reg_rtx (mode);
2058      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2059      return dest;
2060    }
2061
2062  if (GET_CODE (source) == CONST_INT)
2063    {
2064      c0 = INTVAL (source);
2065      c1 = -(c0 < 0);
2066    }
2067  else if (GET_CODE (source) == CONST_DOUBLE)
2068    {
2069#if HOST_BITS_PER_WIDE_INT >= 64
2070      c0 = CONST_DOUBLE_LOW (source);
2071      c1 = -(c0 < 0);
2072#else
2073      c0 = CONST_DOUBLE_LOW (source);
2074      c1 = CONST_DOUBLE_HIGH (source);
2075#endif
2076    }
2077  else
2078    abort ();
2079
2080  return rs6000_emit_set_long_const (dest, c0, c1);
2081}
2082
2083/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2084   fall back to a straight forward decomposition.  We do this to avoid
2085   exponential run times encountered when looking for longer sequences
2086   with rs6000_emit_set_const.  */
2087static rtx
2088rs6000_emit_set_long_const (dest, c1, c2)
2089     rtx dest;
2090     HOST_WIDE_INT c1, c2;
2091{
2092  if (!TARGET_POWERPC64)
2093    {
2094      rtx operand1, operand2;
2095
2096      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2097					DImode);
2098      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2099					DImode);
2100      emit_move_insn (operand1, GEN_INT (c1));
2101      emit_move_insn (operand2, GEN_INT (c2));
2102    }
2103  else
2104    {
2105      HOST_WIDE_INT ud1, ud2, ud3, ud4;
2106
2107      ud1 = c1 & 0xffff;
2108      ud2 = (c1 & 0xffff0000) >> 16;
2109#if HOST_BITS_PER_WIDE_INT >= 64
2110      c2 = c1 >> 32;
2111#endif
2112      ud3 = c2 & 0xffff;
2113      ud4 = (c2 & 0xffff0000) >> 16;
2114
2115      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2116	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2117	{
2118	  if (ud1 & 0x8000)
2119	    emit_move_insn (dest, GEN_INT (((ud1  ^ 0x8000) -  0x8000)));
2120	  else
2121	    emit_move_insn (dest, GEN_INT (ud1));
2122	}
2123
2124      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2125	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2126	{
2127	  if (ud2 & 0x8000)
2128	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2129					   - 0x80000000));
2130	  else
2131	    emit_move_insn (dest, GEN_INT (ud2 << 16));
2132	  if (ud1 != 0)
2133	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2134	}
2135      else if ((ud4 == 0xffff && (ud3 & 0x8000))
2136	       || (ud4 == 0 && ! (ud3 & 0x8000)))
2137	{
2138	  if (ud3 & 0x8000)
2139	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2140					   - 0x80000000));
2141	  else
2142	    emit_move_insn (dest, GEN_INT (ud3 << 16));
2143
2144	  if (ud2 != 0)
2145	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2146	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2147	  if (ud1 != 0)
2148	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2149	}
2150      else
2151	{
2152	  if (ud4 & 0x8000)
2153	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2154					   - 0x80000000));
2155	  else
2156	    emit_move_insn (dest, GEN_INT (ud4 << 16));
2157
2158	  if (ud3 != 0)
2159	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2160
2161	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2162	  if (ud2 != 0)
2163	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2164					       GEN_INT (ud2 << 16)));
2165	  if (ud1 != 0)
2166	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2167	}
2168    }
2169  return dest;
2170}
2171
2172/* Emit a move from SOURCE to DEST in mode MODE.  */
2173void
2174rs6000_emit_move (dest, source, mode)
2175     rtx dest;
2176     rtx source;
2177     enum machine_mode mode;
2178{
2179  rtx operands[2];
2180  operands[0] = dest;
2181  operands[1] = source;
2182
2183  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
2184  if (GET_CODE (operands[1]) == CONST_DOUBLE
2185      && ! FLOAT_MODE_P (mode)
2186      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2187    {
2188      /* FIXME.  This should never happen.  */
2189      /* Since it seems that it does, do the safe thing and convert
2190	 to a CONST_INT.  */
2191      operands[1] =
2192	GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2193    }
2194  if (GET_CODE (operands[1]) == CONST_DOUBLE
2195      && ! FLOAT_MODE_P (mode)
2196      && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2197	   && CONST_DOUBLE_LOW (operands[1]) >= 0)
2198	  || (CONST_DOUBLE_HIGH (operands[1]) == -1
2199	      && CONST_DOUBLE_LOW (operands[1]) < 0)))
2200    abort ();
2201
2202  /* Check if GCC is setting up a block move that will end up using FP
2203     registers as temporaries.  We must make sure this is acceptable.  */
2204  if (GET_CODE (operands[0]) == MEM
2205      && GET_CODE (operands[1]) == MEM
2206      && mode == DImode
2207      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2208	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2209      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2210					    ? 32 : MEM_ALIGN (operands[0])))
2211	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2212					       ? 32
2213					       : MEM_ALIGN (operands[1]))))
2214      && ! MEM_VOLATILE_P (operands [0])
2215      && ! MEM_VOLATILE_P (operands [1]))
2216    {
2217      emit_move_insn (adjust_address (operands[0], SImode, 0),
2218		      adjust_address (operands[1], SImode, 0));
2219      emit_move_insn (adjust_address (operands[0], SImode, 4),
2220		      adjust_address (operands[1], SImode, 4));
2221      return;
2222    }
2223
2224  if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2225    operands[1] = force_reg (mode, operands[1]);
2226
2227  if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2228      && GET_CODE (operands[0]) == MEM)
2229    {
2230      int regnum;
2231
2232      if (reload_in_progress || reload_completed)
2233	regnum = true_regnum (operands[1]);
2234      else if (GET_CODE (operands[1]) == REG)
2235	regnum = REGNO (operands[1]);
2236      else
2237	regnum = -1;
2238
2239      /* If operands[1] is a register, on POWER it may have
2240	 double-precision data in it, so truncate it to single
2241	 precision.  */
2242      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2243	{
2244	  rtx newreg;
2245	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2246	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2247	  operands[1] = newreg;
2248	}
2249    }
2250
2251  /* Handle the case where reload calls us with an invalid address;
2252     and the case of CONSTANT_P_RTX.  */
2253  if (!VECTOR_MODE_P (mode)
2254      && (! general_operand (operands[1], mode)
2255	  || ! nonimmediate_operand (operands[0], mode)
2256	  || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2257    {
2258      emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2259      return;
2260    }
2261
2262  /* FIXME:  In the long term, this switch statement should go away
2263     and be replaced by a sequence of tests based on things like
2264     mode == Pmode.  */
2265  switch (mode)
2266    {
2267    case HImode:
2268    case QImode:
2269      if (CONSTANT_P (operands[1])
2270	  && GET_CODE (operands[1]) != CONST_INT)
2271	operands[1] = force_const_mem (mode, operands[1]);
2272      break;
2273
2274    case TFmode:
2275    case DFmode:
2276    case SFmode:
2277      if (CONSTANT_P (operands[1])
2278	  && ! easy_fp_constant (operands[1], mode))
2279	operands[1] = force_const_mem (mode, operands[1]);
2280      break;
2281
2282    case V16QImode:
2283    case V8HImode:
2284    case V4SFmode:
2285    case V4SImode:
2286      if (CONSTANT_P (operands[1])
2287	  && !easy_vector_constant (operands[1]))
2288	operands[1] = force_const_mem (mode, operands[1]);
2289      break;
2290
2291    case SImode:
2292    case DImode:
2293      /* Use default pattern for address of ELF small data */
2294      if (TARGET_ELF
2295	  && mode == Pmode
2296	  && DEFAULT_ABI == ABI_V4
2297	  && (GET_CODE (operands[1]) == SYMBOL_REF
2298	      || GET_CODE (operands[1]) == CONST)
2299	  && small_data_operand (operands[1], mode))
2300	{
2301	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2302	  return;
2303	}
2304
2305      if (DEFAULT_ABI == ABI_V4
2306	  && mode == Pmode && mode == SImode
2307	  && flag_pic == 1 && got_operand (operands[1], mode))
2308	{
2309	  emit_insn (gen_movsi_got (operands[0], operands[1]));
2310	  return;
2311	}
2312
2313      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2314	  && TARGET_NO_TOC && ! flag_pic
2315	  && mode == Pmode
2316	  && CONSTANT_P (operands[1])
2317	  && GET_CODE (operands[1]) != HIGH
2318	  && GET_CODE (operands[1]) != CONST_INT)
2319	{
2320	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2321
2322	  /* If this is a function address on -mcall-aixdesc,
2323	     convert it to the address of the descriptor.  */
2324	  if (DEFAULT_ABI == ABI_AIX
2325	      && GET_CODE (operands[1]) == SYMBOL_REF
2326	      && XSTR (operands[1], 0)[0] == '.')
2327	    {
2328	      const char *name = XSTR (operands[1], 0);
2329	      rtx new_ref;
2330	      while (*name == '.')
2331		name++;
2332	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2333	      CONSTANT_POOL_ADDRESS_P (new_ref)
2334		= CONSTANT_POOL_ADDRESS_P (operands[1]);
2335	      SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2336	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2337	      operands[1] = new_ref;
2338	    }
2339
2340	  if (DEFAULT_ABI == ABI_DARWIN)
2341	    {
2342	      emit_insn (gen_macho_high (target, operands[1]));
2343	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
2344	      return;
2345	    }
2346
2347	  emit_insn (gen_elf_high (target, operands[1]));
2348	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
2349	  return;
2350	}
2351
2352      /* If this is a SYMBOL_REF that refers to a constant pool entry,
2353	 and we have put it in the TOC, we just need to make a TOC-relative
2354	 reference to it.  */
2355      if (TARGET_TOC
2356	  && GET_CODE (operands[1]) == SYMBOL_REF
2357	  && CONSTANT_POOL_EXPR_P (operands[1])
2358	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2359					      get_pool_mode (operands[1])))
2360	{
2361	  operands[1] = create_TOC_reference (operands[1]);
2362	}
2363      else if (mode == Pmode
2364	       && CONSTANT_P (operands[1])
2365	       && ((GET_CODE (operands[1]) != CONST_INT
2366		    && ! easy_fp_constant (operands[1], mode))
2367		   || (GET_CODE (operands[1]) == CONST_INT
2368		       && num_insns_constant (operands[1], mode) > 2)
2369		   || (GET_CODE (operands[0]) == REG
2370		       && FP_REGNO_P (REGNO (operands[0]))))
2371	       && GET_CODE (operands[1]) != HIGH
2372	       && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2373	       && ! TOC_RELATIVE_EXPR_P (operands[1]))
2374	{
2375	  /* Emit a USE operation so that the constant isn't deleted if
2376	     expensive optimizations are turned on because nobody
2377	     references it.  This should only be done for operands that
2378	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2379	     This should not be done for operands that contain LABEL_REFs.
2380	     For now, we just handle the obvious case.  */
2381	  if (GET_CODE (operands[1]) != LABEL_REF)
2382	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2383
2384#if TARGET_MACHO
2385	  /* Darwin uses a special PIC legitimizer.  */
2386	  if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2387	    {
2388	      operands[1] =
2389		rs6000_machopic_legitimize_pic_address (operands[1], mode,
2390							operands[0]);
2391	      if (operands[0] != operands[1])
2392		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2393	      return;
2394	    }
2395#endif
2396
2397	  /* If we are to limit the number of things we put in the TOC and
2398	     this is a symbol plus a constant we can add in one insn,
2399	     just put the symbol in the TOC and add the constant.  Don't do
2400	     this if reload is in progress.  */
2401	  if (GET_CODE (operands[1]) == CONST
2402	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2403	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
2404	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2405	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2406		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2407	      && ! side_effects_p (operands[0]))
2408	    {
2409	      rtx sym =
2410		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2411	      rtx other = XEXP (XEXP (operands[1], 0), 1);
2412
2413	      sym = force_reg (mode, sym);
2414	      if (mode == SImode)
2415		emit_insn (gen_addsi3 (operands[0], sym, other));
2416	      else
2417		emit_insn (gen_adddi3 (operands[0], sym, other));
2418	      return;
2419	    }
2420
2421	  operands[1] = force_const_mem (mode, operands[1]);
2422
2423	  if (TARGET_TOC
2424	      && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2425	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2426			get_pool_constant (XEXP (operands[1], 0)),
2427			get_pool_mode (XEXP (operands[1], 0))))
2428	    {
2429	      operands[1]
2430		= gen_rtx_MEM (mode,
2431			       create_TOC_reference (XEXP (operands[1], 0)));
2432	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
2433	      RTX_UNCHANGING_P (operands[1]) = 1;
2434	    }
2435	}
2436      break;
2437
2438    case TImode:
2439      if (GET_CODE (operands[0]) == MEM
2440	  && GET_CODE (XEXP (operands[0], 0)) != REG
2441	  && ! reload_in_progress)
2442	operands[0]
2443	  = replace_equiv_address (operands[0],
2444				   copy_addr_to_reg (XEXP (operands[0], 0)));
2445
2446      if (GET_CODE (operands[1]) == MEM
2447	  && GET_CODE (XEXP (operands[1], 0)) != REG
2448	  && ! reload_in_progress)
2449	operands[1]
2450	  = replace_equiv_address (operands[1],
2451				   copy_addr_to_reg (XEXP (operands[1], 0)));
2452      break;
2453
2454    default:
2455      abort ();
2456    }
2457
2458  /* Above, we may have called force_const_mem which may have returned
2459     an invalid address.  If we can, fix this up; otherwise, reload will
2460     have to deal with it.  */
2461  if (GET_CODE (operands[1]) == MEM
2462      && ! memory_address_p (mode, XEXP (operands[1], 0))
2463      && ! reload_in_progress)
2464    operands[1] = adjust_address (operands[1], mode, 0);
2465
2466  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2467  return;
2468}
2469
2470/* Initialize a variable CUM of type CUMULATIVE_ARGS
2471   for a call to a function whose data type is FNTYPE.
2472   For a library call, FNTYPE is 0.
2473
2474   For incoming args we set the number of arguments in the prototype large
2475   so we never return a PARALLEL.  */
2476
2477void
2478init_cumulative_args (cum, fntype, libname, incoming)
2479     CUMULATIVE_ARGS *cum;
2480     tree fntype;
2481     rtx libname ATTRIBUTE_UNUSED;
2482     int incoming;
2483{
2484  static CUMULATIVE_ARGS zero_cumulative;
2485
2486  *cum = zero_cumulative;
2487  cum->words = 0;
2488  cum->fregno = FP_ARG_MIN_REG;
2489  cum->vregno = ALTIVEC_ARG_MIN_REG;
2490  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2491  cum->call_cookie = CALL_NORMAL;
2492  cum->sysv_gregno = GP_ARG_MIN_REG;
2493
2494  if (incoming)
2495    cum->nargs_prototype = 1000;		/* don't return a PARALLEL */
2496
2497  else if (cum->prototype)
2498    cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2499			    + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2500			       || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2501
2502  else
2503    cum->nargs_prototype = 0;
2504
2505  cum->orig_nargs = cum->nargs_prototype;
2506
2507  /* Check for longcall's */
2508  if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2509    cum->call_cookie = CALL_LONG;
2510
2511  if (TARGET_DEBUG_ARG)
2512    {
2513      fprintf (stderr, "\ninit_cumulative_args:");
2514      if (fntype)
2515	{
2516	  tree ret_type = TREE_TYPE (fntype);
2517	  fprintf (stderr, " ret code = %s,",
2518		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
2519	}
2520
2521      if (cum->call_cookie & CALL_LONG)
2522	fprintf (stderr, " longcall,");
2523
2524      fprintf (stderr, " proto = %d, nargs = %d\n",
2525	       cum->prototype, cum->nargs_prototype);
2526    }
2527}
2528
2529/* If defined, a C expression which determines whether, and in which
2530   direction, to pad out an argument with extra space.  The value
2531   should be of type `enum direction': either `upward' to pad above
2532   the argument, `downward' to pad below, or `none' to inhibit
2533   padding.
2534
2535   For the AIX ABI structs are always stored left shifted in their
2536   argument slot.  */
2537
2538enum direction
2539function_arg_padding (mode, type)
2540     enum machine_mode mode;
2541     tree type;
2542{
2543  if (type != 0 && AGGREGATE_TYPE_P (type))
2544    return upward;
2545
2546  /* This is the default definition.  */
2547  return (! BYTES_BIG_ENDIAN
2548          ? upward
2549          : ((mode == BLKmode
2550              ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2551                 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2552              : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2553             ? downward : upward));
2554}
2555
2556/* If defined, a C expression that gives the alignment boundary, in bits,
2557   of an argument with the specified mode and type.  If it is not defined,
2558   PARM_BOUNDARY is used for all arguments.
2559
2560   V.4 wants long longs to be double word aligned.  */
2561
2562int
2563function_arg_boundary (mode, type)
2564     enum machine_mode mode;
2565     tree type ATTRIBUTE_UNUSED;
2566{
2567  if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2568    return 64;
2569  else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2570    return 128;
2571  else
2572    return PARM_BOUNDARY;
2573}
2574
2575/* Update the data in CUM to advance over an argument
2576   of mode MODE and data type TYPE.
2577   (TYPE is null for libcalls where that information may not be available.)  */
2578
2579void
2580function_arg_advance (cum, mode, type, named)
2581     CUMULATIVE_ARGS *cum;
2582     enum machine_mode mode;
2583     tree type;
2584     int named;
2585{
2586  cum->nargs_prototype--;
2587
2588  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2589    {
2590      if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2591	cum->vregno++;
2592      else
2593	cum->words += RS6000_ARG_SIZE (mode, type);
2594    }
2595  else if (DEFAULT_ABI == ABI_V4)
2596    {
2597      if (TARGET_HARD_FLOAT
2598	  && (mode == SFmode || mode == DFmode))
2599	{
2600	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2601	    cum->fregno++;
2602	  else
2603	    {
2604	      if (mode == DFmode)
2605	        cum->words += cum->words & 1;
2606	      cum->words += RS6000_ARG_SIZE (mode, type);
2607	    }
2608	}
2609      else
2610	{
2611	  int n_words;
2612	  int gregno = cum->sysv_gregno;
2613
2614	  /* Aggregates and IEEE quad get passed by reference.  */
2615	  if ((type && AGGREGATE_TYPE_P (type))
2616	      || mode == TFmode)
2617	    n_words = 1;
2618	  else
2619	    n_words = RS6000_ARG_SIZE (mode, type);
2620
2621	  /* Long long is put in odd registers.  */
2622	  if (n_words == 2 && (gregno & 1) == 0)
2623	    gregno += 1;
2624
2625	  /* Long long is not split between registers and stack.  */
2626	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2627	    {
2628	      /* Long long is aligned on the stack.  */
2629	      if (n_words == 2)
2630		cum->words += cum->words & 1;
2631	      cum->words += n_words;
2632	    }
2633
2634	  /* Note: continuing to accumulate gregno past when we've started
2635	     spilling to the stack indicates the fact that we've started
2636	     spilling to the stack to expand_builtin_saveregs.  */
2637	  cum->sysv_gregno = gregno + n_words;
2638	}
2639
2640      if (TARGET_DEBUG_ARG)
2641	{
2642	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2643		   cum->words, cum->fregno);
2644	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2645		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2646	  fprintf (stderr, "mode = %4s, named = %d\n",
2647		   GET_MODE_NAME (mode), named);
2648	}
2649    }
2650  else
2651    {
2652      int align = (TARGET_32BIT && (cum->words & 1) != 0
2653		   && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2654
2655      cum->words += align + RS6000_ARG_SIZE (mode, type);
2656
2657      if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2658	cum->fregno++;
2659
2660      if (TARGET_DEBUG_ARG)
2661	{
2662	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2663		   cum->words, cum->fregno);
2664	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2665		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2666	  fprintf (stderr, "named = %d, align = %d\n", named, align);
2667	}
2668    }
2669}
2670
2671/* Determine where to put an argument to a function.
2672   Value is zero to push the argument on the stack,
2673   or a hard register in which to store the argument.
2674
2675   MODE is the argument's machine mode.
2676   TYPE is the data type of the argument (as a tree).
2677    This is null for libcalls where that information may
2678    not be available.
2679   CUM is a variable of type CUMULATIVE_ARGS which gives info about
2680    the preceding args and about the function being called.
2681   NAMED is nonzero if this argument is a named parameter
2682    (otherwise it is an extra parameter matching an ellipsis).
2683
2684   On RS/6000 the first eight words of non-FP are normally in registers
2685   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
2686   Under V.4, the first 8 FP args are in registers.
2687
2688   If this is floating-point and no prototype is specified, we use
2689   both an FP and integer register (or possibly FP reg and stack).  Library
2690   functions (when TYPE is zero) always have the proper types for args,
2691   so we can pass the FP value just in one register.  emit_library_function
2692   doesn't support PARALLEL anyway.  */
2693
2694struct rtx_def *
2695function_arg (cum, mode, type, named)
2696     CUMULATIVE_ARGS *cum;
2697     enum machine_mode mode;
2698     tree type;
2699     int named;
2700{
2701  enum rs6000_abi abi = DEFAULT_ABI;
2702
2703  /* Return a marker to indicate whether CR1 needs to set or clear the
2704     bit that V.4 uses to say fp args were passed in registers.
2705     Assume that we don't need the marker for software floating point,
2706     or compiler generated library calls.  */
2707  if (mode == VOIDmode)
2708    {
2709      if (abi == ABI_V4
2710	  && TARGET_HARD_FLOAT
2711	  && cum->nargs_prototype < 0
2712	  && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2713	{
2714	  return GEN_INT (cum->call_cookie
2715			  | ((cum->fregno == FP_ARG_MIN_REG)
2716			     ? CALL_V4_SET_FP_ARGS
2717			     : CALL_V4_CLEAR_FP_ARGS));
2718	}
2719
2720      return GEN_INT (cum->call_cookie);
2721    }
2722
2723  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2724    {
2725      if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2726	return gen_rtx_REG (mode, cum->vregno);
2727      else
2728	return NULL;
2729    }
2730  else if (abi == ABI_V4)
2731    {
2732      if (TARGET_HARD_FLOAT
2733	  && (mode == SFmode || mode == DFmode))
2734	{
2735	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2736	    return gen_rtx_REG (mode, cum->fregno);
2737	  else
2738	    return NULL;
2739	}
2740      else
2741	{
2742	  int n_words;
2743	  int gregno = cum->sysv_gregno;
2744
2745	  /* Aggregates and IEEE quad get passed by reference.  */
2746	  if ((type && AGGREGATE_TYPE_P (type))
2747	      || mode == TFmode)
2748	    n_words = 1;
2749	  else
2750	    n_words = RS6000_ARG_SIZE (mode, type);
2751
2752	  /* Long long is put in odd registers.  */
2753	  if (n_words == 2 && (gregno & 1) == 0)
2754	    gregno += 1;
2755
2756	  /* Long long is not split between registers and stack.  */
2757	  if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2758	    return gen_rtx_REG (mode, gregno);
2759	  else
2760	    return NULL;
2761	}
2762    }
2763  else
2764    {
2765      int align = (TARGET_32BIT && (cum->words & 1) != 0
2766	           && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2767      int align_words = cum->words + align;
2768
2769      if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2770        return NULL_RTX;
2771
2772      if (USE_FP_FOR_ARG_P (*cum, mode, type))
2773	{
2774	  if (! type
2775	      || ((cum->nargs_prototype > 0)
2776	          /* IBM AIX extended its linkage convention definition always
2777		     to require FP args after register save area hole on the
2778		     stack.  */
2779	          && (DEFAULT_ABI != ABI_AIX
2780		      || ! TARGET_XL_CALL
2781		      || (align_words < GP_ARG_NUM_REG))))
2782	    return gen_rtx_REG (mode, cum->fregno);
2783
2784          return gen_rtx_PARALLEL (mode,
2785	    gen_rtvec (2,
2786		       gen_rtx_EXPR_LIST (VOIDmode,
2787				((align_words >= GP_ARG_NUM_REG)
2788				 ? NULL_RTX
2789				 : (align_words
2790				    + RS6000_ARG_SIZE (mode, type)
2791				    > GP_ARG_NUM_REG
2792				    /* If this is partially on the stack, then
2793				       we only include the portion actually
2794				       in registers here.  */
2795				    ? gen_rtx_REG (SImode,
2796					       GP_ARG_MIN_REG + align_words)
2797				    : gen_rtx_REG (mode,
2798					       GP_ARG_MIN_REG + align_words))),
2799				const0_rtx),
2800		       gen_rtx_EXPR_LIST (VOIDmode,
2801				gen_rtx_REG (mode, cum->fregno),
2802				const0_rtx)));
2803	}
2804      else if (align_words < GP_ARG_NUM_REG)
2805	return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2806      else
2807	return NULL_RTX;
2808    }
2809}
2810
2811/* For an arg passed partly in registers and partly in memory,
2812   this is the number of registers used.
2813   For args passed entirely in registers or entirely in memory, zero.  */
2814
2815int
2816function_arg_partial_nregs (cum, mode, type, named)
2817     CUMULATIVE_ARGS *cum;
2818     enum machine_mode mode;
2819     tree type;
2820     int named ATTRIBUTE_UNUSED;
2821{
2822  if (DEFAULT_ABI == ABI_V4)
2823    return 0;
2824
2825  if (USE_FP_FOR_ARG_P (*cum, mode, type)
2826      || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2827    {
2828      if (cum->nargs_prototype >= 0)
2829	return 0;
2830    }
2831
2832  if (cum->words < GP_ARG_NUM_REG
2833      && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2834    {
2835      int ret = GP_ARG_NUM_REG - cum->words;
2836      if (ret && TARGET_DEBUG_ARG)
2837	fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2838
2839      return ret;
2840    }
2841
2842  return 0;
2843}
2844
2845/* A C expression that indicates when an argument must be passed by
2846   reference.  If nonzero for an argument, a copy of that argument is
2847   made in memory and a pointer to the argument is passed instead of
2848   the argument itself.  The pointer is passed in whatever way is
2849   appropriate for passing a pointer to that type.
2850
2851   Under V.4, structures and unions are passed by reference.  */
2852
2853int
2854function_arg_pass_by_reference (cum, mode, type, named)
2855     CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2856     enum machine_mode mode ATTRIBUTE_UNUSED;
2857     tree type;
2858     int named ATTRIBUTE_UNUSED;
2859{
2860  if (DEFAULT_ABI == ABI_V4
2861      && ((type && AGGREGATE_TYPE_P (type))
2862	  || mode == TFmode))
2863    {
2864      if (TARGET_DEBUG_ARG)
2865	fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2866
2867      return 1;
2868    }
2869
2870  return 0;
2871}
2872
2873/* Perform any needed actions needed for a function that is receiving a
2874   variable number of arguments.
2875
2876   CUM is as above.
2877
2878   MODE and TYPE are the mode and type of the current parameter.
2879
2880   PRETEND_SIZE is a variable that should be set to the amount of stack
2881   that must be pushed by the prolog to pretend that our caller pushed
2882   it.
2883
2884   Normally, this macro will push all remaining incoming registers on the
2885   stack and set PRETEND_SIZE to the length of the registers pushed.  */
2886
2887void
2888setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2889     CUMULATIVE_ARGS *cum;
2890     enum machine_mode mode;
2891     tree type;
2892     int *pretend_size;
2893     int no_rtl;
2894
2895{
2896  CUMULATIVE_ARGS next_cum;
2897  int reg_size = TARGET_32BIT ? 4 : 8;
2898  rtx save_area = NULL_RTX, mem;
2899  int first_reg_offset, set;
2900  tree fntype;
2901  int stdarg_p;
2902
2903  fntype = TREE_TYPE (current_function_decl);
2904  stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2905	      && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2906		  != void_type_node));
2907
2908  /* For varargs, we do not want to skip the dummy va_dcl argument.
2909     For stdargs, we do want to skip the last named argument.  */
2910  next_cum = *cum;
2911  if (stdarg_p)
2912    function_arg_advance (&next_cum, mode, type, 1);
2913
2914  if (DEFAULT_ABI == ABI_V4)
2915    {
2916      /* Indicate to allocate space on the stack for varargs save area.  */
2917      /* ??? Does this really have to be located at a magic spot on the
2918	 stack, or can we allocate this with assign_stack_local instead.  */
2919      cfun->machine->sysv_varargs_p = 1;
2920      if (! no_rtl)
2921	save_area = plus_constant (virtual_stack_vars_rtx,
2922				   - RS6000_VARARGS_SIZE);
2923
2924      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2925    }
2926  else
2927    {
2928      first_reg_offset = next_cum.words;
2929      save_area = virtual_incoming_args_rtx;
2930      cfun->machine->sysv_varargs_p = 0;
2931
2932      if (MUST_PASS_IN_STACK (mode, type))
2933	first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2934    }
2935
2936  set = get_varargs_alias_set ();
2937  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2938    {
2939      mem = gen_rtx_MEM (BLKmode,
2940		         plus_constant (save_area,
2941					first_reg_offset * reg_size)),
2942      set_mem_alias_set (mem, set);
2943      set_mem_align (mem, BITS_PER_WORD);
2944
2945      move_block_from_reg
2946	(GP_ARG_MIN_REG + first_reg_offset, mem,
2947	 GP_ARG_NUM_REG - first_reg_offset,
2948	 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2949
2950      /* ??? Does ABI_V4 need this at all?  */
2951      *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2952    }
2953
2954  /* Save FP registers if needed.  */
2955  if (DEFAULT_ABI == ABI_V4
2956      && TARGET_HARD_FLOAT && ! no_rtl
2957      && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2958    {
2959      int fregno = next_cum.fregno;
2960      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2961      rtx lab = gen_label_rtx ();
2962      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2963
2964      emit_jump_insn (gen_rtx_SET (VOIDmode,
2965				   pc_rtx,
2966				   gen_rtx_IF_THEN_ELSE (VOIDmode,
2967					    gen_rtx_NE (VOIDmode, cr1,
2968						        const0_rtx),
2969					    gen_rtx_LABEL_REF (VOIDmode, lab),
2970					    pc_rtx)));
2971
2972      while (fregno <= FP_ARG_V4_MAX_REG)
2973	{
2974	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2975          set_mem_alias_set (mem, set);
2976	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2977	  fregno++;
2978	  off += 8;
2979	}
2980
2981      emit_label (lab);
2982    }
2983}
2984
2985/* Create the va_list data type.  */
2986
2987tree
2988rs6000_build_va_list ()
2989{
2990  tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2991
2992  /* For AIX, prefer 'char *' because that's what the system
2993     header files like.  */
2994  if (DEFAULT_ABI != ABI_V4)
2995    return build_pointer_type (char_type_node);
2996
2997  record = make_lang_type (RECORD_TYPE);
2998  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2999
3000  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3001		      unsigned_char_type_node);
3002  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3003		      unsigned_char_type_node);
3004  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3005		      ptr_type_node);
3006  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3007		      ptr_type_node);
3008
3009  DECL_FIELD_CONTEXT (f_gpr) = record;
3010  DECL_FIELD_CONTEXT (f_fpr) = record;
3011  DECL_FIELD_CONTEXT (f_ovf) = record;
3012  DECL_FIELD_CONTEXT (f_sav) = record;
3013
3014  TREE_CHAIN (record) = type_decl;
3015  TYPE_NAME (record) = type_decl;
3016  TYPE_FIELDS (record) = f_gpr;
3017  TREE_CHAIN (f_gpr) = f_fpr;
3018  TREE_CHAIN (f_fpr) = f_ovf;
3019  TREE_CHAIN (f_ovf) = f_sav;
3020
3021  layout_type (record);
3022
3023  /* The correct type is an array type of one element.  */
3024  return build_array_type (record, build_index_type (size_zero_node));
3025}
3026
3027/* Implement va_start.  */
3028
3029void
3030rs6000_va_start (stdarg_p, valist, nextarg)
3031     int stdarg_p;
3032     tree valist;
3033     rtx nextarg;
3034{
3035  HOST_WIDE_INT words, n_gpr, n_fpr;
3036  tree f_gpr, f_fpr, f_ovf, f_sav;
3037  tree gpr, fpr, ovf, sav, t;
3038
3039  /* Only SVR4 needs something special.  */
3040  if (DEFAULT_ABI != ABI_V4)
3041    {
3042      std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3043      return;
3044    }
3045
3046  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3047  f_fpr = TREE_CHAIN (f_gpr);
3048  f_ovf = TREE_CHAIN (f_fpr);
3049  f_sav = TREE_CHAIN (f_ovf);
3050
3051  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3052  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3053  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3054  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3055  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3056
3057  /* Count number of gp and fp argument registers used.  */
3058  words = current_function_args_info.words;
3059  n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3060  n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3061
3062  if (TARGET_DEBUG_ARG)
3063    {
3064      fputs ("va_start: words = ", stderr);
3065      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3066      fputs (", n_gpr = ", stderr);
3067      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3068      fputs (", n_fpr = ", stderr);
3069      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3070      putc ('\n', stderr);
3071    }
3072
3073  t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3074  TREE_SIDE_EFFECTS (t) = 1;
3075  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3076
3077  t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3078  TREE_SIDE_EFFECTS (t) = 1;
3079  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3080
3081  /* Find the overflow area.  */
3082  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3083  if (words != 0)
3084    t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3085	       build_int_2 (words * UNITS_PER_WORD, 0));
3086  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3087  TREE_SIDE_EFFECTS (t) = 1;
3088  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3089
3090  /* Find the register save area.  */
3091  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3092  t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3093	     build_int_2 (-RS6000_VARARGS_SIZE, -1));
3094  t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3095  TREE_SIDE_EFFECTS (t) = 1;
3096  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3097}
3098
3099/* Implement va_arg.  */
3100
3101rtx
3102rs6000_va_arg (valist, type)
3103     tree valist, type;
3104{
3105  tree f_gpr, f_fpr, f_ovf, f_sav;
3106  tree gpr, fpr, ovf, sav, reg, t, u;
3107  int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3108  rtx lab_false, lab_over, addr_rtx, r;
3109
3110  /* For AIX, the rule is that structures are passed left-aligned in
3111     their stack slot.  However, GCC does not presently do this:
3112     structures which are the same size as integer types are passed
3113     right-aligned, as if they were in fact integers.  This only
3114     matters for structures of size 1 or 2, or 4 when TARGET_64BIT.  */
3115  if (DEFAULT_ABI != ABI_V4)
3116    return std_expand_builtin_va_arg (valist, type);
3117
3118  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3119  f_fpr = TREE_CHAIN (f_gpr);
3120  f_ovf = TREE_CHAIN (f_fpr);
3121  f_sav = TREE_CHAIN (f_ovf);
3122
3123  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3124  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3125  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3126  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3127  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3128
3129  size = int_size_in_bytes (type);
3130  rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3131
3132  if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3133    {
3134      /* Aggregates and long doubles are passed by reference.  */
3135      indirect_p = 1;
3136      reg = gpr;
3137      n_reg = 1;
3138      sav_ofs = 0;
3139      sav_scale = 4;
3140      size = UNITS_PER_WORD;
3141      rsize = 1;
3142    }
3143  else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3144    {
3145      /* FP args go in FP registers, if present.  */
3146      indirect_p = 0;
3147      reg = fpr;
3148      n_reg = 1;
3149      sav_ofs = 8*4;
3150      sav_scale = 8;
3151    }
3152  else
3153    {
3154      /* Otherwise into GP registers.  */
3155      indirect_p = 0;
3156      reg = gpr;
3157      n_reg = rsize;
3158      sav_ofs = 0;
3159      sav_scale = 4;
3160    }
3161
3162  /* Pull the value out of the saved registers ...  */
3163
3164  lab_false = gen_label_rtx ();
3165  lab_over = gen_label_rtx ();
3166  addr_rtx = gen_reg_rtx (Pmode);
3167
3168  /*  Vectors never go in registers.  */
3169  if (TREE_CODE (type) != VECTOR_TYPE)
3170    {
3171      TREE_THIS_VOLATILE (reg) = 1;
3172      emit_cmp_and_jump_insns
3173	(expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3174	 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3175	 lab_false);
3176
3177      /* Long long is aligned in the registers.  */
3178      if (n_reg > 1)
3179	{
3180	  u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3181		     build_int_2 (n_reg - 1, 0));
3182	  u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3183	  u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3184	  TREE_SIDE_EFFECTS (u) = 1;
3185	  expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3186	}
3187
3188      if (sav_ofs)
3189	t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3190      else
3191	t = sav;
3192
3193      u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3194		 build_int_2 (n_reg, 0));
3195      TREE_SIDE_EFFECTS (u) = 1;
3196
3197      u = build1 (CONVERT_EXPR, integer_type_node, u);
3198      TREE_SIDE_EFFECTS (u) = 1;
3199
3200      u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3201      TREE_SIDE_EFFECTS (u) = 1;
3202
3203      t = build (PLUS_EXPR, ptr_type_node, t, u);
3204      TREE_SIDE_EFFECTS (t) = 1;
3205
3206      r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3207      if (r != addr_rtx)
3208	emit_move_insn (addr_rtx, r);
3209
3210      emit_jump_insn (gen_jump (lab_over));
3211      emit_barrier ();
3212    }
3213
3214  emit_label (lab_false);
3215
3216  /* ... otherwise out of the overflow area.  */
3217
3218  /* Make sure we don't find reg 7 for the next int arg.
3219
3220     All AltiVec vectors go in the overflow area.  So in the AltiVec
3221     case we need to get the vectors from the overflow area, but
3222     remember where the GPRs and FPRs are.  */
3223  if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3224    {
3225      t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3226      TREE_SIDE_EFFECTS (t) = 1;
3227      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3228    }
3229
3230  /* Care for on-stack alignment if needed.  */
3231  if (rsize <= 1)
3232    t = ovf;
3233  else
3234    {
3235      int align;
3236
3237      /* Vectors are 16 byte aligned.  */
3238      if (TREE_CODE (type) == VECTOR_TYPE)
3239	align = 15;
3240      else
3241	align = 7;
3242
3243      t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3244      t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3245    }
3246  t = save_expr (t);
3247
3248  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3249  if (r != addr_rtx)
3250    emit_move_insn (addr_rtx, r);
3251
3252  t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3253  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3254  TREE_SIDE_EFFECTS (t) = 1;
3255  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3256
3257  emit_label (lab_over);
3258
3259  if (indirect_p)
3260    {
3261      r = gen_rtx_MEM (Pmode, addr_rtx);
3262      set_mem_alias_set (r, get_varargs_alias_set ());
3263      emit_move_insn (addr_rtx, r);
3264    }
3265
3266  return addr_rtx;
3267}
3268
3269/* Builtins.  */
3270
3271#define def_builtin(MASK, NAME, TYPE, CODE)				\
3272do {									\
3273  if ((MASK) & target_flags)						\
3274    builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL);	\
3275} while (0)
3276
3277struct builtin_description
3278{
3279  const unsigned int mask;
3280  const enum insn_code icode;
3281  const char *const name;
3282  const enum rs6000_builtins code;
3283};
3284
3285/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
3286
3287static const struct builtin_description bdesc_3arg[] =
3288{
3289  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3290  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3291  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3292  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3293  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3294  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3295  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3296  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3297  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3298  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3299  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3300  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3301  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3302  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3303  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3304  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3305  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3306  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3307  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3308  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3309  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3310  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3311  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3312};
3313
3314/* DST operations: void foo (void *, const int, const char).  */
3315
3316static const struct builtin_description bdesc_dst[] =
3317{
3318  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3319  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3320  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3321  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3322};
3323
3324/* Simple binary operations: VECc = foo (VECa, VECb).  */
3325
3326static const struct builtin_description bdesc_2arg[] =
3327{
3328  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3329  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3330  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3331  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3332  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3333  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3334  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3335  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3336  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3337  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3338  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3339  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3340  { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3341  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3342  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3343  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3344  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3345  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3346  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3347  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3348  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3349  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3350  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3351  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3352  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3353  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3354  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3355  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3356  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3357  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3358  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3359  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3360  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3361  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3362  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3363  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3364  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3365  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3366  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3367  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3368  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3369  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3370  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3371  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3372  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3373  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3374  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3375  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3376  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3377  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3378  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3379  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3380  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3381  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3382  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3383  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3384  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3385  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3386  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3387  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3388  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3389  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3390  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3391  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3392  { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3393  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3394  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3395  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3396  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3397  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3398  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3399  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3400  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3401  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3402  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3403  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3404  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3405  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3406  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3407  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3408  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3409  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3410  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3411  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3412  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3413  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3414  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3415  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3416  { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3417  { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3418  { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3419  { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3420  { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3421  { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3422  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3423  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3424  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3425  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3426  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3427  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3428  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3429  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3430  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3431  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3432  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3433  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3434  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3435  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3436  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3437  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3438  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3439  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3440  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3441};
3442
3443/* AltiVec predicates.  */
3444
3445struct builtin_description_predicates
3446{
3447  const unsigned int mask;
3448  const enum insn_code icode;
3449  const char *opcode;
3450  const char *const name;
3451  const enum rs6000_builtins code;
3452};
3453
3454static const struct builtin_description_predicates bdesc_altivec_preds[] =
3455{
3456  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3457  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3458  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3459  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3460  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3461  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3462  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3463  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3464  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3465  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3466  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3467  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3468  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3469};
3470
3471/* ABS* opreations.  */
3472
3473static const struct builtin_description bdesc_abs[] =
3474{
3475  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3476  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3477  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3478  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3479  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3480  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3481  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3482};
3483
3484/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3485   foo (VECa).  */
3486
3487static const struct builtin_description bdesc_1arg[] =
3488{
3489  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3490  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3491  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3492  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3493  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3494  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3495  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3496  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3497  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3498  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3499  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3500  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3501  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3502  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3503  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3504  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3505  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3506};
3507
3508static rtx
3509altivec_expand_unop_builtin (icode, arglist, target)
3510     enum insn_code icode;
3511     tree arglist;
3512     rtx target;
3513{
3514  rtx pat;
3515  tree arg0 = TREE_VALUE (arglist);
3516  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3517  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3518  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3519
3520  /* If we got invalid arguments bail out before generating bad rtl.  */
3521  if (arg0 == error_mark_node)
3522    return NULL_RTX;
3523
3524  if (target == 0
3525      || GET_MODE (target) != tmode
3526      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3527    target = gen_reg_rtx (tmode);
3528
3529  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3530    op0 = copy_to_mode_reg (mode0, op0);
3531
3532  pat = GEN_FCN (icode) (target, op0);
3533  if (! pat)
3534    return 0;
3535  emit_insn (pat);
3536
3537  return target;
3538}
3539
3540static rtx
3541altivec_expand_abs_builtin (icode, arglist, target)
3542     enum insn_code icode;
3543     tree arglist;
3544     rtx target;
3545{
3546  rtx pat, scratch1, scratch2;
3547  tree arg0 = TREE_VALUE (arglist);
3548  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3549  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3550  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3551
3552  /* If we have invalid arguments, bail out before generating bad rtl.  */
3553  if (arg0 == error_mark_node)
3554    return NULL_RTX;
3555
3556  if (target == 0
3557      || GET_MODE (target) != tmode
3558      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3559    target = gen_reg_rtx (tmode);
3560
3561  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3562    op0 = copy_to_mode_reg (mode0, op0);
3563
3564  scratch1 = gen_reg_rtx (mode0);
3565  scratch2 = gen_reg_rtx (mode0);
3566
3567  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3568  if (! pat)
3569    return 0;
3570  emit_insn (pat);
3571
3572  return target;
3573}
3574
3575static rtx
3576altivec_expand_binop_builtin (icode, arglist, target)
3577     enum insn_code icode;
3578     tree arglist;
3579     rtx target;
3580{
3581  rtx pat;
3582  tree arg0 = TREE_VALUE (arglist);
3583  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3584  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3585  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3586  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3587  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3588  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3589
3590  /* If we got invalid arguments bail out before generating bad rtl.  */
3591  if (arg0 == error_mark_node || arg1 == error_mark_node)
3592    return NULL_RTX;
3593
3594  if (target == 0
3595      || GET_MODE (target) != tmode
3596      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3597    target = gen_reg_rtx (tmode);
3598
3599  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3600    op0 = copy_to_mode_reg (mode0, op0);
3601  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3602    op1 = copy_to_mode_reg (mode1, op1);
3603
3604  pat = GEN_FCN (icode) (target, op0, op1);
3605  if (! pat)
3606    return 0;
3607  emit_insn (pat);
3608
3609  return target;
3610}
3611
3612static rtx
3613altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3614     enum insn_code icode;
3615     const char *opcode;
3616     tree arglist;
3617     rtx target;
3618{
3619  rtx pat, scratch;
3620  tree cr6_form = TREE_VALUE (arglist);
3621  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3622  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3623  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3624  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3625  enum machine_mode tmode = SImode;
3626  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3627  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3628  int cr6_form_int;
3629
3630  if (TREE_CODE (cr6_form) != INTEGER_CST)
3631    {
3632      error ("argument 1 of __builtin_altivec_predicate must be a constant");
3633      return NULL_RTX;
3634    }
3635  else
3636    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3637
3638  if (mode0 != mode1)
3639    abort ();
3640
3641  /* If we have invalid arguments, bail out before generating bad rtl.  */
3642  if (arg0 == error_mark_node || arg1 == error_mark_node)
3643    return NULL_RTX;
3644
3645  if (target == 0
3646      || GET_MODE (target) != tmode
3647      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3648    target = gen_reg_rtx (tmode);
3649
3650  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3651    op0 = copy_to_mode_reg (mode0, op0);
3652  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3653    op1 = copy_to_mode_reg (mode1, op1);
3654
3655  scratch = gen_reg_rtx (mode0);
3656
3657  pat = GEN_FCN (icode) (scratch, op0, op1,
3658			 gen_rtx (SYMBOL_REF, Pmode, opcode));
3659  if (! pat)
3660    return 0;
3661  emit_insn (pat);
3662
3663  /* The vec_any* and vec_all* predicates use the same opcodes for two
3664     different operations, but the bits in CR6 will be different
3665     depending on what information we want.  So we have to play tricks
3666     with CR6 to get the right bits out.
3667
3668     If you think this is disgusting, look at the specs for the
3669     AltiVec predicates.  */
3670
3671     switch (cr6_form_int)
3672       {
3673       case 0:
3674	 emit_insn (gen_cr6_test_for_zero (target));
3675	 break;
3676       case 1:
3677	 emit_insn (gen_cr6_test_for_zero_reverse (target));
3678	 break;
3679       case 2:
3680	 emit_insn (gen_cr6_test_for_lt (target));
3681	 break;
3682       case 3:
3683	 emit_insn (gen_cr6_test_for_lt_reverse (target));
3684	 break;
3685       default:
3686	 error ("argument 1 of __builtin_altivec_predicate is out of range");
3687	 break;
3688       }
3689
3690  return target;
3691}
3692
3693static rtx
3694altivec_expand_stv_builtin (icode, arglist)
3695     enum insn_code icode;
3696     tree arglist;
3697{
3698  tree arg0 = TREE_VALUE (arglist);
3699  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3700  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3701  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3702  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3703  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3704  rtx pat;
3705  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3706  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3707  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3708
3709  /* Invalid arguments.  Bail before doing anything stoopid!  */
3710  if (arg0 == error_mark_node
3711      || arg1 == error_mark_node
3712      || arg2 == error_mark_node)
3713    return NULL_RTX;
3714
3715  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3716    op0 = copy_to_mode_reg (mode2, op0);
3717  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3718    op1 = copy_to_mode_reg (mode0, op1);
3719  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3720    op2 = copy_to_mode_reg (mode1, op2);
3721
3722  pat = GEN_FCN (icode) (op1, op2, op0);
3723  if (pat)
3724    emit_insn (pat);
3725  return NULL_RTX;
3726}
3727
3728static rtx
3729altivec_expand_ternop_builtin (icode, arglist, target)
3730     enum insn_code icode;
3731     tree arglist;
3732     rtx target;
3733{
3734  rtx pat;
3735  tree arg0 = TREE_VALUE (arglist);
3736  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3737  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3738  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3739  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3740  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3741  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3742  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3743  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3744  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3745
3746  /* If we got invalid arguments bail out before generating bad rtl.  */
3747  if (arg0 == error_mark_node
3748      || arg1 == error_mark_node
3749      || arg2 == error_mark_node)
3750    return NULL_RTX;
3751
3752  if (target == 0
3753      || GET_MODE (target) != tmode
3754      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3755    target = gen_reg_rtx (tmode);
3756
3757  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3758    op0 = copy_to_mode_reg (mode0, op0);
3759  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3760    op1 = copy_to_mode_reg (mode1, op1);
3761  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3762    op2 = copy_to_mode_reg (mode2, op2);
3763
3764  pat = GEN_FCN (icode) (target, op0, op1, op2);
3765  if (! pat)
3766    return 0;
3767  emit_insn (pat);
3768
3769  return target;
3770}
3771static rtx
3772altivec_expand_builtin (exp, target)
3773     tree exp;
3774     rtx target;
3775{
3776  struct builtin_description *d;
3777  struct builtin_description_predicates *dp;
3778  size_t i;
3779  enum insn_code icode;
3780  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3781  tree arglist = TREE_OPERAND (exp, 1);
3782  tree arg0, arg1, arg2;
3783  rtx op0, op1, op2, pat;
3784  enum machine_mode tmode, mode0, mode1, mode2;
3785  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3786
3787  switch (fcode)
3788    {
3789    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3790      icode = CODE_FOR_altivec_lvx_16qi;
3791      arg0 = TREE_VALUE (arglist);
3792      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3793      tmode = insn_data[icode].operand[0].mode;
3794      mode0 = insn_data[icode].operand[1].mode;
3795
3796      if (target == 0
3797	  || GET_MODE (target) != tmode
3798	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3799	target = gen_reg_rtx (tmode);
3800
3801      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3802	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3803
3804      pat = GEN_FCN (icode) (target, op0);
3805      if (! pat)
3806	return 0;
3807      emit_insn (pat);
3808      return target;
3809
3810    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3811      icode = CODE_FOR_altivec_lvx_8hi;
3812      arg0 = TREE_VALUE (arglist);
3813      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3814      tmode = insn_data[icode].operand[0].mode;
3815      mode0 = insn_data[icode].operand[1].mode;
3816
3817      if (target == 0
3818	  || GET_MODE (target) != tmode
3819	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3820	target = gen_reg_rtx (tmode);
3821
3822      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3823	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3824
3825      pat = GEN_FCN (icode) (target, op0);
3826      if (! pat)
3827	return 0;
3828      emit_insn (pat);
3829      return target;
3830
3831    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3832      icode = CODE_FOR_altivec_lvx_4si;
3833      arg0 = TREE_VALUE (arglist);
3834      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3835      tmode = insn_data[icode].operand[0].mode;
3836      mode0 = insn_data[icode].operand[1].mode;
3837
3838      if (target == 0
3839	  || GET_MODE (target) != tmode
3840	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3841	target = gen_reg_rtx (tmode);
3842
3843      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3844	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3845
3846      pat = GEN_FCN (icode) (target, op0);
3847      if (! pat)
3848	return 0;
3849      emit_insn (pat);
3850      return target;
3851
3852    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3853      icode = CODE_FOR_altivec_lvx_4sf;
3854      arg0 = TREE_VALUE (arglist);
3855      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3856      tmode = insn_data[icode].operand[0].mode;
3857      mode0 = insn_data[icode].operand[1].mode;
3858
3859      if (target == 0
3860	  || GET_MODE (target) != tmode
3861	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3862	target = gen_reg_rtx (tmode);
3863
3864      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3865	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3866
3867      pat = GEN_FCN (icode) (target, op0);
3868      if (! pat)
3869	return 0;
3870      emit_insn (pat);
3871      return target;
3872
3873    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3874      icode = CODE_FOR_altivec_stvx_16qi;
3875      arg0 = TREE_VALUE (arglist);
3876      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3877      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3878      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3879      mode0 = insn_data[icode].operand[0].mode;
3880      mode1 = insn_data[icode].operand[1].mode;
3881
3882      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3883	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3884      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3885	op1 = copy_to_mode_reg (mode1, op1);
3886
3887      pat = GEN_FCN (icode) (op0, op1);
3888      if (pat)
3889	emit_insn (pat);
3890      return NULL_RTX;
3891
3892    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3893      icode = CODE_FOR_altivec_stvx_8hi;
3894      arg0 = TREE_VALUE (arglist);
3895      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3896      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3897      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3898      mode0 = insn_data[icode].operand[0].mode;
3899      mode1 = insn_data[icode].operand[1].mode;
3900
3901      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3902	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3903      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3904	op1 = copy_to_mode_reg (mode1, op1);
3905
3906      pat = GEN_FCN (icode) (op0, op1);
3907      if (pat)
3908	emit_insn (pat);
3909      return NULL_RTX;
3910
3911    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3912      icode = CODE_FOR_altivec_stvx_4si;
3913      arg0 = TREE_VALUE (arglist);
3914      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3915      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3916      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3917      mode0 = insn_data[icode].operand[0].mode;
3918      mode1 = insn_data[icode].operand[1].mode;
3919
3920      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3921	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3922      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3923	op1 = copy_to_mode_reg (mode1, op1);
3924
3925      pat = GEN_FCN (icode) (op0, op1);
3926      if (pat)
3927	emit_insn (pat);
3928      return NULL_RTX;
3929
3930    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3931      icode = CODE_FOR_altivec_stvx_4sf;
3932      arg0 = TREE_VALUE (arglist);
3933      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3934      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3935      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3936      mode0 = insn_data[icode].operand[0].mode;
3937      mode1 = insn_data[icode].operand[1].mode;
3938
3939      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3940	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3941      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3942	op1 = copy_to_mode_reg (mode1, op1);
3943
3944      pat = GEN_FCN (icode) (op0, op1);
3945      if (pat)
3946	emit_insn (pat);
3947      return NULL_RTX;
3948
3949    case ALTIVEC_BUILTIN_STVX:
3950      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3951    case ALTIVEC_BUILTIN_STVEBX:
3952      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3953    case ALTIVEC_BUILTIN_STVEHX:
3954      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3955    case ALTIVEC_BUILTIN_STVEWX:
3956      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3957    case ALTIVEC_BUILTIN_STVXL:
3958      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3959
3960    case ALTIVEC_BUILTIN_MFVSCR:
3961      icode = CODE_FOR_altivec_mfvscr;
3962      tmode = insn_data[icode].operand[0].mode;
3963
3964      if (target == 0
3965	  || GET_MODE (target) != tmode
3966	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3967	target = gen_reg_rtx (tmode);
3968
3969      pat = GEN_FCN (icode) (target);
3970      if (! pat)
3971	return 0;
3972      emit_insn (pat);
3973      return target;
3974
3975    case ALTIVEC_BUILTIN_MTVSCR:
3976      icode = CODE_FOR_altivec_mtvscr;
3977      arg0 = TREE_VALUE (arglist);
3978      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3979      mode0 = insn_data[icode].operand[0].mode;
3980
3981      /* If we got invalid arguments bail out before generating bad rtl.  */
3982      if (arg0 == error_mark_node)
3983	return NULL_RTX;
3984
3985      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3986	op0 = copy_to_mode_reg (mode0, op0);
3987
3988      pat = GEN_FCN (icode) (op0);
3989      if (pat)
3990	emit_insn (pat);
3991      return NULL_RTX;
3992
3993    case ALTIVEC_BUILTIN_DSSALL:
3994      emit_insn (gen_altivec_dssall ());
3995      return NULL_RTX;
3996
3997    case ALTIVEC_BUILTIN_DSS:
3998      icode = CODE_FOR_altivec_dss;
3999      arg0 = TREE_VALUE (arglist);
4000      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4001      mode0 = insn_data[icode].operand[0].mode;
4002
4003      /* If we got invalid arguments bail out before generating bad rtl.  */
4004      if (arg0 == error_mark_node)
4005	return NULL_RTX;
4006
4007      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4008	op0 = copy_to_mode_reg (mode0, op0);
4009
4010      emit_insn (gen_altivec_dss (op0));
4011      return NULL_RTX;
4012    }
4013
4014  /* Handle DST variants.  */
4015  d = (struct builtin_description *) bdesc_dst;
4016  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4017    if (d->code == fcode)
4018      {
4019	arg0 = TREE_VALUE (arglist);
4020	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4021	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4022	op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4023	op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4024	op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4025	mode0 = insn_data[d->icode].operand[0].mode;
4026	mode1 = insn_data[d->icode].operand[1].mode;
4027	mode2 = insn_data[d->icode].operand[2].mode;
4028
4029	/* Invalid arguments, bail out before generating bad rtl.  */
4030	if (arg0 == error_mark_node
4031	    || arg1 == error_mark_node
4032	    || arg2 == error_mark_node)
4033	  return NULL_RTX;
4034
4035	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4036	  op0 = copy_to_mode_reg (mode0, op0);
4037	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4038	  op1 = copy_to_mode_reg (mode1, op1);
4039
4040	if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4041	  {
4042	    error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4043	    return NULL_RTX;
4044	  }
4045
4046	pat = GEN_FCN (d->icode) (op0, op1, op2);
4047	if (pat != 0)
4048	  emit_insn (pat);
4049
4050	return NULL_RTX;
4051      }
4052
4053  /* Expand abs* operations.  */
4054  d = (struct builtin_description *) bdesc_abs;
4055  for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4056    if (d->code == fcode)
4057      return altivec_expand_abs_builtin (d->icode, arglist, target);
4058
4059  /* Handle simple unary operations.  */
4060  d = (struct builtin_description *) bdesc_1arg;
4061  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4062    if (d->code == fcode)
4063      return altivec_expand_unop_builtin (d->icode, arglist, target);
4064
4065  /* Handle simple binary operations.  */
4066  d = (struct builtin_description *) bdesc_2arg;
4067  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4068    if (d->code == fcode)
4069      return altivec_expand_binop_builtin (d->icode, arglist, target);
4070
4071  /* Expand the AltiVec predicates.  */
4072  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4073  for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4074    if (dp->code == fcode)
4075      return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4076
4077  /* LV* are funky.  We initialized them differently.  */
4078  switch (fcode)
4079    {
4080    case ALTIVEC_BUILTIN_LVSL:
4081      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4082					   arglist, target);
4083    case ALTIVEC_BUILTIN_LVSR:
4084      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4085					   arglist, target);
4086    case ALTIVEC_BUILTIN_LVEBX:
4087      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4088					   arglist, target);
4089    case ALTIVEC_BUILTIN_LVEHX:
4090      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4091					   arglist, target);
4092    case ALTIVEC_BUILTIN_LVEWX:
4093      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4094					   arglist, target);
4095    case ALTIVEC_BUILTIN_LVXL:
4096      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4097					   arglist, target);
4098    case ALTIVEC_BUILTIN_LVX:
4099      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4100					   arglist, target);
4101    default:
4102      break;
4103      /* Fall through.  */
4104    }
4105
4106  /* Handle simple ternary operations.  */
4107  d = (struct builtin_description *) bdesc_3arg;
4108  for (i = 0; i < sizeof  (bdesc_3arg) / sizeof *d; i++, d++)
4109    if (d->code == fcode)
4110      return altivec_expand_ternop_builtin (d->icode, arglist, target);
4111
4112  abort ();
4113  return NULL_RTX;
4114}
4115
4116/* Expand an expression EXP that calls a built-in function,
4117   with result going to TARGET if that's convenient
4118   (and in mode MODE if that's convenient).
4119   SUBTARGET may be used as the target for computing one of EXP's operands.
4120   IGNORE is nonzero if the value is to be ignored.  */
4121
4122static rtx
4123rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4124     tree exp;
4125     rtx target;
4126     rtx subtarget ATTRIBUTE_UNUSED;
4127     enum machine_mode mode ATTRIBUTE_UNUSED;
4128     int ignore ATTRIBUTE_UNUSED;
4129{
4130  if (TARGET_ALTIVEC)
4131    return altivec_expand_builtin (exp, target);
4132
4133  abort ();
4134}
4135
4136static void
4137rs6000_init_builtins ()
4138{
4139  if (TARGET_ALTIVEC)
4140    altivec_init_builtins ();
4141}
4142
4143static void
4144altivec_init_builtins (void)
4145{
4146  struct builtin_description *d;
4147  struct builtin_description_predicates *dp;
4148  size_t i;
4149
4150  tree endlink = void_list_node;
4151
4152  tree pint_type_node = build_pointer_type (integer_type_node);
4153  tree pvoid_type_node = build_pointer_type (void_type_node);
4154  tree pshort_type_node = build_pointer_type (short_integer_type_node);
4155  tree pchar_type_node = build_pointer_type (char_type_node);
4156  tree pfloat_type_node = build_pointer_type (float_type_node);
4157
4158  tree v4sf_ftype_v4sf_v4sf_v16qi
4159    = build_function_type (V4SF_type_node,
4160			   tree_cons (NULL_TREE, V4SF_type_node,
4161				      tree_cons (NULL_TREE, V4SF_type_node,
4162						 tree_cons (NULL_TREE,
4163							    V16QI_type_node,
4164							    endlink))));
4165  tree v4si_ftype_v4si_v4si_v16qi
4166    = build_function_type (V4SI_type_node,
4167			   tree_cons (NULL_TREE, V4SI_type_node,
4168				      tree_cons (NULL_TREE, V4SI_type_node,
4169						 tree_cons (NULL_TREE,
4170							    V16QI_type_node,
4171							    endlink))));
4172  tree v8hi_ftype_v8hi_v8hi_v16qi
4173    = build_function_type (V8HI_type_node,
4174			   tree_cons (NULL_TREE, V8HI_type_node,
4175				      tree_cons (NULL_TREE, V8HI_type_node,
4176						 tree_cons (NULL_TREE,
4177							    V16QI_type_node,
4178							    endlink))));
4179  tree v16qi_ftype_v16qi_v16qi_v16qi
4180    = build_function_type (V16QI_type_node,
4181			   tree_cons (NULL_TREE, V16QI_type_node,
4182				      tree_cons (NULL_TREE, V16QI_type_node,
4183						 tree_cons (NULL_TREE,
4184							    V16QI_type_node,
4185							    endlink))));
4186
4187  /* V4SI foo (char).  */
4188  tree v4si_ftype_char
4189    = build_function_type (V4SI_type_node,
4190		           tree_cons (NULL_TREE, char_type_node, endlink));
4191
4192  /* V8HI foo (char).  */
4193  tree v8hi_ftype_char
4194    = build_function_type (V8HI_type_node,
4195		           tree_cons (NULL_TREE, char_type_node, endlink));
4196
4197  /* V16QI foo (char).  */
4198  tree v16qi_ftype_char
4199    = build_function_type (V16QI_type_node,
4200		           tree_cons (NULL_TREE, char_type_node, endlink));
4201  /* V4SF foo (V4SF).  */
4202  tree v4sf_ftype_v4sf
4203    = build_function_type (V4SF_type_node,
4204			   tree_cons (NULL_TREE, V4SF_type_node, endlink));
4205
4206  /* V4SI foo (int *).  */
4207  tree v4si_ftype_pint
4208    = build_function_type (V4SI_type_node,
4209			   tree_cons (NULL_TREE, pint_type_node, endlink));
4210  /* V8HI foo (short *).  */
4211  tree v8hi_ftype_pshort
4212    = build_function_type (V8HI_type_node,
4213			   tree_cons (NULL_TREE, pshort_type_node, endlink));
4214  /* V16QI foo (char *).  */
4215  tree v16qi_ftype_pchar
4216    = build_function_type (V16QI_type_node,
4217			   tree_cons (NULL_TREE, pchar_type_node, endlink));
4218  /* V4SF foo (float *).  */
4219  tree v4sf_ftype_pfloat
4220    = build_function_type (V4SF_type_node,
4221			   tree_cons (NULL_TREE, pfloat_type_node, endlink));
4222
4223  /* V8HI foo (V16QI).  */
4224  tree v8hi_ftype_v16qi
4225    = build_function_type (V8HI_type_node,
4226			   tree_cons (NULL_TREE, V16QI_type_node, endlink));
4227
4228  /* void foo (void *, int, char/literal).  */
4229  tree void_ftype_pvoid_int_char
4230    = build_function_type (void_type_node,
4231			   tree_cons (NULL_TREE, pvoid_type_node,
4232				      tree_cons (NULL_TREE, integer_type_node,
4233						 tree_cons (NULL_TREE,
4234							    char_type_node,
4235							    endlink))));
4236
4237  /* void foo (int *, V4SI).  */
4238  tree void_ftype_pint_v4si
4239    = build_function_type (void_type_node,
4240			   tree_cons (NULL_TREE, pint_type_node,
4241				      tree_cons (NULL_TREE, V4SI_type_node,
4242						 endlink)));
4243  /* void foo (short *, V8HI).  */
4244  tree void_ftype_pshort_v8hi
4245    = build_function_type (void_type_node,
4246			   tree_cons (NULL_TREE, pshort_type_node,
4247				      tree_cons (NULL_TREE, V8HI_type_node,
4248						 endlink)));
4249  /* void foo (char *, V16QI).  */
4250  tree void_ftype_pchar_v16qi
4251    = build_function_type (void_type_node,
4252			   tree_cons (NULL_TREE, pchar_type_node,
4253				      tree_cons (NULL_TREE, V16QI_type_node,
4254						 endlink)));
4255  /* void foo (float *, V4SF).  */
4256  tree void_ftype_pfloat_v4sf
4257    = build_function_type (void_type_node,
4258			   tree_cons (NULL_TREE, pfloat_type_node,
4259				      tree_cons (NULL_TREE, V4SF_type_node,
4260						 endlink)));
4261
4262  /* void foo (V4SI).  */
4263  tree void_ftype_v4si
4264    = build_function_type (void_type_node,
4265			   tree_cons (NULL_TREE, V4SI_type_node,
4266				      endlink));
4267
4268  /* void foo (vint, int, void *).  */
4269  tree void_ftype_v4si_int_pvoid
4270    = build_function_type (void_type_node,
4271			   tree_cons (NULL_TREE, V4SI_type_node,
4272				      tree_cons (NULL_TREE, integer_type_node,
4273						 tree_cons (NULL_TREE,
4274							    pvoid_type_node,
4275							    endlink))));
4276
4277  /* void foo (vchar, int, void *).  */
4278  tree void_ftype_v16qi_int_pvoid
4279    = build_function_type (void_type_node,
4280			   tree_cons (NULL_TREE, V16QI_type_node,
4281				      tree_cons (NULL_TREE, integer_type_node,
4282						 tree_cons (NULL_TREE,
4283							    pvoid_type_node,
4284							    endlink))));
4285
4286  /* void foo (vshort, int, void *).  */
4287  tree void_ftype_v8hi_int_pvoid
4288    = build_function_type (void_type_node,
4289			   tree_cons (NULL_TREE, V8HI_type_node,
4290				      tree_cons (NULL_TREE, integer_type_node,
4291						 tree_cons (NULL_TREE,
4292							    pvoid_type_node,
4293							    endlink))));
4294
4295  /* void foo (char).  */
4296  tree void_ftype_qi
4297    = build_function_type (void_type_node,
4298			   tree_cons (NULL_TREE, char_type_node,
4299				      endlink));
4300
4301  /* void foo (void).  */
4302  tree void_ftype_void
4303    = build_function_type (void_type_node, void_list_node);
4304
4305  /* vshort foo (void).  */
4306  tree v8hi_ftype_void
4307    = build_function_type (V8HI_type_node, void_list_node);
4308
4309  tree v4si_ftype_v4si_v4si
4310    = build_function_type (V4SI_type_node,
4311			   tree_cons (NULL_TREE, V4SI_type_node,
4312				      tree_cons (NULL_TREE, V4SI_type_node,
4313						 endlink)));
4314
4315  /* These are for the unsigned 5 bit literals.  */
4316
4317  tree v4sf_ftype_v4si_char
4318    = build_function_type (V4SF_type_node,
4319			   tree_cons (NULL_TREE, V4SI_type_node,
4320				      tree_cons (NULL_TREE, char_type_node,
4321						 endlink)));
4322  tree v4si_ftype_v4sf_char
4323    = build_function_type (V4SI_type_node,
4324			   tree_cons (NULL_TREE, V4SF_type_node,
4325				      tree_cons (NULL_TREE, char_type_node,
4326						 endlink)));
4327  tree v4si_ftype_v4si_char
4328    = build_function_type (V4SI_type_node,
4329			   tree_cons (NULL_TREE, V4SI_type_node,
4330				      tree_cons (NULL_TREE, char_type_node,
4331						 endlink)));
4332  tree v8hi_ftype_v8hi_char
4333    = build_function_type (V8HI_type_node,
4334			   tree_cons (NULL_TREE, V8HI_type_node,
4335				      tree_cons (NULL_TREE, char_type_node,
4336						 endlink)));
4337  tree v16qi_ftype_v16qi_char
4338    = build_function_type (V16QI_type_node,
4339			   tree_cons (NULL_TREE, V16QI_type_node,
4340				      tree_cons (NULL_TREE, char_type_node,
4341						 endlink)));
4342
4343  /* These are for the unsigned 4 bit literals.  */
4344
4345  tree v16qi_ftype_v16qi_v16qi_char
4346    = build_function_type (V16QI_type_node,
4347			   tree_cons (NULL_TREE, V16QI_type_node,
4348				      tree_cons (NULL_TREE, V16QI_type_node,
4349						 tree_cons (NULL_TREE,
4350							    char_type_node,
4351							    endlink))));
4352
4353  tree v8hi_ftype_v8hi_v8hi_char
4354    = build_function_type (V8HI_type_node,
4355			   tree_cons (NULL_TREE, V8HI_type_node,
4356				      tree_cons (NULL_TREE, V8HI_type_node,
4357						 tree_cons (NULL_TREE,
4358							    char_type_node,
4359							    endlink))));
4360
4361  tree v4si_ftype_v4si_v4si_char
4362    = build_function_type (V4SI_type_node,
4363			   tree_cons (NULL_TREE, V4SI_type_node,
4364				      tree_cons (NULL_TREE, V4SI_type_node,
4365						 tree_cons (NULL_TREE,
4366							    char_type_node,
4367							    endlink))));
4368
4369  tree v4sf_ftype_v4sf_v4sf_char
4370    = build_function_type (V4SF_type_node,
4371			   tree_cons (NULL_TREE, V4SF_type_node,
4372				      tree_cons (NULL_TREE, V4SF_type_node,
4373						 tree_cons (NULL_TREE,
4374							    char_type_node,
4375							    endlink))));
4376
4377  /* End of 4 bit literals.  */
4378
4379  tree v4sf_ftype_v4sf_v4sf
4380    = build_function_type (V4SF_type_node,
4381			   tree_cons (NULL_TREE, V4SF_type_node,
4382				      tree_cons (NULL_TREE, V4SF_type_node,
4383						 endlink)));
4384  tree v4sf_ftype_v4sf_v4sf_v4si
4385    = build_function_type (V4SF_type_node,
4386			   tree_cons (NULL_TREE, V4SF_type_node,
4387				      tree_cons (NULL_TREE, V4SF_type_node,
4388						 tree_cons (NULL_TREE,
4389							    V4SI_type_node,
4390							    endlink))));
4391  tree v4sf_ftype_v4sf_v4sf_v4sf
4392    = build_function_type (V4SF_type_node,
4393			   tree_cons (NULL_TREE, V4SF_type_node,
4394				      tree_cons (NULL_TREE, V4SF_type_node,
4395						 tree_cons (NULL_TREE,
4396							    V4SF_type_node,
4397							    endlink))));
4398  tree v4si_ftype_v4si_v4si_v4si
4399    = build_function_type (V4SI_type_node,
4400			   tree_cons (NULL_TREE, V4SI_type_node,
4401				      tree_cons (NULL_TREE, V4SI_type_node,
4402						 tree_cons (NULL_TREE,
4403							    V4SI_type_node,
4404							    endlink))));
4405
4406  tree v8hi_ftype_v8hi_v8hi
4407    = build_function_type (V8HI_type_node,
4408			   tree_cons (NULL_TREE, V8HI_type_node,
4409				      tree_cons (NULL_TREE, V8HI_type_node,
4410						 endlink)));
4411  tree v8hi_ftype_v8hi_v8hi_v8hi
4412    = build_function_type (V8HI_type_node,
4413			   tree_cons (NULL_TREE, V8HI_type_node,
4414				      tree_cons (NULL_TREE, V8HI_type_node,
4415						 tree_cons (NULL_TREE,
4416							    V8HI_type_node,
4417							    endlink))));
4418 tree v4si_ftype_v8hi_v8hi_v4si
4419    = build_function_type (V4SI_type_node,
4420			   tree_cons (NULL_TREE, V8HI_type_node,
4421				      tree_cons (NULL_TREE, V8HI_type_node,
4422						 tree_cons (NULL_TREE,
4423							    V4SI_type_node,
4424							    endlink))));
4425 tree v4si_ftype_v16qi_v16qi_v4si
4426    = build_function_type (V4SI_type_node,
4427			   tree_cons (NULL_TREE, V16QI_type_node,
4428				      tree_cons (NULL_TREE, V16QI_type_node,
4429						 tree_cons (NULL_TREE,
4430							    V4SI_type_node,
4431							    endlink))));
4432
4433  tree v16qi_ftype_v16qi_v16qi
4434    = build_function_type (V16QI_type_node,
4435			   tree_cons (NULL_TREE, V16QI_type_node,
4436				      tree_cons (NULL_TREE, V16QI_type_node,
4437						 endlink)));
4438
4439  tree v4si_ftype_v4sf_v4sf
4440    = build_function_type (V4SI_type_node,
4441			   tree_cons (NULL_TREE, V4SF_type_node,
4442				      tree_cons (NULL_TREE, V4SF_type_node,
4443						 endlink)));
4444
4445  tree v4si_ftype_v4si
4446    = build_function_type (V4SI_type_node,
4447			   tree_cons (NULL_TREE, V4SI_type_node, endlink));
4448
4449  tree v8hi_ftype_v8hi
4450    = build_function_type (V8HI_type_node,
4451			   tree_cons (NULL_TREE, V8HI_type_node, endlink));
4452
4453  tree v16qi_ftype_v16qi
4454    = build_function_type (V16QI_type_node,
4455			   tree_cons (NULL_TREE, V16QI_type_node, endlink));
4456
4457  tree v8hi_ftype_v16qi_v16qi
4458    = build_function_type (V8HI_type_node,
4459			   tree_cons (NULL_TREE, V16QI_type_node,
4460				      tree_cons (NULL_TREE, V16QI_type_node,
4461						 endlink)));
4462
4463  tree v4si_ftype_v8hi_v8hi
4464    = build_function_type (V4SI_type_node,
4465			   tree_cons (NULL_TREE, V8HI_type_node,
4466				      tree_cons (NULL_TREE, V8HI_type_node,
4467						 endlink)));
4468
4469  tree v8hi_ftype_v4si_v4si
4470    = build_function_type (V8HI_type_node,
4471			   tree_cons (NULL_TREE, V4SI_type_node,
4472				      tree_cons (NULL_TREE, V4SI_type_node,
4473						 endlink)));
4474
4475  tree v16qi_ftype_v8hi_v8hi
4476    = build_function_type (V16QI_type_node,
4477			   tree_cons (NULL_TREE, V8HI_type_node,
4478				      tree_cons (NULL_TREE, V8HI_type_node,
4479						 endlink)));
4480
4481  tree v4si_ftype_v16qi_v4si
4482    = build_function_type (V4SI_type_node,
4483			   tree_cons (NULL_TREE, V16QI_type_node,
4484				      tree_cons (NULL_TREE, V4SI_type_node,
4485						 endlink)));
4486
4487  tree v4si_ftype_v16qi_v16qi
4488    = build_function_type (V4SI_type_node,
4489			   tree_cons (NULL_TREE, V16QI_type_node,
4490				      tree_cons (NULL_TREE, V16QI_type_node,
4491						 endlink)));
4492
4493  tree v4si_ftype_v8hi_v4si
4494    = build_function_type (V4SI_type_node,
4495			   tree_cons (NULL_TREE, V8HI_type_node,
4496				      tree_cons (NULL_TREE, V4SI_type_node,
4497						 endlink)));
4498
4499  tree v4si_ftype_v8hi
4500    = build_function_type (V4SI_type_node,
4501			   tree_cons (NULL_TREE, V8HI_type_node, endlink));
4502
4503  tree int_ftype_v4si_v4si
4504    = build_function_type (integer_type_node,
4505			   tree_cons (NULL_TREE, V4SI_type_node,
4506				      tree_cons (NULL_TREE, V4SI_type_node,
4507						 endlink)));
4508
4509  tree int_ftype_v4sf_v4sf
4510    = build_function_type (integer_type_node,
4511			   tree_cons (NULL_TREE, V4SF_type_node,
4512				      tree_cons (NULL_TREE, V4SF_type_node,
4513						 endlink)));
4514
4515  tree int_ftype_v16qi_v16qi
4516    = build_function_type (integer_type_node,
4517			   tree_cons (NULL_TREE, V16QI_type_node,
4518				      tree_cons (NULL_TREE, V16QI_type_node,
4519						 endlink)));
4520
4521  tree int_ftype_int_v4si_v4si
4522    = build_function_type
4523    (integer_type_node,
4524     tree_cons (NULL_TREE, integer_type_node,
4525		tree_cons (NULL_TREE, V4SI_type_node,
4526			   tree_cons (NULL_TREE, V4SI_type_node,
4527				      endlink))));
4528
4529  tree int_ftype_int_v4sf_v4sf
4530    = build_function_type
4531    (integer_type_node,
4532     tree_cons (NULL_TREE, integer_type_node,
4533		tree_cons (NULL_TREE, V4SF_type_node,
4534			   tree_cons (NULL_TREE, V4SF_type_node,
4535				      endlink))));
4536
4537  tree int_ftype_int_v8hi_v8hi
4538    = build_function_type
4539    (integer_type_node,
4540     tree_cons (NULL_TREE, integer_type_node,
4541		 tree_cons (NULL_TREE, V8HI_type_node,
4542			    tree_cons (NULL_TREE, V8HI_type_node,
4543				       endlink))));
4544
4545  tree int_ftype_int_v16qi_v16qi
4546    = build_function_type
4547    (integer_type_node,
4548     tree_cons (NULL_TREE, integer_type_node,
4549		tree_cons (NULL_TREE, V16QI_type_node,
4550			   tree_cons (NULL_TREE, V16QI_type_node,
4551				      endlink))));
4552
4553  tree v16qi_ftype_int_pvoid
4554    = build_function_type (V16QI_type_node,
4555			   tree_cons (NULL_TREE, integer_type_node,
4556				      tree_cons (NULL_TREE, pvoid_type_node,
4557						 endlink)));
4558
4559  tree v4si_ftype_int_pvoid
4560    = build_function_type (V4SI_type_node,
4561			   tree_cons (NULL_TREE, integer_type_node,
4562				      tree_cons (NULL_TREE, pvoid_type_node,
4563						 endlink)));
4564
4565  tree v8hi_ftype_int_pvoid
4566    = build_function_type (V8HI_type_node,
4567			   tree_cons (NULL_TREE, integer_type_node,
4568				      tree_cons (NULL_TREE, pvoid_type_node,
4569						 endlink)));
4570
4571  tree int_ftype_v8hi_v8hi
4572    = build_function_type (integer_type_node,
4573			   tree_cons (NULL_TREE, V8HI_type_node,
4574				      tree_cons (NULL_TREE, V8HI_type_node,
4575						 endlink)));
4576
4577  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4578  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4579  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4580  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4581  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4582  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4583  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4584  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4585  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4586  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4587  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4588  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4589  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4590  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4591  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4592  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4593  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4594  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4595  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4596  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4597  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4598  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4599  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4600  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4601
4602  /* Add the simple ternary operators.  */
4603  d = (struct builtin_description *) bdesc_3arg;
4604  for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4605    {
4606
4607      enum machine_mode mode0, mode1, mode2, mode3;
4608      tree type;
4609
4610      if (d->name == 0)
4611	continue;
4612
4613      mode0 = insn_data[d->icode].operand[0].mode;
4614      mode1 = insn_data[d->icode].operand[1].mode;
4615      mode2 = insn_data[d->icode].operand[2].mode;
4616      mode3 = insn_data[d->icode].operand[3].mode;
4617
4618      /* When all four are of the same mode.  */
4619      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4620	{
4621	  switch (mode0)
4622	    {
4623	    case V4SImode:
4624	      type = v4si_ftype_v4si_v4si_v4si;
4625	      break;
4626	    case V4SFmode:
4627	      type = v4sf_ftype_v4sf_v4sf_v4sf;
4628	      break;
4629	    case V8HImode:
4630	      type = v8hi_ftype_v8hi_v8hi_v8hi;
4631	      break;
4632	    case V16QImode:
4633	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4634	      break;
4635	    default:
4636	      abort();
4637	    }
4638	}
4639      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4640        {
4641	  switch (mode0)
4642	    {
4643	    case V4SImode:
4644	      type = v4si_ftype_v4si_v4si_v16qi;
4645	      break;
4646	    case V4SFmode:
4647	      type = v4sf_ftype_v4sf_v4sf_v16qi;
4648	      break;
4649	    case V8HImode:
4650	      type = v8hi_ftype_v8hi_v8hi_v16qi;
4651	      break;
4652	    case V16QImode:
4653	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4654	      break;
4655	    default:
4656	      abort();
4657	    }
4658	}
4659      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4660	       && mode3 == V4SImode)
4661	type = v4si_ftype_v16qi_v16qi_v4si;
4662      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4663	       && mode3 == V4SImode)
4664	type = v4si_ftype_v8hi_v8hi_v4si;
4665      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4666	       && mode3 == V4SImode)
4667	type = v4sf_ftype_v4sf_v4sf_v4si;
4668
4669      /* vchar, vchar, vchar, 4 bit literal.  */
4670      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4671	       && mode3 == QImode)
4672	type = v16qi_ftype_v16qi_v16qi_char;
4673
4674      /* vshort, vshort, vshort, 4 bit literal.  */
4675      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4676	       && mode3 == QImode)
4677	type = v8hi_ftype_v8hi_v8hi_char;
4678
4679      /* vint, vint, vint, 4 bit literal.  */
4680      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4681	       && mode3 == QImode)
4682	type = v4si_ftype_v4si_v4si_char;
4683
4684      /* vfloat, vfloat, vfloat, 4 bit literal.  */
4685      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4686	       && mode3 == QImode)
4687	type = v4sf_ftype_v4sf_v4sf_char;
4688
4689      else
4690	abort ();
4691
4692      def_builtin (d->mask, d->name, type, d->code);
4693    }
4694
4695  /* Add the DST variants.  */
4696  d = (struct builtin_description *) bdesc_dst;
4697  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4698    def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4699
4700  /* Initialize the predicates.  */
4701  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4702  for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4703    {
4704      enum machine_mode mode1;
4705      tree type;
4706
4707      mode1 = insn_data[dp->icode].operand[1].mode;
4708
4709      switch (mode1)
4710	{
4711	case V4SImode:
4712	  type = int_ftype_int_v4si_v4si;
4713	  break;
4714	case V8HImode:
4715	  type = int_ftype_int_v8hi_v8hi;
4716	  break;
4717	case V16QImode:
4718	  type = int_ftype_int_v16qi_v16qi;
4719	  break;
4720	case V4SFmode:
4721	  type = int_ftype_int_v4sf_v4sf;
4722	  break;
4723	default:
4724	  abort ();
4725	}
4726
4727      def_builtin (dp->mask, dp->name, type, dp->code);
4728    }
4729
4730  /* Add the simple binary operators.  */
4731  d = (struct builtin_description *) bdesc_2arg;
4732  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4733    {
4734      enum machine_mode mode0, mode1, mode2;
4735      tree type;
4736
4737      if (d->name == 0)
4738	continue;
4739
4740      mode0 = insn_data[d->icode].operand[0].mode;
4741      mode1 = insn_data[d->icode].operand[1].mode;
4742      mode2 = insn_data[d->icode].operand[2].mode;
4743
4744      /* When all three operands are of the same mode.  */
4745      if (mode0 == mode1 && mode1 == mode2)
4746	{
4747	  switch (mode0)
4748	    {
4749	    case V4SFmode:
4750	      type = v4sf_ftype_v4sf_v4sf;
4751	      break;
4752	    case V4SImode:
4753	      type = v4si_ftype_v4si_v4si;
4754	      break;
4755	    case V16QImode:
4756	      type = v16qi_ftype_v16qi_v16qi;
4757	      break;
4758	    case V8HImode:
4759	      type = v8hi_ftype_v8hi_v8hi;
4760	      break;
4761	    default:
4762	      abort ();
4763	    }
4764	}
4765
4766      /* A few other combos we really don't want to do manually.  */
4767
4768      /* vint, vfloat, vfloat.  */
4769      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4770	type = v4si_ftype_v4sf_v4sf;
4771
4772      /* vshort, vchar, vchar.  */
4773      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4774	type = v8hi_ftype_v16qi_v16qi;
4775
4776      /* vint, vshort, vshort.  */
4777      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4778	type = v4si_ftype_v8hi_v8hi;
4779
4780      /* vshort, vint, vint.  */
4781      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4782	type = v8hi_ftype_v4si_v4si;
4783
4784      /* vchar, vshort, vshort.  */
4785      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4786	type = v16qi_ftype_v8hi_v8hi;
4787
4788      /* vint, vchar, vint.  */
4789      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4790	type = v4si_ftype_v16qi_v4si;
4791
4792      /* vint, vchar, vchar.  */
4793      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4794	type = v4si_ftype_v16qi_v16qi;
4795
4796      /* vint, vshort, vint.  */
4797      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4798	type = v4si_ftype_v8hi_v4si;
4799
4800      /* vint, vint, 5 bit literal.  */
4801      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4802	type = v4si_ftype_v4si_char;
4803
4804      /* vshort, vshort, 5 bit literal.  */
4805      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4806	type = v8hi_ftype_v8hi_char;
4807
4808      /* vchar, vchar, 5 bit literal.  */
4809      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4810	type = v16qi_ftype_v16qi_char;
4811
4812      /* vfloat, vint, 5 bit literal.  */
4813      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4814	type = v4sf_ftype_v4si_char;
4815
4816      /* vint, vfloat, 5 bit literal.  */
4817      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4818	type = v4si_ftype_v4sf_char;
4819
4820      /* int, x, x.  */
4821      else if (mode0 == SImode)
4822	{
4823	  switch (mode1)
4824	    {
4825	    case V4SImode:
4826	      type = int_ftype_v4si_v4si;
4827	      break;
4828	    case V4SFmode:
4829	      type = int_ftype_v4sf_v4sf;
4830	      break;
4831	    case V16QImode:
4832	      type = int_ftype_v16qi_v16qi;
4833	      break;
4834	    case V8HImode:
4835	      type = int_ftype_v8hi_v8hi;
4836	      break;
4837	    default:
4838	      abort ();
4839	    }
4840	}
4841
4842      else
4843	abort ();
4844
4845      def_builtin (d->mask, d->name, type, d->code);
4846    }
4847
4848  /* Initialize the abs* operators.  */
4849  d = (struct builtin_description *) bdesc_abs;
4850  for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4851    {
4852      enum machine_mode mode0;
4853      tree type;
4854
4855      mode0 = insn_data[d->icode].operand[0].mode;
4856
4857      switch (mode0)
4858	{
4859	case V4SImode:
4860	  type = v4si_ftype_v4si;
4861	  break;
4862	case V8HImode:
4863	  type = v8hi_ftype_v8hi;
4864	  break;
4865	case V16QImode:
4866	  type = v16qi_ftype_v16qi;
4867	  break;
4868	case V4SFmode:
4869	  type = v4sf_ftype_v4sf;
4870	  break;
4871	default:
4872	  abort ();
4873	}
4874
4875      def_builtin (d->mask, d->name, type, d->code);
4876    }
4877
4878  /* Add the simple unary operators.  */
4879  d = (struct builtin_description *) bdesc_1arg;
4880  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4881    {
4882      enum machine_mode mode0, mode1;
4883      tree type;
4884
4885      if (d->name == 0)
4886	continue;
4887
4888      mode0 = insn_data[d->icode].operand[0].mode;
4889      mode1 = insn_data[d->icode].operand[1].mode;
4890
4891      if (mode0 == V4SImode && mode1 == QImode)
4892        type = v4si_ftype_char;
4893      else if (mode0 == V8HImode && mode1 == QImode)
4894        type = v8hi_ftype_char;
4895      else if (mode0 == V16QImode && mode1 == QImode)
4896        type = v16qi_ftype_char;
4897      else if (mode0 == V4SFmode && mode1 == V4SFmode)
4898	type = v4sf_ftype_v4sf;
4899      else if (mode0 == V8HImode && mode1 == V16QImode)
4900	type = v8hi_ftype_v16qi;
4901      else if (mode0 == V4SImode && mode1 == V8HImode)
4902	type = v4si_ftype_v8hi;
4903      else
4904	abort ();
4905
4906      def_builtin (d->mask, d->name, type, d->code);
4907    }
4908}
4909
4910
4911/* Expand a block move operation, and return 1 if successful.  Return 0
4912   if we should let the compiler generate normal code.
4913
4914   operands[0] is the destination
4915   operands[1] is the source
4916   operands[2] is the length
4917   operands[3] is the alignment */
4918
4919#define MAX_MOVE_REG 4
4920
4921int
4922expand_block_move (operands)
4923     rtx operands[];
4924{
4925  rtx orig_dest = operands[0];
4926  rtx orig_src	= operands[1];
4927  rtx bytes_rtx	= operands[2];
4928  rtx align_rtx = operands[3];
4929  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
4930  int align;
4931  int bytes;
4932  int offset;
4933  int move_bytes;
4934
4935  /* If this is not a fixed size move, just call memcpy */
4936  if (! constp)
4937    return 0;
4938
4939  /* If this is not a fixed size alignment, abort */
4940  if (GET_CODE (align_rtx) != CONST_INT)
4941    abort ();
4942  align = INTVAL (align_rtx);
4943
4944  /* Anything to move? */
4945  bytes = INTVAL (bytes_rtx);
4946  if (bytes <= 0)
4947    return 1;
4948
4949  /* store_one_arg depends on expand_block_move to handle at least the size of
4950     reg_parm_stack_space.  */
4951  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4952    return 0;
4953
4954  if (TARGET_STRING)	/* string instructions are available */
4955    {
4956      for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
4957	{
4958	  union {
4959	    rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
4960	    rtx (*mov) PARAMS ((rtx, rtx));
4961	  } gen_func;
4962	  enum machine_mode mode = BLKmode;
4963	  rtx src, dest;
4964
4965	  if (bytes > 24		/* move up to 32 bytes at a time */
4966	      && ! fixed_regs[5]
4967	      && ! fixed_regs[6]
4968	      && ! fixed_regs[7]
4969	      && ! fixed_regs[8]
4970	      && ! fixed_regs[9]
4971	      && ! fixed_regs[10]
4972	      && ! fixed_regs[11]
4973	      && ! fixed_regs[12])
4974	    {
4975	      move_bytes = (bytes > 32) ? 32 : bytes;
4976	      gen_func.movstrsi = gen_movstrsi_8reg;
4977	    }
4978	  else if (bytes > 16	/* move up to 24 bytes at a time */
4979		   && ! fixed_regs[5]
4980		   && ! fixed_regs[6]
4981		   && ! fixed_regs[7]
4982		   && ! fixed_regs[8]
4983		   && ! fixed_regs[9]
4984		   && ! fixed_regs[10])
4985	    {
4986	      move_bytes = (bytes > 24) ? 24 : bytes;
4987	      gen_func.movstrsi = gen_movstrsi_6reg;
4988	    }
4989	  else if (bytes > 8	/* move up to 16 bytes at a time */
4990		   && ! fixed_regs[5]
4991		   && ! fixed_regs[6]
4992		   && ! fixed_regs[7]
4993		   && ! fixed_regs[8])
4994	    {
4995	      move_bytes = (bytes > 16) ? 16 : bytes;
4996	      gen_func.movstrsi = gen_movstrsi_4reg;
4997	    }
4998	  else if (bytes >= 8 && TARGET_POWERPC64
4999		   /* 64-bit loads and stores require word-aligned
5000                      displacements.  */
5001		   && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5002	    {
5003	      move_bytes = 8;
5004	      mode = DImode;
5005	      gen_func.mov = gen_movdi;
5006	    }
5007	  else if (bytes > 4 && !TARGET_POWERPC64)
5008	    {			/* move up to 8 bytes at a time */
5009	      move_bytes = (bytes > 8) ? 8 : bytes;
5010	      gen_func.movstrsi = gen_movstrsi_2reg;
5011	    }
5012	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5013	    {			/* move 4 bytes */
5014	      move_bytes = 4;
5015	      mode = SImode;
5016	      gen_func.mov = gen_movsi;
5017	    }
5018	  else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5019	    {			/* move 2 bytes */
5020	      move_bytes = 2;
5021	      mode = HImode;
5022	      gen_func.mov = gen_movhi;
5023	    }
5024	  else if (bytes == 1)	/* move 1 byte */
5025	    {
5026	      move_bytes = 1;
5027	      mode = QImode;
5028	      gen_func.mov = gen_movqi;
5029	    }
5030	  else
5031	    {			/* move up to 4 bytes at a time */
5032	      move_bytes = (bytes > 4) ? 4 : bytes;
5033	      gen_func.movstrsi = gen_movstrsi_1reg;
5034	    }
5035
5036	  src = adjust_address (orig_src, mode, offset);
5037	  dest = adjust_address (orig_dest, mode, offset);
5038
5039	  if (mode == BLKmode)
5040	    {
5041	      /* Move the address into scratch registers.  The movstrsi
5042		 patterns require zero offset.  */
5043	      if (!REG_P (XEXP (src, 0)))
5044		{
5045		  rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
5046		  src = replace_equiv_address (src, src_reg);
5047		}
5048	      set_mem_size (src, GEN_INT (move_bytes));
5049
5050	      if (!REG_P (XEXP (dest, 0)))
5051		{
5052		  rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
5053		  dest = replace_equiv_address (dest, dest_reg);
5054		}
5055	      set_mem_size (dest, GEN_INT (move_bytes));
5056
5057	      emit_insn ((*gen_func.movstrsi) (dest, src,
5058					       GEN_INT (move_bytes & 31),
5059					       align_rtx));
5060	    }
5061	  else
5062	    {
5063	      rtx tmp_reg = gen_reg_rtx (mode);
5064
5065	      emit_insn ((*gen_func.mov) (tmp_reg, src));
5066	      emit_insn ((*gen_func.mov) (dest, tmp_reg));
5067	    }
5068	}
5069    }
5070
5071  else			/* string instructions not available */
5072    {
5073      rtx stores[MAX_MOVE_REG];
5074      int num_reg = 0;
5075      int i;
5076
5077      for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
5078	{
5079	  rtx (*gen_mov_func) PARAMS ((rtx, rtx));
5080	  enum machine_mode mode;
5081	  rtx src, dest, tmp_reg;
5082
5083	  /* Generate the appropriate load and store, saving the stores
5084	     for later.  */
5085	  if (bytes >= 8 && TARGET_POWERPC64
5086	      /* 64-bit loads and stores require word-aligned
5087                 displacements.  */
5088	      && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5089	    {
5090	      move_bytes = 8;
5091	      mode = DImode;
5092	      gen_mov_func = gen_movdi;
5093	    }
5094	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5095	    {
5096	      move_bytes = 4;
5097	      mode = SImode;
5098	      gen_mov_func = gen_movsi;
5099	    }
5100	  else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5101	    {
5102	      move_bytes = 2;
5103	      mode = HImode;
5104	      gen_mov_func = gen_movhi;
5105	    }
5106	  else
5107	    {
5108	      move_bytes = 1;
5109	      mode = QImode;
5110	      gen_mov_func = gen_movqi;
5111	    }
5112
5113	  src = adjust_address (orig_src, mode, offset);
5114	  dest = adjust_address (orig_dest, mode, offset);
5115	  tmp_reg = gen_reg_rtx (mode);
5116
5117	  emit_insn ((*gen_mov_func) (tmp_reg, src));
5118	  stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
5119
5120	  if (num_reg >= MAX_MOVE_REG)
5121	    {
5122	      for (i = 0; i < num_reg; i++)
5123		emit_insn (stores[i]);
5124	      num_reg = 0;
5125	    }
5126	}
5127
5128      for (i = 0; i < num_reg; i++)
5129	emit_insn (stores[i]);
5130    }
5131
5132  return 1;
5133}
5134
5135
5136/* Return 1 if OP is a load multiple operation.  It is known to be a
5137   PARALLEL and the first section will be tested.  */
5138
5139int
5140load_multiple_operation (op, mode)
5141     rtx op;
5142     enum machine_mode mode ATTRIBUTE_UNUSED;
5143{
5144  int count = XVECLEN (op, 0);
5145  unsigned int dest_regno;
5146  rtx src_addr;
5147  int i;
5148
5149  /* Perform a quick check so we don't blow up below.  */
5150  if (count <= 1
5151      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5152      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5153      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5154    return 0;
5155
5156  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5157  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5158
5159  for (i = 1; i < count; i++)
5160    {
5161      rtx elt = XVECEXP (op, 0, i);
5162
5163      if (GET_CODE (elt) != SET
5164	  || GET_CODE (SET_DEST (elt)) != REG
5165	  || GET_MODE (SET_DEST (elt)) != SImode
5166	  || REGNO (SET_DEST (elt)) != dest_regno + i
5167	  || GET_CODE (SET_SRC (elt)) != MEM
5168	  || GET_MODE (SET_SRC (elt)) != SImode
5169	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5170	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5171	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5172	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5173	return 0;
5174    }
5175
5176  return 1;
5177}
5178
5179/* Similar, but tests for store multiple.  Here, the second vector element
5180   is a CLOBBER.  It will be tested later.  */
5181
5182int
5183store_multiple_operation (op, mode)
5184     rtx op;
5185     enum machine_mode mode ATTRIBUTE_UNUSED;
5186{
5187  int count = XVECLEN (op, 0) - 1;
5188  unsigned int src_regno;
5189  rtx dest_addr;
5190  int i;
5191
5192  /* Perform a quick check so we don't blow up below.  */
5193  if (count <= 1
5194      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5195      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5196      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5197    return 0;
5198
5199  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5200  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5201
5202  for (i = 1; i < count; i++)
5203    {
5204      rtx elt = XVECEXP (op, 0, i + 1);
5205
5206      if (GET_CODE (elt) != SET
5207	  || GET_CODE (SET_SRC (elt)) != REG
5208	  || GET_MODE (SET_SRC (elt)) != SImode
5209	  || REGNO (SET_SRC (elt)) != src_regno + i
5210	  || GET_CODE (SET_DEST (elt)) != MEM
5211	  || GET_MODE (SET_DEST (elt)) != SImode
5212	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5213	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5214	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5215	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5216	return 0;
5217    }
5218
5219  return 1;
5220}
5221
5222/* Return 1 for a parallel vrsave operation.  */
5223
5224int
5225vrsave_operation (op, mode)
5226     rtx op;
5227     enum machine_mode mode ATTRIBUTE_UNUSED;
5228{
5229  int count = XVECLEN (op, 0);
5230  unsigned int dest_regno, src_regno;
5231  int i;
5232
5233  if (count <= 1
5234      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5235      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5236      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5237    return 0;
5238
5239  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5240  src_regno  = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5241
5242  if (dest_regno != VRSAVE_REGNO
5243      && src_regno != VRSAVE_REGNO)
5244    return 0;
5245
5246  for (i = 1; i < count; i++)
5247    {
5248      rtx elt = XVECEXP (op, 0, i);
5249
5250      if (GET_CODE (elt) != CLOBBER
5251	  && GET_CODE (elt) != SET)
5252	return 0;
5253    }
5254
5255  return 1;
5256}
5257
5258/* Return 1 for an PARALLEL suitable for mtcrf.  */
5259
5260int
5261mtcrf_operation (op, mode)
5262     rtx op;
5263     enum machine_mode mode ATTRIBUTE_UNUSED;
5264{
5265  int count = XVECLEN (op, 0);
5266  int i;
5267  rtx src_reg;
5268
5269  /* Perform a quick check so we don't blow up below.  */
5270  if (count < 1
5271      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5272      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5273      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5274    return 0;
5275  src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5276
5277  if (GET_CODE (src_reg) != REG
5278      || GET_MODE (src_reg) != SImode
5279      || ! INT_REGNO_P (REGNO (src_reg)))
5280    return 0;
5281
5282  for (i = 0; i < count; i++)
5283    {
5284      rtx exp = XVECEXP (op, 0, i);
5285      rtx unspec;
5286      int maskval;
5287
5288      if (GET_CODE (exp) != SET
5289	  || GET_CODE (SET_DEST (exp)) != REG
5290	  || GET_MODE (SET_DEST (exp)) != CCmode
5291	  || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5292	return 0;
5293      unspec = SET_SRC (exp);
5294      maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5295
5296      if (GET_CODE (unspec) != UNSPEC
5297	  || XINT (unspec, 1) != 20
5298	  || XVECLEN (unspec, 0) != 2
5299	  || XVECEXP (unspec, 0, 0) != src_reg
5300	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5301	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5302	return 0;
5303    }
5304  return 1;
5305}
5306
5307/* Return 1 for an PARALLEL suitable for lmw.  */
5308
5309int
5310lmw_operation (op, mode)
5311     rtx op;
5312     enum machine_mode mode ATTRIBUTE_UNUSED;
5313{
5314  int count = XVECLEN (op, 0);
5315  unsigned int dest_regno;
5316  rtx src_addr;
5317  unsigned int base_regno;
5318  HOST_WIDE_INT offset;
5319  int i;
5320
5321  /* Perform a quick check so we don't blow up below.  */
5322  if (count <= 1
5323      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5324      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5325      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5326    return 0;
5327
5328  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5329  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5330
5331  if (dest_regno > 31
5332      || count != 32 - (int) dest_regno)
5333    return 0;
5334
5335  if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5336    {
5337      offset = 0;
5338      base_regno = REGNO (src_addr);
5339      if (base_regno == 0)
5340	return 0;
5341    }
5342  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5343    {
5344      offset = INTVAL (XEXP (src_addr, 1));
5345      base_regno = REGNO (XEXP (src_addr, 0));
5346    }
5347  else
5348    return 0;
5349
5350  for (i = 0; i < count; i++)
5351    {
5352      rtx elt = XVECEXP (op, 0, i);
5353      rtx newaddr;
5354      rtx addr_reg;
5355      HOST_WIDE_INT newoffset;
5356
5357      if (GET_CODE (elt) != SET
5358	  || GET_CODE (SET_DEST (elt)) != REG
5359	  || GET_MODE (SET_DEST (elt)) != SImode
5360	  || REGNO (SET_DEST (elt)) != dest_regno + i
5361	  || GET_CODE (SET_SRC (elt)) != MEM
5362	  || GET_MODE (SET_SRC (elt)) != SImode)
5363	return 0;
5364      newaddr = XEXP (SET_SRC (elt), 0);
5365      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5366	{
5367	  newoffset = 0;
5368	  addr_reg = newaddr;
5369	}
5370      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5371	{
5372	  addr_reg = XEXP (newaddr, 0);
5373	  newoffset = INTVAL (XEXP (newaddr, 1));
5374	}
5375      else
5376	return 0;
5377      if (REGNO (addr_reg) != base_regno
5378	  || newoffset != offset + 4 * i)
5379	return 0;
5380    }
5381
5382  return 1;
5383}
5384
5385/* Return 1 for an PARALLEL suitable for stmw.  */
5386
5387int
5388stmw_operation (op, mode)
5389     rtx op;
5390     enum machine_mode mode ATTRIBUTE_UNUSED;
5391{
5392  int count = XVECLEN (op, 0);
5393  unsigned int src_regno;
5394  rtx dest_addr;
5395  unsigned int base_regno;
5396  HOST_WIDE_INT offset;
5397  int i;
5398
5399  /* Perform a quick check so we don't blow up below.  */
5400  if (count <= 1
5401      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5402      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5403      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5404    return 0;
5405
5406  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5407  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5408
5409  if (src_regno > 31
5410      || count != 32 - (int) src_regno)
5411    return 0;
5412
5413  if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5414    {
5415      offset = 0;
5416      base_regno = REGNO (dest_addr);
5417      if (base_regno == 0)
5418	return 0;
5419    }
5420  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5421    {
5422      offset = INTVAL (XEXP (dest_addr, 1));
5423      base_regno = REGNO (XEXP (dest_addr, 0));
5424    }
5425  else
5426    return 0;
5427
5428  for (i = 0; i < count; i++)
5429    {
5430      rtx elt = XVECEXP (op, 0, i);
5431      rtx newaddr;
5432      rtx addr_reg;
5433      HOST_WIDE_INT newoffset;
5434
5435      if (GET_CODE (elt) != SET
5436	  || GET_CODE (SET_SRC (elt)) != REG
5437	  || GET_MODE (SET_SRC (elt)) != SImode
5438	  || REGNO (SET_SRC (elt)) != src_regno + i
5439	  || GET_CODE (SET_DEST (elt)) != MEM
5440	  || GET_MODE (SET_DEST (elt)) != SImode)
5441	return 0;
5442      newaddr = XEXP (SET_DEST (elt), 0);
5443      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5444	{
5445	  newoffset = 0;
5446	  addr_reg = newaddr;
5447	}
5448      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5449	{
5450	  addr_reg = XEXP (newaddr, 0);
5451	  newoffset = INTVAL (XEXP (newaddr, 1));
5452	}
5453      else
5454	return 0;
5455      if (REGNO (addr_reg) != base_regno
5456	  || newoffset != offset + 4 * i)
5457	return 0;
5458    }
5459
5460  return 1;
5461}
5462
5463/* A validation routine: say whether CODE, a condition code, and MODE
5464   match.  The other alternatives either don't make sense or should
5465   never be generated.  */
5466
5467static void
5468validate_condition_mode (code, mode)
5469     enum rtx_code code;
5470     enum machine_mode mode;
5471{
5472  if (GET_RTX_CLASS (code) != '<'
5473      || GET_MODE_CLASS (mode) != MODE_CC)
5474    abort ();
5475
5476  /* These don't make sense.  */
5477  if ((code == GT || code == LT || code == GE || code == LE)
5478      && mode == CCUNSmode)
5479    abort ();
5480
5481  if ((code == GTU || code == LTU || code == GEU || code == LEU)
5482      && mode != CCUNSmode)
5483    abort ();
5484
5485  if (mode != CCFPmode
5486      && (code == ORDERED || code == UNORDERED
5487	  || code == UNEQ || code == LTGT
5488	  || code == UNGT || code == UNLT
5489	  || code == UNGE || code == UNLE))
5490    abort ();
5491
5492  /* These should never be generated except for
5493     flag_unsafe_math_optimizations.  */
5494  if (mode == CCFPmode
5495      && ! flag_unsafe_math_optimizations
5496      && (code == LE || code == GE
5497	  || code == UNEQ || code == LTGT
5498	  || code == UNGT || code == UNLT))
5499    abort ();
5500
5501  /* These are invalid; the information is not there.  */
5502  if (mode == CCEQmode
5503      && code != EQ && code != NE)
5504    abort ();
5505}
5506
5507/* Return 1 if OP is a comparison operation that is valid for a branch insn.
5508   We only check the opcode against the mode of the CC value here.  */
5509
5510int
5511branch_comparison_operator (op, mode)
5512     rtx op;
5513     enum machine_mode mode ATTRIBUTE_UNUSED;
5514{
5515  enum rtx_code code = GET_CODE (op);
5516  enum machine_mode cc_mode;
5517
5518  if (GET_RTX_CLASS (code) != '<')
5519    return 0;
5520
5521  cc_mode = GET_MODE (XEXP (op, 0));
5522  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5523    return 0;
5524
5525  validate_condition_mode (code, cc_mode);
5526
5527  return 1;
5528}
5529
5530/* Return 1 if OP is a comparison operation that is valid for a branch
5531   insn and which is true if the corresponding bit in the CC register
5532   is set.  */
5533
5534int
5535branch_positive_comparison_operator (op, mode)
5536     rtx op;
5537     enum machine_mode mode;
5538{
5539  enum rtx_code code;
5540
5541  if (! branch_comparison_operator (op, mode))
5542    return 0;
5543
5544  code = GET_CODE (op);
5545  return (code == EQ || code == LT || code == GT
5546	  || code == LTU || code == GTU
5547	  || code == UNORDERED);
5548}
5549
5550/* Return 1 if OP is a comparison operation that is valid for an scc insn.
5551   We check the opcode against the mode of the CC value and disallow EQ or
5552   NE comparisons for integers.  */
5553
5554int
5555scc_comparison_operator (op, mode)
5556     rtx op;
5557     enum machine_mode mode;
5558{
5559  enum rtx_code code = GET_CODE (op);
5560  enum machine_mode cc_mode;
5561
5562  if (GET_MODE (op) != mode && mode != VOIDmode)
5563    return 0;
5564
5565  if (GET_RTX_CLASS (code) != '<')
5566    return 0;
5567
5568  cc_mode = GET_MODE (XEXP (op, 0));
5569  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5570    return 0;
5571
5572  validate_condition_mode (code, cc_mode);
5573
5574  if (code == NE && cc_mode != CCFPmode)
5575    return 0;
5576
5577  return 1;
5578}
5579
5580int
5581trap_comparison_operator (op, mode)
5582    rtx op;
5583    enum machine_mode mode;
5584{
5585  if (mode != VOIDmode && mode != GET_MODE (op))
5586    return 0;
5587  return GET_RTX_CLASS (GET_CODE (op)) == '<';
5588}
5589
5590int
5591boolean_operator (op, mode)
5592    rtx op;
5593    enum machine_mode mode ATTRIBUTE_UNUSED;
5594{
5595  enum rtx_code code = GET_CODE (op);
5596  return (code == AND || code == IOR || code == XOR);
5597}
5598
5599int
5600boolean_or_operator (op, mode)
5601    rtx op;
5602    enum machine_mode mode ATTRIBUTE_UNUSED;
5603{
5604  enum rtx_code code = GET_CODE (op);
5605  return (code == IOR || code == XOR);
5606}
5607
5608int
5609min_max_operator (op, mode)
5610    rtx op;
5611    enum machine_mode mode ATTRIBUTE_UNUSED;
5612{
5613  enum rtx_code code = GET_CODE (op);
5614  return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5615}
5616
5617/* Return 1 if ANDOP is a mask that has no bits on that are not in the
5618   mask required to convert the result of a rotate insn into a shift
5619   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
5620
5621int
5622includes_lshift_p (shiftop, andop)
5623     rtx shiftop;
5624     rtx andop;
5625{
5626  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5627
5628  shift_mask <<= INTVAL (shiftop);
5629
5630  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5631}
5632
5633/* Similar, but for right shift.  */
5634
5635int
5636includes_rshift_p (shiftop, andop)
5637     rtx shiftop;
5638     rtx andop;
5639{
5640  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5641
5642  shift_mask >>= INTVAL (shiftop);
5643
5644  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5645}
5646
5647/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5648   to perform a left shift.  It must have exactly SHIFTOP least
5649   signifigant 0's, then one or more 1's, then zero or more 0's.  */
5650
5651int
5652includes_rldic_lshift_p (shiftop, andop)
5653     rtx shiftop;
5654     rtx andop;
5655{
5656  if (GET_CODE (andop) == CONST_INT)
5657    {
5658      HOST_WIDE_INT c, lsb, shift_mask;
5659
5660      c = INTVAL (andop);
5661      if (c == 0 || c == ~0)
5662	return 0;
5663
5664      shift_mask = ~0;
5665      shift_mask <<= INTVAL (shiftop);
5666
5667      /* Find the least signifigant one bit.  */
5668      lsb = c & -c;
5669
5670      /* It must coincide with the LSB of the shift mask.  */
5671      if (-lsb != shift_mask)
5672	return 0;
5673
5674      /* Invert to look for the next transition (if any).  */
5675      c = ~c;
5676
5677      /* Remove the low group of ones (originally low group of zeros).  */
5678      c &= -lsb;
5679
5680      /* Again find the lsb, and check we have all 1's above.  */
5681      lsb = c & -c;
5682      return c == -lsb;
5683    }
5684  else if (GET_CODE (andop) == CONST_DOUBLE
5685	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5686    {
5687      HOST_WIDE_INT low, high, lsb;
5688      HOST_WIDE_INT shift_mask_low, shift_mask_high;
5689
5690      low = CONST_DOUBLE_LOW (andop);
5691      if (HOST_BITS_PER_WIDE_INT < 64)
5692	high = CONST_DOUBLE_HIGH (andop);
5693
5694      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5695	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5696	return 0;
5697
5698      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5699	{
5700	  shift_mask_high = ~0;
5701	  if (INTVAL (shiftop) > 32)
5702	    shift_mask_high <<= INTVAL (shiftop) - 32;
5703
5704	  lsb = high & -high;
5705
5706	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5707	    return 0;
5708
5709	  high = ~high;
5710	  high &= -lsb;
5711
5712	  lsb = high & -high;
5713	  return high == -lsb;
5714	}
5715
5716      shift_mask_low = ~0;
5717      shift_mask_low <<= INTVAL (shiftop);
5718
5719      lsb = low & -low;
5720
5721      if (-lsb != shift_mask_low)
5722	return 0;
5723
5724      if (HOST_BITS_PER_WIDE_INT < 64)
5725	high = ~high;
5726      low = ~low;
5727      low &= -lsb;
5728
5729      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5730	{
5731	  lsb = high & -high;
5732	  return high == -lsb;
5733	}
5734
5735      lsb = low & -low;
5736      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5737    }
5738  else
5739    return 0;
5740}
5741
5742/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5743   to perform a left shift.  It must have SHIFTOP or more least
5744   signifigant 0's, with the remainder of the word 1's.  */
5745
5746int
5747includes_rldicr_lshift_p (shiftop, andop)
5748     rtx shiftop;
5749     rtx andop;
5750{
5751  if (GET_CODE (andop) == CONST_INT)
5752    {
5753      HOST_WIDE_INT c, lsb, shift_mask;
5754
5755      shift_mask = ~0;
5756      shift_mask <<= INTVAL (shiftop);
5757      c = INTVAL (andop);
5758
5759      /* Find the least signifigant one bit.  */
5760      lsb = c & -c;
5761
5762      /* It must be covered by the shift mask.
5763	 This test also rejects c == 0.  */
5764      if ((lsb & shift_mask) == 0)
5765	return 0;
5766
5767      /* Check we have all 1's above the transition, and reject all 1's.  */
5768      return c == -lsb && lsb != 1;
5769    }
5770  else if (GET_CODE (andop) == CONST_DOUBLE
5771	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5772    {
5773      HOST_WIDE_INT low, lsb, shift_mask_low;
5774
5775      low = CONST_DOUBLE_LOW (andop);
5776
5777      if (HOST_BITS_PER_WIDE_INT < 64)
5778	{
5779	  HOST_WIDE_INT high, shift_mask_high;
5780
5781	  high = CONST_DOUBLE_HIGH (andop);
5782
5783	  if (low == 0)
5784	    {
5785	      shift_mask_high = ~0;
5786	      if (INTVAL (shiftop) > 32)
5787		shift_mask_high <<= INTVAL (shiftop) - 32;
5788
5789	      lsb = high & -high;
5790
5791	      if ((lsb & shift_mask_high) == 0)
5792		return 0;
5793
5794	      return high == -lsb;
5795	    }
5796	  if (high != ~0)
5797	    return 0;
5798	}
5799
5800      shift_mask_low = ~0;
5801      shift_mask_low <<= INTVAL (shiftop);
5802
5803      lsb = low & -low;
5804
5805      if ((lsb & shift_mask_low) == 0)
5806	return 0;
5807
5808      return low == -lsb && lsb != 1;
5809    }
5810  else
5811    return 0;
5812}
5813
5814/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5815   for lfq and stfq insns.
5816
5817   Note reg1 and reg2 *must* be hard registers.  To be sure we will
5818   abort if we are passed pseudo registers.  */
5819
5820int
5821registers_ok_for_quad_peep (reg1, reg2)
5822     rtx reg1, reg2;
5823{
5824  /* We might have been passed a SUBREG.  */
5825  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5826    return 0;
5827
5828  return (REGNO (reg1) == REGNO (reg2) - 1);
5829}
5830
5831/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5832   addr1 and addr2 must be in consecutive memory locations
5833   (addr2 == addr1 + 8).  */
5834
5835int
5836addrs_ok_for_quad_peep (addr1, addr2)
5837     rtx addr1;
5838     rtx addr2;
5839{
5840  unsigned int reg1;
5841  int offset1;
5842
5843  /* Extract an offset (if used) from the first addr.  */
5844  if (GET_CODE (addr1) == PLUS)
5845    {
5846      /* If not a REG, return zero.  */
5847      if (GET_CODE (XEXP (addr1, 0)) != REG)
5848	return 0;
5849      else
5850	{
5851          reg1 = REGNO (XEXP (addr1, 0));
5852	  /* The offset must be constant!  */
5853	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5854            return 0;
5855          offset1 = INTVAL (XEXP (addr1, 1));
5856	}
5857    }
5858  else if (GET_CODE (addr1) != REG)
5859    return 0;
5860  else
5861    {
5862      reg1 = REGNO (addr1);
5863      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
5864      offset1 = 0;
5865    }
5866
5867  /* Make sure the second address is a (mem (plus (reg) (const_int))).  */
5868  if (GET_CODE (addr2) != PLUS)
5869    return 0;
5870
5871  if (GET_CODE (XEXP (addr2, 0)) != REG
5872      || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5873    return 0;
5874
5875  if (reg1 != REGNO (XEXP (addr2, 0)))
5876    return 0;
5877
5878  /* The offset for the second addr must be 8 more than the first addr.  */
5879  if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5880    return 0;
5881
5882  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
5883     instructions.  */
5884  return 1;
5885}
5886
5887/* Return the register class of a scratch register needed to copy IN into
5888   or out of a register in CLASS in MODE.  If it can be done directly,
5889   NO_REGS is returned.  */
5890
5891enum reg_class
5892secondary_reload_class (class, mode, in)
5893     enum reg_class class;
5894     enum machine_mode mode ATTRIBUTE_UNUSED;
5895     rtx in;
5896{
5897  int regno;
5898
5899  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5900    {
5901      /* We cannot copy a symbolic operand directly into anything
5902         other than BASE_REGS for TARGET_ELF.  So indicate that a
5903         register from BASE_REGS is needed as an intermediate
5904         register.
5905
5906	 On Darwin, pic addresses require a load from memory, which
5907	 needs a base register.  */
5908      if (class != BASE_REGS
5909          && (GET_CODE (in) == SYMBOL_REF
5910              || GET_CODE (in) == HIGH
5911              || GET_CODE (in) == LABEL_REF
5912              || GET_CODE (in) == CONST))
5913        return BASE_REGS;
5914    }
5915
5916  if (GET_CODE (in) == REG)
5917    {
5918      regno = REGNO (in);
5919      if (regno >= FIRST_PSEUDO_REGISTER)
5920	{
5921	  regno = true_regnum (in);
5922	  if (regno >= FIRST_PSEUDO_REGISTER)
5923	    regno = -1;
5924	}
5925    }
5926  else if (GET_CODE (in) == SUBREG)
5927    {
5928      regno = true_regnum (in);
5929      if (regno >= FIRST_PSEUDO_REGISTER)
5930	regno = -1;
5931    }
5932  else
5933    regno = -1;
5934
5935  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5936     into anything.  */
5937  if (class == GENERAL_REGS || class == BASE_REGS
5938      || (regno >= 0 && INT_REGNO_P (regno)))
5939    return NO_REGS;
5940
5941  /* Constants, memory, and FP registers can go into FP registers.  */
5942  if ((regno == -1 || FP_REGNO_P (regno))
5943      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5944    return NO_REGS;
5945
5946  /* Memory, and AltiVec registers can go into AltiVec registers.  */
5947  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5948      && class == ALTIVEC_REGS)
5949    return NO_REGS;
5950
5951  /* We can copy among the CR registers.  */
5952  if ((class == CR_REGS || class == CR0_REGS)
5953      && regno >= 0 && CR_REGNO_P (regno))
5954    return NO_REGS;
5955
5956  /* Otherwise, we need GENERAL_REGS.  */
5957  return GENERAL_REGS;
5958}
5959
5960/* Given a comparison operation, return the bit number in CCR to test.  We
5961   know this is a valid comparison.
5962
5963   SCC_P is 1 if this is for an scc.  That means that %D will have been
5964   used instead of %C, so the bits will be in different places.
5965
5966   Return -1 if OP isn't a valid comparison for some reason.  */
5967
5968int
5969ccr_bit (op, scc_p)
5970     rtx op;
5971     int scc_p;
5972{
5973  enum rtx_code code = GET_CODE (op);
5974  enum machine_mode cc_mode;
5975  int cc_regnum;
5976  int base_bit;
5977  rtx reg;
5978
5979  if (GET_RTX_CLASS (code) != '<')
5980    return -1;
5981
5982  reg = XEXP (op, 0);
5983
5984  if (GET_CODE (reg) != REG
5985      || ! CR_REGNO_P (REGNO (reg)))
5986    abort ();
5987
5988  cc_mode = GET_MODE (reg);
5989  cc_regnum = REGNO (reg);
5990  base_bit = 4 * (cc_regnum - CR0_REGNO);
5991
5992  validate_condition_mode (code, cc_mode);
5993
5994  switch (code)
5995    {
5996    case NE:
5997      return scc_p ? base_bit + 3 : base_bit + 2;
5998    case EQ:
5999      return base_bit + 2;
6000    case GT:  case GTU:  case UNLE:
6001      return base_bit + 1;
6002    case LT:  case LTU:  case UNGE:
6003      return base_bit;
6004    case ORDERED:  case UNORDERED:
6005      return base_bit + 3;
6006
6007    case GE:  case GEU:
6008      /* If scc, we will have done a cror to put the bit in the
6009	 unordered position.  So test that bit.  For integer, this is ! LT
6010	 unless this is an scc insn.  */
6011      return scc_p ? base_bit + 3 : base_bit;
6012
6013    case LE:  case LEU:
6014      return scc_p ? base_bit + 3 : base_bit + 1;
6015
6016    default:
6017      abort ();
6018    }
6019}
6020
6021/* Return the GOT register.  */
6022
6023struct rtx_def *
6024rs6000_got_register (value)
6025     rtx value ATTRIBUTE_UNUSED;
6026{
6027  /* The second flow pass currently (June 1999) can't update
6028     regs_ever_live without disturbing other parts of the compiler, so
6029     update it here to make the prolog/epilogue code happy.  */
6030  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6031    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6032
6033  current_function_uses_pic_offset_table = 1;
6034
6035  return pic_offset_table_rtx;
6036}
6037
6038/* Functions to init, mark and free struct machine_function.
6039   These will be called, via pointer variables,
6040   from push_function_context and pop_function_context.  */
6041
6042static void
6043rs6000_init_machine_status (p)
6044     struct function *p;
6045{
6046  p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6047}
6048
6049static void
6050rs6000_free_machine_status (p)
6051     struct function *p;
6052{
6053  if (p->machine == NULL)
6054    return;
6055
6056  free (p->machine);
6057  p->machine = NULL;
6058}
6059
6060
6061/* Print an operand.  Recognize special options, documented below.  */
6062
6063#if TARGET_ELF
6064#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6065#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6066#else
6067#define SMALL_DATA_RELOC "sda21"
6068#define SMALL_DATA_REG 0
6069#endif
6070
6071void
6072print_operand (file, x, code)
6073    FILE *file;
6074    rtx x;
6075    int code;
6076{
6077  int i;
6078  HOST_WIDE_INT val;
6079
6080  /* These macros test for integers and extract the low-order bits.  */
6081#define INT_P(X)  \
6082((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
6083 && GET_MODE (X) == VOIDmode)
6084
6085#define INT_LOWPART(X) \
6086  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6087
6088  switch (code)
6089    {
6090    case '.':
6091      /* Write out an instruction after the call which may be replaced
6092	 with glue code by the loader.  This depends on the AIX version.  */
6093      asm_fprintf (file, RS6000_CALL_GLUE);
6094      return;
6095
6096      /* %a is output_address.  */
6097
6098    case 'A':
6099      /* If X is a constant integer whose low-order 5 bits are zero,
6100	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
6101	 in the AIX assembler where "sri" with a zero shift count
6102	 writes a trash instruction.  */
6103      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6104	putc ('l', file);
6105      else
6106	putc ('r', file);
6107      return;
6108
6109    case 'b':
6110      /* If constant, low-order 16 bits of constant, unsigned.
6111	 Otherwise, write normally.  */
6112      if (INT_P (x))
6113	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6114      else
6115	print_operand (file, x, 0);
6116      return;
6117
6118    case 'B':
6119      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6120	 for 64-bit mask direction.  */
6121      putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6122      return;
6123
6124      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6125	 output_operand.  */
6126
6127    case 'D':
6128      /* There used to be a comment for 'C' reading "This is an
6129	   optional cror needed for certain floating-point
6130	   comparisons.  Otherwise write nothing."  */
6131
6132      /* Similar, except that this is for an scc, so we must be able to
6133	 encode the test in a single bit that is one.  We do the above
6134	 for any LE, GE, GEU, or LEU and invert the bit for NE.  */
6135      if (GET_CODE (x) == LE || GET_CODE (x) == GE
6136	  || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6137	{
6138	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6139
6140	  fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6141		   base_bit + 2,
6142		   base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6143	}
6144
6145      else if (GET_CODE (x) == NE)
6146	{
6147	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6148
6149	  fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6150		   base_bit + 2, base_bit + 2);
6151	}
6152      return;
6153
6154    case 'E':
6155      /* X is a CR register.  Print the number of the EQ bit of the CR */
6156      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6157	output_operand_lossage ("invalid %%E value");
6158      else
6159	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6160      return;
6161
6162    case 'f':
6163      /* X is a CR register.  Print the shift count needed to move it
6164	 to the high-order four bits.  */
6165      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6166	output_operand_lossage ("invalid %%f value");
6167      else
6168	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6169      return;
6170
6171    case 'F':
6172      /* Similar, but print the count for the rotate in the opposite
6173	 direction.  */
6174      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6175	output_operand_lossage ("invalid %%F value");
6176      else
6177	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6178      return;
6179
6180    case 'G':
6181      /* X is a constant integer.  If it is negative, print "m",
6182	 otherwise print "z".  This is to make a aze or ame insn.  */
6183      if (GET_CODE (x) != CONST_INT)
6184	output_operand_lossage ("invalid %%G value");
6185      else if (INTVAL (x) >= 0)
6186	putc ('z', file);
6187      else
6188	putc ('m', file);
6189      return;
6190
6191    case 'h':
6192      /* If constant, output low-order five bits.  Otherwise, write
6193	 normally.  */
6194      if (INT_P (x))
6195	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6196      else
6197	print_operand (file, x, 0);
6198      return;
6199
6200    case 'H':
6201      /* If constant, output low-order six bits.  Otherwise, write
6202	 normally.  */
6203      if (INT_P (x))
6204	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6205      else
6206	print_operand (file, x, 0);
6207      return;
6208
6209    case 'I':
6210      /* Print `i' if this is a constant, else nothing.  */
6211      if (INT_P (x))
6212	putc ('i', file);
6213      return;
6214
6215    case 'j':
6216      /* Write the bit number in CCR for jump.  */
6217      i = ccr_bit (x, 0);
6218      if (i == -1)
6219	output_operand_lossage ("invalid %%j code");
6220      else
6221	fprintf (file, "%d", i);
6222      return;
6223
6224    case 'J':
6225      /* Similar, but add one for shift count in rlinm for scc and pass
6226	 scc flag to `ccr_bit'.  */
6227      i = ccr_bit (x, 1);
6228      if (i == -1)
6229	output_operand_lossage ("invalid %%J code");
6230      else
6231	/* If we want bit 31, write a shift count of zero, not 32.  */
6232	fprintf (file, "%d", i == 31 ? 0 : i + 1);
6233      return;
6234
6235    case 'k':
6236      /* X must be a constant.  Write the 1's complement of the
6237	 constant.  */
6238      if (! INT_P (x))
6239	output_operand_lossage ("invalid %%k value");
6240      else
6241	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6242      return;
6243
6244    case 'K':
6245      /* X must be a symbolic constant on ELF.  Write an
6246	 expression suitable for an 'addi' that adds in the low 16
6247	 bits of the MEM.  */
6248      if (GET_CODE (x) != CONST)
6249	{
6250	  print_operand_address (file, x);
6251	  fputs ("@l", file);
6252	}
6253      else
6254	{
6255	  if (GET_CODE (XEXP (x, 0)) != PLUS
6256	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6257		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6258	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6259	    output_operand_lossage ("invalid %%K value");
6260	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
6261	  fputs ("@l", file);
6262	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6263	}
6264      return;
6265
6266      /* %l is output_asm_label.  */
6267
6268    case 'L':
6269      /* Write second word of DImode or DFmode reference.  Works on register
6270	 or non-indexed memory only.  */
6271      if (GET_CODE (x) == REG)
6272	fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6273      else if (GET_CODE (x) == MEM)
6274	{
6275	  /* Handle possible auto-increment.  Since it is pre-increment and
6276	     we have already done it, we can just use an offset of word.  */
6277	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6278	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6279	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6280					   UNITS_PER_WORD));
6281	  else
6282	    output_address (XEXP (adjust_address_nv (x, SImode,
6283						     UNITS_PER_WORD),
6284				  0));
6285
6286	  if (small_data_operand (x, GET_MODE (x)))
6287	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6288		     reg_names[SMALL_DATA_REG]);
6289	}
6290      return;
6291
6292    case 'm':
6293      /* MB value for a mask operand.  */
6294      if (! mask_operand (x, SImode))
6295	output_operand_lossage ("invalid %%m value");
6296
6297      val = INT_LOWPART (x);
6298
6299      /* If the high bit is set and the low bit is not, the value is zero.
6300	 If the high bit is zero, the value is the first 1 bit we find from
6301	 the left.  */
6302      if ((val & 0x80000000) && ((val & 1) == 0))
6303	{
6304	  putc ('0', file);
6305	  return;
6306	}
6307      else if ((val & 0x80000000) == 0)
6308	{
6309	  for (i = 1; i < 32; i++)
6310	    if ((val <<= 1) & 0x80000000)
6311	      break;
6312	  fprintf (file, "%d", i);
6313	  return;
6314	}
6315
6316      /* Otherwise, look for the first 0 bit from the right.  The result is its
6317	 number plus 1. We know the low-order bit is one.  */
6318      for (i = 0; i < 32; i++)
6319	if (((val >>= 1) & 1) == 0)
6320	  break;
6321
6322      /* If we ended in ...01, i would be 0.  The correct value is 31, so
6323	 we want 31 - i.  */
6324      fprintf (file, "%d", 31 - i);
6325      return;
6326
6327    case 'M':
6328      /* ME value for a mask operand.  */
6329      if (! mask_operand (x, SImode))
6330	output_operand_lossage ("invalid %%M value");
6331
6332      val = INT_LOWPART (x);
6333
6334      /* If the low bit is set and the high bit is not, the value is 31.
6335	 If the low bit is zero, the value is the first 1 bit we find from
6336	 the right.  */
6337      if ((val & 1) && ((val & 0x80000000) == 0))
6338	{
6339	  fputs ("31", file);
6340	  return;
6341	}
6342      else if ((val & 1) == 0)
6343	{
6344	  for (i = 0; i < 32; i++)
6345	    if ((val >>= 1) & 1)
6346	      break;
6347
6348	  /* If we had ....10, i would be 0.  The result should be
6349	     30, so we need 30 - i.  */
6350	  fprintf (file, "%d", 30 - i);
6351	  return;
6352	}
6353
6354      /* Otherwise, look for the first 0 bit from the left.  The result is its
6355	 number minus 1. We know the high-order bit is one.  */
6356      for (i = 0; i < 32; i++)
6357	if (((val <<= 1) & 0x80000000) == 0)
6358	  break;
6359
6360      fprintf (file, "%d", i);
6361      return;
6362
6363      /* %n outputs the negative of its operand.  */
6364
6365    case 'N':
6366      /* Write the number of elements in the vector times 4.  */
6367      if (GET_CODE (x) != PARALLEL)
6368	output_operand_lossage ("invalid %%N value");
6369      else
6370	fprintf (file, "%d", XVECLEN (x, 0) * 4);
6371      return;
6372
6373    case 'O':
6374      /* Similar, but subtract 1 first.  */
6375      if (GET_CODE (x) != PARALLEL)
6376	output_operand_lossage ("invalid %%O value");
6377      else
6378	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6379      return;
6380
6381    case 'p':
6382      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
6383      if (! INT_P (x)
6384	  || INT_LOWPART (x) < 0
6385	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
6386	output_operand_lossage ("invalid %%p value");
6387      else
6388	fprintf (file, "%d", i);
6389      return;
6390
6391    case 'P':
6392      /* The operand must be an indirect memory reference.  The result
6393	 is the register number.  */
6394      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6395	  || REGNO (XEXP (x, 0)) >= 32)
6396	output_operand_lossage ("invalid %%P value");
6397      else
6398	fprintf (file, "%d", REGNO (XEXP (x, 0)));
6399      return;
6400
6401    case 'q':
6402      /* This outputs the logical code corresponding to a boolean
6403	 expression.  The expression may have one or both operands
6404	 negated (if one, only the first one).  For condition register
6405         logical operations, it will also treat the negated
6406         CR codes as NOTs, but not handle NOTs of them.  */
6407      {
6408	const char *const *t = 0;
6409	const char *s;
6410	enum rtx_code code = GET_CODE (x);
6411	static const char * const tbl[3][3] = {
6412	  { "and", "andc", "nor" },
6413	  { "or", "orc", "nand" },
6414	  { "xor", "eqv", "xor" } };
6415
6416	if (code == AND)
6417	  t = tbl[0];
6418	else if (code == IOR)
6419	  t = tbl[1];
6420	else if (code == XOR)
6421	  t = tbl[2];
6422	else
6423	  output_operand_lossage ("invalid %%q value");
6424
6425	if (GET_CODE (XEXP (x, 0)) != NOT)
6426	  s = t[0];
6427	else
6428	  {
6429	    if (GET_CODE (XEXP (x, 1)) == NOT)
6430	      s = t[2];
6431	    else
6432	      s = t[1];
6433	  }
6434
6435	fputs (s, file);
6436      }
6437      return;
6438
6439    case 'R':
6440      /* X is a CR register.  Print the mask for `mtcrf'.  */
6441      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6442	output_operand_lossage ("invalid %%R value");
6443      else
6444	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6445      return;
6446
6447    case 's':
6448      /* Low 5 bits of 32 - value */
6449      if (! INT_P (x))
6450	output_operand_lossage ("invalid %%s value");
6451      else
6452	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6453      return;
6454
6455    case 'S':
6456      /* PowerPC64 mask position.  All 0's and all 1's are excluded.
6457	 CONST_INT 32-bit mask is considered sign-extended so any
6458	 transition must occur within the CONST_INT, not on the boundary.  */
6459      if (! mask64_operand (x, DImode))
6460	output_operand_lossage ("invalid %%S value");
6461
6462      val = INT_LOWPART (x);
6463
6464      if (val & 1)      /* Clear Left */
6465	{
6466	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6467	    if (!((val >>= 1) & 1))
6468	      break;
6469
6470#if HOST_BITS_PER_WIDE_INT == 32
6471	  if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6472	    {
6473	      val = CONST_DOUBLE_HIGH (x);
6474
6475	      if (val == 0)
6476		--i;
6477	      else
6478		for (i = 32; i < 64; i++)
6479		  if (!((val >>= 1) & 1))
6480		    break;
6481	    }
6482#endif
6483	/* i = index of last set bit from right
6484	   mask begins at 63 - i from left */
6485	  if (i > 63)
6486	    output_operand_lossage ("%%S computed all 1's mask");
6487
6488	  fprintf (file, "%d", 63 - i);
6489	  return;
6490	}
6491      else	/* Clear Right */
6492	{
6493	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6494	    if ((val >>= 1) & 1)
6495	      break;
6496
6497#if HOST_BITS_PER_WIDE_INT == 32
6498	if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6499	  {
6500	    val = CONST_DOUBLE_HIGH (x);
6501
6502	    if (val == (HOST_WIDE_INT) -1)
6503	      --i;
6504	    else
6505	      for (i = 32; i < 64; i++)
6506		if ((val >>= 1) & 1)
6507		  break;
6508	  }
6509#endif
6510	/* i = index of last clear bit from right
6511	   mask ends at 62 - i from left */
6512	  if (i > 62)
6513	    output_operand_lossage ("%%S computed all 0's mask");
6514
6515	  fprintf (file, "%d", 62 - i);
6516	  return;
6517	}
6518
6519    case 'T':
6520      /* Print the symbolic name of a branch target register.  */
6521      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6522				  && REGNO (x) != COUNT_REGISTER_REGNUM))
6523	output_operand_lossage ("invalid %%T value");
6524      else if (REGNO (x) == LINK_REGISTER_REGNUM)
6525	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6526      else
6527	fputs ("ctr", file);
6528      return;
6529
6530    case 'u':
6531      /* High-order 16 bits of constant for use in unsigned operand.  */
6532      if (! INT_P (x))
6533	output_operand_lossage ("invalid %%u value");
6534      else
6535	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6536		 (INT_LOWPART (x) >> 16) & 0xffff);
6537      return;
6538
6539    case 'v':
6540      /* High-order 16 bits of constant for use in signed operand.  */
6541      if (! INT_P (x))
6542	output_operand_lossage ("invalid %%v value");
6543      else
6544	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6545		 (INT_LOWPART (x) >> 16) & 0xffff);
6546      return;
6547
6548    case 'U':
6549      /* Print `u' if this has an auto-increment or auto-decrement.  */
6550      if (GET_CODE (x) == MEM
6551	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
6552	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6553	putc ('u', file);
6554      return;
6555
6556    case 'V':
6557      /* Print the trap code for this operand.  */
6558      switch (GET_CODE (x))
6559	{
6560	case EQ:
6561	  fputs ("eq", file);   /* 4 */
6562	  break;
6563	case NE:
6564	  fputs ("ne", file);   /* 24 */
6565	  break;
6566	case LT:
6567	  fputs ("lt", file);   /* 16 */
6568	  break;
6569	case LE:
6570	  fputs ("le", file);   /* 20 */
6571	  break;
6572	case GT:
6573	  fputs ("gt", file);   /* 8 */
6574	  break;
6575	case GE:
6576	  fputs ("ge", file);   /* 12 */
6577	  break;
6578	case LTU:
6579	  fputs ("llt", file);  /* 2 */
6580	  break;
6581	case LEU:
6582	  fputs ("lle", file);  /* 6 */
6583	  break;
6584	case GTU:
6585	  fputs ("lgt", file);  /* 1 */
6586	  break;
6587	case GEU:
6588	  fputs ("lge", file);  /* 5 */
6589	  break;
6590	default:
6591	  abort ();
6592	}
6593      break;
6594
6595    case 'w':
6596      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
6597	 normally.  */
6598      if (INT_P (x))
6599	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6600		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6601      else
6602	print_operand (file, x, 0);
6603      return;
6604
6605    case 'W':
6606      /* MB value for a PowerPC64 rldic operand.  */
6607      val = (GET_CODE (x) == CONST_INT
6608	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6609
6610      if (val < 0)
6611	i = -1;
6612      else
6613	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6614	  if ((val <<= 1) < 0)
6615	    break;
6616
6617#if HOST_BITS_PER_WIDE_INT == 32
6618      if (GET_CODE (x) == CONST_INT && i >= 0)
6619	i += 32;  /* zero-extend high-part was all 0's */
6620      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6621	{
6622	  val = CONST_DOUBLE_LOW (x);
6623
6624	  if (val == 0)
6625	    abort ();
6626	  else if (val < 0)
6627	    --i;
6628	  else
6629	    for ( ; i < 64; i++)
6630	      if ((val <<= 1) < 0)
6631		break;
6632	}
6633#endif
6634
6635      fprintf (file, "%d", i + 1);
6636      return;
6637
6638    case 'X':
6639      if (GET_CODE (x) == MEM
6640	  && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6641	putc ('x', file);
6642      return;
6643
6644    case 'Y':
6645      /* Like 'L', for third word of TImode  */
6646      if (GET_CODE (x) == REG)
6647	fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6648      else if (GET_CODE (x) == MEM)
6649	{
6650	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6651	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6652	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6653	  else
6654	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6655	  if (small_data_operand (x, GET_MODE (x)))
6656	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6657		     reg_names[SMALL_DATA_REG]);
6658	}
6659      return;
6660
6661    case 'z':
6662      /* X is a SYMBOL_REF.  Write out the name preceded by a
6663	 period and without any trailing data in brackets.  Used for function
6664	 names.  If we are configured for System V (or the embedded ABI) on
6665	 the PowerPC, do not emit the period, since those systems do not use
6666	 TOCs and the like.  */
6667      if (GET_CODE (x) != SYMBOL_REF)
6668	abort ();
6669
6670      if (XSTR (x, 0)[0] != '.')
6671	{
6672	  switch (DEFAULT_ABI)
6673	    {
6674	    default:
6675	      abort ();
6676
6677	    case ABI_AIX:
6678	      putc ('.', file);
6679	      break;
6680
6681	    case ABI_V4:
6682	    case ABI_AIX_NODESC:
6683	    case ABI_DARWIN:
6684	      break;
6685	    }
6686	}
6687#if TARGET_AIX
6688      RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6689#else
6690      assemble_name (file, XSTR (x, 0));
6691#endif
6692      return;
6693
6694    case 'Z':
6695      /* Like 'L', for last word of TImode.  */
6696      if (GET_CODE (x) == REG)
6697	fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6698      else if (GET_CODE (x) == MEM)
6699	{
6700	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6701	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6702	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6703	  else
6704	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6705	  if (small_data_operand (x, GET_MODE (x)))
6706	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6707		     reg_names[SMALL_DATA_REG]);
6708	}
6709      return;
6710
6711      /* Print AltiVec memory operand.  */
6712    case 'y':
6713      {
6714	rtx tmp;
6715
6716	if (GET_CODE (x) != MEM)
6717	  abort ();
6718
6719	tmp = XEXP (x, 0);
6720
6721	if (GET_CODE (tmp) == REG)
6722	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6723	else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6724	  {
6725	    if (REGNO (XEXP (tmp, 0)) == 0)
6726	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6727		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
6728	    else
6729	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6730		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
6731	  }
6732	else
6733	  abort ();
6734	break;
6735      }
6736
6737    case 0:
6738      if (GET_CODE (x) == REG)
6739	fprintf (file, "%s", reg_names[REGNO (x)]);
6740      else if (GET_CODE (x) == MEM)
6741	{
6742	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
6743	     know the width from the mode.  */
6744	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6745	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6746		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6747	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6748	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6749		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6750	  else
6751	    output_address (XEXP (x, 0));
6752	}
6753      else
6754	output_addr_const (file, x);
6755      return;
6756
6757    default:
6758      output_operand_lossage ("invalid %%xn code");
6759    }
6760}
6761
6762/* Print the address of an operand.  */
6763
6764void
6765print_operand_address (file, x)
6766     FILE *file;
6767     rtx x;
6768{
6769  if (GET_CODE (x) == REG)
6770    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6771  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6772	   || GET_CODE (x) == LABEL_REF)
6773    {
6774      output_addr_const (file, x);
6775      if (small_data_operand (x, GET_MODE (x)))
6776	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6777		 reg_names[SMALL_DATA_REG]);
6778      else if (TARGET_TOC)
6779	abort ();
6780    }
6781  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6782    {
6783      if (REGNO (XEXP (x, 0)) == 0)
6784	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6785		 reg_names[ REGNO (XEXP (x, 0)) ]);
6786      else
6787	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6788		 reg_names[ REGNO (XEXP (x, 1)) ]);
6789    }
6790  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6791    {
6792      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6793      fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6794    }
6795#if TARGET_ELF
6796  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6797           && CONSTANT_P (XEXP (x, 1)))
6798    {
6799      output_addr_const (file, XEXP (x, 1));
6800      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6801    }
6802#endif
6803#if TARGET_MACHO
6804  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6805           && CONSTANT_P (XEXP (x, 1)))
6806    {
6807      fprintf (file, "lo16(");
6808      output_addr_const (file, XEXP (x, 1));
6809      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6810    }
6811#endif
6812  else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6813    {
6814      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6815	{
6816	  rtx contains_minus = XEXP (x, 1);
6817	  rtx minus, symref;
6818	  const char *name;
6819
6820	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
6821	     turn it into (sym) for output_addr_const.  */
6822	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6823	    contains_minus = XEXP (contains_minus, 0);
6824
6825	  minus = XEXP (contains_minus, 0);
6826	  symref = XEXP (minus, 0);
6827	  XEXP (contains_minus, 0) = symref;
6828	  if (TARGET_ELF)
6829	    {
6830	      char *newname;
6831
6832	      name = XSTR (symref, 0);
6833	      newname = alloca (strlen (name) + sizeof ("@toc"));
6834	      strcpy (newname, name);
6835	      strcat (newname, "@toc");
6836	      XSTR (symref, 0) = newname;
6837	    }
6838	  output_addr_const (file, XEXP (x, 1));
6839	  if (TARGET_ELF)
6840	    XSTR (symref, 0) = name;
6841	  XEXP (contains_minus, 0) = minus;
6842	}
6843      else
6844	output_addr_const (file, XEXP (x, 1));
6845
6846      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6847    }
6848  else
6849    abort ();
6850}
6851
6852/* Target hook for assembling integer objects.  The powerpc version has
6853   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6854   is defined.  It also needs to handle DI-mode objects on 64-bit
6855   targets.  */
6856
6857static bool
6858rs6000_assemble_integer (x, size, aligned_p)
6859     rtx x;
6860     unsigned int size;
6861     int aligned_p;
6862{
6863#ifdef RELOCATABLE_NEEDS_FIXUP
6864  /* Special handling for SI values.  */
6865  if (size == 4 && aligned_p)
6866    {
6867      extern int in_toc_section PARAMS ((void));
6868      static int recurse = 0;
6869
6870      /* For -mrelocatable, we mark all addresses that need to be fixed up
6871	 in the .fixup section.  */
6872      if (TARGET_RELOCATABLE
6873	  && !in_toc_section ()
6874	  && !in_text_section ()
6875	  && !recurse
6876	  && GET_CODE (x) != CONST_INT
6877	  && GET_CODE (x) != CONST_DOUBLE
6878	  && CONSTANT_P (x))
6879	{
6880	  char buf[256];
6881
6882	  recurse = 1;
6883	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6884	  fixuplabelno++;
6885	  ASM_OUTPUT_LABEL (asm_out_file, buf);
6886	  fprintf (asm_out_file, "\t.long\t(");
6887	  output_addr_const (asm_out_file, x);
6888	  fprintf (asm_out_file, ")@fixup\n");
6889	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6890	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
6891	  fprintf (asm_out_file, "\t.long\t");
6892	  assemble_name (asm_out_file, buf);
6893	  fprintf (asm_out_file, "\n\t.previous\n");
6894	  recurse = 0;
6895	  return true;
6896	}
6897      /* Remove initial .'s to turn a -mcall-aixdesc function
6898	 address into the address of the descriptor, not the function
6899	 itself.  */
6900      else if (GET_CODE (x) == SYMBOL_REF
6901	       && XSTR (x, 0)[0] == '.'
6902	       && DEFAULT_ABI == ABI_AIX)
6903	{
6904	  const char *name = XSTR (x, 0);
6905	  while (*name == '.')
6906	    name++;
6907
6908	  fprintf (asm_out_file, "\t.long\t%s\n", name);
6909	  return true;
6910	}
6911    }
6912#endif /* RELOCATABLE_NEEDS_FIXUP */
6913  return default_assemble_integer (x, size, aligned_p);
6914}
6915
6916enum rtx_code
6917rs6000_reverse_condition (mode, code)
6918     enum machine_mode mode;
6919     enum rtx_code code;
6920{
6921  /* Reversal of FP compares takes care -- an ordered compare
6922     becomes an unordered compare and vice versa.  */
6923  if (mode == CCFPmode)
6924    return reverse_condition_maybe_unordered (code);
6925  else
6926    return reverse_condition (code);
6927}
6928
6929/* Generate a compare for CODE.  Return a brand-new rtx that
6930   represents the result of the compare.  */
6931
6932static rtx
6933rs6000_generate_compare (code)
6934     enum rtx_code code;
6935{
6936  enum machine_mode comp_mode;
6937  rtx compare_result;
6938
6939  if (rs6000_compare_fp_p)
6940    comp_mode = CCFPmode;
6941  else if (code == GTU || code == LTU
6942	  || code == GEU || code == LEU)
6943    comp_mode = CCUNSmode;
6944  else
6945    comp_mode = CCmode;
6946
6947  /* First, the compare.  */
6948  compare_result = gen_reg_rtx (comp_mode);
6949  emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6950			  gen_rtx_COMPARE (comp_mode,
6951					   rs6000_compare_op0,
6952					   rs6000_compare_op1)));
6953
6954  /* Some kinds of FP comparisons need an OR operation;
6955     except for flag_unsafe_math_optimizations we don't bother.  */
6956  if (rs6000_compare_fp_p
6957      && ! flag_unsafe_math_optimizations
6958      && (code == LE || code == GE
6959	  || code == UNEQ || code == LTGT
6960	  || code == UNGT || code == UNLT))
6961    {
6962      enum rtx_code or1, or2;
6963      rtx or1_rtx, or2_rtx, compare2_rtx;
6964      rtx or_result = gen_reg_rtx (CCEQmode);
6965
6966      switch (code)
6967	{
6968	case LE: or1 = LT;  or2 = EQ;  break;
6969	case GE: or1 = GT;  or2 = EQ;  break;
6970	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
6971	case LTGT: or1 = LT;  or2 = GT;  break;
6972	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
6973	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
6974	default:  abort ();
6975	}
6976      validate_condition_mode (or1, comp_mode);
6977      validate_condition_mode (or2, comp_mode);
6978      or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6979      or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6980      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6981				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6982				      const_true_rtx);
6983      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6984
6985      compare_result = or_result;
6986      code = EQ;
6987    }
6988
6989  validate_condition_mode (code, GET_MODE (compare_result));
6990
6991  return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6992}
6993
6994
6995/* Emit the RTL for an sCOND pattern.  */
6996
6997void
6998rs6000_emit_sCOND (code, result)
6999     enum rtx_code code;
7000     rtx result;
7001{
7002  rtx condition_rtx;
7003  enum machine_mode op_mode;
7004
7005  condition_rtx = rs6000_generate_compare (code);
7006
7007  op_mode = GET_MODE (rs6000_compare_op0);
7008  if (op_mode == VOIDmode)
7009    op_mode = GET_MODE (rs6000_compare_op1);
7010
7011  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7012    {
7013      PUT_MODE (condition_rtx, DImode);
7014      convert_move (result, condition_rtx, 0);
7015    }
7016  else
7017    {
7018      PUT_MODE (condition_rtx, SImode);
7019      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7020    }
7021}
7022
7023/* Emit a branch of kind CODE to location LOC.  */
7024
7025void
7026rs6000_emit_cbranch (code, loc)
7027     enum rtx_code code;
7028     rtx loc;
7029{
7030  rtx condition_rtx, loc_ref;
7031
7032  condition_rtx = rs6000_generate_compare (code);
7033  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7034  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7035			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7036						     loc_ref, pc_rtx)));
7037}
7038
7039/* Return the string to output a conditional branch to LABEL, which is
7040   the operand number of the label, or -1 if the branch is really a
7041   conditional return.
7042
7043   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
7044   condition code register and its mode specifies what kind of
7045   comparison we made.
7046
7047   REVERSED is non-zero if we should reverse the sense of the comparison.
7048
7049   INSN is the insn.  */
7050
7051char *
7052output_cbranch (op, label, reversed, insn)
7053     rtx op;
7054     const char * label;
7055     int reversed;
7056     rtx insn;
7057{
7058  static char string[64];
7059  enum rtx_code code = GET_CODE (op);
7060  rtx cc_reg = XEXP (op, 0);
7061  enum machine_mode mode = GET_MODE (cc_reg);
7062  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7063  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7064  int really_reversed = reversed ^ need_longbranch;
7065  char *s = string;
7066  const char *ccode;
7067  const char *pred;
7068  rtx note;
7069
7070  validate_condition_mode (code, mode);
7071
7072  /* Work out which way this really branches.  We could use
7073     reverse_condition_maybe_unordered here always but this
7074     makes the resulting assembler clearer.  */
7075  if (really_reversed)
7076    code = rs6000_reverse_condition (mode, code);
7077
7078  switch (code)
7079    {
7080      /* Not all of these are actually distinct opcodes, but
7081	 we distinguish them for clarity of the resulting assembler.  */
7082    case NE: case LTGT:
7083      ccode = "ne"; break;
7084    case EQ: case UNEQ:
7085      ccode = "eq"; break;
7086    case GE: case GEU:
7087      ccode = "ge"; break;
7088    case GT: case GTU: case UNGT:
7089      ccode = "gt"; break;
7090    case LE: case LEU:
7091      ccode = "le"; break;
7092    case LT: case LTU: case UNLT:
7093      ccode = "lt"; break;
7094    case UNORDERED: ccode = "un"; break;
7095    case ORDERED: ccode = "nu"; break;
7096    case UNGE: ccode = "nl"; break;
7097    case UNLE: ccode = "ng"; break;
7098    default:
7099      abort ();
7100    }
7101
7102  /* Maybe we have a guess as to how likely the branch is.
7103     The old mnemonics don't have a way to specify this information.  */
7104  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7105  if (note != NULL_RTX)
7106    {
7107      /* PROB is the difference from 50%.  */
7108      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7109
7110      /* For branches that are very close to 50%, assume not-taken.  */
7111      if (abs (prob) > REG_BR_PROB_BASE / 20
7112	  && ((prob > 0) ^ need_longbranch))
7113	pred = "+";
7114      else
7115	pred = "-";
7116    }
7117  else
7118    pred = "";
7119
7120  if (label == NULL)
7121    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7122  else
7123    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7124
7125  /* We need to escape any '%' characters in the reg_names string.
7126     Assume they'd only be the first character...  */
7127  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7128    *s++ = '%';
7129  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7130
7131  if (label != NULL)
7132    {
7133      /* If the branch distance was too far, we may have to use an
7134	 unconditional branch to go the distance.  */
7135      if (need_longbranch)
7136	s += sprintf (s, ",$+8\n\tb %s", label);
7137      else
7138	s += sprintf (s, ",%s", label);
7139    }
7140
7141  return string;
7142}
7143
7144/* Emit a conditional move: move TRUE_COND to DEST if OP of the
7145   operands of the last comparison is nonzero/true, FALSE_COND if it
7146   is zero/false.  Return 0 if the hardware has no such operation.  */
7147
7148int
7149rs6000_emit_cmove (dest, op, true_cond, false_cond)
7150     rtx dest;
7151     rtx op;
7152     rtx true_cond;
7153     rtx false_cond;
7154{
7155  enum rtx_code code = GET_CODE (op);
7156  rtx op0 = rs6000_compare_op0;
7157  rtx op1 = rs6000_compare_op1;
7158  REAL_VALUE_TYPE c1;
7159  enum machine_mode compare_mode = GET_MODE (op0);
7160  enum machine_mode result_mode = GET_MODE (dest);
7161  rtx temp;
7162
7163  /* These modes should always match. */
7164  if (GET_MODE (op1) != compare_mode)
7165    return 0;
7166  if (GET_MODE (true_cond) != result_mode)
7167    return 0;
7168  if (GET_MODE (false_cond) != result_mode)
7169    return 0;
7170
7171  /* First, work out if the hardware can do this at all, or
7172     if it's too slow...  */
7173  /* If the comparison is an integer one, since we only have fsel
7174     it'll be cheaper to use a branch.  */
7175  if (! rs6000_compare_fp_p)
7176    return 0;
7177
7178  /* Eliminate half of the comparisons by switching operands, this
7179     makes the remaining code simpler.  */
7180  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7181      || code == LTGT || code == LT)
7182    {
7183      code = reverse_condition_maybe_unordered (code);
7184      temp = true_cond;
7185      true_cond = false_cond;
7186      false_cond = temp;
7187    }
7188
7189  /* UNEQ and LTGT take four instructions for a comparison with zero,
7190     it'll probably be faster to use a branch here too.  */
7191  if (code == UNEQ)
7192    return 0;
7193
7194  if (GET_CODE (op1) == CONST_DOUBLE)
7195    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7196
7197  /* We're going to try to implement comparions by performing
7198     a subtract, then comparing against zero.  Unfortunately,
7199     Inf - Inf is NaN which is not zero, and so if we don't
7200     know that the the operand is finite and the comparison
7201     would treat EQ different to UNORDERED, we can't do it.  */
7202  if (! flag_unsafe_math_optimizations
7203      && code != GT && code != UNGE
7204      && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7205      /* Constructs of the form (a OP b ? a : b) are safe.  */
7206      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7207	  || (! rtx_equal_p (op0, true_cond)
7208	      && ! rtx_equal_p (op1, true_cond))))
7209    return 0;
7210  /* At this point we know we can use fsel.  */
7211
7212  /* Reduce the comparison to a comparison against zero.  */
7213  temp = gen_reg_rtx (compare_mode);
7214  emit_insn (gen_rtx_SET (VOIDmode, temp,
7215			  gen_rtx_MINUS (compare_mode, op0, op1)));
7216  op0 = temp;
7217  op1 = CONST0_RTX (compare_mode);
7218
7219  /* If we don't care about NaNs we can reduce some of the comparisons
7220     down to faster ones.  */
7221  if (flag_unsafe_math_optimizations)
7222    switch (code)
7223      {
7224      case GT:
7225	code = LE;
7226	temp = true_cond;
7227	true_cond = false_cond;
7228	false_cond = temp;
7229	break;
7230      case UNGE:
7231	code = GE;
7232	break;
7233      case UNEQ:
7234	code = EQ;
7235	break;
7236      default:
7237	break;
7238      }
7239
7240  /* Now, reduce everything down to a GE.  */
7241  switch (code)
7242    {
7243    case GE:
7244      break;
7245
7246    case LE:
7247      temp = gen_reg_rtx (compare_mode);
7248      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7249      op0 = temp;
7250      break;
7251
7252    case ORDERED:
7253      temp = gen_reg_rtx (compare_mode);
7254      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7255      op0 = temp;
7256      break;
7257
7258    case EQ:
7259      temp = gen_reg_rtx (compare_mode);
7260      emit_insn (gen_rtx_SET (VOIDmode, temp,
7261			      gen_rtx_NEG (compare_mode,
7262					   gen_rtx_ABS (compare_mode, op0))));
7263      op0 = temp;
7264      break;
7265
7266    case UNGE:
7267      temp = gen_reg_rtx (result_mode);
7268      emit_insn (gen_rtx_SET (VOIDmode, temp,
7269			      gen_rtx_IF_THEN_ELSE (result_mode,
7270						    gen_rtx_GE (VOIDmode,
7271								op0, op1),
7272						    true_cond, false_cond)));
7273      false_cond = temp;
7274      true_cond = false_cond;
7275
7276      temp = gen_reg_rtx (compare_mode);
7277      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7278      op0 = temp;
7279      break;
7280
7281    case GT:
7282      temp = gen_reg_rtx (result_mode);
7283      emit_insn (gen_rtx_SET (VOIDmode, temp,
7284			      gen_rtx_IF_THEN_ELSE (result_mode,
7285						    gen_rtx_GE (VOIDmode,
7286								op0, op1),
7287						    true_cond, false_cond)));
7288      true_cond = temp;
7289      false_cond = true_cond;
7290
7291      temp = gen_reg_rtx (compare_mode);
7292      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7293      op0 = temp;
7294      break;
7295
7296    default:
7297      abort ();
7298    }
7299
7300  emit_insn (gen_rtx_SET (VOIDmode, dest,
7301			  gen_rtx_IF_THEN_ELSE (result_mode,
7302						gen_rtx_GE (VOIDmode,
7303							    op0, op1),
7304						true_cond, false_cond)));
7305  return 1;
7306}
7307
7308void
7309rs6000_emit_minmax (dest, code, op0, op1)
7310     rtx dest;
7311     enum rtx_code code;
7312     rtx op0;
7313     rtx op1;
7314{
7315  enum machine_mode mode = GET_MODE (op0);
7316  rtx target;
7317  if (code == SMAX || code == UMAX)
7318    target = emit_conditional_move (dest, GE, op0, op1, mode,
7319				    op0, op1, mode, 0);
7320  else
7321    target = emit_conditional_move (dest, GE, op0, op1, mode,
7322				    op1, op0, mode, 0);
7323  if (target == NULL_RTX)
7324    abort ();
7325  if (target != dest)
7326    emit_move_insn (dest, target);
7327}
7328
7329/* This page contains routines that are used to determine what the
7330   function prologue and epilogue code will do and write them out.  */
7331
7332/* Return the first fixed-point register that is required to be
7333   saved. 32 if none.  */
7334
7335int
7336first_reg_to_save ()
7337{
7338  int first_reg;
7339
7340  /* Find lowest numbered live register.  */
7341  for (first_reg = 13; first_reg <= 31; first_reg++)
7342    if (regs_ever_live[first_reg]
7343	&& (! call_used_regs[first_reg]
7344	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7345		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7346		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7347      break;
7348
7349#if TARGET_MACHO
7350  if (flag_pic && current_function_uses_pic_offset_table &&
7351      (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7352    return RS6000_PIC_OFFSET_TABLE_REGNUM;
7353#endif
7354
7355  return first_reg;
7356}
7357
7358/* Similar, for FP regs.  */
7359
7360int
7361first_fp_reg_to_save ()
7362{
7363  int first_reg;
7364
7365  /* Find lowest numbered live register.  */
7366  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7367    if (regs_ever_live[first_reg])
7368      break;
7369
7370  return first_reg;
7371}
7372
7373/* Similar, for AltiVec regs.  */
7374
7375static int
7376first_altivec_reg_to_save ()
7377{
7378  int i;
7379
7380  /* Stack frame remains as is unless we are in AltiVec ABI.  */
7381  if (! TARGET_ALTIVEC_ABI)
7382    return LAST_ALTIVEC_REGNO + 1;
7383
7384  /* Find lowest numbered live register.  */
7385  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7386    if (regs_ever_live[i])
7387      break;
7388
7389  return i;
7390}
7391
7392/* Return a 32-bit mask of the AltiVec registers we need to set in
7393   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
7394   the 32-bit word is 0.  */
7395
7396static unsigned int
7397compute_vrsave_mask ()
7398{
7399  unsigned int i, mask = 0;
7400
7401  /* First, find out if we use _any_ altivec registers.  */
7402  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7403    if (regs_ever_live[i])
7404      mask |= ALTIVEC_REG_BIT (i);
7405
7406  if (mask == 0)
7407    return mask;
7408
7409  /* Next, add all registers that are call-clobbered.  We do this
7410     because post-reload register optimizers such as regrename_optimize
7411     may choose to use them.  They never change the register class
7412     chosen by reload, so cannot create new uses of altivec registers
7413     if there were none before, so the early exit above is safe.  */
7414  /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7415     altivec registers not saved in the mask, which might well make the
7416     adjustments below more effective in eliding the save/restore of
7417     VRSAVE in small functions.  */
7418  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7419    if (call_used_regs[i])
7420      mask |= ALTIVEC_REG_BIT (i);
7421
7422  /* Next, remove the argument registers from the set.  These must
7423     be in the VRSAVE mask set by the caller, so we don't need to add
7424     them in again.  More importantly, the mask we compute here is
7425     used to generate CLOBBERs in the set_vrsave insn, and we do not
7426     wish the argument registers to die.  */
7427  for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7428    mask &= ~ALTIVEC_REG_BIT (i);
7429
7430  /* Similarly, remove the return value from the set.  */
7431  {
7432    bool yes = false;
7433    diddle_return_value (is_altivec_return_reg, &yes);
7434    if (yes)
7435      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7436  }
7437
7438  return mask;
7439}
7440
7441static void
7442is_altivec_return_reg (reg, xyes)
7443     rtx reg;
7444     void *xyes;
7445{
7446  bool *yes = (bool *) xyes;
7447  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7448    *yes = true;
7449}
7450
7451
7452/* Calculate the stack information for the current function.  This is
7453   complicated by having two separate calling sequences, the AIX calling
7454   sequence and the V.4 calling sequence.
7455
7456   AIX (and Darwin/Mac OS X) stack frames look like:
7457							  32-bit  64-bit
7458	SP---->	+---------------------------------------+
7459		| back chain to caller			| 0	  0
7460		+---------------------------------------+
7461		| saved CR				| 4       8 (8-11)
7462		+---------------------------------------+
7463		| saved LR				| 8       16
7464		+---------------------------------------+
7465		| reserved for compilers		| 12      24
7466		+---------------------------------------+
7467		| reserved for binders			| 16      32
7468		+---------------------------------------+
7469		| saved TOC pointer			| 20      40
7470		+---------------------------------------+
7471		| Parameter save area (P)		| 24      48
7472		+---------------------------------------+
7473		| Alloca space (A)			| 24+P    etc.
7474		+---------------------------------------+
7475		| Local variable space (L)		| 24+P+A
7476		+---------------------------------------+
7477		| Float/int conversion temporary (X)	| 24+P+A+L
7478		+---------------------------------------+
7479		| Save area for AltiVec registers (W)	| 24+P+A+L+X
7480		+---------------------------------------+
7481		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
7482		+---------------------------------------+
7483		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
7484		+---------------------------------------+
7485		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
7486		+---------------------------------------+
7487		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
7488		+---------------------------------------+
7489	old SP->| back chain to caller's caller		|
7490		+---------------------------------------+
7491
7492   The required alignment for AIX configurations is two words (i.e., 8
7493   or 16 bytes).
7494
7495
7496   V.4 stack frames look like:
7497
7498	SP---->	+---------------------------------------+
7499		| back chain to caller			| 0
7500		+---------------------------------------+
7501		| caller's saved LR			| 4
7502		+---------------------------------------+
7503		| Parameter save area (P)		| 8
7504		+---------------------------------------+
7505		| Alloca space (A)			| 8+P
7506		+---------------------------------------+
7507		| Varargs save area (V)			| 8+P+A
7508		+---------------------------------------+
7509		| Local variable space (L)		| 8+P+A+V
7510		+---------------------------------------+
7511		| Float/int conversion temporary (X)	| 8+P+A+V+L
7512		+---------------------------------------+
7513		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
7514		+---------------------------------------+
7515		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
7516		+---------------------------------------+
7517		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
7518		+---------------------------------------+
7519		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
7520		+---------------------------------------+
7521		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
7522		+---------------------------------------+
7523		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
7524		+---------------------------------------+
7525	old SP->| back chain to caller's caller		|
7526		+---------------------------------------+
7527
7528   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7529   given.  (But note below and in sysv4.h that we require only 8 and
7530   may round up the size of our stack frame anyways.  The historical
7531   reason is early versions of powerpc-linux which didn't properly
7532   align the stack at program startup.  A happy side-effect is that
7533   -mno-eabi libraries can be used with -meabi programs.)
7534
7535   The EABI configuration defaults to the V.4 layout, unless
7536   -mcall-aix is used, in which case the AIX layout is used.  However,
7537   the stack alignment requirements may differ.  If -mno-eabi is not
7538   given, the required stack alignment is 8 bytes; if -mno-eabi is
7539   given, the required alignment is 16 bytes.  (But see V.4 comment
7540   above.)  */
7541
7542#ifndef ABI_STACK_BOUNDARY
7543#define ABI_STACK_BOUNDARY STACK_BOUNDARY
7544#endif
7545
7546rs6000_stack_t *
7547rs6000_stack_info ()
7548{
7549  static rs6000_stack_t info, zero_info;
7550  rs6000_stack_t *info_ptr = &info;
7551  int reg_size = TARGET_POWERPC64 ? 8 : 4;
7552  enum rs6000_abi abi;
7553  int ehrd_size;
7554  int total_raw_size;
7555
7556  /* Zero all fields portably.  */
7557  info = zero_info;
7558
7559  /* Select which calling sequence.  */
7560  info_ptr->abi = abi = DEFAULT_ABI;
7561
7562  /* Calculate which registers need to be saved & save area size.  */
7563  info_ptr->first_gp_reg_save = first_reg_to_save ();
7564  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7565     even if it currently looks like we won't.  */
7566  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7567       || (flag_pic == 1 && abi == ABI_V4)
7568       || (flag_pic && abi == ABI_DARWIN))
7569      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7570    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7571  else
7572    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7573
7574  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7575  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7576
7577  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7578  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7579				 - info_ptr->first_altivec_reg_save);
7580
7581  /* Does this function call anything?  */
7582  info_ptr->calls_p = (! current_function_is_leaf
7583		       || cfun->machine->ra_needs_full_frame);
7584
7585  /* Determine if we need to save the link register.  */
7586  if (rs6000_ra_ever_killed ()
7587      || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7588#ifdef TARGET_RELOCATABLE
7589      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7590#endif
7591      || (info_ptr->first_fp_reg_save != 64
7592	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7593      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7594      || (abi == ABI_V4 && current_function_calls_alloca)
7595      || (DEFAULT_ABI == ABI_DARWIN
7596	  && flag_pic
7597	  && current_function_uses_pic_offset_table)
7598      || info_ptr->calls_p)
7599    {
7600      info_ptr->lr_save_p = 1;
7601      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7602    }
7603
7604  /* Determine if we need to save the condition code registers.  */
7605  if (regs_ever_live[CR2_REGNO]
7606      || regs_ever_live[CR3_REGNO]
7607      || regs_ever_live[CR4_REGNO])
7608    {
7609      info_ptr->cr_save_p = 1;
7610      if (abi == ABI_V4)
7611	info_ptr->cr_size = reg_size;
7612    }
7613
7614  /* If the current function calls __builtin_eh_return, then we need
7615     to allocate stack space for registers that will hold data for
7616     the exception handler.  */
7617  if (current_function_calls_eh_return)
7618    {
7619      unsigned int i;
7620      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7621	continue;
7622      ehrd_size = i * UNITS_PER_WORD;
7623    }
7624  else
7625    ehrd_size = 0;
7626
7627  /* Determine various sizes.  */
7628  info_ptr->reg_size     = reg_size;
7629  info_ptr->fixed_size   = RS6000_SAVE_AREA;
7630  info_ptr->varargs_size = RS6000_VARARGS_AREA;
7631  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
7632  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
7633					 8);
7634
7635  if (TARGET_ALTIVEC_ABI)
7636    {
7637      info_ptr->vrsave_mask = compute_vrsave_mask ();
7638      info_ptr->vrsave_size  = info_ptr->vrsave_mask ? 4 : 0;
7639    }
7640  else
7641    {
7642      info_ptr->vrsave_mask = 0;
7643      info_ptr->vrsave_size = 0;
7644    }
7645
7646  /* Calculate the offsets.  */
7647  switch (abi)
7648    {
7649    case ABI_NONE:
7650    default:
7651      abort ();
7652
7653    case ABI_AIX:
7654    case ABI_AIX_NODESC:
7655    case ABI_DARWIN:
7656      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7657      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7658
7659      if (TARGET_ALTIVEC_ABI)
7660	{
7661	  info_ptr->vrsave_save_offset
7662	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7663
7664	  /* Align stack so vector save area is on a quadword boundary.  */
7665	  if (info_ptr->altivec_size != 0)
7666	    info_ptr->altivec_padding_size
7667	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7668	  else
7669	    info_ptr->altivec_padding_size = 0;
7670
7671	  info_ptr->altivec_save_offset
7672	    = info_ptr->vrsave_save_offset
7673	    - info_ptr->altivec_padding_size
7674	    - info_ptr->altivec_size;
7675
7676	  /* Adjust for AltiVec case.  */
7677	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7678	}
7679      else
7680	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
7681      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
7682      info_ptr->lr_save_offset   = 2*reg_size;
7683      break;
7684
7685    case ABI_V4:
7686      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7687      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7688      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
7689
7690      if (TARGET_ALTIVEC_ABI)
7691	{
7692	  info_ptr->vrsave_save_offset
7693	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7694
7695	  /* Align stack so vector save area is on a quadword boundary.  */
7696	  if (info_ptr->altivec_size != 0)
7697	    info_ptr->altivec_padding_size
7698	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7699	  else
7700	    info_ptr->altivec_padding_size = 0;
7701
7702	  info_ptr->altivec_save_offset
7703	    = info_ptr->vrsave_save_offset
7704	    - info_ptr->altivec_padding_size
7705	    - info_ptr->altivec_size;
7706
7707	  /* Adjust for AltiVec case.  */
7708	  info_ptr->toc_save_offset
7709	    = info_ptr->altivec_save_offset - info_ptr->toc_size;
7710	}
7711      else
7712	info_ptr->toc_save_offset  = info_ptr->cr_save_offset - info_ptr->toc_size;
7713      info_ptr->ehrd_offset      = info_ptr->toc_save_offset - ehrd_size;
7714      info_ptr->lr_save_offset   = reg_size;
7715      break;
7716    }
7717
7718  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
7719					 + info_ptr->gp_size
7720					 + info_ptr->altivec_size
7721					 + info_ptr->altivec_padding_size
7722					 + info_ptr->vrsave_size
7723					 + ehrd_size
7724					 + info_ptr->cr_size
7725					 + info_ptr->lr_size
7726					 + info_ptr->vrsave_size
7727					 + info_ptr->toc_size,
7728					 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7729					 ? 16 : 8);
7730
7731  total_raw_size	 = (info_ptr->vars_size
7732			    + info_ptr->parm_size
7733			    + info_ptr->save_size
7734			    + info_ptr->varargs_size
7735			    + info_ptr->fixed_size);
7736
7737  info_ptr->total_size =
7738    RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7739
7740  /* Determine if we need to allocate any stack frame:
7741
7742     For AIX we need to push the stack if a frame pointer is needed
7743     (because the stack might be dynamically adjusted), if we are
7744     debugging, if we make calls, or if the sum of fp_save, gp_save,
7745     and local variables are more than the space needed to save all
7746     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7747     + 18*8 = 288 (GPR13 reserved).
7748
7749     For V.4 we don't have the stack cushion that AIX uses, but assume
7750     that the debugger can handle stackless frames.  */
7751
7752  if (info_ptr->calls_p)
7753    info_ptr->push_p = 1;
7754
7755  else if (abi == ABI_V4)
7756    info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7757
7758  else
7759    info_ptr->push_p = (frame_pointer_needed
7760			|| (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7761			|| ((total_raw_size - info_ptr->fixed_size)
7762			    > (TARGET_32BIT ? 220 : 288)));
7763
7764  /* Zero offsets if we're not saving those registers.  */
7765  if (info_ptr->fp_size == 0)
7766    info_ptr->fp_save_offset = 0;
7767
7768  if (info_ptr->gp_size == 0)
7769    info_ptr->gp_save_offset = 0;
7770
7771  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7772    info_ptr->altivec_save_offset = 0;
7773
7774  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7775    info_ptr->vrsave_save_offset = 0;
7776
7777  if (! info_ptr->lr_save_p)
7778    info_ptr->lr_save_offset = 0;
7779
7780  if (! info_ptr->cr_save_p)
7781    info_ptr->cr_save_offset = 0;
7782
7783  if (! info_ptr->toc_save_p)
7784    info_ptr->toc_save_offset = 0;
7785
7786  return info_ptr;
7787}
7788
7789void
7790debug_stack_info (info)
7791     rs6000_stack_t *info;
7792{
7793  const char *abi_string;
7794
7795  if (! info)
7796    info = rs6000_stack_info ();
7797
7798  fprintf (stderr, "\nStack information for function %s:\n",
7799	   ((current_function_decl && DECL_NAME (current_function_decl))
7800	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7801	    : "<unknown>"));
7802
7803  switch (info->abi)
7804    {
7805    default:		 abi_string = "Unknown";	break;
7806    case ABI_NONE:	 abi_string = "NONE";		break;
7807    case ABI_AIX:
7808    case ABI_AIX_NODESC: abi_string = "AIX";		break;
7809    case ABI_DARWIN:	 abi_string = "Darwin";		break;
7810    case ABI_V4:	 abi_string = "V.4";		break;
7811    }
7812
7813  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
7814
7815  if (TARGET_ALTIVEC_ABI)
7816    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7817
7818  if (info->first_gp_reg_save != 32)
7819    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
7820
7821  if (info->first_fp_reg_save != 64)
7822    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
7823
7824  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7825    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7826	     info->first_altivec_reg_save);
7827
7828  if (info->lr_save_p)
7829    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
7830
7831  if (info->cr_save_p)
7832    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
7833
7834  if (info->toc_save_p)
7835    fprintf (stderr, "\ttoc_save_p          = %5d\n", info->toc_save_p);
7836
7837  if (info->vrsave_mask)
7838    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
7839
7840  if (info->push_p)
7841    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
7842
7843  if (info->calls_p)
7844    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
7845
7846  if (info->gp_save_offset)
7847    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
7848
7849  if (info->fp_save_offset)
7850    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
7851
7852  if (info->altivec_save_offset)
7853    fprintf (stderr, "\taltivec_save_offset = %5d\n",
7854	     info->altivec_save_offset);
7855
7856  if (info->vrsave_save_offset)
7857    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
7858	     info->vrsave_save_offset);
7859
7860  if (info->lr_save_offset)
7861    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
7862
7863  if (info->cr_save_offset)
7864    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
7865
7866  if (info->toc_save_offset)
7867    fprintf (stderr, "\ttoc_save_offset     = %5d\n", info->toc_save_offset);
7868
7869  if (info->varargs_save_offset)
7870    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7871
7872  if (info->total_size)
7873    fprintf (stderr, "\ttotal_size          = %5d\n", info->total_size);
7874
7875  if (info->varargs_size)
7876    fprintf (stderr, "\tvarargs_size        = %5d\n", info->varargs_size);
7877
7878  if (info->vars_size)
7879    fprintf (stderr, "\tvars_size           = %5d\n", info->vars_size);
7880
7881  if (info->parm_size)
7882    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
7883
7884  if (info->fixed_size)
7885    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
7886
7887  if (info->gp_size)
7888    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
7889
7890  if (info->fp_size)
7891    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
7892
7893  if (info->altivec_size)
7894    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
7895
7896  if (info->vrsave_size)
7897    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
7898
7899  if (info->altivec_padding_size)
7900    fprintf (stderr, "\taltivec_padding_size= %5d\n",
7901	     info->altivec_padding_size);
7902
7903  if (info->lr_size)
7904    fprintf (stderr, "\tlr_size             = %5d\n", info->lr_size);
7905
7906  if (info->cr_size)
7907    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
7908
7909  if (info->toc_size)
7910    fprintf (stderr, "\ttoc_size            = %5d\n", info->toc_size);
7911
7912  if (info->save_size)
7913    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
7914
7915  if (info->reg_size != 4)
7916    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
7917
7918  fprintf (stderr, "\n");
7919}
7920
7921rtx
7922rs6000_return_addr (count, frame)
7923     int count;
7924     rtx frame;
7925{
7926  /* Currently we don't optimize very well between prolog and body
7927     code and for PIC code the code can be actually quite bad, so
7928     don't try to be too clever here.  */
7929  if (count != 0
7930      || flag_pic != 0
7931      || DEFAULT_ABI == ABI_AIX
7932      || DEFAULT_ABI == ABI_AIX_NODESC)
7933    {
7934      cfun->machine->ra_needs_full_frame = 1;
7935
7936      return
7937	gen_rtx_MEM
7938	  (Pmode,
7939	   memory_address
7940	   (Pmode,
7941	    plus_constant (copy_to_reg
7942			   (gen_rtx_MEM (Pmode,
7943					 memory_address (Pmode, frame))),
7944			   RETURN_ADDRESS_OFFSET)));
7945    }
7946
7947  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
7948}
7949
7950static int
7951rs6000_ra_ever_killed ()
7952{
7953  rtx top;
7954
7955#ifdef ASM_OUTPUT_MI_THUNK
7956  if (current_function_is_thunk)
7957    return 0;
7958#endif
7959  if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
7960      || cfun->machine->ra_needs_full_frame)
7961    return regs_ever_live[LINK_REGISTER_REGNUM];
7962
7963  push_topmost_sequence ();
7964  top = get_insns ();
7965  pop_topmost_sequence ();
7966
7967  return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7968			    top, NULL_RTX);
7969}
7970
7971/* Add a REG_MAYBE_DEAD note to the insn.  */
7972static void
7973rs6000_maybe_dead (insn)
7974     rtx insn;
7975{
7976  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
7977					const0_rtx,
7978					REG_NOTES (insn));
7979}
7980
7981/* Emit instructions needed to load the TOC register.
7982   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
7983   a constant pool; or for SVR4 -fpic.  */
7984
7985void
7986rs6000_emit_load_toc_table (fromprolog)
7987     int fromprolog;
7988{
7989  rtx dest;
7990  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
7991
7992  if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7993    {
7994      rtx temp = (fromprolog
7995		  ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7996		  : gen_reg_rtx (Pmode));
7997      rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
7998      rs6000_maybe_dead (emit_move_insn (dest, temp));
7999    }
8000  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
8001    {
8002      char buf[30];
8003      rtx tempLR = (fromprolog
8004		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8005		    : gen_reg_rtx (Pmode));
8006      rtx temp0 = (fromprolog
8007		   ? gen_rtx_REG (Pmode, 0)
8008		   : gen_reg_rtx (Pmode));
8009      rtx symF;
8010
8011      /* possibly create the toc section */
8012      if (! toc_initialized)
8013	{
8014	  toc_section ();
8015	  function_section (current_function_decl);
8016	}
8017
8018      if (fromprolog)
8019	{
8020	  rtx symL;
8021
8022	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8023	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8024
8025	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8026	  symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8027
8028	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8029							       symF)));
8030	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8031	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8032							       symL,
8033							       symF)));
8034	}
8035      else
8036	{
8037	  rtx tocsym;
8038	  static int reload_toc_labelno = 0;
8039
8040	  tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8041
8042	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8043	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8044
8045	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8046								symF,
8047								tocsym)));
8048	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8049	  rs6000_maybe_dead (emit_move_insn (temp0,
8050					     gen_rtx_MEM (Pmode, dest)));
8051	}
8052      rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8053    }
8054  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
8055    {
8056      /* This is for AIX code running in non-PIC ELF32.  */
8057      char buf[30];
8058      rtx realsym;
8059      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8060      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8061
8062      rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8063      rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8064    }
8065  else if (DEFAULT_ABI == ABI_AIX)
8066    {
8067      if (TARGET_32BIT)
8068	rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8069      else
8070	rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8071    }
8072  else
8073    abort ();
8074}
8075
8076int
8077get_TOC_alias_set ()
8078{
8079    static int set = -1;
8080    if (set == -1)
8081      set = new_alias_set ();
8082    return set;
8083}
8084
8085/* This retuns nonzero if the current function uses the TOC.  This is
8086   determined by the presence of (unspec ... 7), which is generated by
8087   the various load_toc_* patterns.  */
8088
8089int
8090uses_TOC ()
8091{
8092    rtx insn;
8093
8094    for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8095      if (INSN_P (insn))
8096	{
8097	  rtx pat = PATTERN (insn);
8098	  int i;
8099
8100	  if (GET_CODE (pat) == PARALLEL)
8101	    for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8102	      if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8103		 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8104		  return 1;
8105	}
8106    return 0;
8107}
8108
8109rtx
8110create_TOC_reference (symbol)
8111    rtx symbol;
8112{
8113  return gen_rtx_PLUS (Pmode,
8114	   gen_rtx_REG (Pmode, TOC_REGISTER),
8115	     gen_rtx_CONST (Pmode,
8116	       gen_rtx_MINUS (Pmode, symbol,
8117		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8118}
8119
8120#if TARGET_AIX
8121/* __throw will restore its own return address to be the same as the
8122   return address of the function that the throw is being made to.
8123   This is unfortunate, because we want to check the original
8124   return address to see if we need to restore the TOC.
8125   So we have to squirrel it away here.
8126   This is used only in compiling __throw and __rethrow.
8127
8128   Most of this code should be removed by CSE.  */
8129static rtx insn_after_throw;
8130
8131/* This does the saving...  */
8132void
8133rs6000_aix_emit_builtin_unwind_init ()
8134{
8135  rtx mem;
8136  rtx stack_top = gen_reg_rtx (Pmode);
8137  rtx opcode_addr = gen_reg_rtx (Pmode);
8138
8139  insn_after_throw = gen_reg_rtx (SImode);
8140
8141  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8142  emit_move_insn (stack_top, mem);
8143
8144  mem = gen_rtx_MEM (Pmode,
8145		     gen_rtx_PLUS (Pmode, stack_top,
8146				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8147  emit_move_insn (opcode_addr, mem);
8148  emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8149}
8150
8151/* Emit insns to _restore_ the TOC register, at runtime (specifically
8152   in _eh.o).  Only used on AIX.
8153
8154   The idea is that on AIX, function calls look like this:
8155	bl  somefunction-trampoline
8156	lwz r2,20(sp)
8157
8158   and later,
8159	somefunction-trampoline:
8160	stw r2,20(sp)
8161	 ... load function address in the count register ...
8162	bctr
8163   or like this, if the linker determines that this is not a cross-module call
8164   and so the TOC need not be restored:
8165	bl  somefunction
8166	nop
8167   or like this, if the compiler could determine that this is not a
8168   cross-module call:
8169	bl  somefunction
8170   now, the tricky bit here is that register 2 is saved and restored
8171   by the _linker_, so we can't readily generate debugging information
8172   for it.  So we need to go back up the call chain looking at the
8173   insns at return addresses to see which calls saved the TOC register
8174   and so see where it gets restored from.
8175
8176   Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8177   just before the actual epilogue.
8178
8179   On the bright side, this incurs no space or time overhead unless an
8180   exception is thrown, except for the extra code in libgcc.a.
8181
8182   The parameter STACKSIZE is a register containing (at runtime)
8183   the amount to be popped off the stack in addition to the stack frame
8184   of this routine (which will be __throw or __rethrow, and so is
8185   guaranteed to have a stack frame).  */
8186
8187void
8188rs6000_emit_eh_toc_restore (stacksize)
8189     rtx stacksize;
8190{
8191  rtx top_of_stack;
8192  rtx bottom_of_stack = gen_reg_rtx (Pmode);
8193  rtx tocompare = gen_reg_rtx (SImode);
8194  rtx opcode = gen_reg_rtx (SImode);
8195  rtx opcode_addr = gen_reg_rtx (Pmode);
8196  rtx mem;
8197  rtx loop_start = gen_label_rtx ();
8198  rtx no_toc_restore_needed = gen_label_rtx ();
8199  rtx loop_exit = gen_label_rtx ();
8200
8201  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8202  set_mem_alias_set (mem, rs6000_sr_alias_set);
8203  emit_move_insn (bottom_of_stack, mem);
8204
8205  top_of_stack = expand_binop (Pmode, add_optab,
8206			       bottom_of_stack, stacksize,
8207			       NULL_RTX, 1, OPTAB_WIDEN);
8208
8209  emit_move_insn (tocompare,
8210		  GEN_INT (trunc_int_for_mode (TARGET_32BIT
8211					       ? 0x80410014
8212					       : 0xE8410028, SImode)));
8213
8214  if (insn_after_throw == NULL_RTX)
8215    abort ();
8216  emit_move_insn (opcode, insn_after_throw);
8217
8218  emit_note (NULL, NOTE_INSN_LOOP_BEG);
8219  emit_label (loop_start);
8220
8221  do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8222			   SImode, NULL_RTX, NULL_RTX,
8223			   no_toc_restore_needed);
8224
8225  mem = gen_rtx_MEM (Pmode,
8226		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8227				   GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8228  emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8229
8230  emit_label (no_toc_restore_needed);
8231  do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8232			   Pmode, NULL_RTX, NULL_RTX,
8233			   loop_exit);
8234
8235  mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8236  set_mem_alias_set (mem, rs6000_sr_alias_set);
8237  emit_move_insn (bottom_of_stack, mem);
8238
8239  mem = gen_rtx_MEM (Pmode,
8240		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8241				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8242  emit_move_insn (opcode_addr, mem);
8243  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8244
8245  emit_note (NULL, NOTE_INSN_LOOP_CONT);
8246  emit_jump (loop_start);
8247  emit_note (NULL, NOTE_INSN_LOOP_END);
8248  emit_label (loop_exit);
8249}
8250#endif /* TARGET_AIX */
8251
8252/* This ties together stack memory (MEM with an alias set of
8253   rs6000_sr_alias_set) and the change to the stack pointer.  */
8254
8255static void
8256rs6000_emit_stack_tie ()
8257{
8258  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8259
8260  set_mem_alias_set (mem, rs6000_sr_alias_set);
8261  emit_insn (gen_stack_tie (mem));
8262}
8263
8264/* Emit the correct code for allocating stack space, as insns.
8265   If COPY_R12, make sure a copy of the old frame is left in r12.
8266   The generated code may use hard register 0 as a temporary.  */
8267
8268static void
8269rs6000_emit_allocate_stack (size, copy_r12)
8270     HOST_WIDE_INT size;
8271     int copy_r12;
8272{
8273  rtx insn;
8274  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8275  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8276  rtx todec = GEN_INT (-size);
8277
8278  if (current_function_limit_stack)
8279    {
8280      if (REG_P (stack_limit_rtx)
8281	  && REGNO (stack_limit_rtx) > 1
8282	  && REGNO (stack_limit_rtx) <= 31)
8283	{
8284	  emit_insn (Pmode == SImode
8285		     ? gen_addsi3 (tmp_reg,
8286				   stack_limit_rtx,
8287				   GEN_INT (size))
8288		     : gen_adddi3 (tmp_reg,
8289				   stack_limit_rtx,
8290				   GEN_INT (size)));
8291
8292	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8293				    const0_rtx));
8294	}
8295      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8296	       && TARGET_32BIT
8297	       && DEFAULT_ABI == ABI_V4)
8298	{
8299	  rtx toload = gen_rtx_CONST (VOIDmode,
8300				      gen_rtx_PLUS (Pmode,
8301						    stack_limit_rtx,
8302						    GEN_INT (size)));
8303
8304	  emit_insn (gen_elf_high (tmp_reg, toload));
8305	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8306	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8307				    const0_rtx));
8308	}
8309      else
8310	warning ("stack limit expression is not supported");
8311    }
8312
8313  if (copy_r12 || ! TARGET_UPDATE)
8314    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8315
8316  if (TARGET_UPDATE)
8317    {
8318      if (size > 32767)
8319	{
8320	  /* Need a note here so that try_split doesn't get confused.  */
8321	  if (get_last_insn() == NULL_RTX)
8322	    emit_note (0, NOTE_INSN_DELETED);
8323	  insn = emit_move_insn (tmp_reg, todec);
8324	  try_split (PATTERN (insn), insn, 0);
8325	  todec = tmp_reg;
8326	}
8327
8328      if (Pmode == SImode)
8329	insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8330					    todec, stack_reg));
8331      else
8332	insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8333					    todec, stack_reg));
8334    }
8335  else
8336    {
8337      if (Pmode == SImode)
8338	insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8339      else
8340	insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8341      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8342		      gen_rtx_REG (Pmode, 12));
8343    }
8344
8345  RTX_FRAME_RELATED_P (insn) = 1;
8346  REG_NOTES (insn) =
8347    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8348		       gen_rtx_SET (VOIDmode, stack_reg,
8349				    gen_rtx_PLUS (Pmode, stack_reg,
8350						  GEN_INT (-size))),
8351		       REG_NOTES (insn));
8352}
8353
8354/* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8355   knows that:
8356
8357     (mem (plus (blah) (regXX)))
8358
8359   is really:
8360
8361     (mem (plus (blah) (const VALUE_OF_REGXX))).  */
8362
8363static void
8364altivec_frame_fixup (insn, reg, val)
8365     rtx insn, reg;
8366     HOST_WIDE_INT val;
8367{
8368  rtx real;
8369
8370  real = copy_rtx (PATTERN (insn));
8371
8372  real = replace_rtx (real, reg, GEN_INT (val));
8373
8374  RTX_FRAME_RELATED_P (insn) = 1;
8375  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8376					real,
8377					REG_NOTES (insn));
8378}
8379
8380/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8381   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8382   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
8383   deduce these equivalences by itself so it wasn't necessary to hold
8384   its hand so much.  */
8385
8386static void
8387rs6000_frame_related (insn, reg, val, reg2, rreg)
8388     rtx insn;
8389     rtx reg;
8390     HOST_WIDE_INT val;
8391     rtx reg2;
8392     rtx rreg;
8393{
8394  rtx real, temp;
8395
8396  real = copy_rtx (PATTERN (insn));
8397
8398  real = replace_rtx (real, reg,
8399		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8400							STACK_POINTER_REGNUM),
8401				    GEN_INT (val)));
8402
8403  /* We expect that 'real' is either a SET or a PARALLEL containing
8404     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
8405     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
8406
8407  if (GET_CODE (real) == SET)
8408    {
8409      rtx set = real;
8410
8411      temp = simplify_rtx (SET_SRC (set));
8412      if (temp)
8413	SET_SRC (set) = temp;
8414      temp = simplify_rtx (SET_DEST (set));
8415      if (temp)
8416	SET_DEST (set) = temp;
8417      if (GET_CODE (SET_DEST (set)) == MEM)
8418	{
8419	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8420	  if (temp)
8421	    XEXP (SET_DEST (set), 0) = temp;
8422	}
8423    }
8424  else if (GET_CODE (real) == PARALLEL)
8425    {
8426      int i;
8427      for (i = 0; i < XVECLEN (real, 0); i++)
8428	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8429	  {
8430	    rtx set = XVECEXP (real, 0, i);
8431
8432	    temp = simplify_rtx (SET_SRC (set));
8433	    if (temp)
8434	      SET_SRC (set) = temp;
8435	    temp = simplify_rtx (SET_DEST (set));
8436	    if (temp)
8437	      SET_DEST (set) = temp;
8438	    if (GET_CODE (SET_DEST (set)) == MEM)
8439	      {
8440		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8441		if (temp)
8442		  XEXP (SET_DEST (set), 0) = temp;
8443	      }
8444	    RTX_FRAME_RELATED_P (set) = 1;
8445	  }
8446    }
8447  else
8448    abort ();
8449
8450  if (reg2 != NULL_RTX)
8451    real = replace_rtx (real, reg2, rreg);
8452
8453  RTX_FRAME_RELATED_P (insn) = 1;
8454  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8455					real,
8456					REG_NOTES (insn));
8457}
8458
8459/* Returns an insn that has a vrsave set operation with the
8460   appropriate CLOBBERs.  */
8461
8462static rtx
8463generate_set_vrsave (reg, info, epiloguep)
8464     rtx reg;
8465     rs6000_stack_t *info;
8466     int epiloguep;
8467{
8468  int nclobs, i;
8469  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8470  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8471
8472  clobs[0]
8473    = gen_rtx_SET (VOIDmode,
8474		   vrsave,
8475		   gen_rtx_UNSPEC_VOLATILE (SImode,
8476					    gen_rtvec (2, reg, vrsave),
8477					    30));
8478
8479  nclobs = 1;
8480
8481  /* We need to clobber the registers in the mask so the scheduler
8482     does not move sets to VRSAVE before sets of AltiVec registers.
8483
8484     However, if the function receives nonlocal gotos, reload will set
8485     all call saved registers live.  We will end up with:
8486
8487     	(set (reg 999) (mem))
8488	(parallel [ (set (reg vrsave) (unspec blah))
8489		    (clobber (reg 999))])
8490
8491     The clobber will cause the store into reg 999 to be dead, and
8492     flow will attempt to delete an epilogue insn.  In this case, we
8493     need an unspec use/set of the register.  */
8494
8495  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8496    if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8497      {
8498	if (!epiloguep || call_used_regs [i])
8499	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8500					     gen_rtx_REG (V4SImode, i));
8501	else
8502	  {
8503	    rtx reg = gen_rtx_REG (V4SImode, i);
8504
8505	    clobs[nclobs++]
8506	      = gen_rtx_SET (VOIDmode,
8507			     reg,
8508			     gen_rtx_UNSPEC (V4SImode,
8509					     gen_rtvec (1, reg), 27));
8510	  }
8511      }
8512
8513  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8514
8515  for (i = 0; i < nclobs; ++i)
8516    XVECEXP (insn, 0, i) = clobs[i];
8517
8518  return insn;
8519}
8520
8521/* Emit function prologue as insns.  */
8522
8523void
8524rs6000_emit_prologue ()
8525{
8526  rs6000_stack_t *info = rs6000_stack_info ();
8527  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8528  int reg_size = TARGET_POWERPC64 ? 8 : 4;
8529  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8530  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8531  rtx frame_reg_rtx = sp_reg_rtx;
8532  rtx cr_save_rtx = NULL;
8533  rtx insn;
8534  int saving_FPRs_inline;
8535  int using_store_multiple;
8536  HOST_WIDE_INT sp_offset = 0;
8537
8538  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8539			  && info->first_gp_reg_save < 31);
8540  saving_FPRs_inline = (info->first_fp_reg_save == 64
8541			|| FP_SAVE_INLINE (info->first_fp_reg_save));
8542
8543  /* For V.4, update stack before we do any saving and set back pointer.  */
8544  if (info->push_p && DEFAULT_ABI == ABI_V4)
8545    {
8546      if (info->total_size < 32767)
8547	sp_offset = info->total_size;
8548      else
8549	frame_reg_rtx = frame_ptr_rtx;
8550      rs6000_emit_allocate_stack (info->total_size,
8551				  (frame_reg_rtx != sp_reg_rtx
8552				   && (info->cr_save_p
8553				       || info->lr_save_p
8554				       || info->first_fp_reg_save < 64
8555				       || info->first_gp_reg_save < 32
8556				       )));
8557      if (frame_reg_rtx != sp_reg_rtx)
8558	rs6000_emit_stack_tie ();
8559    }
8560
8561  /* Save AltiVec registers if needed.  */
8562  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8563    {
8564      int i;
8565
8566      /* There should be a non inline version of this, for when we
8567	 are saving lots of vector registers.  */
8568      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8569	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8570	  {
8571	    rtx areg, savereg, mem;
8572	    int offset;
8573
8574	    offset = info->altivec_save_offset + sp_offset
8575	      + 16 * (i - info->first_altivec_reg_save);
8576
8577	    savereg = gen_rtx_REG (V4SImode, i);
8578
8579	    areg = gen_rtx_REG (Pmode, 0);
8580	    emit_move_insn (areg, GEN_INT (offset));
8581
8582	    /* AltiVec addressing mode is [reg+reg].  */
8583	    mem = gen_rtx_MEM (V4SImode,
8584			       gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8585
8586	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8587
8588	    insn = emit_move_insn (mem, savereg);
8589
8590	    altivec_frame_fixup (insn, areg, offset);
8591	  }
8592    }
8593
8594  /* VRSAVE is a bit vector representing which AltiVec registers
8595     are used.  The OS uses this to determine which vector
8596     registers to save on a context switch.  We need to save
8597     VRSAVE on the stack frame, add whatever AltiVec registers we
8598     used in this function, and do the corresponding magic in the
8599     epilogue.  */
8600
8601  if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8602    {
8603      rtx reg, mem, vrsave;
8604      int offset;
8605
8606      /* Get VRSAVE onto a GPR.  */
8607      reg = gen_rtx_REG (SImode, 12);
8608      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8609      if (TARGET_MACHO)
8610	emit_insn (gen_get_vrsave_internal (reg));
8611      else
8612	emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8613
8614      /* Save VRSAVE.  */
8615      offset = info->vrsave_save_offset + sp_offset;
8616      mem
8617	= gen_rtx_MEM (SImode,
8618		       gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8619      set_mem_alias_set (mem, rs6000_sr_alias_set);
8620      insn = emit_move_insn (mem, reg);
8621
8622      /* Include the registers in the mask.  */
8623      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8624
8625      insn = emit_insn (generate_set_vrsave (reg, info, 0));
8626    }
8627
8628  /* If we use the link register, get it into r0.  */
8629  if (info->lr_save_p)
8630    emit_move_insn (gen_rtx_REG (Pmode, 0),
8631		    gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8632
8633  /* If we need to save CR, put it into r12.  */
8634  if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8635    {
8636      cr_save_rtx = gen_rtx_REG (SImode, 12);
8637      emit_insn (gen_movesi_from_cr (cr_save_rtx));
8638    }
8639
8640  /* Do any required saving of fpr's.  If only one or two to save, do
8641     it ourselves.  Otherwise, call function.  */
8642  if (saving_FPRs_inline)
8643    {
8644      int i;
8645      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8646	if ((regs_ever_live[info->first_fp_reg_save+i]
8647	     && ! call_used_regs[info->first_fp_reg_save+i]))
8648	  {
8649	    rtx addr, reg, mem;
8650	    reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8651	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8652				 GEN_INT (info->fp_save_offset
8653					  + sp_offset
8654					  + 8 * i));
8655	    mem = gen_rtx_MEM (DFmode, addr);
8656	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8657
8658	    insn = emit_move_insn (mem, reg);
8659	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8660				  NULL_RTX, NULL_RTX);
8661	  }
8662    }
8663  else if (info->first_fp_reg_save != 64)
8664    {
8665      int i;
8666      char rname[30];
8667      const char *alloc_rname;
8668      rtvec p;
8669      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8670
8671      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8672					  gen_rtx_REG (Pmode,
8673						       LINK_REGISTER_REGNUM));
8674      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8675	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8676      alloc_rname = ggc_strdup (rname);
8677      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8678				      gen_rtx_SYMBOL_REF (Pmode,
8679							  alloc_rname));
8680      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8681	{
8682	  rtx addr, reg, mem;
8683	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8684	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8685			       GEN_INT (info->fp_save_offset
8686					+ sp_offset + 8*i));
8687	  mem = gen_rtx_MEM (DFmode, addr);
8688	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8689
8690	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8691	}
8692      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8693      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8694			    NULL_RTX, NULL_RTX);
8695    }
8696
8697  /* Save GPRs.  This is done as a PARALLEL if we are using
8698     the store-multiple instructions.  */
8699  if (using_store_multiple)
8700    {
8701      rtvec p, dwarfp;
8702      int i;
8703      p = rtvec_alloc (32 - info->first_gp_reg_save);
8704      dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8705      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8706	{
8707	  rtx addr, reg, mem;
8708	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8709	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8710			       GEN_INT (info->gp_save_offset
8711					+ sp_offset
8712					+ reg_size * i));
8713	  mem = gen_rtx_MEM (reg_mode, addr);
8714	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8715
8716	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8717	}
8718      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8719      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8720			    NULL_RTX, NULL_RTX);
8721    }
8722  else
8723    {
8724      int i;
8725      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8726	if ((regs_ever_live[info->first_gp_reg_save+i]
8727	     && ! call_used_regs[info->first_gp_reg_save+i])
8728	    || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8729		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8730		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8731	  {
8732	    rtx addr, reg, mem;
8733	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8734	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8735				 GEN_INT (info->gp_save_offset
8736					  + sp_offset
8737					  + reg_size * i));
8738	    mem = gen_rtx_MEM (reg_mode, addr);
8739	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8740
8741	    insn = emit_move_insn (mem, reg);
8742	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8743				  NULL_RTX, NULL_RTX);
8744	  }
8745    }
8746
8747  /* ??? There's no need to emit actual instructions here, but it's the
8748     easiest way to get the frame unwind information emitted.  */
8749  if (current_function_calls_eh_return)
8750    {
8751      unsigned int i, regno;
8752
8753      for (i = 0; ; ++i)
8754	{
8755	  rtx addr, reg, mem;
8756
8757	  regno = EH_RETURN_DATA_REGNO (i);
8758	  if (regno == INVALID_REGNUM)
8759	    break;
8760
8761	  reg = gen_rtx_REG (reg_mode, regno);
8762	  addr = plus_constant (frame_reg_rtx,
8763				info->ehrd_offset + sp_offset
8764				+ reg_size * (int) i);
8765	  mem = gen_rtx_MEM (reg_mode, addr);
8766	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8767
8768	  insn = emit_move_insn (mem, reg);
8769	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8770				NULL_RTX, NULL_RTX);
8771	}
8772    }
8773
8774  /* Save lr if we used it.  */
8775  if (info->lr_save_p)
8776    {
8777      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8778			       GEN_INT (info->lr_save_offset + sp_offset));
8779      rtx reg = gen_rtx_REG (Pmode, 0);
8780      rtx mem = gen_rtx_MEM (Pmode, addr);
8781      /* This should not be of rs6000_sr_alias_set, because of
8782	 __builtin_return_address.  */
8783
8784      insn = emit_move_insn (mem, reg);
8785      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8786			    reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8787    }
8788
8789  /* Save CR if we use any that must be preserved.  */
8790  if (info->cr_save_p)
8791    {
8792      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8793			       GEN_INT (info->cr_save_offset + sp_offset));
8794      rtx mem = gen_rtx_MEM (SImode, addr);
8795
8796      set_mem_alias_set (mem, rs6000_sr_alias_set);
8797
8798      /* If r12 was used to hold the original sp, copy cr into r0 now
8799	 that it's free.  */
8800      if (REGNO (frame_reg_rtx) == 12)
8801	{
8802	  cr_save_rtx = gen_rtx_REG (SImode, 0);
8803	  emit_insn (gen_movesi_from_cr (cr_save_rtx));
8804	}
8805      insn = emit_move_insn (mem, cr_save_rtx);
8806
8807      /* Now, there's no way that dwarf2out_frame_debug_expr is going
8808	 to understand '(unspec:SI [(reg:CC 68) ...] 19)'.  But that's
8809	 OK.  All we have to do is specify that _one_ condition code
8810	 register is saved in this stack slot.  The thrower's epilogue
8811	 will then restore all the call-saved registers.
8812	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
8813      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8814			    cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8815    }
8816
8817  /* Update stack and set back pointer unless this is V.4,
8818     for which it was done previously.  */
8819  if (info->push_p && DEFAULT_ABI != ABI_V4)
8820    rs6000_emit_allocate_stack (info->total_size, FALSE);
8821
8822  /* Set frame pointer, if needed.  */
8823  if (frame_pointer_needed)
8824    {
8825      insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8826			     sp_reg_rtx);
8827      RTX_FRAME_RELATED_P (insn) = 1;
8828    }
8829
8830  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
8831  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8832      || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8833	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
8834  {
8835    /* If emit_load_toc_table will use the link register, we need to save
8836       it.  We use R11 for this purpose because emit_load_toc_table
8837       can use register 0.  This allows us to use a plain 'blr' to return
8838       from the procedure more often.  */
8839    int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8840				    && ! info->lr_save_p
8841				    && EXIT_BLOCK_PTR->pred != NULL);
8842    if (save_LR_around_toc_setup)
8843      emit_move_insn (gen_rtx_REG (Pmode, 11),
8844		      gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8845
8846    rs6000_emit_load_toc_table (TRUE);
8847
8848    if (save_LR_around_toc_setup)
8849      emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8850		      gen_rtx_REG (Pmode, 11));
8851  }
8852
8853  if (DEFAULT_ABI == ABI_DARWIN
8854      && flag_pic && current_function_uses_pic_offset_table)
8855    {
8856      rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8857
8858      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8859
8860      rs6000_maybe_dead (
8861	emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
8862			gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8863    }
8864}
8865
8866/* Write function prologue.  */
8867
8868static void
8869rs6000_output_function_prologue (file, size)
8870     FILE *file;
8871     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8872{
8873  rs6000_stack_t *info = rs6000_stack_info ();
8874
8875  if (TARGET_DEBUG_STACK)
8876    debug_stack_info (info);
8877
8878  /* Write .extern for any function we will call to save and restore
8879     fp values.  */
8880  if (info->first_fp_reg_save < 64
8881      && !FP_SAVE_INLINE (info->first_fp_reg_save))
8882    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8883	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8884	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8885	     RESTORE_FP_SUFFIX);
8886
8887  /* Write .extern for AIX common mode routines, if needed.  */
8888  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8889    {
8890      fputs ("\t.extern __mulh\n", file);
8891      fputs ("\t.extern __mull\n", file);
8892      fputs ("\t.extern __divss\n", file);
8893      fputs ("\t.extern __divus\n", file);
8894      fputs ("\t.extern __quoss\n", file);
8895      fputs ("\t.extern __quous\n", file);
8896      common_mode_defined = 1;
8897    }
8898
8899  if (! HAVE_prologue)
8900    {
8901      start_sequence ();
8902
8903      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8904	 the "toplevel" insn chain.  */
8905      emit_note (0, NOTE_INSN_DELETED);
8906      rs6000_emit_prologue ();
8907      emit_note (0, NOTE_INSN_DELETED);
8908
8909      /* Expand INSN_ADDRESSES so final() doesn't crash. */
8910      {
8911	rtx insn;
8912	unsigned addr = 0;
8913	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
8914	  {
8915	    INSN_ADDRESSES_NEW (insn, addr);
8916	    addr += 4;
8917	  }
8918      }
8919
8920      if (TARGET_DEBUG_STACK)
8921	debug_rtx_list (get_insns (), 100);
8922      final (get_insns (), file, FALSE, FALSE);
8923      end_sequence ();
8924    }
8925
8926  rs6000_pic_labelno++;
8927}
8928
8929/* Emit function epilogue as insns.
8930
8931   At present, dwarf2out_frame_debug_expr doesn't understand
8932   register restores, so we don't bother setting RTX_FRAME_RELATED_P
8933   anywhere in the epilogue.  Most of the insns below would in any case
8934   need special notes to explain where r11 is in relation to the stack.  */
8935
8936void
8937rs6000_emit_epilogue (sibcall)
8938     int sibcall;
8939{
8940  rs6000_stack_t *info;
8941  int restoring_FPRs_inline;
8942  int using_load_multiple;
8943  int using_mfcr_multiple;
8944  int use_backchain_to_restore_sp;
8945  int sp_offset = 0;
8946  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
8947  rtx frame_reg_rtx = sp_reg_rtx;
8948  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8949  int reg_size = TARGET_POWERPC64 ? 8 : 4;
8950  int i;
8951
8952  info = rs6000_stack_info ();
8953  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8954			 && info->first_gp_reg_save < 31);
8955  restoring_FPRs_inline = (sibcall
8956			   || current_function_calls_eh_return
8957			   || info->first_fp_reg_save == 64
8958			   || FP_SAVE_INLINE (info->first_fp_reg_save));
8959  use_backchain_to_restore_sp = (frame_pointer_needed
8960				 || current_function_calls_alloca
8961				 || info->total_size > 32767);
8962  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
8963			 || rs6000_cpu == PROCESSOR_PPC603
8964			 || rs6000_cpu == PROCESSOR_PPC750
8965			 || optimize_size);
8966
8967  /* If we have a frame pointer, a call to alloca,  or a large stack
8968     frame, restore the old stack pointer using the backchain.  Otherwise,
8969     we know what size to update it with.  */
8970  if (use_backchain_to_restore_sp)
8971    {
8972      /* Under V.4, don't reset the stack pointer until after we're done
8973	 loading the saved registers.  */
8974      if (DEFAULT_ABI == ABI_V4)
8975	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
8976
8977      emit_move_insn (frame_reg_rtx,
8978		      gen_rtx_MEM (Pmode, sp_reg_rtx));
8979
8980    }
8981  else if (info->push_p)
8982    {
8983      if (DEFAULT_ABI == ABI_V4)
8984	sp_offset = info->total_size;
8985      else
8986	{
8987	  emit_insn (TARGET_32BIT
8988		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8989				   GEN_INT (info->total_size))
8990		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8991				   GEN_INT (info->total_size)));
8992	}
8993    }
8994
8995  /* Restore AltiVec registers if needed.  */
8996  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8997    {
8998      int i;
8999
9000      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9001	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9002	  {
9003	    rtx addr, areg, mem;
9004
9005	    areg = gen_rtx_REG (Pmode, 0);
9006	    emit_move_insn
9007	      (areg, GEN_INT (info->altivec_save_offset
9008			      + sp_offset
9009			      + 16 * (i - info->first_altivec_reg_save)));
9010
9011	    /* AltiVec addressing mode is [reg+reg].  */
9012	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9013	    mem = gen_rtx_MEM (V4SImode, addr);
9014	    set_mem_alias_set (mem, rs6000_sr_alias_set);
9015
9016	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9017	  }
9018    }
9019
9020  /* Restore VRSAVE if needed.  */
9021  if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9022    {
9023      rtx addr, mem, reg;
9024
9025      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9026			   GEN_INT (info->vrsave_save_offset + sp_offset));
9027      mem = gen_rtx_MEM (SImode, addr);
9028      set_mem_alias_set (mem, rs6000_sr_alias_set);
9029      reg = gen_rtx_REG (SImode, 12);
9030      emit_move_insn (reg, mem);
9031
9032      emit_insn (generate_set_vrsave (reg, info, 1));
9033    }
9034
9035  /* Get the old lr if we saved it.  */
9036  if (info->lr_save_p)
9037    {
9038      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9039			       GEN_INT (info->lr_save_offset + sp_offset));
9040      rtx mem = gen_rtx_MEM (Pmode, addr);
9041
9042      set_mem_alias_set (mem, rs6000_sr_alias_set);
9043
9044      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9045    }
9046
9047  /* Get the old cr if we saved it.  */
9048  if (info->cr_save_p)
9049    {
9050      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9051			       GEN_INT (info->cr_save_offset + sp_offset));
9052      rtx mem = gen_rtx_MEM (SImode, addr);
9053
9054      set_mem_alias_set (mem, rs6000_sr_alias_set);
9055
9056      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9057    }
9058
9059  /* Set LR here to try to overlap restores below.  */
9060  if (info->lr_save_p)
9061    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9062		    gen_rtx_REG (Pmode, 0));
9063
9064  /* Load exception handler data registers, if needed.  */
9065  if (current_function_calls_eh_return)
9066    {
9067      unsigned int i, regno;
9068
9069      for (i = 0; ; ++i)
9070	{
9071	  rtx addr, mem;
9072
9073	  regno = EH_RETURN_DATA_REGNO (i);
9074	  if (regno == INVALID_REGNUM)
9075	    break;
9076
9077	  addr = plus_constant (frame_reg_rtx,
9078				info->ehrd_offset + sp_offset
9079				+ reg_size * (int) i);
9080	  mem = gen_rtx_MEM (reg_mode, addr);
9081	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9082
9083	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9084	}
9085    }
9086
9087  /* Restore GPRs.  This is done as a PARALLEL if we are using
9088     the load-multiple instructions.  */
9089  if (using_load_multiple)
9090    {
9091      rtvec p;
9092      p = rtvec_alloc (32 - info->first_gp_reg_save);
9093      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9094	{
9095	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9096				   GEN_INT (info->gp_save_offset
9097					    + sp_offset
9098					    + reg_size * i));
9099	  rtx mem = gen_rtx_MEM (reg_mode, addr);
9100
9101	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9102
9103	  RTVEC_ELT (p, i) =
9104	    gen_rtx_SET (VOIDmode,
9105			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9106			 mem);
9107	}
9108      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9109    }
9110  else
9111    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9112      if ((regs_ever_live[info->first_gp_reg_save+i]
9113	   && ! call_used_regs[info->first_gp_reg_save+i])
9114	  || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9115	      && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9116		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9117	{
9118	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9119				   GEN_INT (info->gp_save_offset
9120					    + sp_offset
9121					    + reg_size * i));
9122	  rtx mem = gen_rtx_MEM (reg_mode, addr);
9123
9124	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9125
9126	  emit_move_insn (gen_rtx_REG (reg_mode,
9127				       info->first_gp_reg_save + i),
9128			  mem);
9129	}
9130
9131  /* Restore fpr's if we need to do it without calling a function.  */
9132  if (restoring_FPRs_inline)
9133    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9134      if ((regs_ever_live[info->first_fp_reg_save+i]
9135	   && ! call_used_regs[info->first_fp_reg_save+i]))
9136	{
9137	  rtx addr, mem;
9138	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9139			       GEN_INT (info->fp_save_offset
9140					+ sp_offset
9141					+ 8 * i));
9142	  mem = gen_rtx_MEM (DFmode, addr);
9143	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9144
9145	  emit_move_insn (gen_rtx_REG (DFmode,
9146				       info->first_fp_reg_save + i),
9147			  mem);
9148	}
9149
9150  /* If we saved cr, restore it here.  Just those that were used.  */
9151  if (info->cr_save_p)
9152    {
9153      rtx r12_rtx = gen_rtx_REG (SImode, 12);
9154      int count = 0;
9155
9156      if (using_mfcr_multiple)
9157	{
9158	  for (i = 0; i < 8; i++)
9159	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9160	      count++;
9161	  if (count == 0)
9162	    abort ();
9163	}
9164
9165      if (using_mfcr_multiple && count > 1)
9166	{
9167	  rtvec p;
9168	  int ndx;
9169
9170	  p = rtvec_alloc (count);
9171
9172	  ndx = 0;
9173	  for (i = 0; i < 8; i++)
9174	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9175	      {
9176		rtvec r = rtvec_alloc (2);
9177		RTVEC_ELT (r, 0) = r12_rtx;
9178		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9179		RTVEC_ELT (p, ndx) =
9180		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9181			       gen_rtx_UNSPEC (CCmode, r, 20));
9182		ndx++;
9183	      }
9184	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9185	  if (ndx != count)
9186	    abort ();
9187	}
9188      else
9189	for (i = 0; i < 8; i++)
9190	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9191	    {
9192	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9193							   CR0_REGNO+i),
9194					      r12_rtx));
9195	    }
9196    }
9197
9198  /* If this is V.4, unwind the stack pointer after all of the loads
9199     have been done.  We need to emit a block here so that sched
9200     doesn't decide to move the sp change before the register restores
9201     (which may not have any obvious dependency on the stack).  This
9202     doesn't hurt performance, because there is no scheduling that can
9203     be done after this point.  */
9204  if (DEFAULT_ABI == ABI_V4)
9205    {
9206      if (frame_reg_rtx != sp_reg_rtx)
9207	  rs6000_emit_stack_tie ();
9208
9209      if (use_backchain_to_restore_sp)
9210	{
9211	  emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9212	}
9213      else if (sp_offset != 0)
9214	{
9215	  emit_insn (Pmode == SImode
9216		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9217				   GEN_INT (sp_offset))
9218		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9219				   GEN_INT (sp_offset)));
9220	}
9221    }
9222
9223  if (current_function_calls_eh_return)
9224    {
9225      rtx sa = EH_RETURN_STACKADJ_RTX;
9226      emit_insn (Pmode == SImode
9227		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9228		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9229    }
9230
9231  if (!sibcall)
9232    {
9233      rtvec p;
9234      if (! restoring_FPRs_inline)
9235	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9236      else
9237	p = rtvec_alloc (2);
9238
9239      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9240      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9241				      gen_rtx_REG (Pmode,
9242						   LINK_REGISTER_REGNUM));
9243
9244      /* If we have to restore more than two FP registers, branch to the
9245	 restore function.  It will return to our caller.  */
9246      if (! restoring_FPRs_inline)
9247	{
9248	  int i;
9249	  char rname[30];
9250	  const char *alloc_rname;
9251
9252	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9253		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9254	  alloc_rname = ggc_strdup (rname);
9255	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9256					  gen_rtx_SYMBOL_REF (Pmode,
9257							      alloc_rname));
9258
9259	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9260	    {
9261	      rtx addr, mem;
9262	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9263				   GEN_INT (info->fp_save_offset + 8*i));
9264	      mem = gen_rtx_MEM (DFmode, addr);
9265	      set_mem_alias_set (mem, rs6000_sr_alias_set);
9266
9267	      RTVEC_ELT (p, i+3) =
9268		gen_rtx_SET (VOIDmode,
9269			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9270			     mem);
9271	    }
9272	}
9273
9274      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9275    }
9276}
9277
9278/* Write function epilogue.  */
9279
9280static void
9281rs6000_output_function_epilogue (file, size)
9282     FILE *file;
9283     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9284{
9285  rs6000_stack_t *info = rs6000_stack_info ();
9286  int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9287
9288  if (! HAVE_epilogue)
9289    {
9290      rtx insn = get_last_insn ();
9291      /* If the last insn was a BARRIER, we don't have to write anything except
9292	 the trace table.  */
9293      if (GET_CODE (insn) == NOTE)
9294	insn = prev_nonnote_insn (insn);
9295      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
9296	{
9297	  /* This is slightly ugly, but at least we don't have two
9298	     copies of the epilogue-emitting code.  */
9299	  start_sequence ();
9300
9301	  /* A NOTE_INSN_DELETED is supposed to be at the start
9302	     and end of the "toplevel" insn chain.  */
9303	  emit_note (0, NOTE_INSN_DELETED);
9304	  rs6000_emit_epilogue (FALSE);
9305	  emit_note (0, NOTE_INSN_DELETED);
9306
9307	  /* Expand INSN_ADDRESSES so final() doesn't crash. */
9308	  {
9309	    rtx insn;
9310	    unsigned addr = 0;
9311	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9312	      {
9313		INSN_ADDRESSES_NEW (insn, addr);
9314		addr += 4;
9315	      }
9316	  }
9317
9318	  if (TARGET_DEBUG_STACK)
9319	    debug_rtx_list (get_insns (), 100);
9320	  final (get_insns (), file, FALSE, FALSE);
9321	  end_sequence ();
9322	}
9323    }
9324
9325  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
9326     on its format.
9327
9328     We don't output a traceback table if -finhibit-size-directive was
9329     used.  The documentation for -finhibit-size-directive reads
9330     ``don't output a @code{.size} assembler directive, or anything
9331     else that would cause trouble if the function is split in the
9332     middle, and the two halves are placed at locations far apart in
9333     memory.''  The traceback table has this property, since it
9334     includes the offset from the start of the function to the
9335     traceback table itself.
9336
9337     System V.4 Powerpc's (and the embedded ABI derived from it) use a
9338     different traceback table.  */
9339  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9340    {
9341      const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9342      const char *language_string = lang_hooks.name;
9343      int fixed_parms = 0, float_parms = 0, parm_info = 0;
9344      int i;
9345
9346      while (*fname == '.')	/* V.4 encodes . in the name */
9347	fname++;
9348
9349      /* Need label immediately before tbtab, so we can compute its offset
9350	 from the function start.  */
9351      if (*fname == '*')
9352	++fname;
9353      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9354      ASM_OUTPUT_LABEL (file, fname);
9355
9356      /* The .tbtab pseudo-op can only be used for the first eight
9357	 expressions, since it can't handle the possibly variable
9358	 length fields that follow.  However, if you omit the optional
9359	 fields, the assembler outputs zeros for all optional fields
9360	 anyways, giving each variable length field is minimum length
9361	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
9362	 pseudo-op at all.  */
9363
9364      /* An all-zero word flags the start of the tbtab, for debuggers
9365	 that have to find it by searching forward from the entry
9366	 point or from the current pc.  */
9367      fputs ("\t.long 0\n", file);
9368
9369      /* Tbtab format type.  Use format type 0.  */
9370      fputs ("\t.byte 0,", file);
9371
9372      /* Language type.  Unfortunately, there doesn't seem to be any
9373	 official way to get this info, so we use language_string.  C
9374	 is 0.  C++ is 9.  No number defined for Obj-C, so use the
9375	 value for C for now.  There is no official value for Java,
9376         although IBM appears to be using 13.  There is no official value
9377	 for Chill, so we've chosen 44 pseudo-randomly.  */
9378      if (! strcmp (language_string, "GNU C")
9379	  || ! strcmp (language_string, "GNU Objective-C"))
9380	i = 0;
9381      else if (! strcmp (language_string, "GNU F77"))
9382	i = 1;
9383      else if (! strcmp (language_string, "GNU Ada"))
9384	i = 3;
9385      else if (! strcmp (language_string, "GNU Pascal"))
9386	i = 2;
9387      else if (! strcmp (language_string, "GNU C++"))
9388	i = 9;
9389      else if (! strcmp (language_string, "GNU Java"))
9390	i = 13;
9391      else if (! strcmp (language_string, "GNU CHILL"))
9392	i = 44;
9393      else
9394	abort ();
9395      fprintf (file, "%d,", i);
9396
9397      /* 8 single bit fields: global linkage (not set for C extern linkage,
9398	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9399	 from start of procedure stored in tbtab, internal function, function
9400	 has controlled storage, function has no toc, function uses fp,
9401	 function logs/aborts fp operations.  */
9402      /* Assume that fp operations are used if any fp reg must be saved.  */
9403      fprintf (file, "%d,",
9404	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9405
9406      /* 6 bitfields: function is interrupt handler, name present in
9407	 proc table, function calls alloca, on condition directives
9408	 (controls stack walks, 3 bits), saves condition reg, saves
9409	 link reg.  */
9410      /* The `function calls alloca' bit seems to be set whenever reg 31 is
9411	 set up as a frame pointer, even when there is no alloca call.  */
9412      fprintf (file, "%d,",
9413	       ((optional_tbtab << 6)
9414		| ((optional_tbtab & frame_pointer_needed) << 5)
9415		| (info->cr_save_p << 1)
9416		| (info->lr_save_p)));
9417
9418      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9419	 (6 bits).  */
9420      fprintf (file, "%d,",
9421	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
9422
9423      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
9424      fprintf (file, "%d,", (32 - first_reg_to_save ()));
9425
9426      if (optional_tbtab)
9427	{
9428	  /* Compute the parameter info from the function decl argument
9429	     list.  */
9430	  tree decl;
9431	  int next_parm_info_bit = 31;
9432
9433	  for (decl = DECL_ARGUMENTS (current_function_decl);
9434	       decl; decl = TREE_CHAIN (decl))
9435	    {
9436	      rtx parameter = DECL_INCOMING_RTL (decl);
9437	      enum machine_mode mode = GET_MODE (parameter);
9438
9439	      if (GET_CODE (parameter) == REG)
9440		{
9441		  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9442		    {
9443		      int bits;
9444
9445		      float_parms++;
9446
9447		      if (mode == SFmode)
9448			bits = 0x2;
9449		      else if (mode == DFmode)
9450			bits = 0x3;
9451		      else
9452			abort ();
9453
9454		      /* If only one bit will fit, don't or in this entry.  */
9455		      if (next_parm_info_bit > 0)
9456			parm_info |= (bits << (next_parm_info_bit - 1));
9457		      next_parm_info_bit -= 2;
9458		    }
9459		  else
9460		    {
9461		      fixed_parms += ((GET_MODE_SIZE (mode)
9462				       + (UNITS_PER_WORD - 1))
9463				      / UNITS_PER_WORD);
9464		      next_parm_info_bit -= 1;
9465		    }
9466		}
9467	    }
9468	}
9469
9470      /* Number of fixed point parameters.  */
9471      /* This is actually the number of words of fixed point parameters; thus
9472	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
9473      fprintf (file, "%d,", fixed_parms);
9474
9475      /* 2 bitfields: number of floating point parameters (7 bits), parameters
9476	 all on stack.  */
9477      /* This is actually the number of fp registers that hold parameters;
9478	 and thus the maximum value is 13.  */
9479      /* Set parameters on stack bit if parameters are not in their original
9480	 registers, regardless of whether they are on the stack?  Xlc
9481	 seems to set the bit when not optimizing.  */
9482      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9483
9484      if (! optional_tbtab)
9485	return;
9486
9487      /* Optional fields follow.  Some are variable length.  */
9488
9489      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9490	 11 double float.  */
9491      /* There is an entry for each parameter in a register, in the order that
9492	 they occur in the parameter list.  Any intervening arguments on the
9493	 stack are ignored.  If the list overflows a long (max possible length
9494	 34 bits) then completely leave off all elements that don't fit.  */
9495      /* Only emit this long if there was at least one parameter.  */
9496      if (fixed_parms || float_parms)
9497	fprintf (file, "\t.long %d\n", parm_info);
9498
9499      /* Offset from start of code to tb table.  */
9500      fputs ("\t.long ", file);
9501      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9502#if TARGET_AIX
9503      RS6000_OUTPUT_BASENAME (file, fname);
9504#else
9505      assemble_name (file, fname);
9506#endif
9507      fputs ("-.", file);
9508#if TARGET_AIX
9509      RS6000_OUTPUT_BASENAME (file, fname);
9510#else
9511      assemble_name (file, fname);
9512#endif
9513      putc ('\n', file);
9514
9515      /* Interrupt handler mask.  */
9516      /* Omit this long, since we never set the interrupt handler bit
9517	 above.  */
9518
9519      /* Number of CTL (controlled storage) anchors.  */
9520      /* Omit this long, since the has_ctl bit is never set above.  */
9521
9522      /* Displacement into stack of each CTL anchor.  */
9523      /* Omit this list of longs, because there are no CTL anchors.  */
9524
9525      /* Length of function name.  */
9526      fprintf (file, "\t.short %d\n", (int) strlen (fname));
9527
9528      /* Function name.  */
9529      assemble_string (fname, strlen (fname));
9530
9531      /* Register for alloca automatic storage; this is always reg 31.
9532	 Only emit this if the alloca bit was set above.  */
9533      if (frame_pointer_needed)
9534	fputs ("\t.byte 31\n", file);
9535
9536      fputs ("\t.align 2\n", file);
9537    }
9538  return;
9539}
9540
9541/* A C compound statement that outputs the assembler code for a thunk
9542   function, used to implement C++ virtual function calls with
9543   multiple inheritance.  The thunk acts as a wrapper around a virtual
9544   function, adjusting the implicit object parameter before handing
9545   control off to the real function.
9546
9547   First, emit code to add the integer DELTA to the location that
9548   contains the incoming first argument.  Assume that this argument
9549   contains a pointer, and is the one used to pass the `this' pointer
9550   in C++.  This is the incoming argument *before* the function
9551   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
9552   values of all other incoming arguments.
9553
9554   After the addition, emit code to jump to FUNCTION, which is a
9555   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
9556   not touch the return address.  Hence returning from FUNCTION will
9557   return to whoever called the current `thunk'.
9558
9559   The effect must be as if FUNCTION had been called directly with the
9560   adjusted first argument.  This macro is responsible for emitting
9561   all of the code for a thunk function; output_function_prologue()
9562   and output_function_epilogue() are not invoked.
9563
9564   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
9565   been extracted from it.)  It might possibly be useful on some
9566   targets, but probably not.
9567
9568   If you do not define this macro, the target-independent code in the
9569   C++ frontend will generate a less efficient heavyweight thunk that
9570   calls FUNCTION instead of jumping to it.  The generic approach does
9571   not support varargs.  */
9572
9573void
9574output_mi_thunk (file, thunk_fndecl, delta, function)
9575     FILE *file;
9576     tree thunk_fndecl ATTRIBUTE_UNUSED;
9577     int delta;
9578     tree function;
9579{
9580  const char *this_reg =
9581    reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9582  const char *prefix;
9583  const char *fname;
9584  const char *r0	 = reg_names[0];
9585  const char *toc	 = reg_names[2];
9586  const char *schain	 = reg_names[11];
9587  const char *r12	 = reg_names[12];
9588  char buf[512];
9589  static int labelno = 0;
9590
9591  /* Small constants that can be done by one add instruction.  */
9592  if (delta >= -32768 && delta <= 32767)
9593    {
9594      if (! TARGET_NEW_MNEMONICS)
9595	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9596      else
9597	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9598    }
9599
9600  /* 64-bit constants.  If "int" is 32 bits, we'll never hit this abort.  */
9601  else if (TARGET_64BIT && (delta < -2147483647 - 1 || delta > 2147483647))
9602    abort ();
9603
9604  /* Large constants that can be done by one addis instruction.  */
9605  else if ((delta & 0xffff) == 0)
9606    asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9607		 delta >> 16);
9608
9609  /* 32-bit constants that can be done by an add and addis instruction.  */
9610  else
9611    {
9612      /* Break into two pieces, propagating the sign bit from the low
9613	 word to the upper word.  */
9614      int delta_low  = ((delta & 0xffff) ^ 0x8000) - 0x8000;
9615      int delta_high = (delta - delta_low) >> 16;
9616
9617      asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9618		   delta_high);
9619
9620      if (! TARGET_NEW_MNEMONICS)
9621	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9622      else
9623	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9624    }
9625
9626  /* Get the prefix in front of the names.  */
9627  switch (DEFAULT_ABI)
9628    {
9629    default:
9630      abort ();
9631
9632    case ABI_AIX:
9633      prefix = ".";
9634      break;
9635
9636    case ABI_V4:
9637    case ABI_AIX_NODESC:
9638      prefix = "";
9639      break;
9640    }
9641
9642  /* If the function is compiled in this module, jump to it directly.
9643     Otherwise, load up its address and jump to it.  */
9644
9645  fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9646
9647  if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9648      && ! lookup_attribute ("longcall",
9649			     TYPE_ATTRIBUTES (TREE_TYPE (function))))
9650    {
9651      fprintf (file, "\tb %s", prefix);
9652      assemble_name (file, fname);
9653      if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9654      putc ('\n', file);
9655    }
9656
9657  else
9658    {
9659      switch (DEFAULT_ABI)
9660	{
9661	default:
9662	  abort ();
9663
9664	case ABI_AIX:
9665	  /* Set up a TOC entry for the function.  */
9666	  ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9667	  toc_section ();
9668	  ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9669	  labelno++;
9670
9671	  if (TARGET_MINIMAL_TOC)
9672	    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9673	  else
9674	    {
9675	      fputs ("\t.tc ", file);
9676	      assemble_name (file, fname);
9677	      fputs ("[TC],", file);
9678	    }
9679	  assemble_name (file, fname);
9680	  putc ('\n', file);
9681	  if (TARGET_ELF)
9682	    function_section (current_function_decl);
9683	  else
9684	    text_section ();
9685	  if (TARGET_MINIMAL_TOC)
9686	    asm_fprintf (file, (TARGET_32BIT)
9687			 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9688			 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9689	  asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9690	  assemble_name (file, buf);
9691	  if (TARGET_ELF && TARGET_MINIMAL_TOC)
9692	    fputs ("-(.LCTOC1)", file);
9693	  asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9694	  asm_fprintf (file,
9695		       (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9696		       r0, r12);
9697
9698	  asm_fprintf (file,
9699		       (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9700		       toc, r12);
9701
9702	  asm_fprintf (file, "\tmtctr %s\n", r0);
9703	  asm_fprintf (file,
9704		       (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9705		       schain, r12);
9706
9707	  asm_fprintf (file, "\tbctr\n");
9708	  break;
9709
9710	case ABI_AIX_NODESC:
9711	case ABI_V4:
9712	  fprintf (file, "\tb %s", prefix);
9713	  assemble_name (file, fname);
9714	  if (flag_pic) fputs ("@plt", file);
9715	  putc ('\n', file);
9716	  break;
9717
9718#if TARGET_MACHO
9719	case ABI_DARWIN:
9720	  fprintf (file, "\tb %s", prefix);
9721	  if (flag_pic && !machopic_name_defined_p (fname))
9722	    assemble_name (file, machopic_stub_name (fname));
9723	  else
9724	    assemble_name (file, fname);
9725	  putc ('\n', file);
9726	  break;
9727#endif
9728	}
9729    }
9730}
9731
9732
9733/* A quick summary of the various types of 'constant-pool tables'
9734   under PowerPC:
9735
9736   Target	Flags		Name		One table per
9737   AIX		(none)		AIX TOC		object file
9738   AIX		-mfull-toc	AIX TOC		object file
9739   AIX		-mminimal-toc	AIX minimal TOC	translation unit
9740   SVR4/EABI	(none)		SVR4 SDATA	object file
9741   SVR4/EABI	-fpic		SVR4 pic	object file
9742   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
9743   SVR4/EABI	-mrelocatable	EABI TOC	function
9744   SVR4/EABI	-maix		AIX TOC		object file
9745   SVR4/EABI	-maix -mminimal-toc
9746				AIX minimal TOC	translation unit
9747
9748   Name			Reg.	Set by	entries	      contains:
9749					made by	 addrs?	fp?	sum?
9750
9751   AIX TOC		2	crt0	as	 Y	option	option
9752   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
9753   SVR4 SDATA		13	crt0	gcc	 N	Y	N
9754   SVR4 pic		30	prolog	ld	 Y	not yet	N
9755   SVR4 PIC		30	prolog	gcc	 Y	option	option
9756   EABI TOC		30	prolog	gcc	 Y	option	option
9757
9758*/
9759
9760/* Hash table stuff for keeping track of TOC entries.  */
9761
9762struct toc_hash_struct
9763{
9764  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9765     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
9766  rtx key;
9767  enum machine_mode key_mode;
9768  int labelno;
9769};
9770
9771static htab_t toc_hash_table;
9772
9773/* Hash functions for the hash table.  */
9774
9775static unsigned
9776rs6000_hash_constant (k)
9777     rtx k;
9778{
9779  unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9780  const char *format = GET_RTX_FORMAT (GET_CODE (k));
9781  int flen = strlen (format);
9782  int fidx;
9783
9784  if (GET_CODE (k) == LABEL_REF)
9785    return result * 1231 + X0INT (XEXP (k, 0), 3);
9786
9787  if (GET_CODE (k) == CONST_DOUBLE)
9788    fidx = 1;
9789  else if (GET_CODE (k) == CODE_LABEL)
9790    fidx = 3;
9791  else
9792    fidx = 0;
9793
9794  for (; fidx < flen; fidx++)
9795    switch (format[fidx])
9796      {
9797      case 's':
9798	{
9799	  unsigned i, len;
9800	  const char *str = XSTR (k, fidx);
9801	  len = strlen (str);
9802	  result = result * 613 + len;
9803	  for (i = 0; i < len; i++)
9804	    result = result * 613 + (unsigned) str[i];
9805	  break;
9806	}
9807      case 'u':
9808      case 'e':
9809	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9810	break;
9811      case 'i':
9812      case 'n':
9813	result = result * 613 + (unsigned) XINT (k, fidx);
9814	break;
9815      case 'w':
9816	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9817	  result = result * 613 + (unsigned) XWINT (k, fidx);
9818	else
9819	  {
9820	    size_t i;
9821	    for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9822	      result = result * 613 + (unsigned) (XWINT (k, fidx)
9823						  >> CHAR_BIT * i);
9824	  }
9825	break;
9826      default:
9827	abort ();
9828      }
9829  return result;
9830}
9831
9832static unsigned
9833toc_hash_function (hash_entry)
9834     const void * hash_entry;
9835{
9836  const struct toc_hash_struct *thc =
9837    (const struct toc_hash_struct *) hash_entry;
9838  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9839}
9840
9841/* Compare H1 and H2 for equivalence.  */
9842
9843static int
9844toc_hash_eq (h1, h2)
9845     const void * h1;
9846     const void * h2;
9847{
9848  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9849  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9850
9851  if (((const struct toc_hash_struct *) h1)->key_mode
9852      != ((const struct toc_hash_struct *) h2)->key_mode)
9853    return 0;
9854
9855  /* Gotcha:  One of these const_doubles will be in memory.
9856     The other may be on the constant-pool chain.
9857     So rtx_equal_p will think they are different...  */
9858  if (r1 == r2)
9859    return 1;
9860  if (GET_CODE (r1) != GET_CODE (r2)
9861      || GET_MODE (r1) != GET_MODE (r2))
9862    return 0;
9863  if (GET_CODE (r1) == CONST_DOUBLE)
9864    {
9865      int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9866      int i;
9867      for (i = 1; i < format_len; i++)
9868	if (XWINT (r1, i) != XWINT (r2, i))
9869	  return 0;
9870
9871      return 1;
9872    }
9873  else if (GET_CODE (r1) == LABEL_REF)
9874    return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9875	    == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9876  else
9877    return rtx_equal_p (r1, r2);
9878}
9879
9880/* Mark the hash table-entry HASH_ENTRY.  */
9881
9882static int
9883toc_hash_mark_entry (hash_slot, unused)
9884     void ** hash_slot;
9885     void * unused ATTRIBUTE_UNUSED;
9886{
9887  const struct toc_hash_struct * hash_entry =
9888    *(const struct toc_hash_struct **) hash_slot;
9889  rtx r = hash_entry->key;
9890  ggc_set_mark (hash_entry);
9891  /* For CODE_LABELS, we don't want to drag in the whole insn chain...  */
9892  if (GET_CODE (r) == LABEL_REF)
9893    {
9894      ggc_set_mark (r);
9895      ggc_set_mark (XEXP (r, 0));
9896    }
9897  else
9898    ggc_mark_rtx (r);
9899  return 1;
9900}
9901
9902/* Mark all the elements of the TOC hash-table *HT.  */
9903
9904static void
9905toc_hash_mark_table (vht)
9906     void *vht;
9907{
9908  htab_t *ht = vht;
9909
9910  htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9911}
9912
9913/* These are the names given by the C++ front-end to vtables, and
9914   vtable-like objects.  Ideally, this logic should not be here;
9915   instead, there should be some programmatic way of inquiring as
9916   to whether or not an object is a vtable.  */
9917
9918#define VTABLE_NAME_P(NAME)				\
9919  (strncmp ("_vt.", name, strlen("_vt.")) == 0		\
9920  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
9921  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
9922  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9923
9924void
9925rs6000_output_symbol_ref (file, x)
9926     FILE *file;
9927     rtx x;
9928{
9929  /* Currently C++ toc references to vtables can be emitted before it
9930     is decided whether the vtable is public or private.  If this is
9931     the case, then the linker will eventually complain that there is
9932     a reference to an unknown section.  Thus, for vtables only,
9933     we emit the TOC reference to reference the symbol and not the
9934     section.  */
9935  const char *name = XSTR (x, 0);
9936
9937  if (VTABLE_NAME_P (name))
9938    {
9939      RS6000_OUTPUT_BASENAME (file, name);
9940    }
9941  else
9942    assemble_name (file, name);
9943}
9944
9945/* Output a TOC entry.  We derive the entry name from what is being
9946   written.  */
9947
9948void
9949output_toc (file, x, labelno, mode)
9950     FILE *file;
9951     rtx x;
9952     int labelno;
9953     enum machine_mode mode;
9954{
9955  char buf[256];
9956  const char *name = buf;
9957  const char *real_name;
9958  rtx base = x;
9959  int offset = 0;
9960
9961  if (TARGET_NO_TOC)
9962    abort ();
9963
9964  /* When the linker won't eliminate them, don't output duplicate
9965     TOC entries (this happens on AIX if there is any kind of TOC,
9966     and on SVR4 under -fPIC or -mrelocatable).  */
9967  if (TARGET_TOC)
9968    {
9969      struct toc_hash_struct *h;
9970      void * * found;
9971
9972      h = ggc_alloc (sizeof (*h));
9973      h->key = x;
9974      h->key_mode = mode;
9975      h->labelno = labelno;
9976
9977      found = htab_find_slot (toc_hash_table, h, 1);
9978      if (*found == NULL)
9979	*found = h;
9980      else  /* This is indeed a duplicate.
9981	       Set this label equal to that label.  */
9982	{
9983	  fputs ("\t.set ", file);
9984	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9985	  fprintf (file, "%d,", labelno);
9986	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9987	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9988					      found)->labelno));
9989	  return;
9990	}
9991    }
9992
9993  /* If we're going to put a double constant in the TOC, make sure it's
9994     aligned properly when strict alignment is on.  */
9995  if (GET_CODE (x) == CONST_DOUBLE
9996      && STRICT_ALIGNMENT
9997      && GET_MODE_BITSIZE (mode) >= 64
9998      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
9999    ASM_OUTPUT_ALIGN (file, 3);
10000  }
10001
10002  ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10003
10004  /* Handle FP constants specially.  Note that if we have a minimal
10005     TOC, things we put here aren't actually in the TOC, so we can allow
10006     FP constants.  */
10007  if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10008    {
10009      REAL_VALUE_TYPE rv;
10010      long k[2];
10011
10012      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10013      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10014
10015      if (TARGET_64BIT)
10016	{
10017	  if (TARGET_MINIMAL_TOC)
10018	    fputs (DOUBLE_INT_ASM_OP, file);
10019	  else
10020	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10021		     k[0] & 0xffffffff, k[1] & 0xffffffff);
10022	  fprintf (file, "0x%lx%08lx\n",
10023		   k[0] & 0xffffffff, k[1] & 0xffffffff);
10024	  return;
10025	}
10026      else
10027	{
10028	  if (TARGET_MINIMAL_TOC)
10029	    fputs ("\t.long ", file);
10030	  else
10031	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10032		     k[0] & 0xffffffff, k[1] & 0xffffffff);
10033	  fprintf (file, "0x%lx,0x%lx\n",
10034		   k[0] & 0xffffffff, k[1] & 0xffffffff);
10035	  return;
10036	}
10037    }
10038  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10039    {
10040      REAL_VALUE_TYPE rv;
10041      long l;
10042
10043      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10044      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10045
10046      if (TARGET_64BIT)
10047	{
10048	  if (TARGET_MINIMAL_TOC)
10049	    fputs (DOUBLE_INT_ASM_OP, file);
10050	  else
10051	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10052	  fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
10053	  return;
10054	}
10055      else
10056	{
10057	  if (TARGET_MINIMAL_TOC)
10058	    fputs ("\t.long ", file);
10059	  else
10060	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10061	  fprintf (file, "0x%lx\n", l & 0xffffffff);
10062	  return;
10063	}
10064    }
10065  else if (GET_MODE (x) == VOIDmode
10066	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10067    {
10068      unsigned HOST_WIDE_INT low;
10069      HOST_WIDE_INT high;
10070
10071      if (GET_CODE (x) == CONST_DOUBLE)
10072	{
10073	  low = CONST_DOUBLE_LOW (x);
10074	  high = CONST_DOUBLE_HIGH (x);
10075	}
10076      else
10077#if HOST_BITS_PER_WIDE_INT == 32
10078	{
10079	  low = INTVAL (x);
10080	  high = (low & 0x80000000) ? ~0 : 0;
10081	}
10082#else
10083	{
10084          low = INTVAL (x) & 0xffffffff;
10085          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10086	}
10087#endif
10088
10089      /* TOC entries are always Pmode-sized, but since this
10090	 is a bigendian machine then if we're putting smaller
10091	 integer constants in the TOC we have to pad them.
10092	 (This is still a win over putting the constants in
10093	 a separate constant pool, because then we'd have
10094	 to have both a TOC entry _and_ the actual constant.)
10095
10096	 For a 32-bit target, CONST_INT values are loaded and shifted
10097	 entirely within `low' and can be stored in one TOC entry.  */
10098
10099      if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10100	abort ();/* It would be easy to make this work, but it doesn't now.  */
10101
10102      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10103	{
10104#if HOST_BITS_PER_WIDE_INT == 32
10105	  lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10106			 POINTER_SIZE, &low, &high, 0);
10107#else
10108	  low |= high << 32;
10109	  low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
10110	  high = (HOST_WIDE_INT) low >> 32;
10111	  low &= 0xffffffff;
10112#endif
10113	}
10114
10115      if (TARGET_64BIT)
10116	{
10117	  if (TARGET_MINIMAL_TOC)
10118	    fputs (DOUBLE_INT_ASM_OP, file);
10119	  else
10120	    fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10121		     (long) high & 0xffffffff, (long) low & 0xffffffff);
10122	  fprintf (file, "0x%lx%08lx\n",
10123		   (long) high & 0xffffffff, (long) low & 0xffffffff);
10124	  return;
10125	}
10126      else
10127	{
10128	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10129	    {
10130	      if (TARGET_MINIMAL_TOC)
10131		fputs ("\t.long ", file);
10132	      else
10133		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10134			 (long) high & 0xffffffff, (long) low & 0xffffffff);
10135	      fprintf (file, "0x%lx,0x%lx\n",
10136		       (long) high & 0xffffffff, (long) low & 0xffffffff);
10137	    }
10138	  else
10139	    {
10140	      if (TARGET_MINIMAL_TOC)
10141		fputs ("\t.long ", file);
10142	      else
10143		fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
10144	      fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
10145	    }
10146	  return;
10147	}
10148    }
10149
10150  if (GET_CODE (x) == CONST)
10151    {
10152      if (GET_CODE (XEXP (x, 0)) != PLUS)
10153	abort ();
10154
10155      base = XEXP (XEXP (x, 0), 0);
10156      offset = INTVAL (XEXP (XEXP (x, 0), 1));
10157    }
10158
10159  if (GET_CODE (base) == SYMBOL_REF)
10160    name = XSTR (base, 0);
10161  else if (GET_CODE (base) == LABEL_REF)
10162    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10163  else if (GET_CODE (base) == CODE_LABEL)
10164    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10165  else
10166    abort ();
10167
10168  STRIP_NAME_ENCODING (real_name, name);
10169  if (TARGET_MINIMAL_TOC)
10170    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10171  else
10172    {
10173      fprintf (file, "\t.tc %s", real_name);
10174
10175      if (offset < 0)
10176	fprintf (file, ".N%d", - offset);
10177      else if (offset)
10178	fprintf (file, ".P%d", offset);
10179
10180      fputs ("[TC],", file);
10181    }
10182
10183  /* Currently C++ toc references to vtables can be emitted before it
10184     is decided whether the vtable is public or private.  If this is
10185     the case, then the linker will eventually complain that there is
10186     a TOC reference to an unknown section.  Thus, for vtables only,
10187     we emit the TOC reference to reference the symbol and not the
10188     section.  */
10189  if (VTABLE_NAME_P (name))
10190    {
10191      RS6000_OUTPUT_BASENAME (file, name);
10192      if (offset < 0)
10193	fprintf (file, "%d", offset);
10194      else if (offset > 0)
10195	fprintf (file, "+%d", offset);
10196    }
10197  else
10198    output_addr_const (file, x);
10199  putc ('\n', file);
10200}
10201
10202/* Output an assembler pseudo-op to write an ASCII string of N characters
10203   starting at P to FILE.
10204
10205   On the RS/6000, we have to do this using the .byte operation and
10206   write out special characters outside the quoted string.
10207   Also, the assembler is broken; very long strings are truncated,
10208   so we must artificially break them up early.  */
10209
10210void
10211output_ascii (file, p, n)
10212     FILE *file;
10213     const char *p;
10214     int n;
10215{
10216  char c;
10217  int i, count_string;
10218  const char *for_string = "\t.byte \"";
10219  const char *for_decimal = "\t.byte ";
10220  const char *to_close = NULL;
10221
10222  count_string = 0;
10223  for (i = 0; i < n; i++)
10224    {
10225      c = *p++;
10226      if (c >= ' ' && c < 0177)
10227	{
10228	  if (for_string)
10229	    fputs (for_string, file);
10230	  putc (c, file);
10231
10232	  /* Write two quotes to get one.  */
10233	  if (c == '"')
10234	    {
10235	      putc (c, file);
10236	      ++count_string;
10237	    }
10238
10239	  for_string = NULL;
10240	  for_decimal = "\"\n\t.byte ";
10241	  to_close = "\"\n";
10242	  ++count_string;
10243
10244	  if (count_string >= 512)
10245	    {
10246	      fputs (to_close, file);
10247
10248	      for_string = "\t.byte \"";
10249	      for_decimal = "\t.byte ";
10250	      to_close = NULL;
10251	      count_string = 0;
10252	    }
10253	}
10254      else
10255	{
10256	  if (for_decimal)
10257	    fputs (for_decimal, file);
10258	  fprintf (file, "%d", c);
10259
10260	  for_string = "\n\t.byte \"";
10261	  for_decimal = ", ";
10262	  to_close = "\n";
10263	  count_string = 0;
10264	}
10265    }
10266
10267  /* Now close the string if we have written one.  Then end the line.  */
10268  if (to_close)
10269    fputs (to_close, file);
10270}
10271
10272/* Generate a unique section name for FILENAME for a section type
10273   represented by SECTION_DESC.  Output goes into BUF.
10274
10275   SECTION_DESC can be any string, as long as it is different for each
10276   possible section type.
10277
10278   We name the section in the same manner as xlc.  The name begins with an
10279   underscore followed by the filename (after stripping any leading directory
10280   names) with the last period replaced by the string SECTION_DESC.  If
10281   FILENAME does not contain a period, SECTION_DESC is appended to the end of
10282   the name.  */
10283
10284void
10285rs6000_gen_section_name (buf, filename, section_desc)
10286     char **buf;
10287     const char *filename;
10288     const char *section_desc;
10289{
10290  const char *q, *after_last_slash, *last_period = 0;
10291  char *p;
10292  int len;
10293
10294  after_last_slash = filename;
10295  for (q = filename; *q; q++)
10296    {
10297      if (*q == '/')
10298	after_last_slash = q + 1;
10299      else if (*q == '.')
10300	last_period = q;
10301    }
10302
10303  len = strlen (after_last_slash) + strlen (section_desc) + 2;
10304  *buf = (char *) permalloc (len);
10305
10306  p = *buf;
10307  *p++ = '_';
10308
10309  for (q = after_last_slash; *q; q++)
10310    {
10311      if (q == last_period)
10312        {
10313	  strcpy (p, section_desc);
10314	  p += strlen (section_desc);
10315        }
10316
10317      else if (ISALNUM (*q))
10318        *p++ = *q;
10319    }
10320
10321  if (last_period == 0)
10322    strcpy (p, section_desc);
10323  else
10324    *p = '\0';
10325}
10326
10327/* Emit profile function.  */
10328
10329void
10330output_profile_hook (labelno)
10331     int labelno ATTRIBUTE_UNUSED;
10332{
10333  if (DEFAULT_ABI == ABI_AIX)
10334    {
10335#ifdef NO_PROFILE_COUNTERS
10336      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
10337#else
10338      char buf[30];
10339      const char *label_name;
10340      rtx fun;
10341
10342      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10343      STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10344      fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10345
10346      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10347                         fun, Pmode);
10348#endif
10349    }
10350  else if (DEFAULT_ABI == ABI_DARWIN)
10351    {
10352      const char *mcount_name = RS6000_MCOUNT;
10353      int caller_addr_regno = LINK_REGISTER_REGNUM;
10354
10355      /* Be conservative and always set this, at least for now.  */
10356      current_function_uses_pic_offset_table = 1;
10357
10358#if TARGET_MACHO
10359      /* For PIC code, set up a stub and collect the caller's address
10360	 from r0, which is where the prologue puts it.  */
10361      if (flag_pic)
10362	{
10363	  mcount_name = machopic_stub_name (mcount_name);
10364	  if (current_function_uses_pic_offset_table)
10365	    caller_addr_regno = 0;
10366	}
10367#endif
10368      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10369			 0, VOIDmode, 1,
10370			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10371    }
10372}
10373
10374/* Write function profiler code.  */
10375
10376void
10377output_function_profiler (file, labelno)
10378  FILE *file;
10379  int labelno;
10380{
10381  char buf[100];
10382  int save_lr = 8;
10383
10384  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10385  switch (DEFAULT_ABI)
10386    {
10387    default:
10388      abort ();
10389
10390    case ABI_V4:
10391      save_lr = 4;
10392      /* Fall through.  */
10393
10394    case ABI_AIX_NODESC:
10395      if (!TARGET_32BIT)
10396	{
10397	  warning ("no profiling of 64-bit code for this ABI");
10398	  return;
10399	}
10400      fprintf (file, "\tmflr %s\n", reg_names[0]);
10401      if (flag_pic == 1)
10402	{
10403	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10404	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10405		       reg_names[0], save_lr, reg_names[1]);
10406	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10407	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10408	  assemble_name (file, buf);
10409	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10410	}
10411      else if (flag_pic > 1)
10412	{
10413	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10414		       reg_names[0], save_lr, reg_names[1]);
10415	  /* Now, we need to get the address of the label.  */
10416	  fputs ("\tbl 1f\n\t.long ", file);
10417	  assemble_name (file, buf);
10418	  fputs ("-.\n1:", file);
10419	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10420	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10421		       reg_names[0], reg_names[11]);
10422	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10423		       reg_names[0], reg_names[0], reg_names[11]);
10424	}
10425      else
10426	{
10427	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10428	  assemble_name (file, buf);
10429	  fputs ("@ha\n", file);
10430	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10431		       reg_names[0], save_lr, reg_names[1]);
10432	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10433	  assemble_name (file, buf);
10434	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10435	}
10436
10437      if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
10438	{
10439	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10440		       reg_names[STATIC_CHAIN_REGNUM],
10441		       12, reg_names[1]);
10442	  fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10443	  asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
10444		       reg_names[STATIC_CHAIN_REGNUM],
10445		       12, reg_names[1]);
10446	}
10447      else
10448	/* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
10449	fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10450      break;
10451
10452    case ABI_AIX:
10453    case ABI_DARWIN:
10454      /* Don't do anything, done in output_profile_hook ().  */
10455      break;
10456    }
10457}
10458
10459/* Adjust the cost of a scheduling dependency.  Return the new cost of
10460   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
10461
10462static int
10463rs6000_adjust_cost (insn, link, dep_insn, cost)
10464     rtx insn;
10465     rtx link;
10466     rtx dep_insn ATTRIBUTE_UNUSED;
10467     int cost;
10468{
10469  if (! recog_memoized (insn))
10470    return 0;
10471
10472  if (REG_NOTE_KIND (link) != 0)
10473    return 0;
10474
10475  if (REG_NOTE_KIND (link) == 0)
10476    {
10477      /* Data dependency; DEP_INSN writes a register that INSN reads
10478	 some cycles later.  */
10479      switch (get_attr_type (insn))
10480	{
10481	case TYPE_JMPREG:
10482          /* Tell the first scheduling pass about the latency between
10483	     a mtctr and bctr (and mtlr and br/blr).  The first
10484	     scheduling pass will not know about this latency since
10485	     the mtctr instruction, which has the latency associated
10486	     to it, will be generated by reload.  */
10487          return TARGET_POWER ? 5 : 4;
10488	case TYPE_BRANCH:
10489	  /* Leave some extra cycles between a compare and its
10490	     dependent branch, to inhibit expensive mispredicts.  */
10491	  if ((rs6000_cpu_attr == CPU_PPC750
10492               || rs6000_cpu_attr == CPU_PPC7400
10493               || rs6000_cpu_attr == CPU_PPC7450)
10494	      && recog_memoized (dep_insn)
10495	      && (INSN_CODE (dep_insn) >= 0)
10496	      && (get_attr_type (dep_insn) == TYPE_COMPARE
10497		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10498		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10499		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10500	    return cost + 2;
10501	default:
10502	  break;
10503	}
10504      /* Fall out to return default cost.  */
10505    }
10506
10507  return cost;
10508}
10509
10510/* A C statement (sans semicolon) to update the integer scheduling
10511   priority INSN_PRIORITY (INSN).  Reduce the priority to execute the
10512   INSN earlier, increase the priority to execute INSN later.  Do not
10513   define this macro if you do not need to adjust the scheduling
10514   priorities of insns.  */
10515
10516static int
10517rs6000_adjust_priority (insn, priority)
10518     rtx insn ATTRIBUTE_UNUSED;
10519     int priority;
10520{
10521  /* On machines (like the 750) which have asymmetric integer units,
10522     where one integer unit can do multiply and divides and the other
10523     can't, reduce the priority of multiply/divide so it is scheduled
10524     before other integer operations.  */
10525
10526#if 0
10527  if (! INSN_P (insn))
10528    return priority;
10529
10530  if (GET_CODE (PATTERN (insn)) == USE)
10531    return priority;
10532
10533  switch (rs6000_cpu_attr) {
10534  case CPU_PPC750:
10535    switch (get_attr_type (insn))
10536      {
10537      default:
10538	break;
10539
10540      case TYPE_IMUL:
10541      case TYPE_IDIV:
10542	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10543		 priority, priority);
10544	if (priority >= 0 && priority < 0x01000000)
10545	  priority >>= 3;
10546	break;
10547      }
10548  }
10549#endif
10550
10551  return priority;
10552}
10553
10554/* Return how many instructions the machine can issue per cycle.  */
10555
10556static int
10557rs6000_issue_rate ()
10558{
10559  switch (rs6000_cpu_attr) {
10560  case CPU_RIOS1:  /* ? */
10561  case CPU_RS64A:
10562  case CPU_PPC601: /* ? */
10563  case CPU_PPC7450:
10564    return 3;
10565  case CPU_PPC603:
10566  case CPU_PPC750:
10567  case CPU_PPC7400:
10568    return 2;
10569  case CPU_RIOS2:
10570  case CPU_PPC604:
10571  case CPU_PPC604E:
10572  case CPU_PPC620:
10573  case CPU_PPC630:
10574    return 4;
10575  default:
10576    return 1;
10577  }
10578}
10579
10580
10581/* Length in units of the trampoline for entering a nested function.  */
10582
10583int
10584rs6000_trampoline_size ()
10585{
10586  int ret = 0;
10587
10588  switch (DEFAULT_ABI)
10589    {
10590    default:
10591      abort ();
10592
10593    case ABI_AIX:
10594      ret = (TARGET_32BIT) ? 12 : 24;
10595      break;
10596
10597    case ABI_DARWIN:
10598    case ABI_V4:
10599    case ABI_AIX_NODESC:
10600      ret = (TARGET_32BIT) ? 40 : 48;
10601      break;
10602    }
10603
10604  return ret;
10605}
10606
10607/* Emit RTL insns to initialize the variable parts of a trampoline.
10608   FNADDR is an RTX for the address of the function's pure code.
10609   CXT is an RTX for the static chain value for the function.  */
10610
10611void
10612rs6000_initialize_trampoline (addr, fnaddr, cxt)
10613     rtx addr;
10614     rtx fnaddr;
10615     rtx cxt;
10616{
10617  enum machine_mode pmode = Pmode;
10618  int regsize = (TARGET_32BIT) ? 4 : 8;
10619  rtx ctx_reg = force_reg (pmode, cxt);
10620
10621  switch (DEFAULT_ABI)
10622    {
10623    default:
10624      abort ();
10625
10626/* Macros to shorten the code expansions below.  */
10627#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10628#define MEM_PLUS(addr,offset) \
10629  gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10630
10631    /* Under AIX, just build the 3 word function descriptor */
10632    case ABI_AIX:
10633      {
10634	rtx fn_reg = gen_reg_rtx (pmode);
10635	rtx toc_reg = gen_reg_rtx (pmode);
10636	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10637	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10638	emit_move_insn (MEM_DEREF (addr), fn_reg);
10639	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10640	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10641      }
10642      break;
10643
10644    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
10645    case ABI_DARWIN:
10646    case ABI_V4:
10647    case ABI_AIX_NODESC:
10648      emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10649			 FALSE, VOIDmode, 4,
10650			 addr, pmode,
10651			 GEN_INT (rs6000_trampoline_size ()), SImode,
10652			 fnaddr, pmode,
10653			 ctx_reg, pmode);
10654      break;
10655    }
10656
10657  return;
10658}
10659
10660
10661/* Table of valid machine attributes.  */
10662
10663const struct attribute_spec rs6000_attribute_table[] =
10664{
10665  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10666  { "longcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
10667  { NULL,       0, 0, false, false, false, NULL }
10668};
10669
10670/* Handle a "longcall" attribute; arguments as in struct
10671   attribute_spec.handler.  */
10672
10673static tree
10674rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10675     tree *node;
10676     tree name;
10677     tree args ATTRIBUTE_UNUSED;
10678     int flags ATTRIBUTE_UNUSED;
10679     bool *no_add_attrs;
10680{
10681  if (TREE_CODE (*node) != FUNCTION_TYPE
10682      && TREE_CODE (*node) != FIELD_DECL
10683      && TREE_CODE (*node) != TYPE_DECL)
10684    {
10685      warning ("`%s' attribute only applies to functions",
10686	       IDENTIFIER_POINTER (name));
10687      *no_add_attrs = true;
10688    }
10689
10690  return NULL_TREE;
10691}
10692
10693/* Return a reference suitable for calling a function with the
10694   longcall attribute.  */
10695
10696struct rtx_def *
10697rs6000_longcall_ref (call_ref)
10698     rtx call_ref;
10699{
10700  const char *call_name;
10701  tree node;
10702
10703  if (GET_CODE (call_ref) != SYMBOL_REF)
10704    return call_ref;
10705
10706  /* System V adds '.' to the internal name, so skip them.  */
10707  call_name = XSTR (call_ref, 0);
10708  if (*call_name == '.')
10709    {
10710      while (*call_name == '.')
10711	call_name++;
10712
10713      node = get_identifier (call_name);
10714      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10715    }
10716
10717  return force_reg (Pmode, call_ref);
10718}
10719
10720
10721/* A C statement or statements to switch to the appropriate section
10722   for output of RTX in mode MODE.  You can assume that RTX is some
10723   kind of constant in RTL.  The argument MODE is redundant except in
10724   the case of a `const_int' rtx.  Select the section by calling
10725   `text_section' or one of the alternatives for other sections.
10726
10727   Do not define this macro if you put all constants in the read-only
10728   data section.  */
10729
10730#ifdef USING_ELFOS_H
10731
10732void
10733rs6000_select_rtx_section (mode, x)
10734     enum machine_mode mode;
10735     rtx x;
10736{
10737  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10738    toc_section ();
10739  else if (flag_pic
10740	   && (GET_CODE (x) == SYMBOL_REF
10741	       || GET_CODE (x) == LABEL_REF
10742	       || GET_CODE (x) == CONST))
10743    data_section ();
10744  else
10745    const_section ();
10746}
10747
10748/* A C statement or statements to switch to the appropriate
10749   section for output of DECL.  DECL is either a `VAR_DECL' node
10750   or a constant of some sort.  RELOC indicates whether forming
10751   the initial value of DECL requires link-time relocations.  */
10752
10753void
10754rs6000_select_section (decl, reloc)
10755     tree decl;
10756     int reloc;
10757{
10758  int size = int_size_in_bytes (TREE_TYPE (decl));
10759  int needs_sdata;
10760  int readonly;
10761  static void (* const sec_funcs[4]) PARAMS ((void)) = {
10762    &const_section,
10763    &sdata2_section,
10764    &data_section,
10765    &sdata_section
10766  };
10767
10768  needs_sdata = (size > 0
10769		 && size <= g_switch_value
10770		 && rs6000_sdata != SDATA_NONE
10771		 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10772
10773  if (TREE_CODE (decl) == STRING_CST)
10774    readonly = ! flag_writable_strings;
10775  else if (TREE_CODE (decl) == VAR_DECL)
10776    readonly = (! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc)
10777		&& TREE_READONLY (decl)
10778		&& ! TREE_SIDE_EFFECTS (decl)
10779		&& DECL_INITIAL (decl)
10780		&& DECL_INITIAL (decl) != error_mark_node
10781		&& TREE_CONSTANT (DECL_INITIAL (decl)));
10782  else if (TREE_CODE (decl) == CONSTRUCTOR)
10783    readonly = (! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc)
10784		&& ! TREE_SIDE_EFFECTS (decl)
10785		&& TREE_CONSTANT (decl));
10786  else
10787    readonly = ! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc);
10788
10789  if (needs_sdata && rs6000_sdata != SDATA_EABI)
10790    readonly = 0;
10791
10792  (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10793}
10794
10795/* A C statement to build up a unique section name, expressed as a
10796   STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10797   RELOC indicates whether the initial value of EXP requires
10798   link-time relocations.  If you do not define this macro, GCC will use
10799   the symbol name prefixed by `.' as the section name.  Note - this
10800   macro can now be called for uninitialized data items as well as
10801   initialised data and functions.  */
10802
10803void
10804rs6000_unique_section (decl, reloc)
10805     tree decl;
10806     int reloc;
10807{
10808  int len;
10809  int sec;
10810  const char *name;
10811  char *string;
10812  const char *prefix;
10813
10814  static const char *const prefixes[7][2] =
10815  {
10816    { ".rodata.", ".gnu.linkonce.r." },
10817    { ".sdata2.", ".gnu.linkonce.s2." },
10818    { ".data.",   ".gnu.linkonce.d." },
10819    { ".sdata.",  ".gnu.linkonce.s." },
10820    { ".bss.",    ".gnu.linkonce.b." },
10821    { ".sbss.",   ".gnu.linkonce.sb." },
10822    { ".text.",   ".gnu.linkonce.t." }
10823  };
10824
10825  if (TREE_CODE (decl) == FUNCTION_DECL)
10826    sec = 6;
10827  else
10828    {
10829      int readonly;
10830      int needs_sdata;
10831      int size;
10832
10833      if (TREE_CODE (decl) == STRING_CST)
10834	readonly = ! flag_writable_strings;
10835      else if (TREE_CODE (decl) == VAR_DECL)
10836	readonly = (! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc)
10837		    && TREE_READONLY (decl)
10838		    && ! TREE_SIDE_EFFECTS (decl)
10839		    && TREE_CONSTANT (DECL_INITIAL (decl)));
10840      else
10841	readonly = ! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc);
10842
10843      size = int_size_in_bytes (TREE_TYPE (decl));
10844      needs_sdata = (size > 0
10845		     && size <= g_switch_value
10846		     && rs6000_sdata != SDATA_NONE
10847		     && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10848
10849      if (DECL_INITIAL (decl) == 0
10850	  || DECL_INITIAL (decl) == error_mark_node)
10851	sec = 4;
10852      else if (! readonly)
10853	sec = 2;
10854      else
10855	sec = 0;
10856
10857      if (needs_sdata)
10858	{
10859	  /* .sdata2 is only for EABI.  */
10860	  if (sec == 0 && rs6000_sdata != SDATA_EABI)
10861	    sec = 2;
10862	  sec += 1;
10863	}
10864    }
10865
10866  STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10867  prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10868  len    = strlen (name) + strlen (prefix);
10869  string = alloca (len + 1);
10870
10871  sprintf (string, "%s%s", prefix, name);
10872
10873  DECL_SECTION_NAME (decl) = build_string (len, string);
10874}
10875
10876
10877/* If we are referencing a function that is static or is known to be
10878   in this file, make the SYMBOL_REF special.  We can use this to indicate
10879   that we can branch to this function without emitting a no-op after the
10880   call.  For real AIX calling sequences, we also replace the
10881   function name with the real name (1 or 2 leading .'s), rather than
10882   the function descriptor name.  This saves a lot of overriding code
10883   to read the prefixes.  */
10884
10885void
10886rs6000_encode_section_info (decl)
10887     tree decl;
10888{
10889  if (TREE_CODE (decl) == FUNCTION_DECL)
10890    {
10891      rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10892      if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10893          && ! DECL_WEAK (decl))
10894	SYMBOL_REF_FLAG (sym_ref) = 1;
10895
10896      if (DEFAULT_ABI == ABI_AIX)
10897	{
10898	  size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10899	  size_t len2 = strlen (XSTR (sym_ref, 0));
10900	  char *str = alloca (len1 + len2 + 1);
10901	  str[0] = '.';
10902	  str[1] = '.';
10903	  memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10904
10905	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10906	}
10907    }
10908  else if (rs6000_sdata != SDATA_NONE
10909	   && DEFAULT_ABI == ABI_V4
10910	   && TREE_CODE (decl) == VAR_DECL)
10911    {
10912      int size = int_size_in_bytes (TREE_TYPE (decl));
10913      tree section_name = DECL_SECTION_NAME (decl);
10914      const char *name = (char *)0;
10915      int len = 0;
10916
10917      if (section_name)
10918	{
10919	  if (TREE_CODE (section_name) == STRING_CST)
10920	    {
10921	      name = TREE_STRING_POINTER (section_name);
10922	      len = TREE_STRING_LENGTH (section_name);
10923	    }
10924	  else
10925	    abort ();
10926	}
10927
10928      if ((size > 0 && size <= g_switch_value)
10929	  || (name
10930	      && ((len == sizeof (".sdata") - 1
10931		   && strcmp (name, ".sdata") == 0)
10932		  || (len == sizeof (".sdata2") - 1
10933		      && strcmp (name, ".sdata2") == 0)
10934		  || (len == sizeof (".sbss") - 1
10935		      && strcmp (name, ".sbss") == 0)
10936		  || (len == sizeof (".sbss2") - 1
10937		      && strcmp (name, ".sbss2") == 0)
10938		  || (len == sizeof (".PPC.EMB.sdata0") - 1
10939		      && strcmp (name, ".PPC.EMB.sdata0") == 0)
10940		  || (len == sizeof (".PPC.EMB.sbss0") - 1
10941		      && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10942	{
10943	  rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10944	  size_t len = strlen (XSTR (sym_ref, 0));
10945	  char *str = alloca (len + 2);
10946
10947	  str[0] = '@';
10948	  memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
10949	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
10950	}
10951    }
10952}
10953
10954#endif /* USING_ELFOS_H */
10955
10956
10957/* Return a REG that occurs in ADDR with coefficient 1.
10958   ADDR can be effectively incremented by incrementing REG.
10959
10960   r0 is special and we must not select it as an address
10961   register by this routine since our caller will try to
10962   increment the returned register via an "la" instruction.  */
10963
10964struct rtx_def *
10965find_addr_reg (addr)
10966     rtx addr;
10967{
10968  while (GET_CODE (addr) == PLUS)
10969    {
10970      if (GET_CODE (XEXP (addr, 0)) == REG
10971	  && REGNO (XEXP (addr, 0)) != 0)
10972	addr = XEXP (addr, 0);
10973      else if (GET_CODE (XEXP (addr, 1)) == REG
10974	       && REGNO (XEXP (addr, 1)) != 0)
10975	addr = XEXP (addr, 1);
10976      else if (CONSTANT_P (XEXP (addr, 0)))
10977	addr = XEXP (addr, 1);
10978      else if (CONSTANT_P (XEXP (addr, 1)))
10979	addr = XEXP (addr, 0);
10980      else
10981	abort ();
10982    }
10983  if (GET_CODE (addr) == REG && REGNO (addr) != 0)
10984    return addr;
10985  abort ();
10986}
10987
10988void
10989rs6000_fatal_bad_address (op)
10990  rtx op;
10991{
10992  fatal_insn ("bad address", op);
10993}
10994
10995/* Called to register all of our global variables with the garbage
10996   collector.  */
10997
10998static void
10999rs6000_add_gc_roots ()
11000{
11001  ggc_add_rtx_root (&rs6000_compare_op0, 1);
11002  ggc_add_rtx_root (&rs6000_compare_op1, 1);
11003
11004  toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11005  ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11006		toc_hash_mark_table);
11007
11008#if TARGET_MACHO
11009  machopic_add_gc_roots ();
11010#endif
11011}
11012
11013#if TARGET_MACHO
11014
11015#if 0
11016/* Returns 1 if OP is either a symbol reference or a sum of a symbol
11017   reference and a constant.  */
11018
11019int
11020symbolic_operand (op)
11021     rtx op;
11022{
11023  switch (GET_CODE (op))
11024    {
11025    case SYMBOL_REF:
11026    case LABEL_REF:
11027      return 1;
11028    case CONST:
11029      op = XEXP (op, 0);
11030      return (GET_CODE (op) == SYMBOL_REF ||
11031	      (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11032	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11033	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
11034    default:
11035      return 0;
11036    }
11037}
11038#endif
11039
11040#ifdef RS6000_LONG_BRANCH
11041
11042static tree stub_list = 0;
11043
11044/* ADD_COMPILER_STUB adds the compiler generated stub for handling
11045   procedure calls to the linked list.  */
11046
11047void
11048add_compiler_stub (label_name, function_name, line_number)
11049     tree label_name;
11050     tree function_name;
11051     int line_number;
11052{
11053  tree stub = build_tree_list (function_name, label_name);
11054  TREE_TYPE (stub) = build_int_2 (line_number, 0);
11055  TREE_CHAIN (stub) = stub_list;
11056  stub_list = stub;
11057}
11058
11059#define STUB_LABEL_NAME(STUB)     TREE_VALUE (STUB)
11060#define STUB_FUNCTION_NAME(STUB)  TREE_PURPOSE (STUB)
11061#define STUB_LINE_NUMBER(STUB)    TREE_INT_CST_LOW (TREE_TYPE (STUB))
11062
11063/* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11064   handling procedure calls from the linked list and initializes the
11065   linked list.  */
11066
11067void
11068output_compiler_stub ()
11069{
11070  char tmp_buf[256];
11071  char label_buf[256];
11072  char *label;
11073  tree tmp_stub, stub;
11074
11075  if (!flag_pic)
11076    for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11077      {
11078	fprintf (asm_out_file,
11079		 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11080
11081#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11082	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11083	  fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11084#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11085
11086	if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11087	  strcpy (label_buf,
11088		  IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11089	else
11090	  {
11091	    label_buf[0] = '_';
11092	    strcpy (label_buf+1,
11093		    IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11094	  }
11095
11096	strcpy (tmp_buf, "lis r12,hi16(");
11097	strcat (tmp_buf, label_buf);
11098	strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11099	strcat (tmp_buf, label_buf);
11100	strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11101	output_asm_insn (tmp_buf, 0);
11102
11103#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11104	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11105	  fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11106#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11107      }
11108
11109  stub_list = 0;
11110}
11111
11112/* NO_PREVIOUS_DEF checks in the link list whether the function name is
11113   already there or not.  */
11114
11115int
11116no_previous_def (function_name)
11117     tree function_name;
11118{
11119  tree stub;
11120  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11121    if (function_name == STUB_FUNCTION_NAME (stub))
11122      return 0;
11123  return 1;
11124}
11125
11126/* GET_PREV_LABEL gets the label name from the previous definition of
11127   the function.  */
11128
11129tree
11130get_prev_label (function_name)
11131     tree function_name;
11132{
11133  tree stub;
11134  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11135    if (function_name == STUB_FUNCTION_NAME (stub))
11136      return STUB_LABEL_NAME (stub);
11137  return 0;
11138}
11139
11140/* INSN is either a function call or a millicode call.  It may have an
11141   unconditional jump in its delay slot.
11142
11143   CALL_DEST is the routine we are calling.  */
11144
11145char *
11146output_call (insn, call_dest, operand_number)
11147     rtx insn;
11148     rtx call_dest;
11149     int operand_number;
11150{
11151  static char buf[256];
11152  if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11153    {
11154      tree labelname;
11155      tree funname = get_identifier (XSTR (call_dest, 0));
11156
11157      if (no_previous_def (funname))
11158	{
11159	  int line_number;
11160	  rtx label_rtx = gen_label_rtx ();
11161	  char *label_buf, temp_buf[256];
11162	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11163				       CODE_LABEL_NUMBER (label_rtx));
11164	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11165	  labelname = get_identifier (label_buf);
11166	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11167	  if (insn)
11168	    line_number = NOTE_LINE_NUMBER (insn);
11169	  add_compiler_stub (labelname, funname, line_number);
11170	}
11171      else
11172	labelname = get_prev_label (funname);
11173
11174      sprintf (buf, "jbsr %%z%d,%.246s",
11175	       operand_number, IDENTIFIER_POINTER (labelname));
11176      return buf;
11177    }
11178  else
11179    {
11180      sprintf (buf, "bl %%z%d", operand_number);
11181      return buf;
11182    }
11183}
11184
11185#endif /* RS6000_LONG_BRANCH */
11186
11187#define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N)		\
11188  do {								\
11189    const char *const symbol_ = (SYMBOL);			\
11190    char *buffer_ = (BUF);					\
11191    if (symbol_[0] == '"')					\
11192      {								\
11193        sprintf(buffer_, "\"L%d$%s", (N), symbol_+1);		\
11194      }								\
11195    else if (name_needs_quotes(symbol_))			\
11196      {								\
11197        sprintf(buffer_, "\"L%d$%s\"", (N), symbol_);		\
11198      }								\
11199    else							\
11200      {								\
11201        sprintf(buffer_, "L%d$%s", (N), symbol_);		\
11202      }								\
11203  } while (0)
11204
11205
11206/* Generate PIC and indirect symbol stubs.  */
11207
11208void
11209machopic_output_stub (file, symb, stub)
11210     FILE *file;
11211     const char *symb, *stub;
11212{
11213  unsigned int length;
11214  char *symbol_name, *lazy_ptr_name;
11215  char *local_label_0;
11216  static int label = 0;
11217
11218  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
11219  STRIP_NAME_ENCODING (symb, symb);
11220
11221  label += 1;
11222
11223  length = strlen (symb);
11224  symbol_name = alloca (length + 32);
11225  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11226
11227  lazy_ptr_name = alloca (length + 32);
11228  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11229
11230  local_label_0 = alloca (length + 32);
11231  GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11232
11233  if (flag_pic == 2)
11234    machopic_picsymbol_stub_section ();
11235  else
11236    machopic_symbol_stub_section ();
11237
11238  fprintf (file, "%s:\n", stub);
11239  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11240
11241  if (flag_pic == 2)
11242    {
11243      fprintf (file, "\tmflr r0\n");
11244      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11245      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11246      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11247	       lazy_ptr_name, local_label_0);
11248      fprintf (file, "\tmtlr r0\n");
11249      fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11250	       lazy_ptr_name, local_label_0);
11251      fprintf (file, "\tmtctr r12\n");
11252      fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11253	       lazy_ptr_name, local_label_0);
11254      fprintf (file, "\tbctr\n");
11255    }
11256  else
11257    fprintf (file, "non-pure not supported\n");
11258
11259  machopic_lazy_symbol_ptr_section ();
11260  fprintf (file, "%s:\n", lazy_ptr_name);
11261  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11262  fprintf (file, "\t.long dyld_stub_binding_helper\n");
11263}
11264
11265/* Legitimize PIC addresses.  If the address is already
11266   position-independent, we return ORIG.  Newly generated
11267   position-independent addresses go into a reg.  This is REG if non
11268   zero, otherwise we allocate register(s) as necessary.  */
11269
11270#define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11271
11272rtx
11273rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11274     rtx orig;
11275     enum machine_mode mode;
11276     rtx reg;
11277{
11278  rtx base, offset;
11279
11280  if (reg == NULL && ! reload_in_progress && ! reload_completed)
11281    reg = gen_reg_rtx (Pmode);
11282
11283  if (GET_CODE (orig) == CONST)
11284    {
11285      if (GET_CODE (XEXP (orig, 0)) == PLUS
11286	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11287	return orig;
11288
11289      if (GET_CODE (XEXP (orig, 0)) == PLUS)
11290	{
11291	  base =
11292	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11293						    Pmode, reg);
11294	  offset =
11295	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11296						    Pmode, reg);
11297	}
11298      else
11299	abort ();
11300
11301      if (GET_CODE (offset) == CONST_INT)
11302	{
11303	  if (SMALL_INT (offset))
11304	    return plus_constant (base, INTVAL (offset));
11305	  else if (! reload_in_progress && ! reload_completed)
11306	    offset = force_reg (Pmode, offset);
11307	  else
11308	    {
11309 	      rtx mem = force_const_mem (Pmode, orig);
11310	      return machopic_legitimize_pic_address (mem, Pmode, reg);
11311	    }
11312	}
11313      return gen_rtx (PLUS, Pmode, base, offset);
11314    }
11315
11316  /* Fall back on generic machopic code.  */
11317  return machopic_legitimize_pic_address (orig, mode, reg);
11318}
11319
11320/* This is just a placeholder to make linking work without having to
11321   add this to the generic Darwin EXTRA_SECTIONS.  If -mcall-aix is
11322   ever needed for Darwin (not too likely!) this would have to get a
11323   real definition.  */
11324
11325void
11326toc_section ()
11327{
11328}
11329
11330#endif /* TARGET_MACHO */
11331
11332#if TARGET_ELF
11333static unsigned int
11334rs6000_elf_section_type_flags (decl, name, reloc)
11335     tree decl;
11336     const char *name;
11337     int reloc;
11338{
11339  unsigned int flags = default_section_type_flags (decl, name, reloc);
11340
11341  if (TARGET_RELOCATABLE)
11342    flags |= SECTION_WRITE;
11343
11344  return flags;
11345}
11346
11347/* Record an element in the table of global constructors.  SYMBOL is
11348   a SYMBOL_REF of the function to be called; PRIORITY is a number
11349   between 0 and MAX_INIT_PRIORITY.
11350
11351   This differs from default_named_section_asm_out_constructor in
11352   that we have special handling for -mrelocatable.  */
11353
11354static void
11355rs6000_elf_asm_out_constructor (symbol, priority)
11356     rtx symbol;
11357     int priority;
11358{
11359  const char *section = ".ctors";
11360  char buf[16];
11361
11362  if (priority != DEFAULT_INIT_PRIORITY)
11363    {
11364      sprintf (buf, ".ctors.%.5u",
11365               /* Invert the numbering so the linker puts us in the proper
11366                  order; constructors are run from right to left, and the
11367                  linker sorts in increasing order.  */
11368               MAX_INIT_PRIORITY - priority);
11369      section = buf;
11370    }
11371
11372  named_section_flags (section, SECTION_WRITE);
11373  assemble_align (POINTER_SIZE);
11374
11375  if (TARGET_RELOCATABLE)
11376    {
11377      fputs ("\t.long (", asm_out_file);
11378      output_addr_const (asm_out_file, symbol);
11379      fputs (")@fixup\n", asm_out_file);
11380    }
11381  else
11382    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11383}
11384
11385static void
11386rs6000_elf_asm_out_destructor (symbol, priority)
11387     rtx symbol;
11388     int priority;
11389{
11390  const char *section = ".dtors";
11391  char buf[16];
11392
11393  if (priority != DEFAULT_INIT_PRIORITY)
11394    {
11395      sprintf (buf, ".dtors.%.5u",
11396               /* Invert the numbering so the linker puts us in the proper
11397                  order; constructors are run from right to left, and the
11398                  linker sorts in increasing order.  */
11399               MAX_INIT_PRIORITY - priority);
11400      section = buf;
11401    }
11402
11403  named_section_flags (section, SECTION_WRITE);
11404  assemble_align (POINTER_SIZE);
11405
11406  if (TARGET_RELOCATABLE)
11407    {
11408      fputs ("\t.long (", asm_out_file);
11409      output_addr_const (asm_out_file, symbol);
11410      fputs (")@fixup\n", asm_out_file);
11411    }
11412  else
11413    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11414}
11415#endif
11416
11417#ifdef OBJECT_FORMAT_COFF
11418static void
11419xcoff_asm_named_section (name, flags)
11420     const char *name;
11421     unsigned int flags ATTRIBUTE_UNUSED;
11422{
11423  fprintf (asm_out_file, "\t.csect %s\n", name);
11424}
11425#endif
11426