rs6000.c revision 107590
1/* Subroutines used for code generation on IBM RS/6000.
2   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002 Free Software Foundation, Inc.
4   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING.  If not, write to
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
28#include "real.h"
29#include "insn-config.h"
30#include "conditions.h"
31#include "insn-attr.h"
32#include "flags.h"
33#include "recog.h"
34#include "obstack.h"
35#include "tree.h"
36#include "expr.h"
37#include "optabs.h"
38#include "except.h"
39#include "function.h"
40#include "output.h"
41#include "basic-block.h"
42#include "integrate.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "hashtab.h"
46#include "tm_p.h"
47#include "target.h"
48#include "target-def.h"
49#include "langhooks.h"
50#include "reload.h"
51
52#ifndef TARGET_NO_PROTOTYPE
53#define TARGET_NO_PROTOTYPE 0
54#endif
55
56#define min(A,B)	((A) < (B) ? (A) : (B))
57#define max(A,B)	((A) > (B) ? (A) : (B))
58
59/* Target cpu type */
60
61enum processor_type rs6000_cpu;
62struct rs6000_cpu_select rs6000_select[3] =
63{
64  /* switch		name,			tune	arch */
65  { (const char *)0,	"--with-cpu=",		1,	1 },
66  { (const char *)0,	"-mcpu=",		1,	1 },
67  { (const char *)0,	"-mtune=",		1,	0 },
68};
69
70/* Size of long double */
71const char *rs6000_long_double_size_string;
72int rs6000_long_double_type_size;
73
74/* Whether -mabi=altivec has appeared */
75int rs6000_altivec_abi;
76
77/* Set to non-zero once AIX common-mode calls have been defined.  */
78static int common_mode_defined;
79
80/* Save information from a "cmpxx" operation until the branch or scc is
81   emitted.  */
82rtx rs6000_compare_op0, rs6000_compare_op1;
83int rs6000_compare_fp_p;
84
85/* Label number of label created for -mrelocatable, to call to so we can
86   get the address of the GOT section */
87int rs6000_pic_labelno;
88
89#ifdef USING_ELFOS_H
90/* Which abi to adhere to */
91const char *rs6000_abi_name = RS6000_ABI_NAME;
92
93/* Semantics of the small data area */
94enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
95
96/* Which small data model to use */
97const char *rs6000_sdata_name = (char *)0;
98
99/* Counter for labels which are to be placed in .fixup.  */
100int fixuplabelno = 0;
101#endif
102
103/* ABI enumeration available for subtarget to use.  */
104enum rs6000_abi rs6000_current_abi;
105
106/* ABI string from -mabi= option.  */
107const char *rs6000_abi_string;
108
109/* Debug flags */
110const char *rs6000_debug_name;
111int rs6000_debug_stack;		/* debug stack applications */
112int rs6000_debug_arg;		/* debug argument handling */
113
114/* Flag to say the TOC is initialized */
115int toc_initialized;
116char toc_label_name[10];
117
118/* Alias set for saves and restores from the rs6000 stack.  */
119static int rs6000_sr_alias_set;
120
121static void rs6000_add_gc_roots PARAMS ((void));
122static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123static void validate_condition_mode
124  PARAMS ((enum rtx_code, enum machine_mode));
125static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
126static void rs6000_maybe_dead PARAMS ((rtx));
127static void rs6000_emit_stack_tie PARAMS ((void));
128static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
129static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
130static unsigned rs6000_hash_constant PARAMS ((rtx));
131static unsigned toc_hash_function PARAMS ((const void *));
132static int toc_hash_eq PARAMS ((const void *, const void *));
133static int toc_hash_mark_entry PARAMS ((void **, void *));
134static void toc_hash_mark_table PARAMS ((void *));
135static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
136static void rs6000_free_machine_status PARAMS ((struct function *));
137static void rs6000_init_machine_status PARAMS ((struct function *));
138static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
139static int rs6000_ra_ever_killed PARAMS ((void));
140static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
141const struct attribute_spec rs6000_attribute_table[];
142static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
143static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
144static rtx rs6000_emit_set_long_const PARAMS ((rtx,
145  HOST_WIDE_INT, HOST_WIDE_INT));
146#if TARGET_ELF
147static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
148							   int));
149static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
150static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
151#endif
152#ifdef OBJECT_FORMAT_COFF
153static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
154#endif
155static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
156static int rs6000_adjust_priority PARAMS ((rtx, int));
157static int rs6000_issue_rate PARAMS ((void));
158
159static void rs6000_init_builtins PARAMS ((void));
160static void altivec_init_builtins PARAMS ((void));
161static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
162static rtx altivec_expand_builtin PARAMS ((tree, rtx));
163static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
164static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
165static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
166static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
167static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
168static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
169static void rs6000_parse_abi_options PARAMS ((void));
170static int first_altivec_reg_to_save PARAMS ((void));
171static unsigned int compute_vrsave_mask PARAMS ((void));
172static void is_altivec_return_reg PARAMS ((rtx, void *));
173int vrsave_operation PARAMS ((rtx, enum machine_mode));
174static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
175static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
176static int easy_vector_constant PARAMS ((rtx));
177
178/* Default register names.  */
179char rs6000_reg_names[][8] =
180{
181      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
182      "8",  "9", "10", "11", "12", "13", "14", "15",
183     "16", "17", "18", "19", "20", "21", "22", "23",
184     "24", "25", "26", "27", "28", "29", "30", "31",
185      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
186      "8",  "9", "10", "11", "12", "13", "14", "15",
187     "16", "17", "18", "19", "20", "21", "22", "23",
188     "24", "25", "26", "27", "28", "29", "30", "31",
189     "mq", "lr", "ctr","ap",
190      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
191      "xer",
192      /* AltiVec registers.  */
193      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
194      "8",  "9",  "10", "11", "12", "13", "14", "15",
195      "16", "17", "18", "19", "20", "21", "22", "23",
196      "24", "25", "26", "27", "28", "29", "30", "31",
197      "vrsave"
198};
199
200#ifdef TARGET_REGNAMES
201static const char alt_reg_names[][8] =
202{
203   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
204   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
205  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
206  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
207   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
208   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
209  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
210  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
211    "mq",    "lr",  "ctr",   "ap",
212  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
213   "xer",
214   /* AltiVec registers.  */
215   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
216   "%v8",  "%v9",  "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
217   "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
218   "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
219   "vrsave"
220};
221#endif
222
223#ifndef MASK_STRICT_ALIGN
224#define MASK_STRICT_ALIGN 0
225#endif
226
227/* Initialize the GCC target structure.  */
228#undef TARGET_ATTRIBUTE_TABLE
229#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
230
231#undef TARGET_ASM_ALIGNED_DI_OP
232#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
233
234/* Default unaligned ops are only provided for ELF.  Find the ops needed
235   for non-ELF systems.  */
236#ifndef OBJECT_FORMAT_ELF
237#ifdef OBJECT_FORMAT_COFF
238/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
239   64-bit targets.  */
240#undef TARGET_ASM_UNALIGNED_HI_OP
241#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
242#undef TARGET_ASM_UNALIGNED_SI_OP
243#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
244#undef TARGET_ASM_UNALIGNED_DI_OP
245#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
246#else
247/* For Darwin.  */
248#undef TARGET_ASM_UNALIGNED_HI_OP
249#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
250#undef TARGET_ASM_UNALIGNED_SI_OP
251#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
252#endif
253#endif
254
255/* This hook deals with fixups for relocatable code and DI-mode objects
256   in 64-bit code.  */
257#undef TARGET_ASM_INTEGER
258#define TARGET_ASM_INTEGER rs6000_assemble_integer
259
260#undef TARGET_ASM_FUNCTION_PROLOGUE
261#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
262#undef TARGET_ASM_FUNCTION_EPILOGUE
263#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
264
265#if TARGET_ELF
266#undef TARGET_SECTION_TYPE_FLAGS
267#define TARGET_SECTION_TYPE_FLAGS  rs6000_elf_section_type_flags
268#endif
269
270#undef TARGET_SCHED_ISSUE_RATE
271#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
272#undef TARGET_SCHED_ADJUST_COST
273#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
274#undef TARGET_SCHED_ADJUST_PRIORITY
275#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
276
277#undef TARGET_INIT_BUILTINS
278#define TARGET_INIT_BUILTINS rs6000_init_builtins
279
280#undef TARGET_EXPAND_BUILTIN
281#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
282
283/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
284#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
285
286struct gcc_target targetm = TARGET_INITIALIZER;
287
288/* Override command line options.  Mostly we process the processor
289   type and sometimes adjust other TARGET_ options.  */
290
291void
292rs6000_override_options (default_cpu)
293     const char *default_cpu;
294{
295  size_t i, j;
296  struct rs6000_cpu_select *ptr;
297
298  /* Simplify the entries below by making a mask for any POWER
299     variant and any PowerPC variant.  */
300
301#define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
302#define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
303		       | MASK_PPC_GFXOPT | MASK_POWERPC64)
304#define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
305
306  static struct ptt
307    {
308      const char *const name;		/* Canonical processor name.  */
309      const enum processor_type processor; /* Processor type enum value.  */
310      const int target_enable;	/* Target flags to enable.  */
311      const int target_disable;	/* Target flags to disable.  */
312    } const processor_target_table[]
313      = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
314	    POWER_MASKS | POWERPC_MASKS},
315	 {"power", PROCESSOR_POWER,
316	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
317	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
318	 {"power2", PROCESSOR_POWER,
319	    MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
320	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
321	 {"power3", PROCESSOR_PPC630,
322	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
323	    POWER_MASKS | MASK_PPC_GPOPT},
324	 {"powerpc", PROCESSOR_POWERPC,
325	    MASK_POWERPC | MASK_NEW_MNEMONICS,
326	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
327	 {"powerpc64", PROCESSOR_POWERPC64,
328	    MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
329	    POWER_MASKS | POWERPC_OPT_MASKS},
330	 {"rios", PROCESSOR_RIOS1,
331	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
332	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
333	 {"rios1", PROCESSOR_RIOS1,
334	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
335	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
336	 {"rsc", PROCESSOR_PPC601,
337	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
338	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
339	 {"rsc1", PROCESSOR_PPC601,
340	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
341	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
342	 {"rios2", PROCESSOR_RIOS2,
343	    MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
344	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
345	 {"rs64a", PROCESSOR_RS64A,
346	    MASK_POWERPC | MASK_NEW_MNEMONICS,
347	    POWER_MASKS | POWERPC_OPT_MASKS},
348	 {"401", PROCESSOR_PPC403,
349	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
350	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
351	 {"403", PROCESSOR_PPC403,
352	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
353	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
354	 {"405", PROCESSOR_PPC405,
355	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
356	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
357	 {"505", PROCESSOR_MPCCORE,
358	    MASK_POWERPC | MASK_NEW_MNEMONICS,
359	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
360	 {"601", PROCESSOR_PPC601,
361	    MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
362	    MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
363	 {"602", PROCESSOR_PPC603,
364	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
365	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
366	 {"603", PROCESSOR_PPC603,
367	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
368	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
369	 {"603e", PROCESSOR_PPC603,
370	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
371	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
372	 {"ec603e", PROCESSOR_PPC603,
373	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
374	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
375	 {"604", PROCESSOR_PPC604,
376	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
377	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
378	 {"604e", PROCESSOR_PPC604e,
379	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
380	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
381	 {"620", PROCESSOR_PPC620,
382	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
383	    POWER_MASKS | MASK_PPC_GPOPT},
384	 {"630", PROCESSOR_PPC630,
385	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
386	    POWER_MASKS | MASK_PPC_GPOPT},
387	 {"740", PROCESSOR_PPC750,
388 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
389 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
390	 {"750", PROCESSOR_PPC750,
391 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
392 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
393	 {"7400", PROCESSOR_PPC7400,
394            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
395            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
396	 {"7450", PROCESSOR_PPC7450,
397            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
398            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
399	 {"801", PROCESSOR_MPCCORE,
400	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
401	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
402	 {"821", PROCESSOR_MPCCORE,
403	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
404	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
405	 {"823", PROCESSOR_MPCCORE,
406	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
407	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
408	 {"860", PROCESSOR_MPCCORE,
409	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
410	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
411
412  size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
413
414  /* Save current -mmultiple/-mno-multiple status.  */
415  int multiple = TARGET_MULTIPLE;
416  /* Save current -mstring/-mno-string status.  */
417  int string = TARGET_STRING;
418
419  /* Identify the processor type.  */
420  rs6000_select[0].string = default_cpu;
421  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
422
423  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
424    {
425      ptr = &rs6000_select[i];
426      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
427	{
428	  for (j = 0; j < ptt_size; j++)
429	    if (! strcmp (ptr->string, processor_target_table[j].name))
430	      {
431		if (ptr->set_tune_p)
432		  rs6000_cpu = processor_target_table[j].processor;
433
434		if (ptr->set_arch_p)
435		  {
436		    target_flags |= processor_target_table[j].target_enable;
437		    target_flags &= ~processor_target_table[j].target_disable;
438		  }
439		break;
440	      }
441
442	  if (j == ptt_size)
443	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
444	}
445    }
446
447  /* If we are optimizing big endian systems for space, use the store
448     multiple instructions.  */
449  if (BYTES_BIG_ENDIAN && optimize_size)
450    target_flags |= MASK_MULTIPLE;
451
452  /* If -mmultiple or -mno-multiple was explicitly used, don't
453     override with the processor default */
454  if (TARGET_MULTIPLE_SET)
455    target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
456
457  /* If -mstring or -mno-string was explicitly used, don't override
458     with the processor default.  */
459  if (TARGET_STRING_SET)
460    target_flags = (target_flags & ~MASK_STRING) | string;
461
462  /* Don't allow -mmultiple or -mstring on little endian systems
463     unless the cpu is a 750, because the hardware doesn't support the
464     instructions used in little endian mode, and causes an alignment
465     trap.  The 750 does not cause an alignment trap (except when the
466     target is unaligned).  */
467
468  if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
469    {
470      if (TARGET_MULTIPLE)
471	{
472	  target_flags &= ~MASK_MULTIPLE;
473	  if (TARGET_MULTIPLE_SET)
474	    warning ("-mmultiple is not supported on little endian systems");
475	}
476
477      if (TARGET_STRING)
478	{
479	  target_flags &= ~MASK_STRING;
480	  if (TARGET_STRING_SET)
481	    warning ("-mstring is not supported on little endian systems");
482	}
483    }
484
485  if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
486    {
487      flag_pic = 0;
488
489      if (extra_warnings)
490	warning ("-f%s ignored (all code is position independent)",
491		 (flag_pic > 1) ? "PIC" : "pic");
492    }
493
494#ifdef XCOFF_DEBUGGING_INFO
495  if (flag_function_sections && (write_symbols != NO_DEBUG)
496      && DEFAULT_ABI == ABI_AIX)
497    {
498      warning ("-ffunction-sections disabled on AIX when debugging");
499      flag_function_sections = 0;
500    }
501
502  if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
503    {
504      warning ("-fdata-sections not supported on AIX");
505      flag_data_sections = 0;
506    }
507#endif
508
509  /* Set debug flags */
510  if (rs6000_debug_name)
511    {
512      if (! strcmp (rs6000_debug_name, "all"))
513	rs6000_debug_stack = rs6000_debug_arg = 1;
514      else if (! strcmp (rs6000_debug_name, "stack"))
515	rs6000_debug_stack = 1;
516      else if (! strcmp (rs6000_debug_name, "arg"))
517	rs6000_debug_arg = 1;
518      else
519	error ("unknown -mdebug-%s switch", rs6000_debug_name);
520    }
521
522  /* Set size of long double */
523  rs6000_long_double_type_size = 64;
524  if (rs6000_long_double_size_string)
525    {
526      char *tail;
527      int size = strtol (rs6000_long_double_size_string, &tail, 10);
528      if (*tail != '\0' || (size != 64 && size != 128))
529	error ("Unknown switch -mlong-double-%s",
530	       rs6000_long_double_size_string);
531      else
532	rs6000_long_double_type_size = size;
533    }
534
535  /* Handle -mabi= options.  */
536  rs6000_parse_abi_options ();
537
538#ifdef TARGET_REGNAMES
539  /* If the user desires alternate register names, copy in the
540     alternate names now.  */
541  if (TARGET_REGNAMES)
542    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
543#endif
544
545#ifdef SUBTARGET_OVERRIDE_OPTIONS
546  SUBTARGET_OVERRIDE_OPTIONS;
547#endif
548#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
549  SUBSUBTARGET_OVERRIDE_OPTIONS;
550#endif
551
552  /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
553     If -maix-struct-return or -msvr4-struct-return was explicitly
554     used, don't override with the ABI default.  */
555  if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
556    {
557      if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
558	target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
559      else
560	target_flags |= MASK_AIX_STRUCT_RET;
561    }
562
563  /* Register global variables with the garbage collector.  */
564  rs6000_add_gc_roots ();
565
566  /* Allocate an alias set for register saves & restores from stack.  */
567  rs6000_sr_alias_set = new_alias_set ();
568
569  if (TARGET_TOC)
570    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
571
572  /* We can only guarantee the availability of DI pseudo-ops when
573     assembling for 64-bit targets.  */
574  if (!TARGET_64BIT)
575    {
576      targetm.asm_out.aligned_op.di = NULL;
577      targetm.asm_out.unaligned_op.di = NULL;
578    }
579
580  /* Arrange to save and restore machine status around nested functions.  */
581  init_machine_status = rs6000_init_machine_status;
582  free_machine_status = rs6000_free_machine_status;
583}
584
585/* Handle -mabi= options.  */
586static void
587rs6000_parse_abi_options ()
588{
589  if (rs6000_abi_string == 0)
590    return;
591  else if (! strcmp (rs6000_abi_string, "altivec"))
592    rs6000_altivec_abi = 1;
593  else if (! strcmp (rs6000_abi_string, "no-altivec"))
594    rs6000_altivec_abi = 0;
595  else
596    error ("unknown ABI specified: '%s'", rs6000_abi_string);
597}
598
599void
600optimization_options (level, size)
601     int level ATTRIBUTE_UNUSED;
602     int size ATTRIBUTE_UNUSED;
603{
604}
605
606/* Do anything needed at the start of the asm file.  */
607
608void
609rs6000_file_start (file, default_cpu)
610     FILE *file;
611     const char *default_cpu;
612{
613  size_t i;
614  char buffer[80];
615  const char *start = buffer;
616  struct rs6000_cpu_select *ptr;
617
618  if (flag_verbose_asm)
619    {
620      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
621      rs6000_select[0].string = default_cpu;
622
623      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
624	{
625	  ptr = &rs6000_select[i];
626	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
627	    {
628	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
629	      start = "";
630	    }
631	}
632
633#ifdef USING_ELFOS_H
634      switch (rs6000_sdata)
635	{
636	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
637	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
638	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
639	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
640	}
641
642      if (rs6000_sdata && g_switch_value)
643	{
644	  fprintf (file, "%s -G %d", start, g_switch_value);
645	  start = "";
646	}
647#endif
648
649      if (*start == '\0')
650	putc ('\n', file);
651    }
652}
653
654
655/* Create a CONST_DOUBLE from a string.  */
656
657struct rtx_def *
658rs6000_float_const (string, mode)
659     const char *string;
660     enum machine_mode mode;
661{
662  REAL_VALUE_TYPE value;
663  value = REAL_VALUE_ATOF (string, mode);
664  return immed_real_const_1 (value, mode);
665}
666
667/* Return non-zero if this function is known to have a null epilogue.  */
668
669int
670direct_return ()
671{
672  if (reload_completed)
673    {
674      rs6000_stack_t *info = rs6000_stack_info ();
675
676      if (info->first_gp_reg_save == 32
677	  && info->first_fp_reg_save == 64
678	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
679	  && ! info->lr_save_p
680	  && ! info->cr_save_p
681	  && info->vrsave_mask == 0
682	  && ! info->push_p)
683	return 1;
684    }
685
686  return 0;
687}
688
689/* Returns 1 always.  */
690
691int
692any_operand (op, mode)
693     rtx op ATTRIBUTE_UNUSED;
694     enum machine_mode mode ATTRIBUTE_UNUSED;
695{
696  return 1;
697}
698
699/* Returns 1 if op is the count register.  */
700int
701count_register_operand (op, mode)
702     rtx op;
703     enum machine_mode mode ATTRIBUTE_UNUSED;
704{
705  if (GET_CODE (op) != REG)
706    return 0;
707
708  if (REGNO (op) == COUNT_REGISTER_REGNUM)
709    return 1;
710
711  if (REGNO (op) > FIRST_PSEUDO_REGISTER)
712    return 1;
713
714  return 0;
715}
716
717/* Returns 1 if op is an altivec register.  */
718int
719altivec_register_operand (op, mode)
720     rtx op;
721     enum machine_mode mode ATTRIBUTE_UNUSED;
722{
723
724  return (register_operand (op, mode)
725	  && (GET_CODE (op) != REG
726	      || REGNO (op) > FIRST_PSEUDO_REGISTER
727	      || ALTIVEC_REGNO_P (REGNO (op))));
728}
729
730int
731xer_operand (op, mode)
732     rtx op;
733     enum machine_mode mode ATTRIBUTE_UNUSED;
734{
735  if (GET_CODE (op) != REG)
736    return 0;
737
738  if (XER_REGNO_P (REGNO (op)))
739    return 1;
740
741  return 0;
742}
743
744/* Return 1 if OP is a signed 8-bit constant.  Int multiplication
745   by such constants completes more quickly.  */
746
747int
748s8bit_cint_operand (op, mode)
749     rtx op;
750     enum machine_mode mode ATTRIBUTE_UNUSED;
751{
752  return ( GET_CODE (op) == CONST_INT
753	  && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
754}
755
756/* Return 1 if OP is a constant that can fit in a D field.  */
757
758int
759short_cint_operand (op, mode)
760     rtx op;
761     enum machine_mode mode ATTRIBUTE_UNUSED;
762{
763  return (GET_CODE (op) == CONST_INT
764	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
765}
766
767/* Similar for an unsigned D field.  */
768
769int
770u_short_cint_operand (op, mode)
771     rtx op;
772     enum machine_mode mode ATTRIBUTE_UNUSED;
773{
774  return (GET_CODE (op) == CONST_INT
775	  && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
776}
777
778/* Return 1 if OP is a CONST_INT that cannot fit in a signed D field.  */
779
780int
781non_short_cint_operand (op, mode)
782     rtx op;
783     enum machine_mode mode ATTRIBUTE_UNUSED;
784{
785  return (GET_CODE (op) == CONST_INT
786	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
787}
788
789/* Returns 1 if OP is a CONST_INT that is a positive value
790   and an exact power of 2.  */
791
792int
793exact_log2_cint_operand (op, mode)
794     rtx op;
795     enum machine_mode mode ATTRIBUTE_UNUSED;
796{
797  return (GET_CODE (op) == CONST_INT
798	  && INTVAL (op) > 0
799	  && exact_log2 (INTVAL (op)) >= 0);
800}
801
802/* Returns 1 if OP is a register that is not special (i.e., not MQ,
803   ctr, or lr).  */
804
805int
806gpc_reg_operand (op, mode)
807     rtx op;
808     enum machine_mode mode;
809{
810  return (register_operand (op, mode)
811	  && (GET_CODE (op) != REG
812	      || (REGNO (op) >= ARG_POINTER_REGNUM
813		  && !XER_REGNO_P (REGNO (op)))
814	      || REGNO (op) < MQ_REGNO));
815}
816
817/* Returns 1 if OP is either a pseudo-register or a register denoting a
818   CR field.  */
819
820int
821cc_reg_operand (op, mode)
822     rtx op;
823     enum machine_mode mode;
824{
825  return (register_operand (op, mode)
826	  && (GET_CODE (op) != REG
827	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
828	      || CR_REGNO_P (REGNO (op))));
829}
830
831/* Returns 1 if OP is either a pseudo-register or a register denoting a
832   CR field that isn't CR0.  */
833
834int
835cc_reg_not_cr0_operand (op, mode)
836     rtx op;
837     enum machine_mode mode;
838{
839  return (register_operand (op, mode)
840	  && (GET_CODE (op) != REG
841	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
842	      || CR_REGNO_NOT_CR0_P (REGNO (op))));
843}
844
845/* Returns 1 if OP is either a constant integer valid for a D-field or
846   a non-special register.  If a register, it must be in the proper
847   mode unless MODE is VOIDmode.  */
848
849int
850reg_or_short_operand (op, mode)
851      rtx op;
852      enum machine_mode mode;
853{
854  return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
855}
856
857/* Similar, except check if the negation of the constant would be
858   valid for a D-field.  */
859
860int
861reg_or_neg_short_operand (op, mode)
862      rtx op;
863      enum machine_mode mode;
864{
865  if (GET_CODE (op) == CONST_INT)
866    return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
867
868  return gpc_reg_operand (op, mode);
869}
870
871/* Returns 1 if OP is either a constant integer valid for a DS-field or
872   a non-special register.  If a register, it must be in the proper
873   mode unless MODE is VOIDmode.  */
874
875int
876reg_or_aligned_short_operand (op, mode)
877      rtx op;
878      enum machine_mode mode;
879{
880  if (gpc_reg_operand (op, mode))
881    return 1;
882  else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
883    return 1;
884
885  return 0;
886}
887
888
889/* Return 1 if the operand is either a register or an integer whose
890   high-order 16 bits are zero.  */
891
892int
893reg_or_u_short_operand (op, mode)
894     rtx op;
895     enum machine_mode mode;
896{
897  return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
898}
899
900/* Return 1 is the operand is either a non-special register or ANY
901   constant integer.  */
902
903int
904reg_or_cint_operand (op, mode)
905    rtx op;
906    enum machine_mode mode;
907{
908  return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
909}
910
911/* Return 1 is the operand is either a non-special register or ANY
912   32-bit signed constant integer.  */
913
914int
915reg_or_arith_cint_operand (op, mode)
916    rtx op;
917    enum machine_mode mode;
918{
919  return (gpc_reg_operand (op, mode)
920	  || (GET_CODE (op) == CONST_INT
921#if HOST_BITS_PER_WIDE_INT != 32
922	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
923		  < (unsigned HOST_WIDE_INT) 0x100000000ll)
924#endif
925	      ));
926}
927
928/* Return 1 is the operand is either a non-special register or a 32-bit
929   signed constant integer valid for 64-bit addition.  */
930
931int
932reg_or_add_cint64_operand (op, mode)
933    rtx op;
934    enum machine_mode mode;
935{
936  return (gpc_reg_operand (op, mode)
937	  || (GET_CODE (op) == CONST_INT
938	      && INTVAL (op) < 0x7fff8000
939#if HOST_BITS_PER_WIDE_INT != 32
940	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
941		  < 0x100000000ll)
942#endif
943	      ));
944}
945
946/* Return 1 is the operand is either a non-special register or a 32-bit
947   signed constant integer valid for 64-bit subtraction.  */
948
949int
950reg_or_sub_cint64_operand (op, mode)
951    rtx op;
952    enum machine_mode mode;
953{
954  return (gpc_reg_operand (op, mode)
955	  || (GET_CODE (op) == CONST_INT
956	      && (- INTVAL (op)) < 0x7fff8000
957#if HOST_BITS_PER_WIDE_INT != 32
958	      && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
959		  < 0x100000000ll)
960#endif
961	      ));
962}
963
964/* Return 1 is the operand is either a non-special register or ANY
965   32-bit unsigned constant integer.  */
966
967int
968reg_or_logical_cint_operand (op, mode)
969    rtx op;
970    enum machine_mode mode;
971{
972  if (GET_CODE (op) == CONST_INT)
973    {
974      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
975	{
976	  if (GET_MODE_BITSIZE (mode) <= 32)
977	    abort ();
978
979	  if (INTVAL (op) < 0)
980	    return 0;
981	}
982
983      return ((INTVAL (op) & GET_MODE_MASK (mode)
984	       & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
985    }
986  else if (GET_CODE (op) == CONST_DOUBLE)
987    {
988      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
989	  || mode != DImode)
990	abort ();
991
992      return CONST_DOUBLE_HIGH (op) == 0;
993    }
994  else
995    return gpc_reg_operand (op, mode);
996}
997
998/* Return 1 if the operand is an operand that can be loaded via the GOT.  */
999
1000int
1001got_operand (op, mode)
1002     rtx op;
1003     enum machine_mode mode ATTRIBUTE_UNUSED;
1004{
1005  return (GET_CODE (op) == SYMBOL_REF
1006	  || GET_CODE (op) == CONST
1007	  || GET_CODE (op) == LABEL_REF);
1008}
1009
1010/* Return 1 if the operand is a simple references that can be loaded via
1011   the GOT (labels involving addition aren't allowed).  */
1012
1013int
1014got_no_const_operand (op, mode)
1015     rtx op;
1016     enum machine_mode mode ATTRIBUTE_UNUSED;
1017{
1018  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1019}
1020
1021/* Return the number of instructions it takes to form a constant in an
1022   integer register.  */
1023
1024static int
1025num_insns_constant_wide (value)
1026     HOST_WIDE_INT value;
1027{
1028  /* signed constant loadable with {cal|addi} */
1029  if (CONST_OK_FOR_LETTER_P (value, 'I'))
1030    return 1;
1031
1032  /* constant loadable with {cau|addis} */
1033  else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1034    return 1;
1035
1036#if HOST_BITS_PER_WIDE_INT == 64
1037  else if (TARGET_POWERPC64)
1038    {
1039      HOST_WIDE_INT low  = value & 0xffffffff;
1040      HOST_WIDE_INT high = value >> 32;
1041
1042      low = (low ^ 0x80000000) - 0x80000000;  /* sign extend */
1043
1044      if (high == 0 && (low & 0x80000000) == 0)
1045	return 2;
1046
1047      else if (high == -1 && (low & 0x80000000) != 0)
1048	return 2;
1049
1050      else if (! low)
1051	return num_insns_constant_wide (high) + 1;
1052
1053      else
1054	return (num_insns_constant_wide (high)
1055		+ num_insns_constant_wide (low) + 1);
1056    }
1057#endif
1058
1059  else
1060    return 2;
1061}
1062
1063int
1064num_insns_constant (op, mode)
1065     rtx op;
1066     enum machine_mode mode;
1067{
1068  if (GET_CODE (op) == CONST_INT)
1069    {
1070#if HOST_BITS_PER_WIDE_INT == 64
1071      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1072	  && mask64_operand (op, mode))
1073	    return 2;
1074      else
1075#endif
1076	return num_insns_constant_wide (INTVAL (op));
1077    }
1078
1079  else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1080    {
1081      long l;
1082      REAL_VALUE_TYPE rv;
1083
1084      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1085      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1086      return num_insns_constant_wide ((HOST_WIDE_INT) l);
1087    }
1088
1089  else if (GET_CODE (op) == CONST_DOUBLE)
1090    {
1091      HOST_WIDE_INT low;
1092      HOST_WIDE_INT high;
1093      long l[2];
1094      REAL_VALUE_TYPE rv;
1095      int endian = (WORDS_BIG_ENDIAN == 0);
1096
1097      if (mode == VOIDmode || mode == DImode)
1098	{
1099	  high = CONST_DOUBLE_HIGH (op);
1100	  low  = CONST_DOUBLE_LOW (op);
1101	}
1102      else
1103	{
1104	  REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1105	  REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1106	  high = l[endian];
1107	  low  = l[1 - endian];
1108	}
1109
1110      if (TARGET_32BIT)
1111	return (num_insns_constant_wide (low)
1112		+ num_insns_constant_wide (high));
1113
1114      else
1115	{
1116	  if (high == 0 && low >= 0)
1117	    return num_insns_constant_wide (low);
1118
1119	  else if (high == -1 && low < 0)
1120	    return num_insns_constant_wide (low);
1121
1122	  else if (mask64_operand (op, mode))
1123	    return 2;
1124
1125	  else if (low == 0)
1126	    return num_insns_constant_wide (high) + 1;
1127
1128	  else
1129	    return (num_insns_constant_wide (high)
1130		    + num_insns_constant_wide (low) + 1);
1131	}
1132    }
1133
1134  else
1135    abort ();
1136}
1137
1138/* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1139   register with one instruction per word.  We only do this if we can
1140   safely read CONST_DOUBLE_{LOW,HIGH}.  */
1141
1142int
1143easy_fp_constant (op, mode)
1144     rtx op;
1145     enum machine_mode mode;
1146{
1147  if (GET_CODE (op) != CONST_DOUBLE
1148      || GET_MODE (op) != mode
1149      || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1150    return 0;
1151
1152  /* Consider all constants with -msoft-float to be easy.  */
1153  if (TARGET_SOFT_FLOAT && mode != DImode)
1154    return 1;
1155
1156  /* If we are using V.4 style PIC, consider all constants to be hard.  */
1157  if (flag_pic && DEFAULT_ABI == ABI_V4)
1158    return 0;
1159
1160#ifdef TARGET_RELOCATABLE
1161  /* Similarly if we are using -mrelocatable, consider all constants
1162     to be hard.  */
1163  if (TARGET_RELOCATABLE)
1164    return 0;
1165#endif
1166
1167  if (mode == DFmode)
1168    {
1169      long k[2];
1170      REAL_VALUE_TYPE rv;
1171
1172      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1173      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1174
1175      return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1176	      && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1177    }
1178
1179  else if (mode == SFmode)
1180    {
1181      long l;
1182      REAL_VALUE_TYPE rv;
1183
1184      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1185      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1186
1187      return num_insns_constant_wide (l) == 1;
1188    }
1189
1190  else if (mode == DImode)
1191    return ((TARGET_POWERPC64
1192	     && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1193	    || (num_insns_constant (op, DImode) <= 2));
1194
1195  else if (mode == SImode)
1196    return 1;
1197  else
1198    abort ();
1199}
1200
1201/* Return 1 if the operand is a CONST_INT and can be put into a
1202   register with one instruction.  */
1203
1204static int
1205easy_vector_constant (op)
1206     rtx op;
1207{
1208  rtx elt;
1209  int units, i;
1210
1211  if (GET_CODE (op) != CONST_VECTOR)
1212    return 0;
1213
1214  units = CONST_VECTOR_NUNITS (op);
1215
1216  /* We can generate 0 easily.  Look for that.  */
1217  for (i = 0; i < units; ++i)
1218    {
1219      elt = CONST_VECTOR_ELT (op, i);
1220
1221      /* We could probably simplify this by just checking for equality
1222	 with CONST0_RTX for the current mode, but let's be safe
1223	 instead.  */
1224
1225      switch (GET_CODE (elt))
1226	{
1227	case CONST_INT:
1228	  if (INTVAL (elt) != 0)
1229	    return 0;
1230	  break;
1231	case CONST_DOUBLE:
1232	  if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1233	    return 0;
1234	  break;
1235	default:
1236	  return 0;
1237	}
1238    }
1239
1240  /* We could probably generate a few other constants trivially, but
1241     gcc doesn't generate them yet.  FIXME later.  */
1242  return 1;
1243}
1244
1245/* Return 1 if the operand is the constant 0.  This works for scalars
1246   as well as vectors.  */
1247int
1248zero_constant (op, mode)
1249     rtx op;
1250     enum machine_mode mode;
1251{
1252  return op == CONST0_RTX (mode);
1253}
1254
1255/* Return 1 if the operand is 0.0.  */
1256int
1257zero_fp_constant (op, mode)
1258     rtx op;
1259     enum machine_mode mode;
1260{
1261  return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1262}
1263
1264/* Return 1 if the operand is in volatile memory.  Note that during
1265   the RTL generation phase, memory_operand does not return TRUE for
1266   volatile memory references.  So this function allows us to
1267   recognize volatile references where its safe.  */
1268
1269int
1270volatile_mem_operand (op, mode)
1271     rtx op;
1272     enum machine_mode mode;
1273{
1274  if (GET_CODE (op) != MEM)
1275    return 0;
1276
1277  if (!MEM_VOLATILE_P (op))
1278    return 0;
1279
1280  if (mode != GET_MODE (op))
1281    return 0;
1282
1283  if (reload_completed)
1284    return memory_operand (op, mode);
1285
1286  if (reload_in_progress)
1287    return strict_memory_address_p (mode, XEXP (op, 0));
1288
1289  return memory_address_p (mode, XEXP (op, 0));
1290}
1291
1292/* Return 1 if the operand is an offsettable memory operand.  */
1293
1294int
1295offsettable_mem_operand (op, mode)
1296     rtx op;
1297     enum machine_mode mode;
1298{
1299  return ((GET_CODE (op) == MEM)
1300	  && offsettable_address_p (reload_completed || reload_in_progress,
1301				    mode, XEXP (op, 0)));
1302}
1303
1304/* Return 1 if the operand is either an easy FP constant (see above) or
1305   memory.  */
1306
1307int
1308mem_or_easy_const_operand (op, mode)
1309     rtx op;
1310     enum machine_mode mode;
1311{
1312  return memory_operand (op, mode) || easy_fp_constant (op, mode);
1313}
1314
1315/* Return 1 if the operand is either a non-special register or an item
1316   that can be used as the operand of a `mode' add insn.  */
1317
1318int
1319add_operand (op, mode)
1320    rtx op;
1321    enum machine_mode mode;
1322{
1323  if (GET_CODE (op) == CONST_INT)
1324    return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1325	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1326
1327  return gpc_reg_operand (op, mode);
1328}
1329
1330/* Return 1 if OP is a constant but not a valid add_operand.  */
1331
1332int
1333non_add_cint_operand (op, mode)
1334     rtx op;
1335     enum machine_mode mode ATTRIBUTE_UNUSED;
1336{
1337  return (GET_CODE (op) == CONST_INT
1338	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1339	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1340}
1341
1342/* Return 1 if the operand is a non-special register or a constant that
1343   can be used as the operand of an OR or XOR insn on the RS/6000.  */
1344
1345int
1346logical_operand (op, mode)
1347     rtx op;
1348     enum machine_mode mode;
1349{
1350  HOST_WIDE_INT opl, oph;
1351
1352  if (gpc_reg_operand (op, mode))
1353    return 1;
1354
1355  if (GET_CODE (op) == CONST_INT)
1356    {
1357      opl = INTVAL (op) & GET_MODE_MASK (mode);
1358
1359#if HOST_BITS_PER_WIDE_INT <= 32
1360      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1361	return 0;
1362#endif
1363    }
1364  else if (GET_CODE (op) == CONST_DOUBLE)
1365    {
1366      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1367	abort ();
1368
1369      opl = CONST_DOUBLE_LOW (op);
1370      oph = CONST_DOUBLE_HIGH (op);
1371      if (oph != 0)
1372	return 0;
1373    }
1374  else
1375    return 0;
1376
1377  return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1378	  || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1379}
1380
1381/* Return 1 if C is a constant that is not a logical operand (as
1382   above), but could be split into one.  */
1383
1384int
1385non_logical_cint_operand (op, mode)
1386     rtx op;
1387     enum machine_mode mode;
1388{
1389  return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1390	  && ! logical_operand (op, mode)
1391	  && reg_or_logical_cint_operand (op, mode));
1392}
1393
1394/* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1395   RS/6000.  It is if there are no more than two 1->0 or 0->1 transitions.
1396   Reject all ones and all zeros, since these should have been optimized
1397   away and confuse the making of MB and ME.  */
1398
1399int
1400mask_operand (op, mode)
1401     rtx op;
1402     enum machine_mode mode ATTRIBUTE_UNUSED;
1403{
1404  HOST_WIDE_INT c, lsb;
1405
1406  if (GET_CODE (op) != CONST_INT)
1407    return 0;
1408
1409  c = INTVAL (op);
1410
1411  /* Fail in 64-bit mode if the mask wraps around because the upper
1412     32-bits of the mask will all be 1s, contrary to GCC's internal view.  */
1413  if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1414    return 0;
1415
1416  /* We don't change the number of transitions by inverting,
1417     so make sure we start with the LS bit zero.  */
1418  if (c & 1)
1419    c = ~c;
1420
1421  /* Reject all zeros or all ones.  */
1422  if (c == 0)
1423    return 0;
1424
1425  /* Find the first transition.  */
1426  lsb = c & -c;
1427
1428  /* Invert to look for a second transition.  */
1429  c = ~c;
1430
1431  /* Erase first transition.  */
1432  c &= -lsb;
1433
1434  /* Find the second transition (if any).  */
1435  lsb = c & -c;
1436
1437  /* Match if all the bits above are 1's (or c is zero).  */
1438  return c == -lsb;
1439}
1440
1441/* Return 1 if the operand is a constant that is a PowerPC64 mask.
1442   It is if there are no more than one 1->0 or 0->1 transitions.
1443   Reject all ones and all zeros, since these should have been optimized
1444   away and confuse the making of MB and ME.  */
1445
1446int
1447mask64_operand (op, mode)
1448     rtx op;
1449     enum machine_mode mode;
1450{
1451  if (GET_CODE (op) == CONST_INT)
1452    {
1453      HOST_WIDE_INT c, lsb;
1454
1455      /* We don't change the number of transitions by inverting,
1456	 so make sure we start with the LS bit zero.  */
1457      c = INTVAL (op);
1458      if (c & 1)
1459	c = ~c;
1460
1461      /* Reject all zeros or all ones.  */
1462      if (c == 0)
1463	return 0;
1464
1465      /* Find the transition, and check that all bits above are 1's.  */
1466      lsb = c & -c;
1467      return c == -lsb;
1468    }
1469  else if (GET_CODE (op) == CONST_DOUBLE
1470	   && (mode == VOIDmode || mode == DImode))
1471    {
1472      HOST_WIDE_INT low, high, lsb;
1473
1474      if (HOST_BITS_PER_WIDE_INT < 64)
1475	high = CONST_DOUBLE_HIGH (op);
1476
1477      low = CONST_DOUBLE_LOW (op);
1478      if (low & 1)
1479	{
1480	  if (HOST_BITS_PER_WIDE_INT < 64)
1481	    high = ~high;
1482	  low = ~low;
1483	}
1484
1485      if (low == 0)
1486	{
1487	  if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1488	    return 0;
1489
1490	  lsb = high & -high;
1491	  return high == -lsb;
1492	}
1493
1494      lsb = low & -low;
1495      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1496    }
1497  else
1498    return 0;
1499}
1500
1501/* Return 1 if the operand is either a non-special register or a constant
1502   that can be used as the operand of a PowerPC64 logical AND insn.  */
1503
1504int
1505and64_operand (op, mode)
1506    rtx op;
1507    enum machine_mode mode;
1508{
1509  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1510    return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1511
1512  return (logical_operand (op, mode) || mask64_operand (op, mode));
1513}
1514
1515/* Return 1 if the operand is either a non-special register or a
1516   constant that can be used as the operand of an RS/6000 logical AND insn.  */
1517
1518int
1519and_operand (op, mode)
1520    rtx op;
1521    enum machine_mode mode;
1522{
1523  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1524    return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1525
1526  return (logical_operand (op, mode) || mask_operand (op, mode));
1527}
1528
1529/* Return 1 if the operand is a general register or memory operand.  */
1530
1531int
1532reg_or_mem_operand (op, mode)
1533     rtx op;
1534     enum machine_mode mode;
1535{
1536  return (gpc_reg_operand (op, mode)
1537	  || memory_operand (op, mode)
1538	  || volatile_mem_operand (op, mode));
1539}
1540
1541/* Return 1 if the operand is a general register or memory operand without
1542   pre_inc or pre_dec which produces invalid form of PowerPC lwa
1543   instruction.  */
1544
1545int
1546lwa_operand (op, mode)
1547     rtx op;
1548     enum machine_mode mode;
1549{
1550  rtx inner = op;
1551
1552  if (reload_completed && GET_CODE (inner) == SUBREG)
1553    inner = SUBREG_REG (inner);
1554
1555  return gpc_reg_operand (inner, mode)
1556    || (memory_operand (inner, mode)
1557	&& GET_CODE (XEXP (inner, 0)) != PRE_INC
1558	&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
1559	&& (GET_CODE (XEXP (inner, 0)) != PLUS
1560	    || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1561	    || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1562}
1563
1564/* Return 1 if the operand, used inside a MEM, is a valid first argument
1565   to CALL.  This is a SYMBOL_REF or a pseudo-register, which will be
1566   forced to lr.  */
1567
1568int
1569call_operand (op, mode)
1570     rtx op;
1571     enum machine_mode mode;
1572{
1573  if (mode != VOIDmode && GET_MODE (op) != mode)
1574    return 0;
1575
1576  return (GET_CODE (op) == SYMBOL_REF
1577	  || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1578}
1579
1580/* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1581   this file and the function is not weakly defined.  */
1582
1583int
1584current_file_function_operand (op, mode)
1585     rtx op;
1586     enum machine_mode mode ATTRIBUTE_UNUSED;
1587{
1588  return (GET_CODE (op) == SYMBOL_REF
1589	  && (SYMBOL_REF_FLAG (op)
1590	      || (op == XEXP (DECL_RTL (current_function_decl), 0)
1591	          && ! DECL_WEAK (current_function_decl))));
1592}
1593
1594/* Return 1 if this operand is a valid input for a move insn.  */
1595
1596int
1597input_operand (op, mode)
1598     rtx op;
1599     enum machine_mode mode;
1600{
1601  /* Memory is always valid.  */
1602  if (memory_operand (op, mode))
1603    return 1;
1604
1605  /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary.  */
1606  if (GET_CODE (op) == CONSTANT_P_RTX)
1607    return 1;
1608
1609  /* For floating-point, easy constants are valid.  */
1610  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1611      && CONSTANT_P (op)
1612      && easy_fp_constant (op, mode))
1613    return 1;
1614
1615  /* Allow any integer constant.  */
1616  if (GET_MODE_CLASS (mode) == MODE_INT
1617      && (GET_CODE (op) == CONST_INT
1618	  || GET_CODE (op) == CONST_DOUBLE))
1619    return 1;
1620
1621  /* For floating-point or multi-word mode, the only remaining valid type
1622     is a register.  */
1623  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1624      || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1625    return register_operand (op, mode);
1626
1627  /* The only cases left are integral modes one word or smaller (we
1628     do not get called for MODE_CC values).  These can be in any
1629     register.  */
1630  if (register_operand (op, mode))
1631    return 1;
1632
1633  /* A SYMBOL_REF referring to the TOC is valid.  */
1634  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1635    return 1;
1636
1637  /* A constant pool expression (relative to the TOC) is valid */
1638  if (TOC_RELATIVE_EXPR_P (op))
1639    return 1;
1640
1641  /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1642     to be valid.  */
1643  if (DEFAULT_ABI == ABI_V4
1644      && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1645      && small_data_operand (op, Pmode))
1646    return 1;
1647
1648  return 0;
1649}
1650
1651/* Return 1 for an operand in small memory on V.4/eabi.  */
1652
1653int
1654small_data_operand (op, mode)
1655     rtx op ATTRIBUTE_UNUSED;
1656     enum machine_mode mode ATTRIBUTE_UNUSED;
1657{
1658#if TARGET_ELF
1659  rtx sym_ref;
1660
1661  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1662    return 0;
1663
1664  if (DEFAULT_ABI != ABI_V4)
1665    return 0;
1666
1667  if (GET_CODE (op) == SYMBOL_REF)
1668    sym_ref = op;
1669
1670  else if (GET_CODE (op) != CONST
1671	   || GET_CODE (XEXP (op, 0)) != PLUS
1672	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1673	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1674    return 0;
1675
1676  else
1677    {
1678      rtx sum = XEXP (op, 0);
1679      HOST_WIDE_INT summand;
1680
1681      /* We have to be careful here, because it is the referenced address
1682        that must be 32k from _SDA_BASE_, not just the symbol.  */
1683      summand = INTVAL (XEXP (sum, 1));
1684      if (summand < 0 || summand > g_switch_value)
1685       return 0;
1686
1687      sym_ref = XEXP (sum, 0);
1688    }
1689
1690  if (*XSTR (sym_ref, 0) != '@')
1691    return 0;
1692
1693  return 1;
1694
1695#else
1696  return 0;
1697#endif
1698}
1699
1700static int
1701constant_pool_expr_1 (op, have_sym, have_toc)
1702    rtx op;
1703    int *have_sym;
1704    int *have_toc;
1705{
1706  switch (GET_CODE(op))
1707    {
1708    case SYMBOL_REF:
1709      if (CONSTANT_POOL_ADDRESS_P (op))
1710	{
1711	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1712	    {
1713	      *have_sym = 1;
1714	      return 1;
1715	    }
1716	  else
1717	    return 0;
1718	}
1719      else if (! strcmp (XSTR (op, 0), toc_label_name))
1720	{
1721	  *have_toc = 1;
1722	  return 1;
1723	}
1724      else
1725	return 0;
1726    case PLUS:
1727    case MINUS:
1728      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1729	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1730    case CONST:
1731      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1732    case CONST_INT:
1733      return 1;
1734    default:
1735      return 0;
1736    }
1737}
1738
1739int
1740constant_pool_expr_p (op)
1741    rtx op;
1742{
1743  int have_sym = 0;
1744  int have_toc = 0;
1745  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1746}
1747
1748int
1749toc_relative_expr_p (op)
1750    rtx op;
1751{
1752    int have_sym = 0;
1753    int have_toc = 0;
1754    return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1755}
1756
1757/* Try machine-dependent ways of modifying an illegitimate address
1758   to be legitimate.  If we find one, return the new, valid address.
1759   This is used from only one place: `memory_address' in explow.c.
1760
1761   OLDX is the address as it was before break_out_memory_refs was
1762   called.  In some cases it is useful to look at this to decide what
1763   needs to be done.
1764
1765   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1766
1767   It is always safe for this function to do nothing.  It exists to
1768   recognize opportunities to optimize the output.
1769
1770   On RS/6000, first check for the sum of a register with a constant
1771   integer that is out of range.  If so, generate code to add the
1772   constant with the low-order 16 bits masked to the register and force
1773   this result into another register (this can be done with `cau').
1774   Then generate an address of REG+(CONST&0xffff), allowing for the
1775   possibility of bit 16 being a one.
1776
1777   Then check for the sum of a register and something not constant, try to
1778   load the other things into a register and return the sum.  */
1779rtx
1780rs6000_legitimize_address (x, oldx, mode)
1781     rtx x;
1782     rtx oldx ATTRIBUTE_UNUSED;
1783     enum machine_mode mode;
1784{
1785  if (GET_CODE (x) == PLUS
1786      && GET_CODE (XEXP (x, 0)) == REG
1787      && GET_CODE (XEXP (x, 1)) == CONST_INT
1788      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1789    {
1790      HOST_WIDE_INT high_int, low_int;
1791      rtx sum;
1792      high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1793      low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1794      if (low_int & 0x8000)
1795	high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1796      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1797					 GEN_INT (high_int)), 0);
1798      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1799    }
1800  else if (GET_CODE (x) == PLUS
1801	   && GET_CODE (XEXP (x, 0)) == REG
1802	   && GET_CODE (XEXP (x, 1)) != CONST_INT
1803	   && GET_MODE_NUNITS (mode) == 1
1804	   && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1805	   && (TARGET_POWERPC64 || mode != DImode)
1806	   && mode != TImode)
1807    {
1808      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1809			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1810    }
1811  else if (ALTIVEC_VECTOR_MODE (mode))
1812    {
1813      rtx reg;
1814
1815      /* Make sure both operands are registers.  */
1816      if (GET_CODE (x) == PLUS)
1817	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1818			     force_reg (Pmode, XEXP (x, 1)));
1819
1820      reg = force_reg (Pmode, x);
1821      return reg;
1822    }
1823  else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1824	   && GET_CODE (x) != CONST_INT
1825	   && GET_CODE (x) != CONST_DOUBLE
1826	   && CONSTANT_P (x)
1827	   && GET_MODE_NUNITS (mode) == 1
1828	   && (GET_MODE_BITSIZE (mode) <= 32
1829	       || (TARGET_HARD_FLOAT && mode == DFmode)))
1830    {
1831      rtx reg = gen_reg_rtx (Pmode);
1832      emit_insn (gen_elf_high (reg, (x)));
1833      return gen_rtx_LO_SUM (Pmode, reg, (x));
1834    }
1835  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1836	   && ! flag_pic
1837	   && GET_CODE (x) != CONST_INT
1838	   && GET_CODE (x) != CONST_DOUBLE
1839	   && CONSTANT_P (x)
1840	   && (TARGET_HARD_FLOAT || mode != DFmode)
1841	   && mode != DImode
1842	   && mode != TImode)
1843    {
1844      rtx reg = gen_reg_rtx (Pmode);
1845      emit_insn (gen_macho_high (reg, (x)));
1846      return gen_rtx_LO_SUM (Pmode, reg, (x));
1847    }
1848  else if (TARGET_TOC
1849	   && CONSTANT_POOL_EXPR_P (x)
1850	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1851    {
1852      return create_TOC_reference (x);
1853    }
1854  else
1855    return NULL_RTX;
1856}
1857
1858/* The convention appears to be to define this wherever it is used.
1859   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1860   is now used here.  */
1861#ifndef REG_MODE_OK_FOR_BASE_P
1862#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1863#endif
1864
1865/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
1866   replace the input X, or the original X if no replacement is called for.
1867   The output parameter *WIN is 1 if the calling macro should goto WIN,
1868   0 if it should not.
1869
1870   For RS/6000, we wish to handle large displacements off a base
1871   register by splitting the addend across an addiu/addis and the mem insn.
1872   This cuts number of extra insns needed from 3 to 1.
1873
1874   On Darwin, we use this to generate code for floating point constants.
1875   A movsf_low is generated so we wind up with 2 instructions rather than 3.
1876   The Darwin code is inside #if TARGET_MACHO because only then is
1877   machopic_function_base_name() defined.  */
1878rtx
1879rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1880    rtx x;
1881    enum machine_mode mode;
1882    int opnum;
1883    int type;
1884    int ind_levels ATTRIBUTE_UNUSED;
1885    int *win;
1886{
1887  /* We must recognize output that we have already generated ourselves.  */
1888  if (GET_CODE (x) == PLUS
1889      && GET_CODE (XEXP (x, 0)) == PLUS
1890      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1891      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1892      && GET_CODE (XEXP (x, 1)) == CONST_INT)
1893    {
1894      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1895                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1896                   opnum, (enum reload_type)type);
1897      *win = 1;
1898      return x;
1899    }
1900
1901#if TARGET_MACHO
1902  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1903      && GET_CODE (x) == LO_SUM
1904      && GET_CODE (XEXP (x, 0)) == PLUS
1905      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1906      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1907      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1908      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1909      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1910      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1911      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1912    {
1913      /* Result of previous invocation of this function on Darwin
1914	 floating point constant.  */
1915      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1916		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1917		opnum, (enum reload_type)type);
1918      *win = 1;
1919      return x;
1920    }
1921#endif
1922  if (GET_CODE (x) == PLUS
1923      && GET_CODE (XEXP (x, 0)) == REG
1924      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1925      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1926      && GET_CODE (XEXP (x, 1)) == CONST_INT
1927      && !ALTIVEC_VECTOR_MODE (mode))
1928    {
1929      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1930      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1931      HOST_WIDE_INT high
1932        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1933
1934      /* Check for 32-bit overflow.  */
1935      if (high + low != val)
1936        {
1937	  *win = 0;
1938	  return x;
1939	}
1940
1941      /* Reload the high part into a base reg; leave the low part
1942         in the mem directly.  */
1943
1944      x = gen_rtx_PLUS (GET_MODE (x),
1945                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1946                                      GEN_INT (high)),
1947                        GEN_INT (low));
1948
1949      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1950                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1951                   opnum, (enum reload_type)type);
1952      *win = 1;
1953      return x;
1954    }
1955#if TARGET_MACHO
1956  if (GET_CODE (x) == SYMBOL_REF
1957      && DEFAULT_ABI == ABI_DARWIN
1958      && !ALTIVEC_VECTOR_MODE (mode)
1959      && flag_pic)
1960    {
1961      /* Darwin load of floating point constant.  */
1962      rtx offset = gen_rtx (CONST, Pmode,
1963		    gen_rtx (MINUS, Pmode, x,
1964		    gen_rtx (SYMBOL_REF, Pmode,
1965			machopic_function_base_name ())));
1966      x = gen_rtx (LO_SUM, GET_MODE (x),
1967	    gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1968		gen_rtx (HIGH, Pmode, offset)), offset);
1969      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1970		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1971		opnum, (enum reload_type)type);
1972      *win = 1;
1973      return x;
1974    }
1975#endif
1976  if (TARGET_TOC
1977      && CONSTANT_POOL_EXPR_P (x)
1978      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1979    {
1980      (x) = create_TOC_reference (x);
1981      *win = 1;
1982      return x;
1983    }
1984  *win = 0;
1985  return x;
1986}
1987
1988/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1989   that is a valid memory address for an instruction.
1990   The MODE argument is the machine mode for the MEM expression
1991   that wants to use this address.
1992
1993   On the RS/6000, there are four valid address: a SYMBOL_REF that
1994   refers to a constant pool entry of an address (or the sum of it
1995   plus a constant), a short (16-bit signed) constant plus a register,
1996   the sum of two registers, or a register indirect, possibly with an
1997   auto-increment.  For DFmode and DImode with an constant plus register,
1998   we must ensure that both words are addressable or PowerPC64 with offset
1999   word aligned.
2000
2001   For modes spanning multiple registers (DFmode in 32-bit GPRs,
2002   32-bit DImode, TImode), indexed addressing cannot be used because
2003   adjacent memory cells are accessed by adding word-sized offsets
2004   during assembly output.  */
2005int
2006rs6000_legitimate_address (mode, x, reg_ok_strict)
2007    enum machine_mode mode;
2008    rtx x;
2009    int reg_ok_strict;
2010{
2011  if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2012    return 1;
2013  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2014      && !ALTIVEC_VECTOR_MODE (mode)
2015      && TARGET_UPDATE
2016      && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2017    return 1;
2018  if (LEGITIMATE_SMALL_DATA_P (mode, x))
2019    return 1;
2020  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2021    return 1;
2022  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
2023  if (! reg_ok_strict
2024      && GET_CODE (x) == PLUS
2025      && GET_CODE (XEXP (x, 0)) == REG
2026      && XEXP (x, 0) == virtual_stack_vars_rtx
2027      && GET_CODE (XEXP (x, 1)) == CONST_INT)
2028    return 1;
2029  if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2030    return 1;
2031  if (mode != TImode
2032      && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2033      && (TARGET_POWERPC64 || mode != DImode)
2034      && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2035    return 1;
2036  if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2037    return 1;
2038  return 0;
2039}
2040
2041/* Try to output insns to set TARGET equal to the constant C if it can
2042   be done in less than N insns.  Do all computations in MODE.
2043   Returns the place where the output has been placed if it can be
2044   done and the insns have been emitted.  If it would take more than N
2045   insns, zero is returned and no insns and emitted.  */
2046
2047rtx
2048rs6000_emit_set_const (dest, mode, source, n)
2049     rtx dest, source;
2050     enum machine_mode mode;
2051     int n ATTRIBUTE_UNUSED;
2052{
2053  HOST_WIDE_INT c0, c1;
2054
2055  if (mode == QImode || mode == HImode || mode == SImode)
2056    {
2057      if (dest == NULL)
2058        dest = gen_reg_rtx (mode);
2059      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2060      return dest;
2061    }
2062
2063  if (GET_CODE (source) == CONST_INT)
2064    {
2065      c0 = INTVAL (source);
2066      c1 = -(c0 < 0);
2067    }
2068  else if (GET_CODE (source) == CONST_DOUBLE)
2069    {
2070#if HOST_BITS_PER_WIDE_INT >= 64
2071      c0 = CONST_DOUBLE_LOW (source);
2072      c1 = -(c0 < 0);
2073#else
2074      c0 = CONST_DOUBLE_LOW (source);
2075      c1 = CONST_DOUBLE_HIGH (source);
2076#endif
2077    }
2078  else
2079    abort ();
2080
2081  return rs6000_emit_set_long_const (dest, c0, c1);
2082}
2083
2084/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2085   fall back to a straight forward decomposition.  We do this to avoid
2086   exponential run times encountered when looking for longer sequences
2087   with rs6000_emit_set_const.  */
2088static rtx
2089rs6000_emit_set_long_const (dest, c1, c2)
2090     rtx dest;
2091     HOST_WIDE_INT c1, c2;
2092{
2093  if (!TARGET_POWERPC64)
2094    {
2095      rtx operand1, operand2;
2096
2097      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2098					DImode);
2099      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2100					DImode);
2101      emit_move_insn (operand1, GEN_INT (c1));
2102      emit_move_insn (operand2, GEN_INT (c2));
2103    }
2104  else
2105    {
2106      HOST_WIDE_INT ud1, ud2, ud3, ud4;
2107
2108      ud1 = c1 & 0xffff;
2109      ud2 = (c1 & 0xffff0000) >> 16;
2110#if HOST_BITS_PER_WIDE_INT >= 64
2111      c2 = c1 >> 32;
2112#endif
2113      ud3 = c2 & 0xffff;
2114      ud4 = (c2 & 0xffff0000) >> 16;
2115
2116      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2117	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2118	{
2119	  if (ud1 & 0x8000)
2120	    emit_move_insn (dest, GEN_INT (((ud1  ^ 0x8000) -  0x8000)));
2121	  else
2122	    emit_move_insn (dest, GEN_INT (ud1));
2123	}
2124
2125      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2126	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2127	{
2128	  if (ud2 & 0x8000)
2129	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2130					   - 0x80000000));
2131	  else
2132	    emit_move_insn (dest, GEN_INT (ud2 << 16));
2133	  if (ud1 != 0)
2134	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2135	}
2136      else if ((ud4 == 0xffff && (ud3 & 0x8000))
2137	       || (ud4 == 0 && ! (ud3 & 0x8000)))
2138	{
2139	  if (ud3 & 0x8000)
2140	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2141					   - 0x80000000));
2142	  else
2143	    emit_move_insn (dest, GEN_INT (ud3 << 16));
2144
2145	  if (ud2 != 0)
2146	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2147	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2148	  if (ud1 != 0)
2149	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2150	}
2151      else
2152	{
2153	  if (ud4 & 0x8000)
2154	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2155					   - 0x80000000));
2156	  else
2157	    emit_move_insn (dest, GEN_INT (ud4 << 16));
2158
2159	  if (ud3 != 0)
2160	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2161
2162	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2163	  if (ud2 != 0)
2164	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2165					       GEN_INT (ud2 << 16)));
2166	  if (ud1 != 0)
2167	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2168	}
2169    }
2170  return dest;
2171}
2172
2173/* Emit a move from SOURCE to DEST in mode MODE.  */
2174void
2175rs6000_emit_move (dest, source, mode)
2176     rtx dest;
2177     rtx source;
2178     enum machine_mode mode;
2179{
2180  rtx operands[2];
2181  operands[0] = dest;
2182  operands[1] = source;
2183
2184  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
2185  if (GET_CODE (operands[1]) == CONST_DOUBLE
2186      && ! FLOAT_MODE_P (mode)
2187      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2188    {
2189      /* FIXME.  This should never happen.  */
2190      /* Since it seems that it does, do the safe thing and convert
2191	 to a CONST_INT.  */
2192      operands[1] =
2193	GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2194    }
2195  if (GET_CODE (operands[1]) == CONST_DOUBLE
2196      && ! FLOAT_MODE_P (mode)
2197      && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2198	   && CONST_DOUBLE_LOW (operands[1]) >= 0)
2199	  || (CONST_DOUBLE_HIGH (operands[1]) == -1
2200	      && CONST_DOUBLE_LOW (operands[1]) < 0)))
2201    abort ();
2202
2203  /* Check if GCC is setting up a block move that will end up using FP
2204     registers as temporaries.  We must make sure this is acceptable.  */
2205  if (GET_CODE (operands[0]) == MEM
2206      && GET_CODE (operands[1]) == MEM
2207      && mode == DImode
2208      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2209	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2210      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2211					    ? 32 : MEM_ALIGN (operands[0])))
2212	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2213					       ? 32
2214					       : MEM_ALIGN (operands[1]))))
2215      && ! MEM_VOLATILE_P (operands [0])
2216      && ! MEM_VOLATILE_P (operands [1]))
2217    {
2218      emit_move_insn (adjust_address (operands[0], SImode, 0),
2219		      adjust_address (operands[1], SImode, 0));
2220      emit_move_insn (adjust_address (operands[0], SImode, 4),
2221		      adjust_address (operands[1], SImode, 4));
2222      return;
2223    }
2224
2225  if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2226    operands[1] = force_reg (mode, operands[1]);
2227
2228  if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2229      && GET_CODE (operands[0]) == MEM)
2230    {
2231      int regnum;
2232
2233      if (reload_in_progress || reload_completed)
2234	regnum = true_regnum (operands[1]);
2235      else if (GET_CODE (operands[1]) == REG)
2236	regnum = REGNO (operands[1]);
2237      else
2238	regnum = -1;
2239
2240      /* If operands[1] is a register, on POWER it may have
2241	 double-precision data in it, so truncate it to single
2242	 precision.  */
2243      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2244	{
2245	  rtx newreg;
2246	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2247	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2248	  operands[1] = newreg;
2249	}
2250    }
2251
2252  /* Handle the case where reload calls us with an invalid address;
2253     and the case of CONSTANT_P_RTX.  */
2254  if (!VECTOR_MODE_P (mode)
2255      && (! general_operand (operands[1], mode)
2256	  || ! nonimmediate_operand (operands[0], mode)
2257	  || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2258    {
2259      emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2260      return;
2261    }
2262
2263  /* FIXME:  In the long term, this switch statement should go away
2264     and be replaced by a sequence of tests based on things like
2265     mode == Pmode.  */
2266  switch (mode)
2267    {
2268    case HImode:
2269    case QImode:
2270      if (CONSTANT_P (operands[1])
2271	  && GET_CODE (operands[1]) != CONST_INT)
2272	operands[1] = force_const_mem (mode, operands[1]);
2273      break;
2274
2275    case TFmode:
2276    case DFmode:
2277    case SFmode:
2278      if (CONSTANT_P (operands[1])
2279	  && ! easy_fp_constant (operands[1], mode))
2280	operands[1] = force_const_mem (mode, operands[1]);
2281      break;
2282
2283    case V16QImode:
2284    case V8HImode:
2285    case V4SFmode:
2286    case V4SImode:
2287      if (CONSTANT_P (operands[1])
2288	  && !easy_vector_constant (operands[1]))
2289	operands[1] = force_const_mem (mode, operands[1]);
2290      break;
2291
2292    case SImode:
2293    case DImode:
2294      /* Use default pattern for address of ELF small data */
2295      if (TARGET_ELF
2296	  && mode == Pmode
2297	  && DEFAULT_ABI == ABI_V4
2298	  && (GET_CODE (operands[1]) == SYMBOL_REF
2299	      || GET_CODE (operands[1]) == CONST)
2300	  && small_data_operand (operands[1], mode))
2301	{
2302	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2303	  return;
2304	}
2305
2306      if (DEFAULT_ABI == ABI_V4
2307	  && mode == Pmode && mode == SImode
2308	  && flag_pic == 1 && got_operand (operands[1], mode))
2309	{
2310	  emit_insn (gen_movsi_got (operands[0], operands[1]));
2311	  return;
2312	}
2313
2314      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2315	  && TARGET_NO_TOC && ! flag_pic
2316	  && mode == Pmode
2317	  && CONSTANT_P (operands[1])
2318	  && GET_CODE (operands[1]) != HIGH
2319	  && GET_CODE (operands[1]) != CONST_INT)
2320	{
2321	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2322
2323	  /* If this is a function address on -mcall-aixdesc,
2324	     convert it to the address of the descriptor.  */
2325	  if (DEFAULT_ABI == ABI_AIX
2326	      && GET_CODE (operands[1]) == SYMBOL_REF
2327	      && XSTR (operands[1], 0)[0] == '.')
2328	    {
2329	      const char *name = XSTR (operands[1], 0);
2330	      rtx new_ref;
2331	      while (*name == '.')
2332		name++;
2333	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2334	      CONSTANT_POOL_ADDRESS_P (new_ref)
2335		= CONSTANT_POOL_ADDRESS_P (operands[1]);
2336	      SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2337	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2338	      operands[1] = new_ref;
2339	    }
2340
2341	  if (DEFAULT_ABI == ABI_DARWIN)
2342	    {
2343	      emit_insn (gen_macho_high (target, operands[1]));
2344	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
2345	      return;
2346	    }
2347
2348	  emit_insn (gen_elf_high (target, operands[1]));
2349	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
2350	  return;
2351	}
2352
2353      /* If this is a SYMBOL_REF that refers to a constant pool entry,
2354	 and we have put it in the TOC, we just need to make a TOC-relative
2355	 reference to it.  */
2356      if (TARGET_TOC
2357	  && GET_CODE (operands[1]) == SYMBOL_REF
2358	  && CONSTANT_POOL_EXPR_P (operands[1])
2359	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2360					      get_pool_mode (operands[1])))
2361	{
2362	  operands[1] = create_TOC_reference (operands[1]);
2363	}
2364      else if (mode == Pmode
2365	       && CONSTANT_P (operands[1])
2366	       && ((GET_CODE (operands[1]) != CONST_INT
2367		    && ! easy_fp_constant (operands[1], mode))
2368		   || (GET_CODE (operands[1]) == CONST_INT
2369		       && num_insns_constant (operands[1], mode) > 2)
2370		   || (GET_CODE (operands[0]) == REG
2371		       && FP_REGNO_P (REGNO (operands[0]))))
2372	       && GET_CODE (operands[1]) != HIGH
2373	       && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2374	       && ! TOC_RELATIVE_EXPR_P (operands[1]))
2375	{
2376	  /* Emit a USE operation so that the constant isn't deleted if
2377	     expensive optimizations are turned on because nobody
2378	     references it.  This should only be done for operands that
2379	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2380	     This should not be done for operands that contain LABEL_REFs.
2381	     For now, we just handle the obvious case.  */
2382	  if (GET_CODE (operands[1]) != LABEL_REF)
2383	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2384
2385#if TARGET_MACHO
2386	  /* Darwin uses a special PIC legitimizer.  */
2387	  if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2388	    {
2389	      operands[1] =
2390		rs6000_machopic_legitimize_pic_address (operands[1], mode,
2391							operands[0]);
2392	      if (operands[0] != operands[1])
2393		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2394	      return;
2395	    }
2396#endif
2397
2398	  /* If we are to limit the number of things we put in the TOC and
2399	     this is a symbol plus a constant we can add in one insn,
2400	     just put the symbol in the TOC and add the constant.  Don't do
2401	     this if reload is in progress.  */
2402	  if (GET_CODE (operands[1]) == CONST
2403	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2404	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
2405	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2406	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2407		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2408	      && ! side_effects_p (operands[0]))
2409	    {
2410	      rtx sym =
2411		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2412	      rtx other = XEXP (XEXP (operands[1], 0), 1);
2413
2414	      sym = force_reg (mode, sym);
2415	      if (mode == SImode)
2416		emit_insn (gen_addsi3 (operands[0], sym, other));
2417	      else
2418		emit_insn (gen_adddi3 (operands[0], sym, other));
2419	      return;
2420	    }
2421
2422	  operands[1] = force_const_mem (mode, operands[1]);
2423
2424	  if (TARGET_TOC
2425	      && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2426	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2427			get_pool_constant (XEXP (operands[1], 0)),
2428			get_pool_mode (XEXP (operands[1], 0))))
2429	    {
2430	      operands[1]
2431		= gen_rtx_MEM (mode,
2432			       create_TOC_reference (XEXP (operands[1], 0)));
2433	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
2434	      RTX_UNCHANGING_P (operands[1]) = 1;
2435	    }
2436	}
2437      break;
2438
2439    case TImode:
2440      if (GET_CODE (operands[0]) == MEM
2441	  && GET_CODE (XEXP (operands[0], 0)) != REG
2442	  && ! reload_in_progress)
2443	operands[0]
2444	  = replace_equiv_address (operands[0],
2445				   copy_addr_to_reg (XEXP (operands[0], 0)));
2446
2447      if (GET_CODE (operands[1]) == MEM
2448	  && GET_CODE (XEXP (operands[1], 0)) != REG
2449	  && ! reload_in_progress)
2450	operands[1]
2451	  = replace_equiv_address (operands[1],
2452				   copy_addr_to_reg (XEXP (operands[1], 0)));
2453      break;
2454
2455    default:
2456      abort ();
2457    }
2458
2459  /* Above, we may have called force_const_mem which may have returned
2460     an invalid address.  If we can, fix this up; otherwise, reload will
2461     have to deal with it.  */
2462  if (GET_CODE (operands[1]) == MEM
2463      && ! memory_address_p (mode, XEXP (operands[1], 0))
2464      && ! reload_in_progress)
2465    operands[1] = adjust_address (operands[1], mode, 0);
2466
2467  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2468  return;
2469}
2470
2471/* Initialize a variable CUM of type CUMULATIVE_ARGS
2472   for a call to a function whose data type is FNTYPE.
2473   For a library call, FNTYPE is 0.
2474
2475   For incoming args we set the number of arguments in the prototype large
2476   so we never return a PARALLEL.  */
2477
2478void
2479init_cumulative_args (cum, fntype, libname, incoming)
2480     CUMULATIVE_ARGS *cum;
2481     tree fntype;
2482     rtx libname ATTRIBUTE_UNUSED;
2483     int incoming;
2484{
2485  static CUMULATIVE_ARGS zero_cumulative;
2486
2487  *cum = zero_cumulative;
2488  cum->words = 0;
2489  cum->fregno = FP_ARG_MIN_REG;
2490  cum->vregno = ALTIVEC_ARG_MIN_REG;
2491  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2492  cum->call_cookie = CALL_NORMAL;
2493  cum->sysv_gregno = GP_ARG_MIN_REG;
2494
2495  if (incoming)
2496    cum->nargs_prototype = 1000;		/* don't return a PARALLEL */
2497
2498  else if (cum->prototype)
2499    cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2500			    + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2501			       || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2502
2503  else
2504    cum->nargs_prototype = 0;
2505
2506  cum->orig_nargs = cum->nargs_prototype;
2507
2508  /* Check for longcall's */
2509  if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2510    cum->call_cookie = CALL_LONG;
2511
2512  if (TARGET_DEBUG_ARG)
2513    {
2514      fprintf (stderr, "\ninit_cumulative_args:");
2515      if (fntype)
2516	{
2517	  tree ret_type = TREE_TYPE (fntype);
2518	  fprintf (stderr, " ret code = %s,",
2519		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
2520	}
2521
2522      if (cum->call_cookie & CALL_LONG)
2523	fprintf (stderr, " longcall,");
2524
2525      fprintf (stderr, " proto = %d, nargs = %d\n",
2526	       cum->prototype, cum->nargs_prototype);
2527    }
2528}
2529
2530/* If defined, a C expression which determines whether, and in which
2531   direction, to pad out an argument with extra space.  The value
2532   should be of type `enum direction': either `upward' to pad above
2533   the argument, `downward' to pad below, or `none' to inhibit
2534   padding.
2535
2536   For the AIX ABI structs are always stored left shifted in their
2537   argument slot.  */
2538
2539enum direction
2540function_arg_padding (mode, type)
2541     enum machine_mode mode;
2542     tree type;
2543{
2544  if (type != 0 && AGGREGATE_TYPE_P (type))
2545    return upward;
2546
2547  /* This is the default definition.  */
2548  return (! BYTES_BIG_ENDIAN
2549          ? upward
2550          : ((mode == BLKmode
2551              ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2552                 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2553              : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2554             ? downward : upward));
2555}
2556
2557/* If defined, a C expression that gives the alignment boundary, in bits,
2558   of an argument with the specified mode and type.  If it is not defined,
2559   PARM_BOUNDARY is used for all arguments.
2560
2561   V.4 wants long longs to be double word aligned.  */
2562
2563int
2564function_arg_boundary (mode, type)
2565     enum machine_mode mode;
2566     tree type ATTRIBUTE_UNUSED;
2567{
2568  if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2569    return 64;
2570  else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2571    return 128;
2572  else
2573    return PARM_BOUNDARY;
2574}
2575
2576/* Update the data in CUM to advance over an argument
2577   of mode MODE and data type TYPE.
2578   (TYPE is null for libcalls where that information may not be available.)  */
2579
2580void
2581function_arg_advance (cum, mode, type, named)
2582     CUMULATIVE_ARGS *cum;
2583     enum machine_mode mode;
2584     tree type;
2585     int named;
2586{
2587  cum->nargs_prototype--;
2588
2589  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2590    {
2591      if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2592	cum->vregno++;
2593      else
2594	cum->words += RS6000_ARG_SIZE (mode, type);
2595    }
2596  else if (DEFAULT_ABI == ABI_V4)
2597    {
2598      if (TARGET_HARD_FLOAT
2599	  && (mode == SFmode || mode == DFmode))
2600	{
2601	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2602	    cum->fregno++;
2603	  else
2604	    {
2605	      if (mode == DFmode)
2606	        cum->words += cum->words & 1;
2607	      cum->words += RS6000_ARG_SIZE (mode, type);
2608	    }
2609	}
2610      else
2611	{
2612	  int n_words;
2613	  int gregno = cum->sysv_gregno;
2614
2615	  /* Aggregates and IEEE quad get passed by reference.  */
2616	  if ((type && AGGREGATE_TYPE_P (type))
2617	      || mode == TFmode)
2618	    n_words = 1;
2619	  else
2620	    n_words = RS6000_ARG_SIZE (mode, type);
2621
2622	  /* Long long is put in odd registers.  */
2623	  if (n_words == 2 && (gregno & 1) == 0)
2624	    gregno += 1;
2625
2626	  /* Long long is not split between registers and stack.  */
2627	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2628	    {
2629	      /* Long long is aligned on the stack.  */
2630	      if (n_words == 2)
2631		cum->words += cum->words & 1;
2632	      cum->words += n_words;
2633	    }
2634
2635	  /* Note: continuing to accumulate gregno past when we've started
2636	     spilling to the stack indicates the fact that we've started
2637	     spilling to the stack to expand_builtin_saveregs.  */
2638	  cum->sysv_gregno = gregno + n_words;
2639	}
2640
2641      if (TARGET_DEBUG_ARG)
2642	{
2643	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2644		   cum->words, cum->fregno);
2645	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2646		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2647	  fprintf (stderr, "mode = %4s, named = %d\n",
2648		   GET_MODE_NAME (mode), named);
2649	}
2650    }
2651  else
2652    {
2653      int align = (TARGET_32BIT && (cum->words & 1) != 0
2654		   && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2655
2656      cum->words += align + RS6000_ARG_SIZE (mode, type);
2657
2658      if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2659	cum->fregno++;
2660
2661      if (TARGET_DEBUG_ARG)
2662	{
2663	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2664		   cum->words, cum->fregno);
2665	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2666		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2667	  fprintf (stderr, "named = %d, align = %d\n", named, align);
2668	}
2669    }
2670}
2671
2672/* Determine where to put an argument to a function.
2673   Value is zero to push the argument on the stack,
2674   or a hard register in which to store the argument.
2675
2676   MODE is the argument's machine mode.
2677   TYPE is the data type of the argument (as a tree).
2678    This is null for libcalls where that information may
2679    not be available.
2680   CUM is a variable of type CUMULATIVE_ARGS which gives info about
2681    the preceding args and about the function being called.
2682   NAMED is nonzero if this argument is a named parameter
2683    (otherwise it is an extra parameter matching an ellipsis).
2684
2685   On RS/6000 the first eight words of non-FP are normally in registers
2686   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
2687   Under V.4, the first 8 FP args are in registers.
2688
2689   If this is floating-point and no prototype is specified, we use
2690   both an FP and integer register (or possibly FP reg and stack).  Library
2691   functions (when TYPE is zero) always have the proper types for args,
2692   so we can pass the FP value just in one register.  emit_library_function
2693   doesn't support PARALLEL anyway.  */
2694
2695struct rtx_def *
2696function_arg (cum, mode, type, named)
2697     CUMULATIVE_ARGS *cum;
2698     enum machine_mode mode;
2699     tree type;
2700     int named;
2701{
2702  enum rs6000_abi abi = DEFAULT_ABI;
2703
2704  /* Return a marker to indicate whether CR1 needs to set or clear the
2705     bit that V.4 uses to say fp args were passed in registers.
2706     Assume that we don't need the marker for software floating point,
2707     or compiler generated library calls.  */
2708  if (mode == VOIDmode)
2709    {
2710      if (abi == ABI_V4
2711	  && TARGET_HARD_FLOAT
2712	  && cum->nargs_prototype < 0
2713	  && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2714	{
2715	  return GEN_INT (cum->call_cookie
2716			  | ((cum->fregno == FP_ARG_MIN_REG)
2717			     ? CALL_V4_SET_FP_ARGS
2718			     : CALL_V4_CLEAR_FP_ARGS));
2719	}
2720
2721      return GEN_INT (cum->call_cookie);
2722    }
2723
2724  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2725    {
2726      if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2727	return gen_rtx_REG (mode, cum->vregno);
2728      else
2729	return NULL;
2730    }
2731  else if (abi == ABI_V4)
2732    {
2733      if (TARGET_HARD_FLOAT
2734	  && (mode == SFmode || mode == DFmode))
2735	{
2736	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2737	    return gen_rtx_REG (mode, cum->fregno);
2738	  else
2739	    return NULL;
2740	}
2741      else
2742	{
2743	  int n_words;
2744	  int gregno = cum->sysv_gregno;
2745
2746	  /* Aggregates and IEEE quad get passed by reference.  */
2747	  if ((type && AGGREGATE_TYPE_P (type))
2748	      || mode == TFmode)
2749	    n_words = 1;
2750	  else
2751	    n_words = RS6000_ARG_SIZE (mode, type);
2752
2753	  /* Long long is put in odd registers.  */
2754	  if (n_words == 2 && (gregno & 1) == 0)
2755	    gregno += 1;
2756
2757	  /* Long long is not split between registers and stack.  */
2758	  if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2759	    return gen_rtx_REG (mode, gregno);
2760	  else
2761	    return NULL;
2762	}
2763    }
2764  else
2765    {
2766      int align = (TARGET_32BIT && (cum->words & 1) != 0
2767	           && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2768      int align_words = cum->words + align;
2769
2770      if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2771        return NULL_RTX;
2772
2773      if (USE_FP_FOR_ARG_P (*cum, mode, type))
2774	{
2775	  if (! type
2776	      || ((cum->nargs_prototype > 0)
2777	          /* IBM AIX extended its linkage convention definition always
2778		     to require FP args after register save area hole on the
2779		     stack.  */
2780	          && (DEFAULT_ABI != ABI_AIX
2781		      || ! TARGET_XL_CALL
2782		      || (align_words < GP_ARG_NUM_REG))))
2783	    return gen_rtx_REG (mode, cum->fregno);
2784
2785          return gen_rtx_PARALLEL (mode,
2786	    gen_rtvec (2,
2787		       gen_rtx_EXPR_LIST (VOIDmode,
2788				((align_words >= GP_ARG_NUM_REG)
2789				 ? NULL_RTX
2790				 : (align_words
2791				    + RS6000_ARG_SIZE (mode, type)
2792				    > GP_ARG_NUM_REG
2793				    /* If this is partially on the stack, then
2794				       we only include the portion actually
2795				       in registers here.  */
2796				    ? gen_rtx_REG (SImode,
2797					       GP_ARG_MIN_REG + align_words)
2798				    : gen_rtx_REG (mode,
2799					       GP_ARG_MIN_REG + align_words))),
2800				const0_rtx),
2801		       gen_rtx_EXPR_LIST (VOIDmode,
2802				gen_rtx_REG (mode, cum->fregno),
2803				const0_rtx)));
2804	}
2805      else if (align_words < GP_ARG_NUM_REG)
2806	return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2807      else
2808	return NULL_RTX;
2809    }
2810}
2811
2812/* For an arg passed partly in registers and partly in memory,
2813   this is the number of registers used.
2814   For args passed entirely in registers or entirely in memory, zero.  */
2815
2816int
2817function_arg_partial_nregs (cum, mode, type, named)
2818     CUMULATIVE_ARGS *cum;
2819     enum machine_mode mode;
2820     tree type;
2821     int named ATTRIBUTE_UNUSED;
2822{
2823  if (DEFAULT_ABI == ABI_V4)
2824    return 0;
2825
2826  if (USE_FP_FOR_ARG_P (*cum, mode, type)
2827      || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2828    {
2829      if (cum->nargs_prototype >= 0)
2830	return 0;
2831    }
2832
2833  if (cum->words < GP_ARG_NUM_REG
2834      && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2835    {
2836      int ret = GP_ARG_NUM_REG - cum->words;
2837      if (ret && TARGET_DEBUG_ARG)
2838	fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2839
2840      return ret;
2841    }
2842
2843  return 0;
2844}
2845
2846/* A C expression that indicates when an argument must be passed by
2847   reference.  If nonzero for an argument, a copy of that argument is
2848   made in memory and a pointer to the argument is passed instead of
2849   the argument itself.  The pointer is passed in whatever way is
2850   appropriate for passing a pointer to that type.
2851
2852   Under V.4, structures and unions are passed by reference.  */
2853
2854int
2855function_arg_pass_by_reference (cum, mode, type, named)
2856     CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2857     enum machine_mode mode ATTRIBUTE_UNUSED;
2858     tree type;
2859     int named ATTRIBUTE_UNUSED;
2860{
2861  if (DEFAULT_ABI == ABI_V4
2862      && ((type && AGGREGATE_TYPE_P (type))
2863	  || mode == TFmode))
2864    {
2865      if (TARGET_DEBUG_ARG)
2866	fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2867
2868      return 1;
2869    }
2870
2871  return 0;
2872}
2873
2874/* Perform any needed actions needed for a function that is receiving a
2875   variable number of arguments.
2876
2877   CUM is as above.
2878
2879   MODE and TYPE are the mode and type of the current parameter.
2880
2881   PRETEND_SIZE is a variable that should be set to the amount of stack
2882   that must be pushed by the prolog to pretend that our caller pushed
2883   it.
2884
2885   Normally, this macro will push all remaining incoming registers on the
2886   stack and set PRETEND_SIZE to the length of the registers pushed.  */
2887
2888void
2889setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2890     CUMULATIVE_ARGS *cum;
2891     enum machine_mode mode;
2892     tree type;
2893     int *pretend_size;
2894     int no_rtl;
2895
2896{
2897  CUMULATIVE_ARGS next_cum;
2898  int reg_size = TARGET_32BIT ? 4 : 8;
2899  rtx save_area = NULL_RTX, mem;
2900  int first_reg_offset, set;
2901  tree fntype;
2902  int stdarg_p;
2903
2904  fntype = TREE_TYPE (current_function_decl);
2905  stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2906	      && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2907		  != void_type_node));
2908
2909  /* For varargs, we do not want to skip the dummy va_dcl argument.
2910     For stdargs, we do want to skip the last named argument.  */
2911  next_cum = *cum;
2912  if (stdarg_p)
2913    function_arg_advance (&next_cum, mode, type, 1);
2914
2915  if (DEFAULT_ABI == ABI_V4)
2916    {
2917      /* Indicate to allocate space on the stack for varargs save area.  */
2918      /* ??? Does this really have to be located at a magic spot on the
2919	 stack, or can we allocate this with assign_stack_local instead.  */
2920      cfun->machine->sysv_varargs_p = 1;
2921      if (! no_rtl)
2922	save_area = plus_constant (virtual_stack_vars_rtx,
2923				   - RS6000_VARARGS_SIZE);
2924
2925      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2926    }
2927  else
2928    {
2929      first_reg_offset = next_cum.words;
2930      save_area = virtual_incoming_args_rtx;
2931      cfun->machine->sysv_varargs_p = 0;
2932
2933      if (MUST_PASS_IN_STACK (mode, type))
2934	first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2935    }
2936
2937  set = get_varargs_alias_set ();
2938  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2939    {
2940      mem = gen_rtx_MEM (BLKmode,
2941		         plus_constant (save_area,
2942					first_reg_offset * reg_size)),
2943      set_mem_alias_set (mem, set);
2944      set_mem_align (mem, BITS_PER_WORD);
2945
2946      move_block_from_reg
2947	(GP_ARG_MIN_REG + first_reg_offset, mem,
2948	 GP_ARG_NUM_REG - first_reg_offset,
2949	 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2950
2951      /* ??? Does ABI_V4 need this at all?  */
2952      *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2953    }
2954
2955  /* Save FP registers if needed.  */
2956  if (DEFAULT_ABI == ABI_V4
2957      && TARGET_HARD_FLOAT && ! no_rtl
2958      && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2959    {
2960      int fregno = next_cum.fregno;
2961      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2962      rtx lab = gen_label_rtx ();
2963      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2964
2965      emit_jump_insn (gen_rtx_SET (VOIDmode,
2966				   pc_rtx,
2967				   gen_rtx_IF_THEN_ELSE (VOIDmode,
2968					    gen_rtx_NE (VOIDmode, cr1,
2969						        const0_rtx),
2970					    gen_rtx_LABEL_REF (VOIDmode, lab),
2971					    pc_rtx)));
2972
2973      while (fregno <= FP_ARG_V4_MAX_REG)
2974	{
2975	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2976          set_mem_alias_set (mem, set);
2977	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2978	  fregno++;
2979	  off += 8;
2980	}
2981
2982      emit_label (lab);
2983    }
2984}
2985
2986/* Create the va_list data type.  */
2987
2988tree
2989rs6000_build_va_list ()
2990{
2991  tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2992
2993  /* For AIX, prefer 'char *' because that's what the system
2994     header files like.  */
2995  if (DEFAULT_ABI != ABI_V4)
2996    return build_pointer_type (char_type_node);
2997
2998  record = make_lang_type (RECORD_TYPE);
2999  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3000
3001  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3002		      unsigned_char_type_node);
3003  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3004		      unsigned_char_type_node);
3005  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3006		      ptr_type_node);
3007  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3008		      ptr_type_node);
3009
3010  DECL_FIELD_CONTEXT (f_gpr) = record;
3011  DECL_FIELD_CONTEXT (f_fpr) = record;
3012  DECL_FIELD_CONTEXT (f_ovf) = record;
3013  DECL_FIELD_CONTEXT (f_sav) = record;
3014
3015  TREE_CHAIN (record) = type_decl;
3016  TYPE_NAME (record) = type_decl;
3017  TYPE_FIELDS (record) = f_gpr;
3018  TREE_CHAIN (f_gpr) = f_fpr;
3019  TREE_CHAIN (f_fpr) = f_ovf;
3020  TREE_CHAIN (f_ovf) = f_sav;
3021
3022  layout_type (record);
3023
3024  /* The correct type is an array type of one element.  */
3025  return build_array_type (record, build_index_type (size_zero_node));
3026}
3027
3028/* Implement va_start.  */
3029
3030void
3031rs6000_va_start (stdarg_p, valist, nextarg)
3032     int stdarg_p;
3033     tree valist;
3034     rtx nextarg;
3035{
3036  HOST_WIDE_INT words, n_gpr, n_fpr;
3037  tree f_gpr, f_fpr, f_ovf, f_sav;
3038  tree gpr, fpr, ovf, sav, t;
3039
3040  /* Only SVR4 needs something special.  */
3041  if (DEFAULT_ABI != ABI_V4)
3042    {
3043      std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3044      return;
3045    }
3046
3047  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3048  f_fpr = TREE_CHAIN (f_gpr);
3049  f_ovf = TREE_CHAIN (f_fpr);
3050  f_sav = TREE_CHAIN (f_ovf);
3051
3052  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3053  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3054  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3055  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3056  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3057
3058  /* Count number of gp and fp argument registers used.  */
3059  words = current_function_args_info.words;
3060  n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3061  n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3062
3063  if (TARGET_DEBUG_ARG)
3064    {
3065      fputs ("va_start: words = ", stderr);
3066      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3067      fputs (", n_gpr = ", stderr);
3068      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3069      fputs (", n_fpr = ", stderr);
3070      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3071      putc ('\n', stderr);
3072    }
3073
3074  t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3075  TREE_SIDE_EFFECTS (t) = 1;
3076  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3077
3078  t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3079  TREE_SIDE_EFFECTS (t) = 1;
3080  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3081
3082  /* Find the overflow area.  */
3083  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3084  if (words != 0)
3085    t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3086	       build_int_2 (words * UNITS_PER_WORD, 0));
3087  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3088  TREE_SIDE_EFFECTS (t) = 1;
3089  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3090
3091  /* Find the register save area.  */
3092  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3093  t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3094	     build_int_2 (-RS6000_VARARGS_SIZE, -1));
3095  t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3096  TREE_SIDE_EFFECTS (t) = 1;
3097  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3098}
3099
3100/* Implement va_arg.  */
3101
3102rtx
3103rs6000_va_arg (valist, type)
3104     tree valist, type;
3105{
3106  tree f_gpr, f_fpr, f_ovf, f_sav;
3107  tree gpr, fpr, ovf, sav, reg, t, u;
3108  int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3109  rtx lab_false, lab_over, addr_rtx, r;
3110
3111  /* For AIX, the rule is that structures are passed left-aligned in
3112     their stack slot.  However, GCC does not presently do this:
3113     structures which are the same size as integer types are passed
3114     right-aligned, as if they were in fact integers.  This only
3115     matters for structures of size 1 or 2, or 4 when TARGET_64BIT.  */
3116  if (DEFAULT_ABI != ABI_V4)
3117    return std_expand_builtin_va_arg (valist, type);
3118
3119  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3120  f_fpr = TREE_CHAIN (f_gpr);
3121  f_ovf = TREE_CHAIN (f_fpr);
3122  f_sav = TREE_CHAIN (f_ovf);
3123
3124  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3125  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3126  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3127  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3128  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3129
3130  size = int_size_in_bytes (type);
3131  rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3132
3133  if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3134    {
3135      /* Aggregates and long doubles are passed by reference.  */
3136      indirect_p = 1;
3137      reg = gpr;
3138      n_reg = 1;
3139      sav_ofs = 0;
3140      sav_scale = 4;
3141      size = UNITS_PER_WORD;
3142      rsize = 1;
3143    }
3144  else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3145    {
3146      /* FP args go in FP registers, if present.  */
3147      indirect_p = 0;
3148      reg = fpr;
3149      n_reg = 1;
3150      sav_ofs = 8*4;
3151      sav_scale = 8;
3152    }
3153  else
3154    {
3155      /* Otherwise into GP registers.  */
3156      indirect_p = 0;
3157      reg = gpr;
3158      n_reg = rsize;
3159      sav_ofs = 0;
3160      sav_scale = 4;
3161    }
3162
3163  /* Pull the value out of the saved registers ...  */
3164
3165  lab_false = gen_label_rtx ();
3166  lab_over = gen_label_rtx ();
3167  addr_rtx = gen_reg_rtx (Pmode);
3168
3169  /*  Vectors never go in registers.  */
3170  if (TREE_CODE (type) != VECTOR_TYPE)
3171    {
3172      TREE_THIS_VOLATILE (reg) = 1;
3173      emit_cmp_and_jump_insns
3174	(expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3175	 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3176	 lab_false);
3177
3178      /* Long long is aligned in the registers.  */
3179      if (n_reg > 1)
3180	{
3181	  u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3182		     build_int_2 (n_reg - 1, 0));
3183	  u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3184	  u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3185	  TREE_SIDE_EFFECTS (u) = 1;
3186	  expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3187	}
3188
3189      if (sav_ofs)
3190	t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3191      else
3192	t = sav;
3193
3194      u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3195		 build_int_2 (n_reg, 0));
3196      TREE_SIDE_EFFECTS (u) = 1;
3197
3198      u = build1 (CONVERT_EXPR, integer_type_node, u);
3199      TREE_SIDE_EFFECTS (u) = 1;
3200
3201      u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3202      TREE_SIDE_EFFECTS (u) = 1;
3203
3204      t = build (PLUS_EXPR, ptr_type_node, t, u);
3205      TREE_SIDE_EFFECTS (t) = 1;
3206
3207      r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3208      if (r != addr_rtx)
3209	emit_move_insn (addr_rtx, r);
3210
3211      emit_jump_insn (gen_jump (lab_over));
3212      emit_barrier ();
3213    }
3214
3215  emit_label (lab_false);
3216
3217  /* ... otherwise out of the overflow area.  */
3218
3219  /* Make sure we don't find reg 7 for the next int arg.
3220
3221     All AltiVec vectors go in the overflow area.  So in the AltiVec
3222     case we need to get the vectors from the overflow area, but
3223     remember where the GPRs and FPRs are.  */
3224  if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3225    {
3226      t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3227      TREE_SIDE_EFFECTS (t) = 1;
3228      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3229    }
3230
3231  /* Care for on-stack alignment if needed.  */
3232  if (rsize <= 1)
3233    t = ovf;
3234  else
3235    {
3236      int align;
3237
3238      /* Vectors are 16 byte aligned.  */
3239      if (TREE_CODE (type) == VECTOR_TYPE)
3240	align = 15;
3241      else
3242	align = 7;
3243
3244      t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3245      t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3246    }
3247  t = save_expr (t);
3248
3249  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3250  if (r != addr_rtx)
3251    emit_move_insn (addr_rtx, r);
3252
3253  t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3254  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3255  TREE_SIDE_EFFECTS (t) = 1;
3256  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3257
3258  emit_label (lab_over);
3259
3260  if (indirect_p)
3261    {
3262      r = gen_rtx_MEM (Pmode, addr_rtx);
3263      set_mem_alias_set (r, get_varargs_alias_set ());
3264      emit_move_insn (addr_rtx, r);
3265    }
3266
3267  return addr_rtx;
3268}
3269
3270/* Builtins.  */
3271
3272#define def_builtin(MASK, NAME, TYPE, CODE)				\
3273do {									\
3274  if ((MASK) & target_flags)						\
3275    builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL);	\
3276} while (0)
3277
3278struct builtin_description
3279{
3280  const unsigned int mask;
3281  const enum insn_code icode;
3282  const char *const name;
3283  const enum rs6000_builtins code;
3284};
3285
3286/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
3287
3288static const struct builtin_description bdesc_3arg[] =
3289{
3290  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3291  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3292  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3293  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3294  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3295  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3296  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3297  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3298  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3299  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3300  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3301  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3302  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3303  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3304  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3305  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3306  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3307  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3308  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3309  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3310  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3311  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3312  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3313};
3314
3315/* DST operations: void foo (void *, const int, const char).  */
3316
3317static const struct builtin_description bdesc_dst[] =
3318{
3319  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3320  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3321  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3322  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3323};
3324
3325/* Simple binary operations: VECc = foo (VECa, VECb).  */
3326
3327static const struct builtin_description bdesc_2arg[] =
3328{
3329  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3330  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3331  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3332  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3333  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3334  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3335  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3336  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3337  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3338  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3339  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3340  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3341  { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3342  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3343  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3344  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3345  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3346  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3347  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3348  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3349  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3350  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3351  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3352  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3353  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3354  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3355  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3356  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3357  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3358  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3359  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3360  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3361  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3362  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3363  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3364  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3365  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3366  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3367  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3368  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3369  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3370  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3371  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3372  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3373  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3374  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3375  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3376  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3377  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3378  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3379  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3380  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3381  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3382  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3383  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3384  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3385  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3386  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3387  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3388  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3389  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3390  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3391  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3392  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3393  { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3394  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3395  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3396  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3397  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3398  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3399  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3400  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3401  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3402  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3403  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3404  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3405  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3406  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3407  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3408  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3409  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3410  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3411  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3412  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3413  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3414  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3415  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3416  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3417  { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3418  { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3419  { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3420  { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3421  { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3422  { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3423  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3424  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3425  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3426  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3427  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3428  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3429  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3430  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3431  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3432  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3433  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3434  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3435  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3436  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3437  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3438  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3439  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3440  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3441  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3442};
3443
3444/* AltiVec predicates.  */
3445
3446struct builtin_description_predicates
3447{
3448  const unsigned int mask;
3449  const enum insn_code icode;
3450  const char *opcode;
3451  const char *const name;
3452  const enum rs6000_builtins code;
3453};
3454
3455static const struct builtin_description_predicates bdesc_altivec_preds[] =
3456{
3457  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3458  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3459  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3460  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3461  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3462  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3463  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3464  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3465  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3466  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3467  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3468  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3469  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3470};
3471
3472/* ABS* opreations.  */
3473
3474static const struct builtin_description bdesc_abs[] =
3475{
3476  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3477  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3478  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3479  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3480  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3481  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3482  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3483};
3484
3485/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3486   foo (VECa).  */
3487
3488static const struct builtin_description bdesc_1arg[] =
3489{
3490  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3491  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3492  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3493  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3494  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3495  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3496  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3497  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3498  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3499  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3500  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3501  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3502  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3503  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3504  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3505  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3506  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3507};
3508
3509static rtx
3510altivec_expand_unop_builtin (icode, arglist, target)
3511     enum insn_code icode;
3512     tree arglist;
3513     rtx target;
3514{
3515  rtx pat;
3516  tree arg0 = TREE_VALUE (arglist);
3517  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3518  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3519  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3520
3521  /* If we got invalid arguments bail out before generating bad rtl.  */
3522  if (arg0 == error_mark_node)
3523    return NULL_RTX;
3524
3525  if (target == 0
3526      || GET_MODE (target) != tmode
3527      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3528    target = gen_reg_rtx (tmode);
3529
3530  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3531    op0 = copy_to_mode_reg (mode0, op0);
3532
3533  pat = GEN_FCN (icode) (target, op0);
3534  if (! pat)
3535    return 0;
3536  emit_insn (pat);
3537
3538  return target;
3539}
3540
3541static rtx
3542altivec_expand_abs_builtin (icode, arglist, target)
3543     enum insn_code icode;
3544     tree arglist;
3545     rtx target;
3546{
3547  rtx pat, scratch1, scratch2;
3548  tree arg0 = TREE_VALUE (arglist);
3549  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3550  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3551  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3552
3553  /* If we have invalid arguments, bail out before generating bad rtl.  */
3554  if (arg0 == error_mark_node)
3555    return NULL_RTX;
3556
3557  if (target == 0
3558      || GET_MODE (target) != tmode
3559      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3560    target = gen_reg_rtx (tmode);
3561
3562  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3563    op0 = copy_to_mode_reg (mode0, op0);
3564
3565  scratch1 = gen_reg_rtx (mode0);
3566  scratch2 = gen_reg_rtx (mode0);
3567
3568  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3569  if (! pat)
3570    return 0;
3571  emit_insn (pat);
3572
3573  return target;
3574}
3575
3576static rtx
3577altivec_expand_binop_builtin (icode, arglist, target)
3578     enum insn_code icode;
3579     tree arglist;
3580     rtx target;
3581{
3582  rtx pat;
3583  tree arg0 = TREE_VALUE (arglist);
3584  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3585  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3586  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3587  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3588  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3589  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3590
3591  /* If we got invalid arguments bail out before generating bad rtl.  */
3592  if (arg0 == error_mark_node || arg1 == error_mark_node)
3593    return NULL_RTX;
3594
3595  if (target == 0
3596      || GET_MODE (target) != tmode
3597      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3598    target = gen_reg_rtx (tmode);
3599
3600  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3601    op0 = copy_to_mode_reg (mode0, op0);
3602  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3603    op1 = copy_to_mode_reg (mode1, op1);
3604
3605  pat = GEN_FCN (icode) (target, op0, op1);
3606  if (! pat)
3607    return 0;
3608  emit_insn (pat);
3609
3610  return target;
3611}
3612
3613static rtx
3614altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3615     enum insn_code icode;
3616     const char *opcode;
3617     tree arglist;
3618     rtx target;
3619{
3620  rtx pat, scratch;
3621  tree cr6_form = TREE_VALUE (arglist);
3622  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3623  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3624  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3625  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3626  enum machine_mode tmode = SImode;
3627  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3628  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3629  int cr6_form_int;
3630
3631  if (TREE_CODE (cr6_form) != INTEGER_CST)
3632    {
3633      error ("argument 1 of __builtin_altivec_predicate must be a constant");
3634      return NULL_RTX;
3635    }
3636  else
3637    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3638
3639  if (mode0 != mode1)
3640    abort ();
3641
3642  /* If we have invalid arguments, bail out before generating bad rtl.  */
3643  if (arg0 == error_mark_node || arg1 == error_mark_node)
3644    return NULL_RTX;
3645
3646  if (target == 0
3647      || GET_MODE (target) != tmode
3648      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3649    target = gen_reg_rtx (tmode);
3650
3651  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3652    op0 = copy_to_mode_reg (mode0, op0);
3653  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3654    op1 = copy_to_mode_reg (mode1, op1);
3655
3656  scratch = gen_reg_rtx (mode0);
3657
3658  pat = GEN_FCN (icode) (scratch, op0, op1,
3659			 gen_rtx (SYMBOL_REF, Pmode, opcode));
3660  if (! pat)
3661    return 0;
3662  emit_insn (pat);
3663
3664  /* The vec_any* and vec_all* predicates use the same opcodes for two
3665     different operations, but the bits in CR6 will be different
3666     depending on what information we want.  So we have to play tricks
3667     with CR6 to get the right bits out.
3668
3669     If you think this is disgusting, look at the specs for the
3670     AltiVec predicates.  */
3671
3672     switch (cr6_form_int)
3673       {
3674       case 0:
3675	 emit_insn (gen_cr6_test_for_zero (target));
3676	 break;
3677       case 1:
3678	 emit_insn (gen_cr6_test_for_zero_reverse (target));
3679	 break;
3680       case 2:
3681	 emit_insn (gen_cr6_test_for_lt (target));
3682	 break;
3683       case 3:
3684	 emit_insn (gen_cr6_test_for_lt_reverse (target));
3685	 break;
3686       default:
3687	 error ("argument 1 of __builtin_altivec_predicate is out of range");
3688	 break;
3689       }
3690
3691  return target;
3692}
3693
3694static rtx
3695altivec_expand_stv_builtin (icode, arglist)
3696     enum insn_code icode;
3697     tree arglist;
3698{
3699  tree arg0 = TREE_VALUE (arglist);
3700  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3701  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3702  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3703  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3704  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3705  rtx pat;
3706  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3707  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3708  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3709
3710  /* Invalid arguments.  Bail before doing anything stoopid!  */
3711  if (arg0 == error_mark_node
3712      || arg1 == error_mark_node
3713      || arg2 == error_mark_node)
3714    return NULL_RTX;
3715
3716  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3717    op0 = copy_to_mode_reg (mode2, op0);
3718  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3719    op1 = copy_to_mode_reg (mode0, op1);
3720  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3721    op2 = copy_to_mode_reg (mode1, op2);
3722
3723  pat = GEN_FCN (icode) (op1, op2, op0);
3724  if (pat)
3725    emit_insn (pat);
3726  return NULL_RTX;
3727}
3728
3729static rtx
3730altivec_expand_ternop_builtin (icode, arglist, target)
3731     enum insn_code icode;
3732     tree arglist;
3733     rtx target;
3734{
3735  rtx pat;
3736  tree arg0 = TREE_VALUE (arglist);
3737  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3738  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3739  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3740  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3741  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3742  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3743  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3744  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3745  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3746
3747  /* If we got invalid arguments bail out before generating bad rtl.  */
3748  if (arg0 == error_mark_node
3749      || arg1 == error_mark_node
3750      || arg2 == error_mark_node)
3751    return NULL_RTX;
3752
3753  if (target == 0
3754      || GET_MODE (target) != tmode
3755      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3756    target = gen_reg_rtx (tmode);
3757
3758  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3759    op0 = copy_to_mode_reg (mode0, op0);
3760  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3761    op1 = copy_to_mode_reg (mode1, op1);
3762  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3763    op2 = copy_to_mode_reg (mode2, op2);
3764
3765  pat = GEN_FCN (icode) (target, op0, op1, op2);
3766  if (! pat)
3767    return 0;
3768  emit_insn (pat);
3769
3770  return target;
3771}
3772static rtx
3773altivec_expand_builtin (exp, target)
3774     tree exp;
3775     rtx target;
3776{
3777  struct builtin_description *d;
3778  struct builtin_description_predicates *dp;
3779  size_t i;
3780  enum insn_code icode;
3781  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3782  tree arglist = TREE_OPERAND (exp, 1);
3783  tree arg0, arg1, arg2;
3784  rtx op0, op1, op2, pat;
3785  enum machine_mode tmode, mode0, mode1, mode2;
3786  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3787
3788  switch (fcode)
3789    {
3790    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3791      icode = CODE_FOR_altivec_lvx_16qi;
3792      arg0 = TREE_VALUE (arglist);
3793      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3794      tmode = insn_data[icode].operand[0].mode;
3795      mode0 = insn_data[icode].operand[1].mode;
3796
3797      if (target == 0
3798	  || GET_MODE (target) != tmode
3799	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3800	target = gen_reg_rtx (tmode);
3801
3802      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3803	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3804
3805      pat = GEN_FCN (icode) (target, op0);
3806      if (! pat)
3807	return 0;
3808      emit_insn (pat);
3809      return target;
3810
3811    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3812      icode = CODE_FOR_altivec_lvx_8hi;
3813      arg0 = TREE_VALUE (arglist);
3814      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3815      tmode = insn_data[icode].operand[0].mode;
3816      mode0 = insn_data[icode].operand[1].mode;
3817
3818      if (target == 0
3819	  || GET_MODE (target) != tmode
3820	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3821	target = gen_reg_rtx (tmode);
3822
3823      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3824	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3825
3826      pat = GEN_FCN (icode) (target, op0);
3827      if (! pat)
3828	return 0;
3829      emit_insn (pat);
3830      return target;
3831
3832    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3833      icode = CODE_FOR_altivec_lvx_4si;
3834      arg0 = TREE_VALUE (arglist);
3835      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3836      tmode = insn_data[icode].operand[0].mode;
3837      mode0 = insn_data[icode].operand[1].mode;
3838
3839      if (target == 0
3840	  || GET_MODE (target) != tmode
3841	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3842	target = gen_reg_rtx (tmode);
3843
3844      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3845	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3846
3847      pat = GEN_FCN (icode) (target, op0);
3848      if (! pat)
3849	return 0;
3850      emit_insn (pat);
3851      return target;
3852
3853    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3854      icode = CODE_FOR_altivec_lvx_4sf;
3855      arg0 = TREE_VALUE (arglist);
3856      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3857      tmode = insn_data[icode].operand[0].mode;
3858      mode0 = insn_data[icode].operand[1].mode;
3859
3860      if (target == 0
3861	  || GET_MODE (target) != tmode
3862	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3863	target = gen_reg_rtx (tmode);
3864
3865      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3866	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3867
3868      pat = GEN_FCN (icode) (target, op0);
3869      if (! pat)
3870	return 0;
3871      emit_insn (pat);
3872      return target;
3873
3874    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3875      icode = CODE_FOR_altivec_stvx_16qi;
3876      arg0 = TREE_VALUE (arglist);
3877      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3878      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3879      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3880      mode0 = insn_data[icode].operand[0].mode;
3881      mode1 = insn_data[icode].operand[1].mode;
3882
3883      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3884	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3885      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3886	op1 = copy_to_mode_reg (mode1, op1);
3887
3888      pat = GEN_FCN (icode) (op0, op1);
3889      if (pat)
3890	emit_insn (pat);
3891      return NULL_RTX;
3892
3893    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3894      icode = CODE_FOR_altivec_stvx_8hi;
3895      arg0 = TREE_VALUE (arglist);
3896      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3897      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3898      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3899      mode0 = insn_data[icode].operand[0].mode;
3900      mode1 = insn_data[icode].operand[1].mode;
3901
3902      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3903	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3904      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3905	op1 = copy_to_mode_reg (mode1, op1);
3906
3907      pat = GEN_FCN (icode) (op0, op1);
3908      if (pat)
3909	emit_insn (pat);
3910      return NULL_RTX;
3911
3912    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3913      icode = CODE_FOR_altivec_stvx_4si;
3914      arg0 = TREE_VALUE (arglist);
3915      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3916      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3917      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3918      mode0 = insn_data[icode].operand[0].mode;
3919      mode1 = insn_data[icode].operand[1].mode;
3920
3921      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3922	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3923      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3924	op1 = copy_to_mode_reg (mode1, op1);
3925
3926      pat = GEN_FCN (icode) (op0, op1);
3927      if (pat)
3928	emit_insn (pat);
3929      return NULL_RTX;
3930
3931    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3932      icode = CODE_FOR_altivec_stvx_4sf;
3933      arg0 = TREE_VALUE (arglist);
3934      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3935      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3936      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3937      mode0 = insn_data[icode].operand[0].mode;
3938      mode1 = insn_data[icode].operand[1].mode;
3939
3940      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3941	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3942      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3943	op1 = copy_to_mode_reg (mode1, op1);
3944
3945      pat = GEN_FCN (icode) (op0, op1);
3946      if (pat)
3947	emit_insn (pat);
3948      return NULL_RTX;
3949
3950    case ALTIVEC_BUILTIN_STVX:
3951      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3952    case ALTIVEC_BUILTIN_STVEBX:
3953      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3954    case ALTIVEC_BUILTIN_STVEHX:
3955      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3956    case ALTIVEC_BUILTIN_STVEWX:
3957      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3958    case ALTIVEC_BUILTIN_STVXL:
3959      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3960
3961    case ALTIVEC_BUILTIN_MFVSCR:
3962      icode = CODE_FOR_altivec_mfvscr;
3963      tmode = insn_data[icode].operand[0].mode;
3964
3965      if (target == 0
3966	  || GET_MODE (target) != tmode
3967	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3968	target = gen_reg_rtx (tmode);
3969
3970      pat = GEN_FCN (icode) (target);
3971      if (! pat)
3972	return 0;
3973      emit_insn (pat);
3974      return target;
3975
3976    case ALTIVEC_BUILTIN_MTVSCR:
3977      icode = CODE_FOR_altivec_mtvscr;
3978      arg0 = TREE_VALUE (arglist);
3979      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3980      mode0 = insn_data[icode].operand[0].mode;
3981
3982      /* If we got invalid arguments bail out before generating bad rtl.  */
3983      if (arg0 == error_mark_node)
3984	return NULL_RTX;
3985
3986      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3987	op0 = copy_to_mode_reg (mode0, op0);
3988
3989      pat = GEN_FCN (icode) (op0);
3990      if (pat)
3991	emit_insn (pat);
3992      return NULL_RTX;
3993
3994    case ALTIVEC_BUILTIN_DSSALL:
3995      emit_insn (gen_altivec_dssall ());
3996      return NULL_RTX;
3997
3998    case ALTIVEC_BUILTIN_DSS:
3999      icode = CODE_FOR_altivec_dss;
4000      arg0 = TREE_VALUE (arglist);
4001      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4002      mode0 = insn_data[icode].operand[0].mode;
4003
4004      /* If we got invalid arguments bail out before generating bad rtl.  */
4005      if (arg0 == error_mark_node)
4006	return NULL_RTX;
4007
4008      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4009	op0 = copy_to_mode_reg (mode0, op0);
4010
4011      emit_insn (gen_altivec_dss (op0));
4012      return NULL_RTX;
4013    }
4014
4015  /* Handle DST variants.  */
4016  d = (struct builtin_description *) bdesc_dst;
4017  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4018    if (d->code == fcode)
4019      {
4020	arg0 = TREE_VALUE (arglist);
4021	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4022	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4023	op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4024	op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4025	op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4026	mode0 = insn_data[d->icode].operand[0].mode;
4027	mode1 = insn_data[d->icode].operand[1].mode;
4028	mode2 = insn_data[d->icode].operand[2].mode;
4029
4030	/* Invalid arguments, bail out before generating bad rtl.  */
4031	if (arg0 == error_mark_node
4032	    || arg1 == error_mark_node
4033	    || arg2 == error_mark_node)
4034	  return NULL_RTX;
4035
4036	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4037	  op0 = copy_to_mode_reg (mode0, op0);
4038	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4039	  op1 = copy_to_mode_reg (mode1, op1);
4040
4041	if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4042	  {
4043	    error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4044	    return NULL_RTX;
4045	  }
4046
4047	pat = GEN_FCN (d->icode) (op0, op1, op2);
4048	if (pat != 0)
4049	  emit_insn (pat);
4050
4051	return NULL_RTX;
4052      }
4053
4054  /* Expand abs* operations.  */
4055  d = (struct builtin_description *) bdesc_abs;
4056  for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4057    if (d->code == fcode)
4058      return altivec_expand_abs_builtin (d->icode, arglist, target);
4059
4060  /* Handle simple unary operations.  */
4061  d = (struct builtin_description *) bdesc_1arg;
4062  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4063    if (d->code == fcode)
4064      return altivec_expand_unop_builtin (d->icode, arglist, target);
4065
4066  /* Handle simple binary operations.  */
4067  d = (struct builtin_description *) bdesc_2arg;
4068  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4069    if (d->code == fcode)
4070      return altivec_expand_binop_builtin (d->icode, arglist, target);
4071
4072  /* Expand the AltiVec predicates.  */
4073  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4074  for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4075    if (dp->code == fcode)
4076      return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4077
4078  /* LV* are funky.  We initialized them differently.  */
4079  switch (fcode)
4080    {
4081    case ALTIVEC_BUILTIN_LVSL:
4082      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4083					   arglist, target);
4084    case ALTIVEC_BUILTIN_LVSR:
4085      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4086					   arglist, target);
4087    case ALTIVEC_BUILTIN_LVEBX:
4088      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4089					   arglist, target);
4090    case ALTIVEC_BUILTIN_LVEHX:
4091      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4092					   arglist, target);
4093    case ALTIVEC_BUILTIN_LVEWX:
4094      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4095					   arglist, target);
4096    case ALTIVEC_BUILTIN_LVXL:
4097      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4098					   arglist, target);
4099    case ALTIVEC_BUILTIN_LVX:
4100      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4101					   arglist, target);
4102    default:
4103      break;
4104      /* Fall through.  */
4105    }
4106
4107  /* Handle simple ternary operations.  */
4108  d = (struct builtin_description *) bdesc_3arg;
4109  for (i = 0; i < sizeof  (bdesc_3arg) / sizeof *d; i++, d++)
4110    if (d->code == fcode)
4111      return altivec_expand_ternop_builtin (d->icode, arglist, target);
4112
4113  abort ();
4114  return NULL_RTX;
4115}
4116
4117/* Expand an expression EXP that calls a built-in function,
4118   with result going to TARGET if that's convenient
4119   (and in mode MODE if that's convenient).
4120   SUBTARGET may be used as the target for computing one of EXP's operands.
4121   IGNORE is nonzero if the value is to be ignored.  */
4122
4123static rtx
4124rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4125     tree exp;
4126     rtx target;
4127     rtx subtarget ATTRIBUTE_UNUSED;
4128     enum machine_mode mode ATTRIBUTE_UNUSED;
4129     int ignore ATTRIBUTE_UNUSED;
4130{
4131  if (TARGET_ALTIVEC)
4132    return altivec_expand_builtin (exp, target);
4133
4134  abort ();
4135}
4136
4137static void
4138rs6000_init_builtins ()
4139{
4140  if (TARGET_ALTIVEC)
4141    altivec_init_builtins ();
4142}
4143
4144static void
4145altivec_init_builtins (void)
4146{
4147  struct builtin_description *d;
4148  struct builtin_description_predicates *dp;
4149  size_t i;
4150
4151  tree endlink = void_list_node;
4152
4153  tree pint_type_node = build_pointer_type (integer_type_node);
4154  tree pvoid_type_node = build_pointer_type (void_type_node);
4155  tree pshort_type_node = build_pointer_type (short_integer_type_node);
4156  tree pchar_type_node = build_pointer_type (char_type_node);
4157  tree pfloat_type_node = build_pointer_type (float_type_node);
4158
4159  tree v4sf_ftype_v4sf_v4sf_v16qi
4160    = build_function_type (V4SF_type_node,
4161			   tree_cons (NULL_TREE, V4SF_type_node,
4162				      tree_cons (NULL_TREE, V4SF_type_node,
4163						 tree_cons (NULL_TREE,
4164							    V16QI_type_node,
4165							    endlink))));
4166  tree v4si_ftype_v4si_v4si_v16qi
4167    = build_function_type (V4SI_type_node,
4168			   tree_cons (NULL_TREE, V4SI_type_node,
4169				      tree_cons (NULL_TREE, V4SI_type_node,
4170						 tree_cons (NULL_TREE,
4171							    V16QI_type_node,
4172							    endlink))));
4173  tree v8hi_ftype_v8hi_v8hi_v16qi
4174    = build_function_type (V8HI_type_node,
4175			   tree_cons (NULL_TREE, V8HI_type_node,
4176				      tree_cons (NULL_TREE, V8HI_type_node,
4177						 tree_cons (NULL_TREE,
4178							    V16QI_type_node,
4179							    endlink))));
4180  tree v16qi_ftype_v16qi_v16qi_v16qi
4181    = build_function_type (V16QI_type_node,
4182			   tree_cons (NULL_TREE, V16QI_type_node,
4183				      tree_cons (NULL_TREE, V16QI_type_node,
4184						 tree_cons (NULL_TREE,
4185							    V16QI_type_node,
4186							    endlink))));
4187
4188  /* V4SI foo (char).  */
4189  tree v4si_ftype_char
4190    = build_function_type (V4SI_type_node,
4191		           tree_cons (NULL_TREE, char_type_node, endlink));
4192
4193  /* V8HI foo (char).  */
4194  tree v8hi_ftype_char
4195    = build_function_type (V8HI_type_node,
4196		           tree_cons (NULL_TREE, char_type_node, endlink));
4197
4198  /* V16QI foo (char).  */
4199  tree v16qi_ftype_char
4200    = build_function_type (V16QI_type_node,
4201		           tree_cons (NULL_TREE, char_type_node, endlink));
4202  /* V4SF foo (V4SF).  */
4203  tree v4sf_ftype_v4sf
4204    = build_function_type (V4SF_type_node,
4205			   tree_cons (NULL_TREE, V4SF_type_node, endlink));
4206
4207  /* V4SI foo (int *).  */
4208  tree v4si_ftype_pint
4209    = build_function_type (V4SI_type_node,
4210			   tree_cons (NULL_TREE, pint_type_node, endlink));
4211  /* V8HI foo (short *).  */
4212  tree v8hi_ftype_pshort
4213    = build_function_type (V8HI_type_node,
4214			   tree_cons (NULL_TREE, pshort_type_node, endlink));
4215  /* V16QI foo (char *).  */
4216  tree v16qi_ftype_pchar
4217    = build_function_type (V16QI_type_node,
4218			   tree_cons (NULL_TREE, pchar_type_node, endlink));
4219  /* V4SF foo (float *).  */
4220  tree v4sf_ftype_pfloat
4221    = build_function_type (V4SF_type_node,
4222			   tree_cons (NULL_TREE, pfloat_type_node, endlink));
4223
4224  /* V8HI foo (V16QI).  */
4225  tree v8hi_ftype_v16qi
4226    = build_function_type (V8HI_type_node,
4227			   tree_cons (NULL_TREE, V16QI_type_node, endlink));
4228
4229  /* void foo (void *, int, char/literal).  */
4230  tree void_ftype_pvoid_int_char
4231    = build_function_type (void_type_node,
4232			   tree_cons (NULL_TREE, pvoid_type_node,
4233				      tree_cons (NULL_TREE, integer_type_node,
4234						 tree_cons (NULL_TREE,
4235							    char_type_node,
4236							    endlink))));
4237
4238  /* void foo (int *, V4SI).  */
4239  tree void_ftype_pint_v4si
4240    = build_function_type (void_type_node,
4241			   tree_cons (NULL_TREE, pint_type_node,
4242				      tree_cons (NULL_TREE, V4SI_type_node,
4243						 endlink)));
4244  /* void foo (short *, V8HI).  */
4245  tree void_ftype_pshort_v8hi
4246    = build_function_type (void_type_node,
4247			   tree_cons (NULL_TREE, pshort_type_node,
4248				      tree_cons (NULL_TREE, V8HI_type_node,
4249						 endlink)));
4250  /* void foo (char *, V16QI).  */
4251  tree void_ftype_pchar_v16qi
4252    = build_function_type (void_type_node,
4253			   tree_cons (NULL_TREE, pchar_type_node,
4254				      tree_cons (NULL_TREE, V16QI_type_node,
4255						 endlink)));
4256  /* void foo (float *, V4SF).  */
4257  tree void_ftype_pfloat_v4sf
4258    = build_function_type (void_type_node,
4259			   tree_cons (NULL_TREE, pfloat_type_node,
4260				      tree_cons (NULL_TREE, V4SF_type_node,
4261						 endlink)));
4262
4263  /* void foo (V4SI).  */
4264  tree void_ftype_v4si
4265    = build_function_type (void_type_node,
4266			   tree_cons (NULL_TREE, V4SI_type_node,
4267				      endlink));
4268
4269  /* void foo (vint, int, void *).  */
4270  tree void_ftype_v4si_int_pvoid
4271    = build_function_type (void_type_node,
4272			   tree_cons (NULL_TREE, V4SI_type_node,
4273				      tree_cons (NULL_TREE, integer_type_node,
4274						 tree_cons (NULL_TREE,
4275							    pvoid_type_node,
4276							    endlink))));
4277
4278  /* void foo (vchar, int, void *).  */
4279  tree void_ftype_v16qi_int_pvoid
4280    = build_function_type (void_type_node,
4281			   tree_cons (NULL_TREE, V16QI_type_node,
4282				      tree_cons (NULL_TREE, integer_type_node,
4283						 tree_cons (NULL_TREE,
4284							    pvoid_type_node,
4285							    endlink))));
4286
4287  /* void foo (vshort, int, void *).  */
4288  tree void_ftype_v8hi_int_pvoid
4289    = build_function_type (void_type_node,
4290			   tree_cons (NULL_TREE, V8HI_type_node,
4291				      tree_cons (NULL_TREE, integer_type_node,
4292						 tree_cons (NULL_TREE,
4293							    pvoid_type_node,
4294							    endlink))));
4295
4296  /* void foo (char).  */
4297  tree void_ftype_qi
4298    = build_function_type (void_type_node,
4299			   tree_cons (NULL_TREE, char_type_node,
4300				      endlink));
4301
4302  /* void foo (void).  */
4303  tree void_ftype_void
4304    = build_function_type (void_type_node, void_list_node);
4305
4306  /* vshort foo (void).  */
4307  tree v8hi_ftype_void
4308    = build_function_type (V8HI_type_node, void_list_node);
4309
4310  tree v4si_ftype_v4si_v4si
4311    = build_function_type (V4SI_type_node,
4312			   tree_cons (NULL_TREE, V4SI_type_node,
4313				      tree_cons (NULL_TREE, V4SI_type_node,
4314						 endlink)));
4315
4316  /* These are for the unsigned 5 bit literals.  */
4317
4318  tree v4sf_ftype_v4si_char
4319    = build_function_type (V4SF_type_node,
4320			   tree_cons (NULL_TREE, V4SI_type_node,
4321				      tree_cons (NULL_TREE, char_type_node,
4322						 endlink)));
4323  tree v4si_ftype_v4sf_char
4324    = build_function_type (V4SI_type_node,
4325			   tree_cons (NULL_TREE, V4SF_type_node,
4326				      tree_cons (NULL_TREE, char_type_node,
4327						 endlink)));
4328  tree v4si_ftype_v4si_char
4329    = build_function_type (V4SI_type_node,
4330			   tree_cons (NULL_TREE, V4SI_type_node,
4331				      tree_cons (NULL_TREE, char_type_node,
4332						 endlink)));
4333  tree v8hi_ftype_v8hi_char
4334    = build_function_type (V8HI_type_node,
4335			   tree_cons (NULL_TREE, V8HI_type_node,
4336				      tree_cons (NULL_TREE, char_type_node,
4337						 endlink)));
4338  tree v16qi_ftype_v16qi_char
4339    = build_function_type (V16QI_type_node,
4340			   tree_cons (NULL_TREE, V16QI_type_node,
4341				      tree_cons (NULL_TREE, char_type_node,
4342						 endlink)));
4343
4344  /* These are for the unsigned 4 bit literals.  */
4345
4346  tree v16qi_ftype_v16qi_v16qi_char
4347    = build_function_type (V16QI_type_node,
4348			   tree_cons (NULL_TREE, V16QI_type_node,
4349				      tree_cons (NULL_TREE, V16QI_type_node,
4350						 tree_cons (NULL_TREE,
4351							    char_type_node,
4352							    endlink))));
4353
4354  tree v8hi_ftype_v8hi_v8hi_char
4355    = build_function_type (V8HI_type_node,
4356			   tree_cons (NULL_TREE, V8HI_type_node,
4357				      tree_cons (NULL_TREE, V8HI_type_node,
4358						 tree_cons (NULL_TREE,
4359							    char_type_node,
4360							    endlink))));
4361
4362  tree v4si_ftype_v4si_v4si_char
4363    = build_function_type (V4SI_type_node,
4364			   tree_cons (NULL_TREE, V4SI_type_node,
4365				      tree_cons (NULL_TREE, V4SI_type_node,
4366						 tree_cons (NULL_TREE,
4367							    char_type_node,
4368							    endlink))));
4369
4370  tree v4sf_ftype_v4sf_v4sf_char
4371    = build_function_type (V4SF_type_node,
4372			   tree_cons (NULL_TREE, V4SF_type_node,
4373				      tree_cons (NULL_TREE, V4SF_type_node,
4374						 tree_cons (NULL_TREE,
4375							    char_type_node,
4376							    endlink))));
4377
4378  /* End of 4 bit literals.  */
4379
4380  tree v4sf_ftype_v4sf_v4sf
4381    = build_function_type (V4SF_type_node,
4382			   tree_cons (NULL_TREE, V4SF_type_node,
4383				      tree_cons (NULL_TREE, V4SF_type_node,
4384						 endlink)));
4385  tree v4sf_ftype_v4sf_v4sf_v4si
4386    = build_function_type (V4SF_type_node,
4387			   tree_cons (NULL_TREE, V4SF_type_node,
4388				      tree_cons (NULL_TREE, V4SF_type_node,
4389						 tree_cons (NULL_TREE,
4390							    V4SI_type_node,
4391							    endlink))));
4392  tree v4sf_ftype_v4sf_v4sf_v4sf
4393    = build_function_type (V4SF_type_node,
4394			   tree_cons (NULL_TREE, V4SF_type_node,
4395				      tree_cons (NULL_TREE, V4SF_type_node,
4396						 tree_cons (NULL_TREE,
4397							    V4SF_type_node,
4398							    endlink))));
4399  tree v4si_ftype_v4si_v4si_v4si
4400    = build_function_type (V4SI_type_node,
4401			   tree_cons (NULL_TREE, V4SI_type_node,
4402				      tree_cons (NULL_TREE, V4SI_type_node,
4403						 tree_cons (NULL_TREE,
4404							    V4SI_type_node,
4405							    endlink))));
4406
4407  tree v8hi_ftype_v8hi_v8hi
4408    = build_function_type (V8HI_type_node,
4409			   tree_cons (NULL_TREE, V8HI_type_node,
4410				      tree_cons (NULL_TREE, V8HI_type_node,
4411						 endlink)));
4412  tree v8hi_ftype_v8hi_v8hi_v8hi
4413    = build_function_type (V8HI_type_node,
4414			   tree_cons (NULL_TREE, V8HI_type_node,
4415				      tree_cons (NULL_TREE, V8HI_type_node,
4416						 tree_cons (NULL_TREE,
4417							    V8HI_type_node,
4418							    endlink))));
4419 tree v4si_ftype_v8hi_v8hi_v4si
4420    = build_function_type (V4SI_type_node,
4421			   tree_cons (NULL_TREE, V8HI_type_node,
4422				      tree_cons (NULL_TREE, V8HI_type_node,
4423						 tree_cons (NULL_TREE,
4424							    V4SI_type_node,
4425							    endlink))));
4426 tree v4si_ftype_v16qi_v16qi_v4si
4427    = build_function_type (V4SI_type_node,
4428			   tree_cons (NULL_TREE, V16QI_type_node,
4429				      tree_cons (NULL_TREE, V16QI_type_node,
4430						 tree_cons (NULL_TREE,
4431							    V4SI_type_node,
4432							    endlink))));
4433
4434  tree v16qi_ftype_v16qi_v16qi
4435    = build_function_type (V16QI_type_node,
4436			   tree_cons (NULL_TREE, V16QI_type_node,
4437				      tree_cons (NULL_TREE, V16QI_type_node,
4438						 endlink)));
4439
4440  tree v4si_ftype_v4sf_v4sf
4441    = build_function_type (V4SI_type_node,
4442			   tree_cons (NULL_TREE, V4SF_type_node,
4443				      tree_cons (NULL_TREE, V4SF_type_node,
4444						 endlink)));
4445
4446  tree v4si_ftype_v4si
4447    = build_function_type (V4SI_type_node,
4448			   tree_cons (NULL_TREE, V4SI_type_node, endlink));
4449
4450  tree v8hi_ftype_v8hi
4451    = build_function_type (V8HI_type_node,
4452			   tree_cons (NULL_TREE, V8HI_type_node, endlink));
4453
4454  tree v16qi_ftype_v16qi
4455    = build_function_type (V16QI_type_node,
4456			   tree_cons (NULL_TREE, V16QI_type_node, endlink));
4457
4458  tree v8hi_ftype_v16qi_v16qi
4459    = build_function_type (V8HI_type_node,
4460			   tree_cons (NULL_TREE, V16QI_type_node,
4461				      tree_cons (NULL_TREE, V16QI_type_node,
4462						 endlink)));
4463
4464  tree v4si_ftype_v8hi_v8hi
4465    = build_function_type (V4SI_type_node,
4466			   tree_cons (NULL_TREE, V8HI_type_node,
4467				      tree_cons (NULL_TREE, V8HI_type_node,
4468						 endlink)));
4469
4470  tree v8hi_ftype_v4si_v4si
4471    = build_function_type (V8HI_type_node,
4472			   tree_cons (NULL_TREE, V4SI_type_node,
4473				      tree_cons (NULL_TREE, V4SI_type_node,
4474						 endlink)));
4475
4476  tree v16qi_ftype_v8hi_v8hi
4477    = build_function_type (V16QI_type_node,
4478			   tree_cons (NULL_TREE, V8HI_type_node,
4479				      tree_cons (NULL_TREE, V8HI_type_node,
4480						 endlink)));
4481
4482  tree v4si_ftype_v16qi_v4si
4483    = build_function_type (V4SI_type_node,
4484			   tree_cons (NULL_TREE, V16QI_type_node,
4485				      tree_cons (NULL_TREE, V4SI_type_node,
4486						 endlink)));
4487
4488  tree v4si_ftype_v16qi_v16qi
4489    = build_function_type (V4SI_type_node,
4490			   tree_cons (NULL_TREE, V16QI_type_node,
4491				      tree_cons (NULL_TREE, V16QI_type_node,
4492						 endlink)));
4493
4494  tree v4si_ftype_v8hi_v4si
4495    = build_function_type (V4SI_type_node,
4496			   tree_cons (NULL_TREE, V8HI_type_node,
4497				      tree_cons (NULL_TREE, V4SI_type_node,
4498						 endlink)));
4499
4500  tree v4si_ftype_v8hi
4501    = build_function_type (V4SI_type_node,
4502			   tree_cons (NULL_TREE, V8HI_type_node, endlink));
4503
4504  tree int_ftype_v4si_v4si
4505    = build_function_type (integer_type_node,
4506			   tree_cons (NULL_TREE, V4SI_type_node,
4507				      tree_cons (NULL_TREE, V4SI_type_node,
4508						 endlink)));
4509
4510  tree int_ftype_v4sf_v4sf
4511    = build_function_type (integer_type_node,
4512			   tree_cons (NULL_TREE, V4SF_type_node,
4513				      tree_cons (NULL_TREE, V4SF_type_node,
4514						 endlink)));
4515
4516  tree int_ftype_v16qi_v16qi
4517    = build_function_type (integer_type_node,
4518			   tree_cons (NULL_TREE, V16QI_type_node,
4519				      tree_cons (NULL_TREE, V16QI_type_node,
4520						 endlink)));
4521
4522  tree int_ftype_int_v4si_v4si
4523    = build_function_type
4524    (integer_type_node,
4525     tree_cons (NULL_TREE, integer_type_node,
4526		tree_cons (NULL_TREE, V4SI_type_node,
4527			   tree_cons (NULL_TREE, V4SI_type_node,
4528				      endlink))));
4529
4530  tree int_ftype_int_v4sf_v4sf
4531    = build_function_type
4532    (integer_type_node,
4533     tree_cons (NULL_TREE, integer_type_node,
4534		tree_cons (NULL_TREE, V4SF_type_node,
4535			   tree_cons (NULL_TREE, V4SF_type_node,
4536				      endlink))));
4537
4538  tree int_ftype_int_v8hi_v8hi
4539    = build_function_type
4540    (integer_type_node,
4541     tree_cons (NULL_TREE, integer_type_node,
4542		 tree_cons (NULL_TREE, V8HI_type_node,
4543			    tree_cons (NULL_TREE, V8HI_type_node,
4544				       endlink))));
4545
4546  tree int_ftype_int_v16qi_v16qi
4547    = build_function_type
4548    (integer_type_node,
4549     tree_cons (NULL_TREE, integer_type_node,
4550		tree_cons (NULL_TREE, V16QI_type_node,
4551			   tree_cons (NULL_TREE, V16QI_type_node,
4552				      endlink))));
4553
4554  tree v16qi_ftype_int_pvoid
4555    = build_function_type (V16QI_type_node,
4556			   tree_cons (NULL_TREE, integer_type_node,
4557				      tree_cons (NULL_TREE, pvoid_type_node,
4558						 endlink)));
4559
4560  tree v4si_ftype_int_pvoid
4561    = build_function_type (V4SI_type_node,
4562			   tree_cons (NULL_TREE, integer_type_node,
4563				      tree_cons (NULL_TREE, pvoid_type_node,
4564						 endlink)));
4565
4566  tree v8hi_ftype_int_pvoid
4567    = build_function_type (V8HI_type_node,
4568			   tree_cons (NULL_TREE, integer_type_node,
4569				      tree_cons (NULL_TREE, pvoid_type_node,
4570						 endlink)));
4571
4572  tree int_ftype_v8hi_v8hi
4573    = build_function_type (integer_type_node,
4574			   tree_cons (NULL_TREE, V8HI_type_node,
4575				      tree_cons (NULL_TREE, V8HI_type_node,
4576						 endlink)));
4577
4578  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4579  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4580  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4581  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4582  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4583  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4584  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4585  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4586  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4587  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4588  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4589  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4590  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4591  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4592  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4593  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4594  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4595  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4596  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4597  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4598  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4599  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4600  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4601  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4602
4603  /* Add the simple ternary operators.  */
4604  d = (struct builtin_description *) bdesc_3arg;
4605  for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4606    {
4607
4608      enum machine_mode mode0, mode1, mode2, mode3;
4609      tree type;
4610
4611      if (d->name == 0)
4612	continue;
4613
4614      mode0 = insn_data[d->icode].operand[0].mode;
4615      mode1 = insn_data[d->icode].operand[1].mode;
4616      mode2 = insn_data[d->icode].operand[2].mode;
4617      mode3 = insn_data[d->icode].operand[3].mode;
4618
4619      /* When all four are of the same mode.  */
4620      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4621	{
4622	  switch (mode0)
4623	    {
4624	    case V4SImode:
4625	      type = v4si_ftype_v4si_v4si_v4si;
4626	      break;
4627	    case V4SFmode:
4628	      type = v4sf_ftype_v4sf_v4sf_v4sf;
4629	      break;
4630	    case V8HImode:
4631	      type = v8hi_ftype_v8hi_v8hi_v8hi;
4632	      break;
4633	    case V16QImode:
4634	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4635	      break;
4636	    default:
4637	      abort();
4638	    }
4639	}
4640      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4641        {
4642	  switch (mode0)
4643	    {
4644	    case V4SImode:
4645	      type = v4si_ftype_v4si_v4si_v16qi;
4646	      break;
4647	    case V4SFmode:
4648	      type = v4sf_ftype_v4sf_v4sf_v16qi;
4649	      break;
4650	    case V8HImode:
4651	      type = v8hi_ftype_v8hi_v8hi_v16qi;
4652	      break;
4653	    case V16QImode:
4654	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4655	      break;
4656	    default:
4657	      abort();
4658	    }
4659	}
4660      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4661	       && mode3 == V4SImode)
4662	type = v4si_ftype_v16qi_v16qi_v4si;
4663      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4664	       && mode3 == V4SImode)
4665	type = v4si_ftype_v8hi_v8hi_v4si;
4666      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4667	       && mode3 == V4SImode)
4668	type = v4sf_ftype_v4sf_v4sf_v4si;
4669
4670      /* vchar, vchar, vchar, 4 bit literal.  */
4671      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4672	       && mode3 == QImode)
4673	type = v16qi_ftype_v16qi_v16qi_char;
4674
4675      /* vshort, vshort, vshort, 4 bit literal.  */
4676      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4677	       && mode3 == QImode)
4678	type = v8hi_ftype_v8hi_v8hi_char;
4679
4680      /* vint, vint, vint, 4 bit literal.  */
4681      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4682	       && mode3 == QImode)
4683	type = v4si_ftype_v4si_v4si_char;
4684
4685      /* vfloat, vfloat, vfloat, 4 bit literal.  */
4686      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4687	       && mode3 == QImode)
4688	type = v4sf_ftype_v4sf_v4sf_char;
4689
4690      else
4691	abort ();
4692
4693      def_builtin (d->mask, d->name, type, d->code);
4694    }
4695
4696  /* Add the DST variants.  */
4697  d = (struct builtin_description *) bdesc_dst;
4698  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4699    def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4700
4701  /* Initialize the predicates.  */
4702  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4703  for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4704    {
4705      enum machine_mode mode1;
4706      tree type;
4707
4708      mode1 = insn_data[dp->icode].operand[1].mode;
4709
4710      switch (mode1)
4711	{
4712	case V4SImode:
4713	  type = int_ftype_int_v4si_v4si;
4714	  break;
4715	case V8HImode:
4716	  type = int_ftype_int_v8hi_v8hi;
4717	  break;
4718	case V16QImode:
4719	  type = int_ftype_int_v16qi_v16qi;
4720	  break;
4721	case V4SFmode:
4722	  type = int_ftype_int_v4sf_v4sf;
4723	  break;
4724	default:
4725	  abort ();
4726	}
4727
4728      def_builtin (dp->mask, dp->name, type, dp->code);
4729    }
4730
4731  /* Add the simple binary operators.  */
4732  d = (struct builtin_description *) bdesc_2arg;
4733  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4734    {
4735      enum machine_mode mode0, mode1, mode2;
4736      tree type;
4737
4738      if (d->name == 0)
4739	continue;
4740
4741      mode0 = insn_data[d->icode].operand[0].mode;
4742      mode1 = insn_data[d->icode].operand[1].mode;
4743      mode2 = insn_data[d->icode].operand[2].mode;
4744
4745      /* When all three operands are of the same mode.  */
4746      if (mode0 == mode1 && mode1 == mode2)
4747	{
4748	  switch (mode0)
4749	    {
4750	    case V4SFmode:
4751	      type = v4sf_ftype_v4sf_v4sf;
4752	      break;
4753	    case V4SImode:
4754	      type = v4si_ftype_v4si_v4si;
4755	      break;
4756	    case V16QImode:
4757	      type = v16qi_ftype_v16qi_v16qi;
4758	      break;
4759	    case V8HImode:
4760	      type = v8hi_ftype_v8hi_v8hi;
4761	      break;
4762	    default:
4763	      abort ();
4764	    }
4765	}
4766
4767      /* A few other combos we really don't want to do manually.  */
4768
4769      /* vint, vfloat, vfloat.  */
4770      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4771	type = v4si_ftype_v4sf_v4sf;
4772
4773      /* vshort, vchar, vchar.  */
4774      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4775	type = v8hi_ftype_v16qi_v16qi;
4776
4777      /* vint, vshort, vshort.  */
4778      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4779	type = v4si_ftype_v8hi_v8hi;
4780
4781      /* vshort, vint, vint.  */
4782      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4783	type = v8hi_ftype_v4si_v4si;
4784
4785      /* vchar, vshort, vshort.  */
4786      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4787	type = v16qi_ftype_v8hi_v8hi;
4788
4789      /* vint, vchar, vint.  */
4790      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4791	type = v4si_ftype_v16qi_v4si;
4792
4793      /* vint, vchar, vchar.  */
4794      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4795	type = v4si_ftype_v16qi_v16qi;
4796
4797      /* vint, vshort, vint.  */
4798      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4799	type = v4si_ftype_v8hi_v4si;
4800
4801      /* vint, vint, 5 bit literal.  */
4802      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4803	type = v4si_ftype_v4si_char;
4804
4805      /* vshort, vshort, 5 bit literal.  */
4806      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4807	type = v8hi_ftype_v8hi_char;
4808
4809      /* vchar, vchar, 5 bit literal.  */
4810      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4811	type = v16qi_ftype_v16qi_char;
4812
4813      /* vfloat, vint, 5 bit literal.  */
4814      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4815	type = v4sf_ftype_v4si_char;
4816
4817      /* vint, vfloat, 5 bit literal.  */
4818      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4819	type = v4si_ftype_v4sf_char;
4820
4821      /* int, x, x.  */
4822      else if (mode0 == SImode)
4823	{
4824	  switch (mode1)
4825	    {
4826	    case V4SImode:
4827	      type = int_ftype_v4si_v4si;
4828	      break;
4829	    case V4SFmode:
4830	      type = int_ftype_v4sf_v4sf;
4831	      break;
4832	    case V16QImode:
4833	      type = int_ftype_v16qi_v16qi;
4834	      break;
4835	    case V8HImode:
4836	      type = int_ftype_v8hi_v8hi;
4837	      break;
4838	    default:
4839	      abort ();
4840	    }
4841	}
4842
4843      else
4844	abort ();
4845
4846      def_builtin (d->mask, d->name, type, d->code);
4847    }
4848
4849  /* Initialize the abs* operators.  */
4850  d = (struct builtin_description *) bdesc_abs;
4851  for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4852    {
4853      enum machine_mode mode0;
4854      tree type;
4855
4856      mode0 = insn_data[d->icode].operand[0].mode;
4857
4858      switch (mode0)
4859	{
4860	case V4SImode:
4861	  type = v4si_ftype_v4si;
4862	  break;
4863	case V8HImode:
4864	  type = v8hi_ftype_v8hi;
4865	  break;
4866	case V16QImode:
4867	  type = v16qi_ftype_v16qi;
4868	  break;
4869	case V4SFmode:
4870	  type = v4sf_ftype_v4sf;
4871	  break;
4872	default:
4873	  abort ();
4874	}
4875
4876      def_builtin (d->mask, d->name, type, d->code);
4877    }
4878
4879  /* Add the simple unary operators.  */
4880  d = (struct builtin_description *) bdesc_1arg;
4881  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4882    {
4883      enum machine_mode mode0, mode1;
4884      tree type;
4885
4886      if (d->name == 0)
4887	continue;
4888
4889      mode0 = insn_data[d->icode].operand[0].mode;
4890      mode1 = insn_data[d->icode].operand[1].mode;
4891
4892      if (mode0 == V4SImode && mode1 == QImode)
4893        type = v4si_ftype_char;
4894      else if (mode0 == V8HImode && mode1 == QImode)
4895        type = v8hi_ftype_char;
4896      else if (mode0 == V16QImode && mode1 == QImode)
4897        type = v16qi_ftype_char;
4898      else if (mode0 == V4SFmode && mode1 == V4SFmode)
4899	type = v4sf_ftype_v4sf;
4900      else if (mode0 == V8HImode && mode1 == V16QImode)
4901	type = v8hi_ftype_v16qi;
4902      else if (mode0 == V4SImode && mode1 == V8HImode)
4903	type = v4si_ftype_v8hi;
4904      else
4905	abort ();
4906
4907      def_builtin (d->mask, d->name, type, d->code);
4908    }
4909}
4910
4911
4912/* Expand a block move operation, and return 1 if successful.  Return 0
4913   if we should let the compiler generate normal code.
4914
4915   operands[0] is the destination
4916   operands[1] is the source
4917   operands[2] is the length
4918   operands[3] is the alignment */
4919
4920#define MAX_MOVE_REG 4
4921
4922int
4923expand_block_move (operands)
4924     rtx operands[];
4925{
4926  rtx orig_dest = operands[0];
4927  rtx orig_src	= operands[1];
4928  rtx bytes_rtx	= operands[2];
4929  rtx align_rtx = operands[3];
4930  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
4931  int align;
4932  int bytes;
4933  int offset;
4934  int move_bytes;
4935
4936  /* If this is not a fixed size move, just call memcpy */
4937  if (! constp)
4938    return 0;
4939
4940  /* If this is not a fixed size alignment, abort */
4941  if (GET_CODE (align_rtx) != CONST_INT)
4942    abort ();
4943  align = INTVAL (align_rtx);
4944
4945  /* Anything to move? */
4946  bytes = INTVAL (bytes_rtx);
4947  if (bytes <= 0)
4948    return 1;
4949
4950  /* store_one_arg depends on expand_block_move to handle at least the size of
4951     reg_parm_stack_space.  */
4952  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4953    return 0;
4954
4955  if (TARGET_STRING)	/* string instructions are available */
4956    {
4957      for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
4958	{
4959	  union {
4960	    rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
4961	    rtx (*mov) PARAMS ((rtx, rtx));
4962	  } gen_func;
4963	  enum machine_mode mode = BLKmode;
4964	  rtx src, dest;
4965
4966	  if (bytes > 24		/* move up to 32 bytes at a time */
4967	      && ! fixed_regs[5]
4968	      && ! fixed_regs[6]
4969	      && ! fixed_regs[7]
4970	      && ! fixed_regs[8]
4971	      && ! fixed_regs[9]
4972	      && ! fixed_regs[10]
4973	      && ! fixed_regs[11]
4974	      && ! fixed_regs[12])
4975	    {
4976	      move_bytes = (bytes > 32) ? 32 : bytes;
4977	      gen_func.movstrsi = gen_movstrsi_8reg;
4978	    }
4979	  else if (bytes > 16	/* move up to 24 bytes at a time */
4980		   && ! fixed_regs[5]
4981		   && ! fixed_regs[6]
4982		   && ! fixed_regs[7]
4983		   && ! fixed_regs[8]
4984		   && ! fixed_regs[9]
4985		   && ! fixed_regs[10])
4986	    {
4987	      move_bytes = (bytes > 24) ? 24 : bytes;
4988	      gen_func.movstrsi = gen_movstrsi_6reg;
4989	    }
4990	  else if (bytes > 8	/* move up to 16 bytes at a time */
4991		   && ! fixed_regs[5]
4992		   && ! fixed_regs[6]
4993		   && ! fixed_regs[7]
4994		   && ! fixed_regs[8])
4995	    {
4996	      move_bytes = (bytes > 16) ? 16 : bytes;
4997	      gen_func.movstrsi = gen_movstrsi_4reg;
4998	    }
4999	  else if (bytes >= 8 && TARGET_POWERPC64
5000		   /* 64-bit loads and stores require word-aligned
5001                      displacements.  */
5002		   && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5003	    {
5004	      move_bytes = 8;
5005	      mode = DImode;
5006	      gen_func.mov = gen_movdi;
5007	    }
5008	  else if (bytes > 4 && !TARGET_POWERPC64)
5009	    {			/* move up to 8 bytes at a time */
5010	      move_bytes = (bytes > 8) ? 8 : bytes;
5011	      gen_func.movstrsi = gen_movstrsi_2reg;
5012	    }
5013	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5014	    {			/* move 4 bytes */
5015	      move_bytes = 4;
5016	      mode = SImode;
5017	      gen_func.mov = gen_movsi;
5018	    }
5019	  else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5020	    {			/* move 2 bytes */
5021	      move_bytes = 2;
5022	      mode = HImode;
5023	      gen_func.mov = gen_movhi;
5024	    }
5025	  else if (bytes == 1)	/* move 1 byte */
5026	    {
5027	      move_bytes = 1;
5028	      mode = QImode;
5029	      gen_func.mov = gen_movqi;
5030	    }
5031	  else
5032	    {			/* move up to 4 bytes at a time */
5033	      move_bytes = (bytes > 4) ? 4 : bytes;
5034	      gen_func.movstrsi = gen_movstrsi_1reg;
5035	    }
5036
5037	  src = adjust_address (orig_src, mode, offset);
5038	  dest = adjust_address (orig_dest, mode, offset);
5039
5040	  if (mode == BLKmode)
5041	    {
5042	      /* Move the address into scratch registers.  The movstrsi
5043		 patterns require zero offset.  */
5044	      if (!REG_P (XEXP (src, 0)))
5045		{
5046		  rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
5047		  src = replace_equiv_address (src, src_reg);
5048		}
5049	      set_mem_size (src, GEN_INT (move_bytes));
5050
5051	      if (!REG_P (XEXP (dest, 0)))
5052		{
5053		  rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
5054		  dest = replace_equiv_address (dest, dest_reg);
5055		}
5056	      set_mem_size (dest, GEN_INT (move_bytes));
5057
5058	      emit_insn ((*gen_func.movstrsi) (dest, src,
5059					       GEN_INT (move_bytes & 31),
5060					       align_rtx));
5061	    }
5062	  else
5063	    {
5064	      rtx tmp_reg = gen_reg_rtx (mode);
5065
5066	      emit_insn ((*gen_func.mov) (tmp_reg, src));
5067	      emit_insn ((*gen_func.mov) (dest, tmp_reg));
5068	    }
5069	}
5070    }
5071
5072  else			/* string instructions not available */
5073    {
5074      rtx stores[MAX_MOVE_REG];
5075      int num_reg = 0;
5076      int i;
5077
5078      for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
5079	{
5080	  rtx (*gen_mov_func) PARAMS ((rtx, rtx));
5081	  enum machine_mode mode;
5082	  rtx src, dest, tmp_reg;
5083
5084	  /* Generate the appropriate load and store, saving the stores
5085	     for later.  */
5086	  if (bytes >= 8 && TARGET_POWERPC64
5087	      /* 64-bit loads and stores require word-aligned
5088                 displacements.  */
5089	      && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5090	    {
5091	      move_bytes = 8;
5092	      mode = DImode;
5093	      gen_mov_func = gen_movdi;
5094	    }
5095	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5096	    {
5097	      move_bytes = 4;
5098	      mode = SImode;
5099	      gen_mov_func = gen_movsi;
5100	    }
5101	  else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5102	    {
5103	      move_bytes = 2;
5104	      mode = HImode;
5105	      gen_mov_func = gen_movhi;
5106	    }
5107	  else
5108	    {
5109	      move_bytes = 1;
5110	      mode = QImode;
5111	      gen_mov_func = gen_movqi;
5112	    }
5113
5114	  src = adjust_address (orig_src, mode, offset);
5115	  dest = adjust_address (orig_dest, mode, offset);
5116	  tmp_reg = gen_reg_rtx (mode);
5117
5118	  emit_insn ((*gen_mov_func) (tmp_reg, src));
5119	  stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
5120
5121	  if (num_reg >= MAX_MOVE_REG)
5122	    {
5123	      for (i = 0; i < num_reg; i++)
5124		emit_insn (stores[i]);
5125	      num_reg = 0;
5126	    }
5127	}
5128
5129      for (i = 0; i < num_reg; i++)
5130	emit_insn (stores[i]);
5131    }
5132
5133  return 1;
5134}
5135
5136
5137/* Return 1 if OP is a load multiple operation.  It is known to be a
5138   PARALLEL and the first section will be tested.  */
5139
5140int
5141load_multiple_operation (op, mode)
5142     rtx op;
5143     enum machine_mode mode ATTRIBUTE_UNUSED;
5144{
5145  int count = XVECLEN (op, 0);
5146  unsigned int dest_regno;
5147  rtx src_addr;
5148  int i;
5149
5150  /* Perform a quick check so we don't blow up below.  */
5151  if (count <= 1
5152      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5153      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5154      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5155    return 0;
5156
5157  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5158  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5159
5160  for (i = 1; i < count; i++)
5161    {
5162      rtx elt = XVECEXP (op, 0, i);
5163
5164      if (GET_CODE (elt) != SET
5165	  || GET_CODE (SET_DEST (elt)) != REG
5166	  || GET_MODE (SET_DEST (elt)) != SImode
5167	  || REGNO (SET_DEST (elt)) != dest_regno + i
5168	  || GET_CODE (SET_SRC (elt)) != MEM
5169	  || GET_MODE (SET_SRC (elt)) != SImode
5170	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5171	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5172	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5173	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5174	return 0;
5175    }
5176
5177  return 1;
5178}
5179
5180/* Similar, but tests for store multiple.  Here, the second vector element
5181   is a CLOBBER.  It will be tested later.  */
5182
5183int
5184store_multiple_operation (op, mode)
5185     rtx op;
5186     enum machine_mode mode ATTRIBUTE_UNUSED;
5187{
5188  int count = XVECLEN (op, 0) - 1;
5189  unsigned int src_regno;
5190  rtx dest_addr;
5191  int i;
5192
5193  /* Perform a quick check so we don't blow up below.  */
5194  if (count <= 1
5195      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5196      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5197      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5198    return 0;
5199
5200  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5201  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5202
5203  for (i = 1; i < count; i++)
5204    {
5205      rtx elt = XVECEXP (op, 0, i + 1);
5206
5207      if (GET_CODE (elt) != SET
5208	  || GET_CODE (SET_SRC (elt)) != REG
5209	  || GET_MODE (SET_SRC (elt)) != SImode
5210	  || REGNO (SET_SRC (elt)) != src_regno + i
5211	  || GET_CODE (SET_DEST (elt)) != MEM
5212	  || GET_MODE (SET_DEST (elt)) != SImode
5213	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5214	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5215	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5216	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5217	return 0;
5218    }
5219
5220  return 1;
5221}
5222
5223/* Return 1 for a parallel vrsave operation.  */
5224
5225int
5226vrsave_operation (op, mode)
5227     rtx op;
5228     enum machine_mode mode ATTRIBUTE_UNUSED;
5229{
5230  int count = XVECLEN (op, 0);
5231  unsigned int dest_regno, src_regno;
5232  int i;
5233
5234  if (count <= 1
5235      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5236      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5237      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5238    return 0;
5239
5240  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5241  src_regno  = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5242
5243  if (dest_regno != VRSAVE_REGNO
5244      && src_regno != VRSAVE_REGNO)
5245    return 0;
5246
5247  for (i = 1; i < count; i++)
5248    {
5249      rtx elt = XVECEXP (op, 0, i);
5250
5251      if (GET_CODE (elt) != CLOBBER
5252	  && GET_CODE (elt) != SET)
5253	return 0;
5254    }
5255
5256  return 1;
5257}
5258
5259/* Return 1 for an PARALLEL suitable for mtcrf.  */
5260
5261int
5262mtcrf_operation (op, mode)
5263     rtx op;
5264     enum machine_mode mode ATTRIBUTE_UNUSED;
5265{
5266  int count = XVECLEN (op, 0);
5267  int i;
5268  rtx src_reg;
5269
5270  /* Perform a quick check so we don't blow up below.  */
5271  if (count < 1
5272      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5273      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5274      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5275    return 0;
5276  src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5277
5278  if (GET_CODE (src_reg) != REG
5279      || GET_MODE (src_reg) != SImode
5280      || ! INT_REGNO_P (REGNO (src_reg)))
5281    return 0;
5282
5283  for (i = 0; i < count; i++)
5284    {
5285      rtx exp = XVECEXP (op, 0, i);
5286      rtx unspec;
5287      int maskval;
5288
5289      if (GET_CODE (exp) != SET
5290	  || GET_CODE (SET_DEST (exp)) != REG
5291	  || GET_MODE (SET_DEST (exp)) != CCmode
5292	  || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5293	return 0;
5294      unspec = SET_SRC (exp);
5295      maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5296
5297      if (GET_CODE (unspec) != UNSPEC
5298	  || XINT (unspec, 1) != 20
5299	  || XVECLEN (unspec, 0) != 2
5300	  || XVECEXP (unspec, 0, 0) != src_reg
5301	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5302	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5303	return 0;
5304    }
5305  return 1;
5306}
5307
5308/* Return 1 for an PARALLEL suitable for lmw.  */
5309
5310int
5311lmw_operation (op, mode)
5312     rtx op;
5313     enum machine_mode mode ATTRIBUTE_UNUSED;
5314{
5315  int count = XVECLEN (op, 0);
5316  unsigned int dest_regno;
5317  rtx src_addr;
5318  unsigned int base_regno;
5319  HOST_WIDE_INT offset;
5320  int i;
5321
5322  /* Perform a quick check so we don't blow up below.  */
5323  if (count <= 1
5324      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5325      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5326      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5327    return 0;
5328
5329  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5330  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5331
5332  if (dest_regno > 31
5333      || count != 32 - (int) dest_regno)
5334    return 0;
5335
5336  if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5337    {
5338      offset = 0;
5339      base_regno = REGNO (src_addr);
5340      if (base_regno == 0)
5341	return 0;
5342    }
5343  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5344    {
5345      offset = INTVAL (XEXP (src_addr, 1));
5346      base_regno = REGNO (XEXP (src_addr, 0));
5347    }
5348  else
5349    return 0;
5350
5351  for (i = 0; i < count; i++)
5352    {
5353      rtx elt = XVECEXP (op, 0, i);
5354      rtx newaddr;
5355      rtx addr_reg;
5356      HOST_WIDE_INT newoffset;
5357
5358      if (GET_CODE (elt) != SET
5359	  || GET_CODE (SET_DEST (elt)) != REG
5360	  || GET_MODE (SET_DEST (elt)) != SImode
5361	  || REGNO (SET_DEST (elt)) != dest_regno + i
5362	  || GET_CODE (SET_SRC (elt)) != MEM
5363	  || GET_MODE (SET_SRC (elt)) != SImode)
5364	return 0;
5365      newaddr = XEXP (SET_SRC (elt), 0);
5366      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5367	{
5368	  newoffset = 0;
5369	  addr_reg = newaddr;
5370	}
5371      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5372	{
5373	  addr_reg = XEXP (newaddr, 0);
5374	  newoffset = INTVAL (XEXP (newaddr, 1));
5375	}
5376      else
5377	return 0;
5378      if (REGNO (addr_reg) != base_regno
5379	  || newoffset != offset + 4 * i)
5380	return 0;
5381    }
5382
5383  return 1;
5384}
5385
5386/* Return 1 for an PARALLEL suitable for stmw.  */
5387
5388int
5389stmw_operation (op, mode)
5390     rtx op;
5391     enum machine_mode mode ATTRIBUTE_UNUSED;
5392{
5393  int count = XVECLEN (op, 0);
5394  unsigned int src_regno;
5395  rtx dest_addr;
5396  unsigned int base_regno;
5397  HOST_WIDE_INT offset;
5398  int i;
5399
5400  /* Perform a quick check so we don't blow up below.  */
5401  if (count <= 1
5402      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5403      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5404      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5405    return 0;
5406
5407  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5408  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5409
5410  if (src_regno > 31
5411      || count != 32 - (int) src_regno)
5412    return 0;
5413
5414  if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5415    {
5416      offset = 0;
5417      base_regno = REGNO (dest_addr);
5418      if (base_regno == 0)
5419	return 0;
5420    }
5421  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5422    {
5423      offset = INTVAL (XEXP (dest_addr, 1));
5424      base_regno = REGNO (XEXP (dest_addr, 0));
5425    }
5426  else
5427    return 0;
5428
5429  for (i = 0; i < count; i++)
5430    {
5431      rtx elt = XVECEXP (op, 0, i);
5432      rtx newaddr;
5433      rtx addr_reg;
5434      HOST_WIDE_INT newoffset;
5435
5436      if (GET_CODE (elt) != SET
5437	  || GET_CODE (SET_SRC (elt)) != REG
5438	  || GET_MODE (SET_SRC (elt)) != SImode
5439	  || REGNO (SET_SRC (elt)) != src_regno + i
5440	  || GET_CODE (SET_DEST (elt)) != MEM
5441	  || GET_MODE (SET_DEST (elt)) != SImode)
5442	return 0;
5443      newaddr = XEXP (SET_DEST (elt), 0);
5444      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5445	{
5446	  newoffset = 0;
5447	  addr_reg = newaddr;
5448	}
5449      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5450	{
5451	  addr_reg = XEXP (newaddr, 0);
5452	  newoffset = INTVAL (XEXP (newaddr, 1));
5453	}
5454      else
5455	return 0;
5456      if (REGNO (addr_reg) != base_regno
5457	  || newoffset != offset + 4 * i)
5458	return 0;
5459    }
5460
5461  return 1;
5462}
5463
5464/* A validation routine: say whether CODE, a condition code, and MODE
5465   match.  The other alternatives either don't make sense or should
5466   never be generated.  */
5467
5468static void
5469validate_condition_mode (code, mode)
5470     enum rtx_code code;
5471     enum machine_mode mode;
5472{
5473  if (GET_RTX_CLASS (code) != '<'
5474      || GET_MODE_CLASS (mode) != MODE_CC)
5475    abort ();
5476
5477  /* These don't make sense.  */
5478  if ((code == GT || code == LT || code == GE || code == LE)
5479      && mode == CCUNSmode)
5480    abort ();
5481
5482  if ((code == GTU || code == LTU || code == GEU || code == LEU)
5483      && mode != CCUNSmode)
5484    abort ();
5485
5486  if (mode != CCFPmode
5487      && (code == ORDERED || code == UNORDERED
5488	  || code == UNEQ || code == LTGT
5489	  || code == UNGT || code == UNLT
5490	  || code == UNGE || code == UNLE))
5491    abort ();
5492
5493  /* These should never be generated except for
5494     flag_unsafe_math_optimizations.  */
5495  if (mode == CCFPmode
5496      && ! flag_unsafe_math_optimizations
5497      && (code == LE || code == GE
5498	  || code == UNEQ || code == LTGT
5499	  || code == UNGT || code == UNLT))
5500    abort ();
5501
5502  /* These are invalid; the information is not there.  */
5503  if (mode == CCEQmode
5504      && code != EQ && code != NE)
5505    abort ();
5506}
5507
5508/* Return 1 if OP is a comparison operation that is valid for a branch insn.
5509   We only check the opcode against the mode of the CC value here.  */
5510
5511int
5512branch_comparison_operator (op, mode)
5513     rtx op;
5514     enum machine_mode mode ATTRIBUTE_UNUSED;
5515{
5516  enum rtx_code code = GET_CODE (op);
5517  enum machine_mode cc_mode;
5518
5519  if (GET_RTX_CLASS (code) != '<')
5520    return 0;
5521
5522  cc_mode = GET_MODE (XEXP (op, 0));
5523  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5524    return 0;
5525
5526  validate_condition_mode (code, cc_mode);
5527
5528  return 1;
5529}
5530
5531/* Return 1 if OP is a comparison operation that is valid for a branch
5532   insn and which is true if the corresponding bit in the CC register
5533   is set.  */
5534
5535int
5536branch_positive_comparison_operator (op, mode)
5537     rtx op;
5538     enum machine_mode mode;
5539{
5540  enum rtx_code code;
5541
5542  if (! branch_comparison_operator (op, mode))
5543    return 0;
5544
5545  code = GET_CODE (op);
5546  return (code == EQ || code == LT || code == GT
5547	  || code == LTU || code == GTU
5548	  || code == UNORDERED);
5549}
5550
5551/* Return 1 if OP is a comparison operation that is valid for an scc insn.
5552   We check the opcode against the mode of the CC value and disallow EQ or
5553   NE comparisons for integers.  */
5554
5555int
5556scc_comparison_operator (op, mode)
5557     rtx op;
5558     enum machine_mode mode;
5559{
5560  enum rtx_code code = GET_CODE (op);
5561  enum machine_mode cc_mode;
5562
5563  if (GET_MODE (op) != mode && mode != VOIDmode)
5564    return 0;
5565
5566  if (GET_RTX_CLASS (code) != '<')
5567    return 0;
5568
5569  cc_mode = GET_MODE (XEXP (op, 0));
5570  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5571    return 0;
5572
5573  validate_condition_mode (code, cc_mode);
5574
5575  if (code == NE && cc_mode != CCFPmode)
5576    return 0;
5577
5578  return 1;
5579}
5580
5581int
5582trap_comparison_operator (op, mode)
5583    rtx op;
5584    enum machine_mode mode;
5585{
5586  if (mode != VOIDmode && mode != GET_MODE (op))
5587    return 0;
5588  return GET_RTX_CLASS (GET_CODE (op)) == '<';
5589}
5590
5591int
5592boolean_operator (op, mode)
5593    rtx op;
5594    enum machine_mode mode ATTRIBUTE_UNUSED;
5595{
5596  enum rtx_code code = GET_CODE (op);
5597  return (code == AND || code == IOR || code == XOR);
5598}
5599
5600int
5601boolean_or_operator (op, mode)
5602    rtx op;
5603    enum machine_mode mode ATTRIBUTE_UNUSED;
5604{
5605  enum rtx_code code = GET_CODE (op);
5606  return (code == IOR || code == XOR);
5607}
5608
5609int
5610min_max_operator (op, mode)
5611    rtx op;
5612    enum machine_mode mode ATTRIBUTE_UNUSED;
5613{
5614  enum rtx_code code = GET_CODE (op);
5615  return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5616}
5617
5618/* Return 1 if ANDOP is a mask that has no bits on that are not in the
5619   mask required to convert the result of a rotate insn into a shift
5620   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
5621
5622int
5623includes_lshift_p (shiftop, andop)
5624     rtx shiftop;
5625     rtx andop;
5626{
5627  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5628
5629  shift_mask <<= INTVAL (shiftop);
5630
5631  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5632}
5633
5634/* Similar, but for right shift.  */
5635
5636int
5637includes_rshift_p (shiftop, andop)
5638     rtx shiftop;
5639     rtx andop;
5640{
5641  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5642
5643  shift_mask >>= INTVAL (shiftop);
5644
5645  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5646}
5647
5648/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5649   to perform a left shift.  It must have exactly SHIFTOP least
5650   signifigant 0's, then one or more 1's, then zero or more 0's.  */
5651
5652int
5653includes_rldic_lshift_p (shiftop, andop)
5654     rtx shiftop;
5655     rtx andop;
5656{
5657  if (GET_CODE (andop) == CONST_INT)
5658    {
5659      HOST_WIDE_INT c, lsb, shift_mask;
5660
5661      c = INTVAL (andop);
5662      if (c == 0 || c == ~0)
5663	return 0;
5664
5665      shift_mask = ~0;
5666      shift_mask <<= INTVAL (shiftop);
5667
5668      /* Find the least signifigant one bit.  */
5669      lsb = c & -c;
5670
5671      /* It must coincide with the LSB of the shift mask.  */
5672      if (-lsb != shift_mask)
5673	return 0;
5674
5675      /* Invert to look for the next transition (if any).  */
5676      c = ~c;
5677
5678      /* Remove the low group of ones (originally low group of zeros).  */
5679      c &= -lsb;
5680
5681      /* Again find the lsb, and check we have all 1's above.  */
5682      lsb = c & -c;
5683      return c == -lsb;
5684    }
5685  else if (GET_CODE (andop) == CONST_DOUBLE
5686	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5687    {
5688      HOST_WIDE_INT low, high, lsb;
5689      HOST_WIDE_INT shift_mask_low, shift_mask_high;
5690
5691      low = CONST_DOUBLE_LOW (andop);
5692      if (HOST_BITS_PER_WIDE_INT < 64)
5693	high = CONST_DOUBLE_HIGH (andop);
5694
5695      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5696	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5697	return 0;
5698
5699      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5700	{
5701	  shift_mask_high = ~0;
5702	  if (INTVAL (shiftop) > 32)
5703	    shift_mask_high <<= INTVAL (shiftop) - 32;
5704
5705	  lsb = high & -high;
5706
5707	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5708	    return 0;
5709
5710	  high = ~high;
5711	  high &= -lsb;
5712
5713	  lsb = high & -high;
5714	  return high == -lsb;
5715	}
5716
5717      shift_mask_low = ~0;
5718      shift_mask_low <<= INTVAL (shiftop);
5719
5720      lsb = low & -low;
5721
5722      if (-lsb != shift_mask_low)
5723	return 0;
5724
5725      if (HOST_BITS_PER_WIDE_INT < 64)
5726	high = ~high;
5727      low = ~low;
5728      low &= -lsb;
5729
5730      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5731	{
5732	  lsb = high & -high;
5733	  return high == -lsb;
5734	}
5735
5736      lsb = low & -low;
5737      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5738    }
5739  else
5740    return 0;
5741}
5742
5743/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5744   to perform a left shift.  It must have SHIFTOP or more least
5745   signifigant 0's, with the remainder of the word 1's.  */
5746
5747int
5748includes_rldicr_lshift_p (shiftop, andop)
5749     rtx shiftop;
5750     rtx andop;
5751{
5752  if (GET_CODE (andop) == CONST_INT)
5753    {
5754      HOST_WIDE_INT c, lsb, shift_mask;
5755
5756      shift_mask = ~0;
5757      shift_mask <<= INTVAL (shiftop);
5758      c = INTVAL (andop);
5759
5760      /* Find the least signifigant one bit.  */
5761      lsb = c & -c;
5762
5763      /* It must be covered by the shift mask.
5764	 This test also rejects c == 0.  */
5765      if ((lsb & shift_mask) == 0)
5766	return 0;
5767
5768      /* Check we have all 1's above the transition, and reject all 1's.  */
5769      return c == -lsb && lsb != 1;
5770    }
5771  else if (GET_CODE (andop) == CONST_DOUBLE
5772	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5773    {
5774      HOST_WIDE_INT low, lsb, shift_mask_low;
5775
5776      low = CONST_DOUBLE_LOW (andop);
5777
5778      if (HOST_BITS_PER_WIDE_INT < 64)
5779	{
5780	  HOST_WIDE_INT high, shift_mask_high;
5781
5782	  high = CONST_DOUBLE_HIGH (andop);
5783
5784	  if (low == 0)
5785	    {
5786	      shift_mask_high = ~0;
5787	      if (INTVAL (shiftop) > 32)
5788		shift_mask_high <<= INTVAL (shiftop) - 32;
5789
5790	      lsb = high & -high;
5791
5792	      if ((lsb & shift_mask_high) == 0)
5793		return 0;
5794
5795	      return high == -lsb;
5796	    }
5797	  if (high != ~0)
5798	    return 0;
5799	}
5800
5801      shift_mask_low = ~0;
5802      shift_mask_low <<= INTVAL (shiftop);
5803
5804      lsb = low & -low;
5805
5806      if ((lsb & shift_mask_low) == 0)
5807	return 0;
5808
5809      return low == -lsb && lsb != 1;
5810    }
5811  else
5812    return 0;
5813}
5814
5815/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5816   for lfq and stfq insns.
5817
5818   Note reg1 and reg2 *must* be hard registers.  To be sure we will
5819   abort if we are passed pseudo registers.  */
5820
5821int
5822registers_ok_for_quad_peep (reg1, reg2)
5823     rtx reg1, reg2;
5824{
5825  /* We might have been passed a SUBREG.  */
5826  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5827    return 0;
5828
5829  return (REGNO (reg1) == REGNO (reg2) - 1);
5830}
5831
5832/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5833   addr1 and addr2 must be in consecutive memory locations
5834   (addr2 == addr1 + 8).  */
5835
5836int
5837addrs_ok_for_quad_peep (addr1, addr2)
5838     rtx addr1;
5839     rtx addr2;
5840{
5841  unsigned int reg1;
5842  int offset1;
5843
5844  /* Extract an offset (if used) from the first addr.  */
5845  if (GET_CODE (addr1) == PLUS)
5846    {
5847      /* If not a REG, return zero.  */
5848      if (GET_CODE (XEXP (addr1, 0)) != REG)
5849	return 0;
5850      else
5851	{
5852          reg1 = REGNO (XEXP (addr1, 0));
5853	  /* The offset must be constant!  */
5854	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5855            return 0;
5856          offset1 = INTVAL (XEXP (addr1, 1));
5857	}
5858    }
5859  else if (GET_CODE (addr1) != REG)
5860    return 0;
5861  else
5862    {
5863      reg1 = REGNO (addr1);
5864      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
5865      offset1 = 0;
5866    }
5867
5868  /* Make sure the second address is a (mem (plus (reg) (const_int))).  */
5869  if (GET_CODE (addr2) != PLUS)
5870    return 0;
5871
5872  if (GET_CODE (XEXP (addr2, 0)) != REG
5873      || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5874    return 0;
5875
5876  if (reg1 != REGNO (XEXP (addr2, 0)))
5877    return 0;
5878
5879  /* The offset for the second addr must be 8 more than the first addr.  */
5880  if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5881    return 0;
5882
5883  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
5884     instructions.  */
5885  return 1;
5886}
5887
5888/* Return the register class of a scratch register needed to copy IN into
5889   or out of a register in CLASS in MODE.  If it can be done directly,
5890   NO_REGS is returned.  */
5891
5892enum reg_class
5893secondary_reload_class (class, mode, in)
5894     enum reg_class class;
5895     enum machine_mode mode ATTRIBUTE_UNUSED;
5896     rtx in;
5897{
5898  int regno;
5899
5900  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5901    {
5902      /* We cannot copy a symbolic operand directly into anything
5903         other than BASE_REGS for TARGET_ELF.  So indicate that a
5904         register from BASE_REGS is needed as an intermediate
5905         register.
5906
5907	 On Darwin, pic addresses require a load from memory, which
5908	 needs a base register.  */
5909      if (class != BASE_REGS
5910          && (GET_CODE (in) == SYMBOL_REF
5911              || GET_CODE (in) == HIGH
5912              || GET_CODE (in) == LABEL_REF
5913              || GET_CODE (in) == CONST))
5914        return BASE_REGS;
5915    }
5916
5917  if (GET_CODE (in) == REG)
5918    {
5919      regno = REGNO (in);
5920      if (regno >= FIRST_PSEUDO_REGISTER)
5921	{
5922	  regno = true_regnum (in);
5923	  if (regno >= FIRST_PSEUDO_REGISTER)
5924	    regno = -1;
5925	}
5926    }
5927  else if (GET_CODE (in) == SUBREG)
5928    {
5929      regno = true_regnum (in);
5930      if (regno >= FIRST_PSEUDO_REGISTER)
5931	regno = -1;
5932    }
5933  else
5934    regno = -1;
5935
5936  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5937     into anything.  */
5938  if (class == GENERAL_REGS || class == BASE_REGS
5939      || (regno >= 0 && INT_REGNO_P (regno)))
5940    return NO_REGS;
5941
5942  /* Constants, memory, and FP registers can go into FP registers.  */
5943  if ((regno == -1 || FP_REGNO_P (regno))
5944      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5945    return NO_REGS;
5946
5947  /* Memory, and AltiVec registers can go into AltiVec registers.  */
5948  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5949      && class == ALTIVEC_REGS)
5950    return NO_REGS;
5951
5952  /* We can copy among the CR registers.  */
5953  if ((class == CR_REGS || class == CR0_REGS)
5954      && regno >= 0 && CR_REGNO_P (regno))
5955    return NO_REGS;
5956
5957  /* Otherwise, we need GENERAL_REGS.  */
5958  return GENERAL_REGS;
5959}
5960
5961/* Given a comparison operation, return the bit number in CCR to test.  We
5962   know this is a valid comparison.
5963
5964   SCC_P is 1 if this is for an scc.  That means that %D will have been
5965   used instead of %C, so the bits will be in different places.
5966
5967   Return -1 if OP isn't a valid comparison for some reason.  */
5968
5969int
5970ccr_bit (op, scc_p)
5971     rtx op;
5972     int scc_p;
5973{
5974  enum rtx_code code = GET_CODE (op);
5975  enum machine_mode cc_mode;
5976  int cc_regnum;
5977  int base_bit;
5978  rtx reg;
5979
5980  if (GET_RTX_CLASS (code) != '<')
5981    return -1;
5982
5983  reg = XEXP (op, 0);
5984
5985  if (GET_CODE (reg) != REG
5986      || ! CR_REGNO_P (REGNO (reg)))
5987    abort ();
5988
5989  cc_mode = GET_MODE (reg);
5990  cc_regnum = REGNO (reg);
5991  base_bit = 4 * (cc_regnum - CR0_REGNO);
5992
5993  validate_condition_mode (code, cc_mode);
5994
5995  switch (code)
5996    {
5997    case NE:
5998      return scc_p ? base_bit + 3 : base_bit + 2;
5999    case EQ:
6000      return base_bit + 2;
6001    case GT:  case GTU:  case UNLE:
6002      return base_bit + 1;
6003    case LT:  case LTU:  case UNGE:
6004      return base_bit;
6005    case ORDERED:  case UNORDERED:
6006      return base_bit + 3;
6007
6008    case GE:  case GEU:
6009      /* If scc, we will have done a cror to put the bit in the
6010	 unordered position.  So test that bit.  For integer, this is ! LT
6011	 unless this is an scc insn.  */
6012      return scc_p ? base_bit + 3 : base_bit;
6013
6014    case LE:  case LEU:
6015      return scc_p ? base_bit + 3 : base_bit + 1;
6016
6017    default:
6018      abort ();
6019    }
6020}
6021
6022/* Return the GOT register.  */
6023
6024struct rtx_def *
6025rs6000_got_register (value)
6026     rtx value ATTRIBUTE_UNUSED;
6027{
6028  /* The second flow pass currently (June 1999) can't update
6029     regs_ever_live without disturbing other parts of the compiler, so
6030     update it here to make the prolog/epilogue code happy.  */
6031  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6032    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6033
6034  current_function_uses_pic_offset_table = 1;
6035
6036  return pic_offset_table_rtx;
6037}
6038
6039/* Functions to init, mark and free struct machine_function.
6040   These will be called, via pointer variables,
6041   from push_function_context and pop_function_context.  */
6042
6043static void
6044rs6000_init_machine_status (p)
6045     struct function *p;
6046{
6047  p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6048}
6049
6050static void
6051rs6000_free_machine_status (p)
6052     struct function *p;
6053{
6054  if (p->machine == NULL)
6055    return;
6056
6057  free (p->machine);
6058  p->machine = NULL;
6059}
6060
6061
6062/* Print an operand.  Recognize special options, documented below.  */
6063
6064#if TARGET_ELF
6065#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6066#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6067#else
6068#define SMALL_DATA_RELOC "sda21"
6069#define SMALL_DATA_REG 0
6070#endif
6071
6072void
6073print_operand (file, x, code)
6074    FILE *file;
6075    rtx x;
6076    int code;
6077{
6078  int i;
6079  HOST_WIDE_INT val;
6080
6081  /* These macros test for integers and extract the low-order bits.  */
6082#define INT_P(X)  \
6083((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
6084 && GET_MODE (X) == VOIDmode)
6085
6086#define INT_LOWPART(X) \
6087  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6088
6089  switch (code)
6090    {
6091    case '.':
6092      /* Write out an instruction after the call which may be replaced
6093	 with glue code by the loader.  This depends on the AIX version.  */
6094      asm_fprintf (file, RS6000_CALL_GLUE);
6095      return;
6096
6097      /* %a is output_address.  */
6098
6099    case 'A':
6100      /* If X is a constant integer whose low-order 5 bits are zero,
6101	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
6102	 in the AIX assembler where "sri" with a zero shift count
6103	 writes a trash instruction.  */
6104      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6105	putc ('l', file);
6106      else
6107	putc ('r', file);
6108      return;
6109
6110    case 'b':
6111      /* If constant, low-order 16 bits of constant, unsigned.
6112	 Otherwise, write normally.  */
6113      if (INT_P (x))
6114	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6115      else
6116	print_operand (file, x, 0);
6117      return;
6118
6119    case 'B':
6120      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6121	 for 64-bit mask direction.  */
6122      putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6123      return;
6124
6125      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6126	 output_operand.  */
6127
6128    case 'D':
6129      /* There used to be a comment for 'C' reading "This is an
6130	   optional cror needed for certain floating-point
6131	   comparisons.  Otherwise write nothing."  */
6132
6133      /* Similar, except that this is for an scc, so we must be able to
6134	 encode the test in a single bit that is one.  We do the above
6135	 for any LE, GE, GEU, or LEU and invert the bit for NE.  */
6136      if (GET_CODE (x) == LE || GET_CODE (x) == GE
6137	  || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6138	{
6139	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6140
6141	  fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6142		   base_bit + 2,
6143		   base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6144	}
6145
6146      else if (GET_CODE (x) == NE)
6147	{
6148	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6149
6150	  fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6151		   base_bit + 2, base_bit + 2);
6152	}
6153      return;
6154
6155    case 'E':
6156      /* X is a CR register.  Print the number of the EQ bit of the CR */
6157      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6158	output_operand_lossage ("invalid %%E value");
6159      else
6160	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6161      return;
6162
6163    case 'f':
6164      /* X is a CR register.  Print the shift count needed to move it
6165	 to the high-order four bits.  */
6166      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6167	output_operand_lossage ("invalid %%f value");
6168      else
6169	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6170      return;
6171
6172    case 'F':
6173      /* Similar, but print the count for the rotate in the opposite
6174	 direction.  */
6175      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6176	output_operand_lossage ("invalid %%F value");
6177      else
6178	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6179      return;
6180
6181    case 'G':
6182      /* X is a constant integer.  If it is negative, print "m",
6183	 otherwise print "z".  This is to make a aze or ame insn.  */
6184      if (GET_CODE (x) != CONST_INT)
6185	output_operand_lossage ("invalid %%G value");
6186      else if (INTVAL (x) >= 0)
6187	putc ('z', file);
6188      else
6189	putc ('m', file);
6190      return;
6191
6192    case 'h':
6193      /* If constant, output low-order five bits.  Otherwise, write
6194	 normally.  */
6195      if (INT_P (x))
6196	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6197      else
6198	print_operand (file, x, 0);
6199      return;
6200
6201    case 'H':
6202      /* If constant, output low-order six bits.  Otherwise, write
6203	 normally.  */
6204      if (INT_P (x))
6205	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6206      else
6207	print_operand (file, x, 0);
6208      return;
6209
6210    case 'I':
6211      /* Print `i' if this is a constant, else nothing.  */
6212      if (INT_P (x))
6213	putc ('i', file);
6214      return;
6215
6216    case 'j':
6217      /* Write the bit number in CCR for jump.  */
6218      i = ccr_bit (x, 0);
6219      if (i == -1)
6220	output_operand_lossage ("invalid %%j code");
6221      else
6222	fprintf (file, "%d", i);
6223      return;
6224
6225    case 'J':
6226      /* Similar, but add one for shift count in rlinm for scc and pass
6227	 scc flag to `ccr_bit'.  */
6228      i = ccr_bit (x, 1);
6229      if (i == -1)
6230	output_operand_lossage ("invalid %%J code");
6231      else
6232	/* If we want bit 31, write a shift count of zero, not 32.  */
6233	fprintf (file, "%d", i == 31 ? 0 : i + 1);
6234      return;
6235
6236    case 'k':
6237      /* X must be a constant.  Write the 1's complement of the
6238	 constant.  */
6239      if (! INT_P (x))
6240	output_operand_lossage ("invalid %%k value");
6241      else
6242	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6243      return;
6244
6245    case 'K':
6246      /* X must be a symbolic constant on ELF.  Write an
6247	 expression suitable for an 'addi' that adds in the low 16
6248	 bits of the MEM.  */
6249      if (GET_CODE (x) != CONST)
6250	{
6251	  print_operand_address (file, x);
6252	  fputs ("@l", file);
6253	}
6254      else
6255	{
6256	  if (GET_CODE (XEXP (x, 0)) != PLUS
6257	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6258		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6259	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6260	    output_operand_lossage ("invalid %%K value");
6261	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
6262	  fputs ("@l", file);
6263	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6264	}
6265      return;
6266
6267      /* %l is output_asm_label.  */
6268
6269    case 'L':
6270      /* Write second word of DImode or DFmode reference.  Works on register
6271	 or non-indexed memory only.  */
6272      if (GET_CODE (x) == REG)
6273	fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6274      else if (GET_CODE (x) == MEM)
6275	{
6276	  /* Handle possible auto-increment.  Since it is pre-increment and
6277	     we have already done it, we can just use an offset of word.  */
6278	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6279	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6280	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6281					   UNITS_PER_WORD));
6282	  else
6283	    output_address (XEXP (adjust_address_nv (x, SImode,
6284						     UNITS_PER_WORD),
6285				  0));
6286
6287	  if (small_data_operand (x, GET_MODE (x)))
6288	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6289		     reg_names[SMALL_DATA_REG]);
6290	}
6291      return;
6292
6293    case 'm':
6294      /* MB value for a mask operand.  */
6295      if (! mask_operand (x, SImode))
6296	output_operand_lossage ("invalid %%m value");
6297
6298      val = INT_LOWPART (x);
6299
6300      /* If the high bit is set and the low bit is not, the value is zero.
6301	 If the high bit is zero, the value is the first 1 bit we find from
6302	 the left.  */
6303      if ((val & 0x80000000) && ((val & 1) == 0))
6304	{
6305	  putc ('0', file);
6306	  return;
6307	}
6308      else if ((val & 0x80000000) == 0)
6309	{
6310	  for (i = 1; i < 32; i++)
6311	    if ((val <<= 1) & 0x80000000)
6312	      break;
6313	  fprintf (file, "%d", i);
6314	  return;
6315	}
6316
6317      /* Otherwise, look for the first 0 bit from the right.  The result is its
6318	 number plus 1. We know the low-order bit is one.  */
6319      for (i = 0; i < 32; i++)
6320	if (((val >>= 1) & 1) == 0)
6321	  break;
6322
6323      /* If we ended in ...01, i would be 0.  The correct value is 31, so
6324	 we want 31 - i.  */
6325      fprintf (file, "%d", 31 - i);
6326      return;
6327
6328    case 'M':
6329      /* ME value for a mask operand.  */
6330      if (! mask_operand (x, SImode))
6331	output_operand_lossage ("invalid %%M value");
6332
6333      val = INT_LOWPART (x);
6334
6335      /* If the low bit is set and the high bit is not, the value is 31.
6336	 If the low bit is zero, the value is the first 1 bit we find from
6337	 the right.  */
6338      if ((val & 1) && ((val & 0x80000000) == 0))
6339	{
6340	  fputs ("31", file);
6341	  return;
6342	}
6343      else if ((val & 1) == 0)
6344	{
6345	  for (i = 0; i < 32; i++)
6346	    if ((val >>= 1) & 1)
6347	      break;
6348
6349	  /* If we had ....10, i would be 0.  The result should be
6350	     30, so we need 30 - i.  */
6351	  fprintf (file, "%d", 30 - i);
6352	  return;
6353	}
6354
6355      /* Otherwise, look for the first 0 bit from the left.  The result is its
6356	 number minus 1. We know the high-order bit is one.  */
6357      for (i = 0; i < 32; i++)
6358	if (((val <<= 1) & 0x80000000) == 0)
6359	  break;
6360
6361      fprintf (file, "%d", i);
6362      return;
6363
6364      /* %n outputs the negative of its operand.  */
6365
6366    case 'N':
6367      /* Write the number of elements in the vector times 4.  */
6368      if (GET_CODE (x) != PARALLEL)
6369	output_operand_lossage ("invalid %%N value");
6370      else
6371	fprintf (file, "%d", XVECLEN (x, 0) * 4);
6372      return;
6373
6374    case 'O':
6375      /* Similar, but subtract 1 first.  */
6376      if (GET_CODE (x) != PARALLEL)
6377	output_operand_lossage ("invalid %%O value");
6378      else
6379	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6380      return;
6381
6382    case 'p':
6383      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
6384      if (! INT_P (x)
6385	  || INT_LOWPART (x) < 0
6386	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
6387	output_operand_lossage ("invalid %%p value");
6388      else
6389	fprintf (file, "%d", i);
6390      return;
6391
6392    case 'P':
6393      /* The operand must be an indirect memory reference.  The result
6394	 is the register number.  */
6395      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6396	  || REGNO (XEXP (x, 0)) >= 32)
6397	output_operand_lossage ("invalid %%P value");
6398      else
6399	fprintf (file, "%d", REGNO (XEXP (x, 0)));
6400      return;
6401
6402    case 'q':
6403      /* This outputs the logical code corresponding to a boolean
6404	 expression.  The expression may have one or both operands
6405	 negated (if one, only the first one).  For condition register
6406         logical operations, it will also treat the negated
6407         CR codes as NOTs, but not handle NOTs of them.  */
6408      {
6409	const char *const *t = 0;
6410	const char *s;
6411	enum rtx_code code = GET_CODE (x);
6412	static const char * const tbl[3][3] = {
6413	  { "and", "andc", "nor" },
6414	  { "or", "orc", "nand" },
6415	  { "xor", "eqv", "xor" } };
6416
6417	if (code == AND)
6418	  t = tbl[0];
6419	else if (code == IOR)
6420	  t = tbl[1];
6421	else if (code == XOR)
6422	  t = tbl[2];
6423	else
6424	  output_operand_lossage ("invalid %%q value");
6425
6426	if (GET_CODE (XEXP (x, 0)) != NOT)
6427	  s = t[0];
6428	else
6429	  {
6430	    if (GET_CODE (XEXP (x, 1)) == NOT)
6431	      s = t[2];
6432	    else
6433	      s = t[1];
6434	  }
6435
6436	fputs (s, file);
6437      }
6438      return;
6439
6440    case 'R':
6441      /* X is a CR register.  Print the mask for `mtcrf'.  */
6442      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6443	output_operand_lossage ("invalid %%R value");
6444      else
6445	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6446      return;
6447
6448    case 's':
6449      /* Low 5 bits of 32 - value */
6450      if (! INT_P (x))
6451	output_operand_lossage ("invalid %%s value");
6452      else
6453	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6454      return;
6455
6456    case 'S':
6457      /* PowerPC64 mask position.  All 0's and all 1's are excluded.
6458	 CONST_INT 32-bit mask is considered sign-extended so any
6459	 transition must occur within the CONST_INT, not on the boundary.  */
6460      if (! mask64_operand (x, DImode))
6461	output_operand_lossage ("invalid %%S value");
6462
6463      val = INT_LOWPART (x);
6464
6465      if (val & 1)      /* Clear Left */
6466	{
6467	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6468	    if (!((val >>= 1) & 1))
6469	      break;
6470
6471#if HOST_BITS_PER_WIDE_INT == 32
6472	  if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6473	    {
6474	      val = CONST_DOUBLE_HIGH (x);
6475
6476	      if (val == 0)
6477		--i;
6478	      else
6479		for (i = 32; i < 64; i++)
6480		  if (!((val >>= 1) & 1))
6481		    break;
6482	    }
6483#endif
6484	/* i = index of last set bit from right
6485	   mask begins at 63 - i from left */
6486	  if (i > 63)
6487	    output_operand_lossage ("%%S computed all 1's mask");
6488
6489	  fprintf (file, "%d", 63 - i);
6490	  return;
6491	}
6492      else	/* Clear Right */
6493	{
6494	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6495	    if ((val >>= 1) & 1)
6496	      break;
6497
6498#if HOST_BITS_PER_WIDE_INT == 32
6499	if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6500	  {
6501	    val = CONST_DOUBLE_HIGH (x);
6502
6503	    if (val == (HOST_WIDE_INT) -1)
6504	      --i;
6505	    else
6506	      for (i = 32; i < 64; i++)
6507		if ((val >>= 1) & 1)
6508		  break;
6509	  }
6510#endif
6511	/* i = index of last clear bit from right
6512	   mask ends at 62 - i from left */
6513	  if (i > 62)
6514	    output_operand_lossage ("%%S computed all 0's mask");
6515
6516	  fprintf (file, "%d", 62 - i);
6517	  return;
6518	}
6519
6520    case 'T':
6521      /* Print the symbolic name of a branch target register.  */
6522      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6523				  && REGNO (x) != COUNT_REGISTER_REGNUM))
6524	output_operand_lossage ("invalid %%T value");
6525      else if (REGNO (x) == LINK_REGISTER_REGNUM)
6526	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6527      else
6528	fputs ("ctr", file);
6529      return;
6530
6531    case 'u':
6532      /* High-order 16 bits of constant for use in unsigned operand.  */
6533      if (! INT_P (x))
6534	output_operand_lossage ("invalid %%u value");
6535      else
6536	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6537		 (INT_LOWPART (x) >> 16) & 0xffff);
6538      return;
6539
6540    case 'v':
6541      /* High-order 16 bits of constant for use in signed operand.  */
6542      if (! INT_P (x))
6543	output_operand_lossage ("invalid %%v value");
6544      else
6545	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6546		 (INT_LOWPART (x) >> 16) & 0xffff);
6547      return;
6548
6549    case 'U':
6550      /* Print `u' if this has an auto-increment or auto-decrement.  */
6551      if (GET_CODE (x) == MEM
6552	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
6553	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6554	putc ('u', file);
6555      return;
6556
6557    case 'V':
6558      /* Print the trap code for this operand.  */
6559      switch (GET_CODE (x))
6560	{
6561	case EQ:
6562	  fputs ("eq", file);   /* 4 */
6563	  break;
6564	case NE:
6565	  fputs ("ne", file);   /* 24 */
6566	  break;
6567	case LT:
6568	  fputs ("lt", file);   /* 16 */
6569	  break;
6570	case LE:
6571	  fputs ("le", file);   /* 20 */
6572	  break;
6573	case GT:
6574	  fputs ("gt", file);   /* 8 */
6575	  break;
6576	case GE:
6577	  fputs ("ge", file);   /* 12 */
6578	  break;
6579	case LTU:
6580	  fputs ("llt", file);  /* 2 */
6581	  break;
6582	case LEU:
6583	  fputs ("lle", file);  /* 6 */
6584	  break;
6585	case GTU:
6586	  fputs ("lgt", file);  /* 1 */
6587	  break;
6588	case GEU:
6589	  fputs ("lge", file);  /* 5 */
6590	  break;
6591	default:
6592	  abort ();
6593	}
6594      break;
6595
6596    case 'w':
6597      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
6598	 normally.  */
6599      if (INT_P (x))
6600	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6601		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6602      else
6603	print_operand (file, x, 0);
6604      return;
6605
6606    case 'W':
6607      /* MB value for a PowerPC64 rldic operand.  */
6608      val = (GET_CODE (x) == CONST_INT
6609	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6610
6611      if (val < 0)
6612	i = -1;
6613      else
6614	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6615	  if ((val <<= 1) < 0)
6616	    break;
6617
6618#if HOST_BITS_PER_WIDE_INT == 32
6619      if (GET_CODE (x) == CONST_INT && i >= 0)
6620	i += 32;  /* zero-extend high-part was all 0's */
6621      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6622	{
6623	  val = CONST_DOUBLE_LOW (x);
6624
6625	  if (val == 0)
6626	    abort ();
6627	  else if (val < 0)
6628	    --i;
6629	  else
6630	    for ( ; i < 64; i++)
6631	      if ((val <<= 1) < 0)
6632		break;
6633	}
6634#endif
6635
6636      fprintf (file, "%d", i + 1);
6637      return;
6638
6639    case 'X':
6640      if (GET_CODE (x) == MEM
6641	  && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6642	putc ('x', file);
6643      return;
6644
6645    case 'Y':
6646      /* Like 'L', for third word of TImode  */
6647      if (GET_CODE (x) == REG)
6648	fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6649      else if (GET_CODE (x) == MEM)
6650	{
6651	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6652	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6653	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6654	  else
6655	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6656	  if (small_data_operand (x, GET_MODE (x)))
6657	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6658		     reg_names[SMALL_DATA_REG]);
6659	}
6660      return;
6661
6662    case 'z':
6663      /* X is a SYMBOL_REF.  Write out the name preceded by a
6664	 period and without any trailing data in brackets.  Used for function
6665	 names.  If we are configured for System V (or the embedded ABI) on
6666	 the PowerPC, do not emit the period, since those systems do not use
6667	 TOCs and the like.  */
6668      if (GET_CODE (x) != SYMBOL_REF)
6669	abort ();
6670
6671      if (XSTR (x, 0)[0] != '.')
6672	{
6673	  switch (DEFAULT_ABI)
6674	    {
6675	    default:
6676	      abort ();
6677
6678	    case ABI_AIX:
6679	      putc ('.', file);
6680	      break;
6681
6682	    case ABI_V4:
6683	    case ABI_AIX_NODESC:
6684	    case ABI_DARWIN:
6685	      break;
6686	    }
6687	}
6688#if TARGET_AIX
6689      RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6690#else
6691      assemble_name (file, XSTR (x, 0));
6692#endif
6693      return;
6694
6695    case 'Z':
6696      /* Like 'L', for last word of TImode.  */
6697      if (GET_CODE (x) == REG)
6698	fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6699      else if (GET_CODE (x) == MEM)
6700	{
6701	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6702	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6703	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6704	  else
6705	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6706	  if (small_data_operand (x, GET_MODE (x)))
6707	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6708		     reg_names[SMALL_DATA_REG]);
6709	}
6710      return;
6711
6712      /* Print AltiVec memory operand.  */
6713    case 'y':
6714      {
6715	rtx tmp;
6716
6717	if (GET_CODE (x) != MEM)
6718	  abort ();
6719
6720	tmp = XEXP (x, 0);
6721
6722	if (GET_CODE (tmp) == REG)
6723	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6724	else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6725	  {
6726	    if (REGNO (XEXP (tmp, 0)) == 0)
6727	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6728		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
6729	    else
6730	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6731		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
6732	  }
6733	else
6734	  abort ();
6735	break;
6736      }
6737
6738    case 0:
6739      if (GET_CODE (x) == REG)
6740	fprintf (file, "%s", reg_names[REGNO (x)]);
6741      else if (GET_CODE (x) == MEM)
6742	{
6743	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
6744	     know the width from the mode.  */
6745	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6746	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6747		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6748	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6749	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6750		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6751	  else
6752	    output_address (XEXP (x, 0));
6753	}
6754      else
6755	output_addr_const (file, x);
6756      return;
6757
6758    default:
6759      output_operand_lossage ("invalid %%xn code");
6760    }
6761}
6762
6763/* Print the address of an operand.  */
6764
6765void
6766print_operand_address (file, x)
6767     FILE *file;
6768     rtx x;
6769{
6770  if (GET_CODE (x) == REG)
6771    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6772  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6773	   || GET_CODE (x) == LABEL_REF)
6774    {
6775      output_addr_const (file, x);
6776      if (small_data_operand (x, GET_MODE (x)))
6777	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6778		 reg_names[SMALL_DATA_REG]);
6779      else if (TARGET_TOC)
6780	abort ();
6781    }
6782  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6783    {
6784      if (REGNO (XEXP (x, 0)) == 0)
6785	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6786		 reg_names[ REGNO (XEXP (x, 0)) ]);
6787      else
6788	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6789		 reg_names[ REGNO (XEXP (x, 1)) ]);
6790    }
6791  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6792    {
6793      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6794      fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6795    }
6796#if TARGET_ELF
6797  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6798           && CONSTANT_P (XEXP (x, 1)))
6799    {
6800      output_addr_const (file, XEXP (x, 1));
6801      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6802    }
6803#endif
6804#if TARGET_MACHO
6805  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6806           && CONSTANT_P (XEXP (x, 1)))
6807    {
6808      fprintf (file, "lo16(");
6809      output_addr_const (file, XEXP (x, 1));
6810      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6811    }
6812#endif
6813  else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6814    {
6815      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6816	{
6817	  rtx contains_minus = XEXP (x, 1);
6818	  rtx minus, symref;
6819	  const char *name;
6820
6821	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
6822	     turn it into (sym) for output_addr_const.  */
6823	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6824	    contains_minus = XEXP (contains_minus, 0);
6825
6826	  minus = XEXP (contains_minus, 0);
6827	  symref = XEXP (minus, 0);
6828	  XEXP (contains_minus, 0) = symref;
6829	  if (TARGET_ELF)
6830	    {
6831	      char *newname;
6832
6833	      name = XSTR (symref, 0);
6834	      newname = alloca (strlen (name) + sizeof ("@toc"));
6835	      strcpy (newname, name);
6836	      strcat (newname, "@toc");
6837	      XSTR (symref, 0) = newname;
6838	    }
6839	  output_addr_const (file, XEXP (x, 1));
6840	  if (TARGET_ELF)
6841	    XSTR (symref, 0) = name;
6842	  XEXP (contains_minus, 0) = minus;
6843	}
6844      else
6845	output_addr_const (file, XEXP (x, 1));
6846
6847      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6848    }
6849  else
6850    abort ();
6851}
6852
6853/* Target hook for assembling integer objects.  The powerpc version has
6854   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6855   is defined.  It also needs to handle DI-mode objects on 64-bit
6856   targets.  */
6857
6858static bool
6859rs6000_assemble_integer (x, size, aligned_p)
6860     rtx x;
6861     unsigned int size;
6862     int aligned_p;
6863{
6864#ifdef RELOCATABLE_NEEDS_FIXUP
6865  /* Special handling for SI values.  */
6866  if (size == 4 && aligned_p)
6867    {
6868      extern int in_toc_section PARAMS ((void));
6869      static int recurse = 0;
6870
6871      /* For -mrelocatable, we mark all addresses that need to be fixed up
6872	 in the .fixup section.  */
6873      if (TARGET_RELOCATABLE
6874	  && !in_toc_section ()
6875	  && !in_text_section ()
6876	  && !recurse
6877	  && GET_CODE (x) != CONST_INT
6878	  && GET_CODE (x) != CONST_DOUBLE
6879	  && CONSTANT_P (x))
6880	{
6881	  char buf[256];
6882
6883	  recurse = 1;
6884	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6885	  fixuplabelno++;
6886	  ASM_OUTPUT_LABEL (asm_out_file, buf);
6887	  fprintf (asm_out_file, "\t.long\t(");
6888	  output_addr_const (asm_out_file, x);
6889	  fprintf (asm_out_file, ")@fixup\n");
6890	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6891	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
6892	  fprintf (asm_out_file, "\t.long\t");
6893	  assemble_name (asm_out_file, buf);
6894	  fprintf (asm_out_file, "\n\t.previous\n");
6895	  recurse = 0;
6896	  return true;
6897	}
6898      /* Remove initial .'s to turn a -mcall-aixdesc function
6899	 address into the address of the descriptor, not the function
6900	 itself.  */
6901      else if (GET_CODE (x) == SYMBOL_REF
6902	       && XSTR (x, 0)[0] == '.'
6903	       && DEFAULT_ABI == ABI_AIX)
6904	{
6905	  const char *name = XSTR (x, 0);
6906	  while (*name == '.')
6907	    name++;
6908
6909	  fprintf (asm_out_file, "\t.long\t%s\n", name);
6910	  return true;
6911	}
6912    }
6913#endif /* RELOCATABLE_NEEDS_FIXUP */
6914  return default_assemble_integer (x, size, aligned_p);
6915}
6916
6917enum rtx_code
6918rs6000_reverse_condition (mode, code)
6919     enum machine_mode mode;
6920     enum rtx_code code;
6921{
6922  /* Reversal of FP compares takes care -- an ordered compare
6923     becomes an unordered compare and vice versa.  */
6924  if (mode == CCFPmode && !flag_unsafe_math_optimizations)
6925    return reverse_condition_maybe_unordered (code);
6926  else
6927    return reverse_condition (code);
6928}
6929
6930/* Generate a compare for CODE.  Return a brand-new rtx that
6931   represents the result of the compare.  */
6932
6933static rtx
6934rs6000_generate_compare (code)
6935     enum rtx_code code;
6936{
6937  enum machine_mode comp_mode;
6938  rtx compare_result;
6939
6940  if (rs6000_compare_fp_p)
6941    comp_mode = CCFPmode;
6942  else if (code == GTU || code == LTU
6943	  || code == GEU || code == LEU)
6944    comp_mode = CCUNSmode;
6945  else
6946    comp_mode = CCmode;
6947
6948  /* First, the compare.  */
6949  compare_result = gen_reg_rtx (comp_mode);
6950  emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6951			  gen_rtx_COMPARE (comp_mode,
6952					   rs6000_compare_op0,
6953					   rs6000_compare_op1)));
6954
6955  /* Some kinds of FP comparisons need an OR operation;
6956     except for flag_unsafe_math_optimizations we don't bother.  */
6957  if (rs6000_compare_fp_p
6958      && ! flag_unsafe_math_optimizations
6959      && (code == LE || code == GE
6960	  || code == UNEQ || code == LTGT
6961	  || code == UNGT || code == UNLT))
6962    {
6963      enum rtx_code or1, or2;
6964      rtx or1_rtx, or2_rtx, compare2_rtx;
6965      rtx or_result = gen_reg_rtx (CCEQmode);
6966
6967      switch (code)
6968	{
6969	case LE: or1 = LT;  or2 = EQ;  break;
6970	case GE: or1 = GT;  or2 = EQ;  break;
6971	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
6972	case LTGT: or1 = LT;  or2 = GT;  break;
6973	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
6974	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
6975	default:  abort ();
6976	}
6977      validate_condition_mode (or1, comp_mode);
6978      validate_condition_mode (or2, comp_mode);
6979      or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6980      or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6981      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6982				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6983				      const_true_rtx);
6984      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6985
6986      compare_result = or_result;
6987      code = EQ;
6988    }
6989
6990  validate_condition_mode (code, GET_MODE (compare_result));
6991
6992  return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6993}
6994
6995
6996/* Emit the RTL for an sCOND pattern.  */
6997
6998void
6999rs6000_emit_sCOND (code, result)
7000     enum rtx_code code;
7001     rtx result;
7002{
7003  rtx condition_rtx;
7004  enum machine_mode op_mode;
7005
7006  condition_rtx = rs6000_generate_compare (code);
7007
7008  op_mode = GET_MODE (rs6000_compare_op0);
7009  if (op_mode == VOIDmode)
7010    op_mode = GET_MODE (rs6000_compare_op1);
7011
7012  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7013    {
7014      PUT_MODE (condition_rtx, DImode);
7015      convert_move (result, condition_rtx, 0);
7016    }
7017  else
7018    {
7019      PUT_MODE (condition_rtx, SImode);
7020      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7021    }
7022}
7023
7024/* Emit a branch of kind CODE to location LOC.  */
7025
7026void
7027rs6000_emit_cbranch (code, loc)
7028     enum rtx_code code;
7029     rtx loc;
7030{
7031  rtx condition_rtx, loc_ref;
7032
7033  condition_rtx = rs6000_generate_compare (code);
7034  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7035  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7036			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7037						     loc_ref, pc_rtx)));
7038}
7039
7040/* Return the string to output a conditional branch to LABEL, which is
7041   the operand number of the label, or -1 if the branch is really a
7042   conditional return.
7043
7044   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
7045   condition code register and its mode specifies what kind of
7046   comparison we made.
7047
7048   REVERSED is non-zero if we should reverse the sense of the comparison.
7049
7050   INSN is the insn.  */
7051
7052char *
7053output_cbranch (op, label, reversed, insn)
7054     rtx op;
7055     const char * label;
7056     int reversed;
7057     rtx insn;
7058{
7059  static char string[64];
7060  enum rtx_code code = GET_CODE (op);
7061  rtx cc_reg = XEXP (op, 0);
7062  enum machine_mode mode = GET_MODE (cc_reg);
7063  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7064  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7065  int really_reversed = reversed ^ need_longbranch;
7066  char *s = string;
7067  const char *ccode;
7068  const char *pred;
7069  rtx note;
7070
7071  validate_condition_mode (code, mode);
7072
7073  /* Work out which way this really branches.  We could use
7074     reverse_condition_maybe_unordered here always but this
7075     makes the resulting assembler clearer.  */
7076  if (really_reversed)
7077    {
7078      /* Reversal of FP compares takes care -- an ordered compare
7079	 becomes an unordered compare and vice versa.  */
7080      if (mode == CCFPmode)
7081	code = reverse_condition_maybe_unordered (code);
7082      else
7083	code = reverse_condition (code);
7084    }
7085
7086  switch (code)
7087    {
7088      /* Not all of these are actually distinct opcodes, but
7089	 we distinguish them for clarity of the resulting assembler.  */
7090    case NE: case LTGT:
7091      ccode = "ne"; break;
7092    case EQ: case UNEQ:
7093      ccode = "eq"; break;
7094    case GE: case GEU:
7095      ccode = "ge"; break;
7096    case GT: case GTU: case UNGT:
7097      ccode = "gt"; break;
7098    case LE: case LEU:
7099      ccode = "le"; break;
7100    case LT: case LTU: case UNLT:
7101      ccode = "lt"; break;
7102    case UNORDERED: ccode = "un"; break;
7103    case ORDERED: ccode = "nu"; break;
7104    case UNGE: ccode = "nl"; break;
7105    case UNLE: ccode = "ng"; break;
7106    default:
7107      abort ();
7108    }
7109
7110  /* Maybe we have a guess as to how likely the branch is.
7111     The old mnemonics don't have a way to specify this information.  */
7112  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7113  if (note != NULL_RTX)
7114    {
7115      /* PROB is the difference from 50%.  */
7116      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7117
7118      /* For branches that are very close to 50%, assume not-taken.  */
7119      if (abs (prob) > REG_BR_PROB_BASE / 20
7120	  && ((prob > 0) ^ need_longbranch))
7121	pred = "+";
7122      else
7123	pred = "-";
7124    }
7125  else
7126    pred = "";
7127
7128  if (label == NULL)
7129    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7130  else
7131    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7132
7133  /* We need to escape any '%' characters in the reg_names string.
7134     Assume they'd only be the first character...  */
7135  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7136    *s++ = '%';
7137  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7138
7139  if (label != NULL)
7140    {
7141      /* If the branch distance was too far, we may have to use an
7142	 unconditional branch to go the distance.  */
7143      if (need_longbranch)
7144	s += sprintf (s, ",$+8\n\tb %s", label);
7145      else
7146	s += sprintf (s, ",%s", label);
7147    }
7148
7149  return string;
7150}
7151
7152/* Emit a conditional move: move TRUE_COND to DEST if OP of the
7153   operands of the last comparison is nonzero/true, FALSE_COND if it
7154   is zero/false.  Return 0 if the hardware has no such operation.  */
7155
7156int
7157rs6000_emit_cmove (dest, op, true_cond, false_cond)
7158     rtx dest;
7159     rtx op;
7160     rtx true_cond;
7161     rtx false_cond;
7162{
7163  enum rtx_code code = GET_CODE (op);
7164  rtx op0 = rs6000_compare_op0;
7165  rtx op1 = rs6000_compare_op1;
7166  REAL_VALUE_TYPE c1;
7167  enum machine_mode compare_mode = GET_MODE (op0);
7168  enum machine_mode result_mode = GET_MODE (dest);
7169  rtx temp;
7170
7171  /* These modes should always match. */
7172  if (GET_MODE (op1) != compare_mode)
7173    return 0;
7174  if (GET_MODE (true_cond) != result_mode)
7175    return 0;
7176  if (GET_MODE (false_cond) != result_mode)
7177    return 0;
7178
7179  /* First, work out if the hardware can do this at all, or
7180     if it's too slow...  */
7181  /* If the comparison is an integer one, since we only have fsel
7182     it'll be cheaper to use a branch.  */
7183  if (! rs6000_compare_fp_p)
7184    return 0;
7185
7186  /* Eliminate half of the comparisons by switching operands, this
7187     makes the remaining code simpler.  */
7188  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7189      || code == LTGT || code == LT)
7190    {
7191      code = reverse_condition_maybe_unordered (code);
7192      temp = true_cond;
7193      true_cond = false_cond;
7194      false_cond = temp;
7195    }
7196
7197  /* UNEQ and LTGT take four instructions for a comparison with zero,
7198     it'll probably be faster to use a branch here too.  */
7199  if (code == UNEQ)
7200    return 0;
7201
7202  if (GET_CODE (op1) == CONST_DOUBLE)
7203    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7204
7205  /* We're going to try to implement comparions by performing
7206     a subtract, then comparing against zero.  Unfortunately,
7207     Inf - Inf is NaN which is not zero, and so if we don't
7208     know that the the operand is finite and the comparison
7209     would treat EQ different to UNORDERED, we can't do it.  */
7210  if (! flag_unsafe_math_optimizations
7211      && code != GT && code != UNGE
7212      && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7213      /* Constructs of the form (a OP b ? a : b) are safe.  */
7214      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7215	  || (! rtx_equal_p (op0, true_cond)
7216	      && ! rtx_equal_p (op1, true_cond))))
7217    return 0;
7218  /* At this point we know we can use fsel.  */
7219
7220  /* Reduce the comparison to a comparison against zero.  */
7221  temp = gen_reg_rtx (compare_mode);
7222  emit_insn (gen_rtx_SET (VOIDmode, temp,
7223			  gen_rtx_MINUS (compare_mode, op0, op1)));
7224  op0 = temp;
7225  op1 = CONST0_RTX (compare_mode);
7226
7227  /* If we don't care about NaNs we can reduce some of the comparisons
7228     down to faster ones.  */
7229  if (flag_unsafe_math_optimizations)
7230    switch (code)
7231      {
7232      case GT:
7233	code = LE;
7234	temp = true_cond;
7235	true_cond = false_cond;
7236	false_cond = temp;
7237	break;
7238      case UNGE:
7239	code = GE;
7240	break;
7241      case UNEQ:
7242	code = EQ;
7243	break;
7244      default:
7245	break;
7246      }
7247
7248  /* Now, reduce everything down to a GE.  */
7249  switch (code)
7250    {
7251    case GE:
7252      break;
7253
7254    case LE:
7255      temp = gen_reg_rtx (compare_mode);
7256      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7257      op0 = temp;
7258      break;
7259
7260    case ORDERED:
7261      temp = gen_reg_rtx (compare_mode);
7262      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7263      op0 = temp;
7264      break;
7265
7266    case EQ:
7267      temp = gen_reg_rtx (compare_mode);
7268      emit_insn (gen_rtx_SET (VOIDmode, temp,
7269			      gen_rtx_NEG (compare_mode,
7270					   gen_rtx_ABS (compare_mode, op0))));
7271      op0 = temp;
7272      break;
7273
7274    case UNGE:
7275      temp = gen_reg_rtx (result_mode);
7276      emit_insn (gen_rtx_SET (VOIDmode, temp,
7277			      gen_rtx_IF_THEN_ELSE (result_mode,
7278						    gen_rtx_GE (VOIDmode,
7279								op0, op1),
7280						    true_cond, false_cond)));
7281      false_cond = temp;
7282      true_cond = false_cond;
7283
7284      temp = gen_reg_rtx (compare_mode);
7285      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7286      op0 = temp;
7287      break;
7288
7289    case GT:
7290      temp = gen_reg_rtx (result_mode);
7291      emit_insn (gen_rtx_SET (VOIDmode, temp,
7292			      gen_rtx_IF_THEN_ELSE (result_mode,
7293						    gen_rtx_GE (VOIDmode,
7294								op0, op1),
7295						    true_cond, false_cond)));
7296      true_cond = temp;
7297      false_cond = true_cond;
7298
7299      temp = gen_reg_rtx (compare_mode);
7300      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7301      op0 = temp;
7302      break;
7303
7304    default:
7305      abort ();
7306    }
7307
7308  emit_insn (gen_rtx_SET (VOIDmode, dest,
7309			  gen_rtx_IF_THEN_ELSE (result_mode,
7310						gen_rtx_GE (VOIDmode,
7311							    op0, op1),
7312						true_cond, false_cond)));
7313  return 1;
7314}
7315
7316void
7317rs6000_emit_minmax (dest, code, op0, op1)
7318     rtx dest;
7319     enum rtx_code code;
7320     rtx op0;
7321     rtx op1;
7322{
7323  enum machine_mode mode = GET_MODE (op0);
7324  rtx target;
7325  if (code == SMAX || code == UMAX)
7326    target = emit_conditional_move (dest, GE, op0, op1, mode,
7327				    op0, op1, mode, 0);
7328  else
7329    target = emit_conditional_move (dest, GE, op0, op1, mode,
7330				    op1, op0, mode, 0);
7331  if (target == NULL_RTX)
7332    abort ();
7333  if (target != dest)
7334    emit_move_insn (dest, target);
7335}
7336
7337/* This page contains routines that are used to determine what the
7338   function prologue and epilogue code will do and write them out.  */
7339
7340/* Return the first fixed-point register that is required to be
7341   saved. 32 if none.  */
7342
7343int
7344first_reg_to_save ()
7345{
7346  int first_reg;
7347
7348  /* Find lowest numbered live register.  */
7349  for (first_reg = 13; first_reg <= 31; first_reg++)
7350    if (regs_ever_live[first_reg]
7351	&& (! call_used_regs[first_reg]
7352	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7353		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7354		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7355      break;
7356
7357#if TARGET_MACHO
7358  if (flag_pic && current_function_uses_pic_offset_table &&
7359      (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7360    return RS6000_PIC_OFFSET_TABLE_REGNUM;
7361#endif
7362
7363  return first_reg;
7364}
7365
7366/* Similar, for FP regs.  */
7367
7368int
7369first_fp_reg_to_save ()
7370{
7371  int first_reg;
7372
7373  /* Find lowest numbered live register.  */
7374  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7375    if (regs_ever_live[first_reg])
7376      break;
7377
7378  return first_reg;
7379}
7380
7381/* Similar, for AltiVec regs.  */
7382
7383static int
7384first_altivec_reg_to_save ()
7385{
7386  int i;
7387
7388  /* Stack frame remains as is unless we are in AltiVec ABI.  */
7389  if (! TARGET_ALTIVEC_ABI)
7390    return LAST_ALTIVEC_REGNO + 1;
7391
7392  /* Find lowest numbered live register.  */
7393  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7394    if (regs_ever_live[i])
7395      break;
7396
7397  return i;
7398}
7399
7400/* Return a 32-bit mask of the AltiVec registers we need to set in
7401   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
7402   the 32-bit word is 0.  */
7403
7404static unsigned int
7405compute_vrsave_mask ()
7406{
7407  unsigned int i, mask = 0;
7408
7409  /* First, find out if we use _any_ altivec registers.  */
7410  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7411    if (regs_ever_live[i])
7412      mask |= ALTIVEC_REG_BIT (i);
7413
7414  if (mask == 0)
7415    return mask;
7416
7417  /* Next, add all registers that are call-clobbered.  We do this
7418     because post-reload register optimizers such as regrename_optimize
7419     may choose to use them.  They never change the register class
7420     chosen by reload, so cannot create new uses of altivec registers
7421     if there were none before, so the early exit above is safe.  */
7422  /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7423     altivec registers not saved in the mask, which might well make the
7424     adjustments below more effective in eliding the save/restore of
7425     VRSAVE in small functions.  */
7426  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7427    if (call_used_regs[i])
7428      mask |= ALTIVEC_REG_BIT (i);
7429
7430  /* Next, remove the argument registers from the set.  These must
7431     be in the VRSAVE mask set by the caller, so we don't need to add
7432     them in again.  More importantly, the mask we compute here is
7433     used to generate CLOBBERs in the set_vrsave insn, and we do not
7434     wish the argument registers to die.  */
7435  for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7436    mask &= ~ALTIVEC_REG_BIT (i);
7437
7438  /* Similarly, remove the return value from the set.  */
7439  {
7440    bool yes = false;
7441    diddle_return_value (is_altivec_return_reg, &yes);
7442    if (yes)
7443      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7444  }
7445
7446  return mask;
7447}
7448
7449static void
7450is_altivec_return_reg (reg, xyes)
7451     rtx reg;
7452     void *xyes;
7453{
7454  bool *yes = (bool *) xyes;
7455  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7456    *yes = true;
7457}
7458
7459
7460/* Calculate the stack information for the current function.  This is
7461   complicated by having two separate calling sequences, the AIX calling
7462   sequence and the V.4 calling sequence.
7463
7464   AIX (and Darwin/Mac OS X) stack frames look like:
7465							  32-bit  64-bit
7466	SP---->	+---------------------------------------+
7467		| back chain to caller			| 0	  0
7468		+---------------------------------------+
7469		| saved CR				| 4       8 (8-11)
7470		+---------------------------------------+
7471		| saved LR				| 8       16
7472		+---------------------------------------+
7473		| reserved for compilers		| 12      24
7474		+---------------------------------------+
7475		| reserved for binders			| 16      32
7476		+---------------------------------------+
7477		| saved TOC pointer			| 20      40
7478		+---------------------------------------+
7479		| Parameter save area (P)		| 24      48
7480		+---------------------------------------+
7481		| Alloca space (A)			| 24+P    etc.
7482		+---------------------------------------+
7483		| Local variable space (L)		| 24+P+A
7484		+---------------------------------------+
7485		| Float/int conversion temporary (X)	| 24+P+A+L
7486		+---------------------------------------+
7487		| Save area for AltiVec registers (W)	| 24+P+A+L+X
7488		+---------------------------------------+
7489		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
7490		+---------------------------------------+
7491		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
7492		+---------------------------------------+
7493		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
7494		+---------------------------------------+
7495		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
7496		+---------------------------------------+
7497	old SP->| back chain to caller's caller		|
7498		+---------------------------------------+
7499
7500   The required alignment for AIX configurations is two words (i.e., 8
7501   or 16 bytes).
7502
7503
7504   V.4 stack frames look like:
7505
7506	SP---->	+---------------------------------------+
7507		| back chain to caller			| 0
7508		+---------------------------------------+
7509		| caller's saved LR			| 4
7510		+---------------------------------------+
7511		| Parameter save area (P)		| 8
7512		+---------------------------------------+
7513		| Alloca space (A)			| 8+P
7514		+---------------------------------------+
7515		| Varargs save area (V)			| 8+P+A
7516		+---------------------------------------+
7517		| Local variable space (L)		| 8+P+A+V
7518		+---------------------------------------+
7519		| Float/int conversion temporary (X)	| 8+P+A+V+L
7520		+---------------------------------------+
7521		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
7522		+---------------------------------------+
7523		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
7524		+---------------------------------------+
7525		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
7526		+---------------------------------------+
7527		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
7528		+---------------------------------------+
7529		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
7530		+---------------------------------------+
7531		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
7532		+---------------------------------------+
7533	old SP->| back chain to caller's caller		|
7534		+---------------------------------------+
7535
7536   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7537   given.  (But note below and in sysv4.h that we require only 8 and
7538   may round up the size of our stack frame anyways.  The historical
7539   reason is early versions of powerpc-linux which didn't properly
7540   align the stack at program startup.  A happy side-effect is that
7541   -mno-eabi libraries can be used with -meabi programs.)
7542
7543   The EABI configuration defaults to the V.4 layout, unless
7544   -mcall-aix is used, in which case the AIX layout is used.  However,
7545   the stack alignment requirements may differ.  If -mno-eabi is not
7546   given, the required stack alignment is 8 bytes; if -mno-eabi is
7547   given, the required alignment is 16 bytes.  (But see V.4 comment
7548   above.)  */
7549
7550#ifndef ABI_STACK_BOUNDARY
7551#define ABI_STACK_BOUNDARY STACK_BOUNDARY
7552#endif
7553
7554rs6000_stack_t *
7555rs6000_stack_info ()
7556{
7557  static rs6000_stack_t info, zero_info;
7558  rs6000_stack_t *info_ptr = &info;
7559  int reg_size = TARGET_POWERPC64 ? 8 : 4;
7560  enum rs6000_abi abi;
7561  int ehrd_size;
7562  int total_raw_size;
7563
7564  /* Zero all fields portably.  */
7565  info = zero_info;
7566
7567  /* Select which calling sequence.  */
7568  info_ptr->abi = abi = DEFAULT_ABI;
7569
7570  /* Calculate which registers need to be saved & save area size.  */
7571  info_ptr->first_gp_reg_save = first_reg_to_save ();
7572  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7573     even if it currently looks like we won't.  */
7574  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7575       || (flag_pic == 1 && abi == ABI_V4)
7576       || (flag_pic && abi == ABI_DARWIN))
7577      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7578    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7579  else
7580    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7581
7582  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7583  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7584
7585  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7586  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7587				 - info_ptr->first_altivec_reg_save);
7588
7589  /* Does this function call anything?  */
7590  info_ptr->calls_p = (! current_function_is_leaf
7591		       || cfun->machine->ra_needs_full_frame);
7592
7593  /* Determine if we need to save the link register.  */
7594  if (rs6000_ra_ever_killed ()
7595      || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7596#ifdef TARGET_RELOCATABLE
7597      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7598#endif
7599      || (info_ptr->first_fp_reg_save != 64
7600	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7601      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7602      || (abi == ABI_V4 && current_function_calls_alloca)
7603      || (DEFAULT_ABI == ABI_DARWIN
7604	  && flag_pic
7605	  && current_function_uses_pic_offset_table)
7606      || info_ptr->calls_p)
7607    {
7608      info_ptr->lr_save_p = 1;
7609      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7610    }
7611
7612  /* Determine if we need to save the condition code registers.  */
7613  if (regs_ever_live[CR2_REGNO]
7614      || regs_ever_live[CR3_REGNO]
7615      || regs_ever_live[CR4_REGNO])
7616    {
7617      info_ptr->cr_save_p = 1;
7618      if (abi == ABI_V4)
7619	info_ptr->cr_size = reg_size;
7620    }
7621
7622  /* If the current function calls __builtin_eh_return, then we need
7623     to allocate stack space for registers that will hold data for
7624     the exception handler.  */
7625  if (current_function_calls_eh_return)
7626    {
7627      unsigned int i;
7628      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7629	continue;
7630      ehrd_size = i * UNITS_PER_WORD;
7631    }
7632  else
7633    ehrd_size = 0;
7634
7635  /* Determine various sizes.  */
7636  info_ptr->reg_size     = reg_size;
7637  info_ptr->fixed_size   = RS6000_SAVE_AREA;
7638  info_ptr->varargs_size = RS6000_VARARGS_AREA;
7639  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
7640  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
7641					 8);
7642
7643  if (TARGET_ALTIVEC_ABI)
7644    {
7645      info_ptr->vrsave_mask = compute_vrsave_mask ();
7646      info_ptr->vrsave_size  = info_ptr->vrsave_mask ? 4 : 0;
7647    }
7648  else
7649    {
7650      info_ptr->vrsave_mask = 0;
7651      info_ptr->vrsave_size = 0;
7652    }
7653
7654  /* Calculate the offsets.  */
7655  switch (abi)
7656    {
7657    case ABI_NONE:
7658    default:
7659      abort ();
7660
7661    case ABI_AIX:
7662    case ABI_AIX_NODESC:
7663    case ABI_DARWIN:
7664      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7665      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7666
7667      if (TARGET_ALTIVEC_ABI)
7668	{
7669	  info_ptr->vrsave_save_offset
7670	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7671
7672	  /* Align stack so vector save area is on a quadword boundary.  */
7673	  if (info_ptr->altivec_size != 0)
7674	    info_ptr->altivec_padding_size
7675	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7676	  else
7677	    info_ptr->altivec_padding_size = 0;
7678
7679	  info_ptr->altivec_save_offset
7680	    = info_ptr->vrsave_save_offset
7681	    - info_ptr->altivec_padding_size
7682	    - info_ptr->altivec_size;
7683
7684	  /* Adjust for AltiVec case.  */
7685	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7686	}
7687      else
7688	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
7689      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
7690      info_ptr->lr_save_offset   = 2*reg_size;
7691      break;
7692
7693    case ABI_V4:
7694      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7695      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7696      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
7697
7698      if (TARGET_ALTIVEC_ABI)
7699	{
7700	  info_ptr->vrsave_save_offset
7701	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7702
7703	  /* Align stack so vector save area is on a quadword boundary.  */
7704	  if (info_ptr->altivec_size != 0)
7705	    info_ptr->altivec_padding_size
7706	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7707	  else
7708	    info_ptr->altivec_padding_size = 0;
7709
7710	  info_ptr->altivec_save_offset
7711	    = info_ptr->vrsave_save_offset
7712	    - info_ptr->altivec_padding_size
7713	    - info_ptr->altivec_size;
7714
7715	  /* Adjust for AltiVec case.  */
7716	  info_ptr->toc_save_offset
7717	    = info_ptr->altivec_save_offset - info_ptr->toc_size;
7718	}
7719      else
7720	info_ptr->toc_save_offset  = info_ptr->cr_save_offset - info_ptr->toc_size;
7721      info_ptr->ehrd_offset      = info_ptr->toc_save_offset - ehrd_size;
7722      info_ptr->lr_save_offset   = reg_size;
7723      break;
7724    }
7725
7726  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
7727					 + info_ptr->gp_size
7728					 + info_ptr->altivec_size
7729					 + info_ptr->altivec_padding_size
7730					 + info_ptr->vrsave_size
7731					 + ehrd_size
7732					 + info_ptr->cr_size
7733					 + info_ptr->lr_size
7734					 + info_ptr->vrsave_size
7735					 + info_ptr->toc_size,
7736					 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7737					 ? 16 : 8);
7738
7739  total_raw_size	 = (info_ptr->vars_size
7740			    + info_ptr->parm_size
7741			    + info_ptr->save_size
7742			    + info_ptr->varargs_size
7743			    + info_ptr->fixed_size);
7744
7745  info_ptr->total_size =
7746    RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7747
7748  /* Determine if we need to allocate any stack frame:
7749
7750     For AIX we need to push the stack if a frame pointer is needed
7751     (because the stack might be dynamically adjusted), if we are
7752     debugging, if we make calls, or if the sum of fp_save, gp_save,
7753     and local variables are more than the space needed to save all
7754     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7755     + 18*8 = 288 (GPR13 reserved).
7756
7757     For V.4 we don't have the stack cushion that AIX uses, but assume
7758     that the debugger can handle stackless frames.  */
7759
7760  if (info_ptr->calls_p)
7761    info_ptr->push_p = 1;
7762
7763  else if (abi == ABI_V4)
7764    info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7765
7766  else
7767    info_ptr->push_p = (frame_pointer_needed
7768			|| (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7769			|| ((total_raw_size - info_ptr->fixed_size)
7770			    > (TARGET_32BIT ? 220 : 288)));
7771
7772  /* Zero offsets if we're not saving those registers.  */
7773  if (info_ptr->fp_size == 0)
7774    info_ptr->fp_save_offset = 0;
7775
7776  if (info_ptr->gp_size == 0)
7777    info_ptr->gp_save_offset = 0;
7778
7779  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7780    info_ptr->altivec_save_offset = 0;
7781
7782  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7783    info_ptr->vrsave_save_offset = 0;
7784
7785  if (! info_ptr->lr_save_p)
7786    info_ptr->lr_save_offset = 0;
7787
7788  if (! info_ptr->cr_save_p)
7789    info_ptr->cr_save_offset = 0;
7790
7791  if (! info_ptr->toc_save_p)
7792    info_ptr->toc_save_offset = 0;
7793
7794  return info_ptr;
7795}
7796
7797void
7798debug_stack_info (info)
7799     rs6000_stack_t *info;
7800{
7801  const char *abi_string;
7802
7803  if (! info)
7804    info = rs6000_stack_info ();
7805
7806  fprintf (stderr, "\nStack information for function %s:\n",
7807	   ((current_function_decl && DECL_NAME (current_function_decl))
7808	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7809	    : "<unknown>"));
7810
7811  switch (info->abi)
7812    {
7813    default:		 abi_string = "Unknown";	break;
7814    case ABI_NONE:	 abi_string = "NONE";		break;
7815    case ABI_AIX:
7816    case ABI_AIX_NODESC: abi_string = "AIX";		break;
7817    case ABI_DARWIN:	 abi_string = "Darwin";		break;
7818    case ABI_V4:	 abi_string = "V.4";		break;
7819    }
7820
7821  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
7822
7823  if (TARGET_ALTIVEC_ABI)
7824    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7825
7826  if (info->first_gp_reg_save != 32)
7827    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
7828
7829  if (info->first_fp_reg_save != 64)
7830    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
7831
7832  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7833    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7834	     info->first_altivec_reg_save);
7835
7836  if (info->lr_save_p)
7837    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
7838
7839  if (info->cr_save_p)
7840    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
7841
7842  if (info->toc_save_p)
7843    fprintf (stderr, "\ttoc_save_p          = %5d\n", info->toc_save_p);
7844
7845  if (info->vrsave_mask)
7846    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
7847
7848  if (info->push_p)
7849    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
7850
7851  if (info->calls_p)
7852    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
7853
7854  if (info->gp_save_offset)
7855    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
7856
7857  if (info->fp_save_offset)
7858    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
7859
7860  if (info->altivec_save_offset)
7861    fprintf (stderr, "\taltivec_save_offset = %5d\n",
7862	     info->altivec_save_offset);
7863
7864  if (info->vrsave_save_offset)
7865    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
7866	     info->vrsave_save_offset);
7867
7868  if (info->lr_save_offset)
7869    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
7870
7871  if (info->cr_save_offset)
7872    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
7873
7874  if (info->toc_save_offset)
7875    fprintf (stderr, "\ttoc_save_offset     = %5d\n", info->toc_save_offset);
7876
7877  if (info->varargs_save_offset)
7878    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7879
7880  if (info->total_size)
7881    fprintf (stderr, "\ttotal_size          = %5d\n", info->total_size);
7882
7883  if (info->varargs_size)
7884    fprintf (stderr, "\tvarargs_size        = %5d\n", info->varargs_size);
7885
7886  if (info->vars_size)
7887    fprintf (stderr, "\tvars_size           = %5d\n", info->vars_size);
7888
7889  if (info->parm_size)
7890    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
7891
7892  if (info->fixed_size)
7893    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
7894
7895  if (info->gp_size)
7896    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
7897
7898  if (info->fp_size)
7899    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
7900
7901  if (info->altivec_size)
7902    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
7903
7904  if (info->vrsave_size)
7905    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
7906
7907  if (info->altivec_padding_size)
7908    fprintf (stderr, "\taltivec_padding_size= %5d\n",
7909	     info->altivec_padding_size);
7910
7911  if (info->lr_size)
7912    fprintf (stderr, "\tlr_size             = %5d\n", info->lr_size);
7913
7914  if (info->cr_size)
7915    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
7916
7917  if (info->toc_size)
7918    fprintf (stderr, "\ttoc_size            = %5d\n", info->toc_size);
7919
7920  if (info->save_size)
7921    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
7922
7923  if (info->reg_size != 4)
7924    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
7925
7926  fprintf (stderr, "\n");
7927}
7928
7929rtx
7930rs6000_return_addr (count, frame)
7931     int count;
7932     rtx frame;
7933{
7934  /* Currently we don't optimize very well between prolog and body
7935     code and for PIC code the code can be actually quite bad, so
7936     don't try to be too clever here.  */
7937  if (count != 0
7938      || flag_pic != 0
7939      || DEFAULT_ABI == ABI_AIX
7940      || DEFAULT_ABI == ABI_AIX_NODESC)
7941    {
7942      cfun->machine->ra_needs_full_frame = 1;
7943
7944      return
7945	gen_rtx_MEM
7946	  (Pmode,
7947	   memory_address
7948	   (Pmode,
7949	    plus_constant (copy_to_reg
7950			   (gen_rtx_MEM (Pmode,
7951					 memory_address (Pmode, frame))),
7952			   RETURN_ADDRESS_OFFSET)));
7953    }
7954
7955  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
7956}
7957
7958static int
7959rs6000_ra_ever_killed ()
7960{
7961  rtx top;
7962
7963#ifdef ASM_OUTPUT_MI_THUNK
7964  if (current_function_is_thunk)
7965    return 0;
7966#endif
7967  if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
7968      || cfun->machine->ra_needs_full_frame)
7969    return regs_ever_live[LINK_REGISTER_REGNUM];
7970
7971  push_topmost_sequence ();
7972  top = get_insns ();
7973  pop_topmost_sequence ();
7974
7975  return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7976			    top, NULL_RTX);
7977}
7978
7979/* Add a REG_MAYBE_DEAD note to the insn.  */
7980static void
7981rs6000_maybe_dead (insn)
7982     rtx insn;
7983{
7984  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
7985					const0_rtx,
7986					REG_NOTES (insn));
7987}
7988
7989/* Emit instructions needed to load the TOC register.
7990   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
7991   a constant pool; or for SVR4 -fpic.  */
7992
7993void
7994rs6000_emit_load_toc_table (fromprolog)
7995     int fromprolog;
7996{
7997  rtx dest;
7998  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
7999
8000  if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8001    {
8002      rtx temp = (fromprolog
8003		  ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8004		  : gen_reg_rtx (Pmode));
8005      rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8006      rs6000_maybe_dead (emit_move_insn (dest, temp));
8007    }
8008  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
8009    {
8010      char buf[30];
8011      rtx tempLR = (fromprolog
8012		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8013		    : gen_reg_rtx (Pmode));
8014      rtx temp0 = (fromprolog
8015		   ? gen_rtx_REG (Pmode, 0)
8016		   : gen_reg_rtx (Pmode));
8017      rtx symF;
8018
8019      /* possibly create the toc section */
8020      if (! toc_initialized)
8021	{
8022	  toc_section ();
8023	  function_section (current_function_decl);
8024	}
8025
8026      if (fromprolog)
8027	{
8028	  rtx symL;
8029
8030	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8031	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8032
8033	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8034	  symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8035
8036	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8037							       symF)));
8038	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8039	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8040							       symL,
8041							       symF)));
8042	}
8043      else
8044	{
8045	  rtx tocsym;
8046	  static int reload_toc_labelno = 0;
8047
8048	  tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8049
8050	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8051	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8052
8053	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8054								symF,
8055								tocsym)));
8056	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8057	  rs6000_maybe_dead (emit_move_insn (temp0,
8058					     gen_rtx_MEM (Pmode, dest)));
8059	}
8060      rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8061    }
8062  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
8063    {
8064      /* This is for AIX code running in non-PIC ELF32.  */
8065      char buf[30];
8066      rtx realsym;
8067      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8068      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8069
8070      rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8071      rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8072    }
8073  else if (DEFAULT_ABI == ABI_AIX)
8074    {
8075      if (TARGET_32BIT)
8076	rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8077      else
8078	rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8079    }
8080  else
8081    abort ();
8082}
8083
8084int
8085get_TOC_alias_set ()
8086{
8087    static int set = -1;
8088    if (set == -1)
8089      set = new_alias_set ();
8090    return set;
8091}
8092
8093/* This retuns nonzero if the current function uses the TOC.  This is
8094   determined by the presence of (unspec ... 7), which is generated by
8095   the various load_toc_* patterns.  */
8096
8097int
8098uses_TOC ()
8099{
8100    rtx insn;
8101
8102    for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8103      if (INSN_P (insn))
8104	{
8105	  rtx pat = PATTERN (insn);
8106	  int i;
8107
8108	  if (GET_CODE (pat) == PARALLEL)
8109	    for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8110	      if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8111		 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8112		  return 1;
8113	}
8114    return 0;
8115}
8116
8117rtx
8118create_TOC_reference (symbol)
8119    rtx symbol;
8120{
8121  return gen_rtx_PLUS (Pmode,
8122	   gen_rtx_REG (Pmode, TOC_REGISTER),
8123	     gen_rtx_CONST (Pmode,
8124	       gen_rtx_MINUS (Pmode, symbol,
8125		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8126}
8127
8128#if TARGET_AIX
8129/* __throw will restore its own return address to be the same as the
8130   return address of the function that the throw is being made to.
8131   This is unfortunate, because we want to check the original
8132   return address to see if we need to restore the TOC.
8133   So we have to squirrel it away here.
8134   This is used only in compiling __throw and __rethrow.
8135
8136   Most of this code should be removed by CSE.  */
8137static rtx insn_after_throw;
8138
8139/* This does the saving...  */
8140void
8141rs6000_aix_emit_builtin_unwind_init ()
8142{
8143  rtx mem;
8144  rtx stack_top = gen_reg_rtx (Pmode);
8145  rtx opcode_addr = gen_reg_rtx (Pmode);
8146
8147  insn_after_throw = gen_reg_rtx (SImode);
8148
8149  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8150  emit_move_insn (stack_top, mem);
8151
8152  mem = gen_rtx_MEM (Pmode,
8153		     gen_rtx_PLUS (Pmode, stack_top,
8154				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8155  emit_move_insn (opcode_addr, mem);
8156  emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8157}
8158
8159/* Emit insns to _restore_ the TOC register, at runtime (specifically
8160   in _eh.o).  Only used on AIX.
8161
8162   The idea is that on AIX, function calls look like this:
8163	bl  somefunction-trampoline
8164	lwz r2,20(sp)
8165
8166   and later,
8167	somefunction-trampoline:
8168	stw r2,20(sp)
8169	 ... load function address in the count register ...
8170	bctr
8171   or like this, if the linker determines that this is not a cross-module call
8172   and so the TOC need not be restored:
8173	bl  somefunction
8174	nop
8175   or like this, if the compiler could determine that this is not a
8176   cross-module call:
8177	bl  somefunction
8178   now, the tricky bit here is that register 2 is saved and restored
8179   by the _linker_, so we can't readily generate debugging information
8180   for it.  So we need to go back up the call chain looking at the
8181   insns at return addresses to see which calls saved the TOC register
8182   and so see where it gets restored from.
8183
8184   Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8185   just before the actual epilogue.
8186
8187   On the bright side, this incurs no space or time overhead unless an
8188   exception is thrown, except for the extra code in libgcc.a.
8189
8190   The parameter STACKSIZE is a register containing (at runtime)
8191   the amount to be popped off the stack in addition to the stack frame
8192   of this routine (which will be __throw or __rethrow, and so is
8193   guaranteed to have a stack frame).  */
8194
8195void
8196rs6000_emit_eh_toc_restore (stacksize)
8197     rtx stacksize;
8198{
8199  rtx top_of_stack;
8200  rtx bottom_of_stack = gen_reg_rtx (Pmode);
8201  rtx tocompare = gen_reg_rtx (SImode);
8202  rtx opcode = gen_reg_rtx (SImode);
8203  rtx opcode_addr = gen_reg_rtx (Pmode);
8204  rtx mem;
8205  rtx loop_start = gen_label_rtx ();
8206  rtx no_toc_restore_needed = gen_label_rtx ();
8207  rtx loop_exit = gen_label_rtx ();
8208
8209  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8210  set_mem_alias_set (mem, rs6000_sr_alias_set);
8211  emit_move_insn (bottom_of_stack, mem);
8212
8213  top_of_stack = expand_binop (Pmode, add_optab,
8214			       bottom_of_stack, stacksize,
8215			       NULL_RTX, 1, OPTAB_WIDEN);
8216
8217  emit_move_insn (tocompare,
8218		  GEN_INT (trunc_int_for_mode (TARGET_32BIT
8219					       ? 0x80410014
8220					       : 0xE8410028, SImode)));
8221
8222  if (insn_after_throw == NULL_RTX)
8223    abort ();
8224  emit_move_insn (opcode, insn_after_throw);
8225
8226  emit_note (NULL, NOTE_INSN_LOOP_BEG);
8227  emit_label (loop_start);
8228
8229  do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8230			   SImode, NULL_RTX, NULL_RTX,
8231			   no_toc_restore_needed);
8232
8233  mem = gen_rtx_MEM (Pmode,
8234		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8235				   GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8236  emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8237
8238  emit_label (no_toc_restore_needed);
8239  do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8240			   Pmode, NULL_RTX, NULL_RTX,
8241			   loop_exit);
8242
8243  mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8244  set_mem_alias_set (mem, rs6000_sr_alias_set);
8245  emit_move_insn (bottom_of_stack, mem);
8246
8247  mem = gen_rtx_MEM (Pmode,
8248		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8249				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8250  emit_move_insn (opcode_addr, mem);
8251  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8252
8253  emit_note (NULL, NOTE_INSN_LOOP_CONT);
8254  emit_jump (loop_start);
8255  emit_note (NULL, NOTE_INSN_LOOP_END);
8256  emit_label (loop_exit);
8257}
8258#endif /* TARGET_AIX */
8259
8260/* This ties together stack memory (MEM with an alias set of
8261   rs6000_sr_alias_set) and the change to the stack pointer.  */
8262
8263static void
8264rs6000_emit_stack_tie ()
8265{
8266  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8267
8268  set_mem_alias_set (mem, rs6000_sr_alias_set);
8269  emit_insn (gen_stack_tie (mem));
8270}
8271
8272/* Emit the correct code for allocating stack space, as insns.
8273   If COPY_R12, make sure a copy of the old frame is left in r12.
8274   The generated code may use hard register 0 as a temporary.  */
8275
8276static void
8277rs6000_emit_allocate_stack (size, copy_r12)
8278     HOST_WIDE_INT size;
8279     int copy_r12;
8280{
8281  rtx insn;
8282  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8283  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8284  rtx todec = GEN_INT (-size);
8285
8286  if (current_function_limit_stack)
8287    {
8288      if (REG_P (stack_limit_rtx)
8289	  && REGNO (stack_limit_rtx) > 1
8290	  && REGNO (stack_limit_rtx) <= 31)
8291	{
8292	  emit_insn (Pmode == SImode
8293		     ? gen_addsi3 (tmp_reg,
8294				   stack_limit_rtx,
8295				   GEN_INT (size))
8296		     : gen_adddi3 (tmp_reg,
8297				   stack_limit_rtx,
8298				   GEN_INT (size)));
8299
8300	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8301				    const0_rtx));
8302	}
8303      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8304	       && TARGET_32BIT
8305	       && DEFAULT_ABI == ABI_V4)
8306	{
8307	  rtx toload = gen_rtx_CONST (VOIDmode,
8308				      gen_rtx_PLUS (Pmode,
8309						    stack_limit_rtx,
8310						    GEN_INT (size)));
8311
8312	  emit_insn (gen_elf_high (tmp_reg, toload));
8313	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8314	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8315				    const0_rtx));
8316	}
8317      else
8318	warning ("stack limit expression is not supported");
8319    }
8320
8321  if (copy_r12 || ! TARGET_UPDATE)
8322    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8323
8324  if (TARGET_UPDATE)
8325    {
8326      if (size > 32767)
8327	{
8328	  /* Need a note here so that try_split doesn't get confused.  */
8329	  if (get_last_insn() == NULL_RTX)
8330	    emit_note (0, NOTE_INSN_DELETED);
8331	  insn = emit_move_insn (tmp_reg, todec);
8332	  try_split (PATTERN (insn), insn, 0);
8333	  todec = tmp_reg;
8334	}
8335
8336      if (Pmode == SImode)
8337	insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8338					    todec, stack_reg));
8339      else
8340	insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8341					    todec, stack_reg));
8342    }
8343  else
8344    {
8345      if (Pmode == SImode)
8346	insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8347      else
8348	insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8349      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8350		      gen_rtx_REG (Pmode, 12));
8351    }
8352
8353  RTX_FRAME_RELATED_P (insn) = 1;
8354  REG_NOTES (insn) =
8355    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8356		       gen_rtx_SET (VOIDmode, stack_reg,
8357				    gen_rtx_PLUS (Pmode, stack_reg,
8358						  GEN_INT (-size))),
8359		       REG_NOTES (insn));
8360}
8361
8362/* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8363   knows that:
8364
8365     (mem (plus (blah) (regXX)))
8366
8367   is really:
8368
8369     (mem (plus (blah) (const VALUE_OF_REGXX))).  */
8370
8371static void
8372altivec_frame_fixup (insn, reg, val)
8373     rtx insn, reg;
8374     HOST_WIDE_INT val;
8375{
8376  rtx real;
8377
8378  real = copy_rtx (PATTERN (insn));
8379
8380  real = replace_rtx (real, reg, GEN_INT (val));
8381
8382  RTX_FRAME_RELATED_P (insn) = 1;
8383  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8384					real,
8385					REG_NOTES (insn));
8386}
8387
8388/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8389   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8390   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
8391   deduce these equivalences by itself so it wasn't necessary to hold
8392   its hand so much.  */
8393
8394static void
8395rs6000_frame_related (insn, reg, val, reg2, rreg)
8396     rtx insn;
8397     rtx reg;
8398     HOST_WIDE_INT val;
8399     rtx reg2;
8400     rtx rreg;
8401{
8402  rtx real, temp;
8403
8404  real = copy_rtx (PATTERN (insn));
8405
8406  real = replace_rtx (real, reg,
8407		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8408							STACK_POINTER_REGNUM),
8409				    GEN_INT (val)));
8410
8411  /* We expect that 'real' is either a SET or a PARALLEL containing
8412     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
8413     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
8414
8415  if (GET_CODE (real) == SET)
8416    {
8417      rtx set = real;
8418
8419      temp = simplify_rtx (SET_SRC (set));
8420      if (temp)
8421	SET_SRC (set) = temp;
8422      temp = simplify_rtx (SET_DEST (set));
8423      if (temp)
8424	SET_DEST (set) = temp;
8425      if (GET_CODE (SET_DEST (set)) == MEM)
8426	{
8427	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8428	  if (temp)
8429	    XEXP (SET_DEST (set), 0) = temp;
8430	}
8431    }
8432  else if (GET_CODE (real) == PARALLEL)
8433    {
8434      int i;
8435      for (i = 0; i < XVECLEN (real, 0); i++)
8436	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8437	  {
8438	    rtx set = XVECEXP (real, 0, i);
8439
8440	    temp = simplify_rtx (SET_SRC (set));
8441	    if (temp)
8442	      SET_SRC (set) = temp;
8443	    temp = simplify_rtx (SET_DEST (set));
8444	    if (temp)
8445	      SET_DEST (set) = temp;
8446	    if (GET_CODE (SET_DEST (set)) == MEM)
8447	      {
8448		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8449		if (temp)
8450		  XEXP (SET_DEST (set), 0) = temp;
8451	      }
8452	    RTX_FRAME_RELATED_P (set) = 1;
8453	  }
8454    }
8455  else
8456    abort ();
8457
8458  if (reg2 != NULL_RTX)
8459    real = replace_rtx (real, reg2, rreg);
8460
8461  RTX_FRAME_RELATED_P (insn) = 1;
8462  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8463					real,
8464					REG_NOTES (insn));
8465}
8466
8467/* Returns an insn that has a vrsave set operation with the
8468   appropriate CLOBBERs.  */
8469
8470static rtx
8471generate_set_vrsave (reg, info, epiloguep)
8472     rtx reg;
8473     rs6000_stack_t *info;
8474     int epiloguep;
8475{
8476  int nclobs, i;
8477  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8478  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8479
8480  clobs[0]
8481    = gen_rtx_SET (VOIDmode,
8482		   vrsave,
8483		   gen_rtx_UNSPEC_VOLATILE (SImode,
8484					    gen_rtvec (2, reg, vrsave),
8485					    30));
8486
8487  nclobs = 1;
8488
8489  /* We need to clobber the registers in the mask so the scheduler
8490     does not move sets to VRSAVE before sets of AltiVec registers.
8491
8492     However, if the function receives nonlocal gotos, reload will set
8493     all call saved registers live.  We will end up with:
8494
8495     	(set (reg 999) (mem))
8496	(parallel [ (set (reg vrsave) (unspec blah))
8497		    (clobber (reg 999))])
8498
8499     The clobber will cause the store into reg 999 to be dead, and
8500     flow will attempt to delete an epilogue insn.  In this case, we
8501     need an unspec use/set of the register.  */
8502
8503  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8504    if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8505      {
8506	if (!epiloguep || call_used_regs [i])
8507	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8508					     gen_rtx_REG (V4SImode, i));
8509	else
8510	  {
8511	    rtx reg = gen_rtx_REG (V4SImode, i);
8512
8513	    clobs[nclobs++]
8514	      = gen_rtx_SET (VOIDmode,
8515			     reg,
8516			     gen_rtx_UNSPEC (V4SImode,
8517					     gen_rtvec (1, reg), 27));
8518	  }
8519      }
8520
8521  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8522
8523  for (i = 0; i < nclobs; ++i)
8524    XVECEXP (insn, 0, i) = clobs[i];
8525
8526  return insn;
8527}
8528
8529/* Emit function prologue as insns.  */
8530
8531void
8532rs6000_emit_prologue ()
8533{
8534  rs6000_stack_t *info = rs6000_stack_info ();
8535  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8536  int reg_size = TARGET_POWERPC64 ? 8 : 4;
8537  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8538  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8539  rtx frame_reg_rtx = sp_reg_rtx;
8540  rtx cr_save_rtx = NULL;
8541  rtx insn;
8542  int saving_FPRs_inline;
8543  int using_store_multiple;
8544  HOST_WIDE_INT sp_offset = 0;
8545
8546  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8547			  && info->first_gp_reg_save < 31);
8548  saving_FPRs_inline = (info->first_fp_reg_save == 64
8549			|| FP_SAVE_INLINE (info->first_fp_reg_save));
8550
8551  /* For V.4, update stack before we do any saving and set back pointer.  */
8552  if (info->push_p && DEFAULT_ABI == ABI_V4)
8553    {
8554      if (info->total_size < 32767)
8555	sp_offset = info->total_size;
8556      else
8557	frame_reg_rtx = frame_ptr_rtx;
8558      rs6000_emit_allocate_stack (info->total_size,
8559				  (frame_reg_rtx != sp_reg_rtx
8560				   && (info->cr_save_p
8561				       || info->lr_save_p
8562				       || info->first_fp_reg_save < 64
8563				       || info->first_gp_reg_save < 32
8564				       )));
8565      if (frame_reg_rtx != sp_reg_rtx)
8566	rs6000_emit_stack_tie ();
8567    }
8568
8569  /* Save AltiVec registers if needed.  */
8570  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8571    {
8572      int i;
8573
8574      /* There should be a non inline version of this, for when we
8575	 are saving lots of vector registers.  */
8576      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8577	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8578	  {
8579	    rtx areg, savereg, mem;
8580	    int offset;
8581
8582	    offset = info->altivec_save_offset + sp_offset
8583	      + 16 * (i - info->first_altivec_reg_save);
8584
8585	    savereg = gen_rtx_REG (V4SImode, i);
8586
8587	    areg = gen_rtx_REG (Pmode, 0);
8588	    emit_move_insn (areg, GEN_INT (offset));
8589
8590	    /* AltiVec addressing mode is [reg+reg].  */
8591	    mem = gen_rtx_MEM (V4SImode,
8592			       gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8593
8594	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8595
8596	    insn = emit_move_insn (mem, savereg);
8597
8598	    altivec_frame_fixup (insn, areg, offset);
8599	  }
8600    }
8601
8602  /* VRSAVE is a bit vector representing which AltiVec registers
8603     are used.  The OS uses this to determine which vector
8604     registers to save on a context switch.  We need to save
8605     VRSAVE on the stack frame, add whatever AltiVec registers we
8606     used in this function, and do the corresponding magic in the
8607     epilogue.  */
8608
8609  if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8610    {
8611      rtx reg, mem, vrsave;
8612      int offset;
8613
8614      /* Get VRSAVE onto a GPR.  */
8615      reg = gen_rtx_REG (SImode, 12);
8616      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8617      if (TARGET_MACHO)
8618	emit_insn (gen_get_vrsave_internal (reg));
8619      else
8620	emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8621
8622      /* Save VRSAVE.  */
8623      offset = info->vrsave_save_offset + sp_offset;
8624      mem
8625	= gen_rtx_MEM (SImode,
8626		       gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8627      set_mem_alias_set (mem, rs6000_sr_alias_set);
8628      insn = emit_move_insn (mem, reg);
8629
8630      /* Include the registers in the mask.  */
8631      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8632
8633      insn = emit_insn (generate_set_vrsave (reg, info, 0));
8634    }
8635
8636  /* If we use the link register, get it into r0.  */
8637  if (info->lr_save_p)
8638    emit_move_insn (gen_rtx_REG (Pmode, 0),
8639		    gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8640
8641  /* If we need to save CR, put it into r12.  */
8642  if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8643    {
8644      cr_save_rtx = gen_rtx_REG (SImode, 12);
8645      emit_insn (gen_movesi_from_cr (cr_save_rtx));
8646    }
8647
8648  /* Do any required saving of fpr's.  If only one or two to save, do
8649     it ourselves.  Otherwise, call function.  */
8650  if (saving_FPRs_inline)
8651    {
8652      int i;
8653      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8654	if ((regs_ever_live[info->first_fp_reg_save+i]
8655	     && ! call_used_regs[info->first_fp_reg_save+i]))
8656	  {
8657	    rtx addr, reg, mem;
8658	    reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8659	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8660				 GEN_INT (info->fp_save_offset
8661					  + sp_offset
8662					  + 8 * i));
8663	    mem = gen_rtx_MEM (DFmode, addr);
8664	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8665
8666	    insn = emit_move_insn (mem, reg);
8667	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8668				  NULL_RTX, NULL_RTX);
8669	  }
8670    }
8671  else if (info->first_fp_reg_save != 64)
8672    {
8673      int i;
8674      char rname[30];
8675      const char *alloc_rname;
8676      rtvec p;
8677      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8678
8679      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8680					  gen_rtx_REG (Pmode,
8681						       LINK_REGISTER_REGNUM));
8682      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8683	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8684      alloc_rname = ggc_strdup (rname);
8685      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8686				      gen_rtx_SYMBOL_REF (Pmode,
8687							  alloc_rname));
8688      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8689	{
8690	  rtx addr, reg, mem;
8691	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8692	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8693			       GEN_INT (info->fp_save_offset
8694					+ sp_offset + 8*i));
8695	  mem = gen_rtx_MEM (DFmode, addr);
8696	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8697
8698	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8699	}
8700      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8701      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8702			    NULL_RTX, NULL_RTX);
8703    }
8704
8705  /* Save GPRs.  This is done as a PARALLEL if we are using
8706     the store-multiple instructions.  */
8707  if (using_store_multiple)
8708    {
8709      rtvec p, dwarfp;
8710      int i;
8711      p = rtvec_alloc (32 - info->first_gp_reg_save);
8712      dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8713      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8714	{
8715	  rtx addr, reg, mem;
8716	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8717	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8718			       GEN_INT (info->gp_save_offset
8719					+ sp_offset
8720					+ reg_size * i));
8721	  mem = gen_rtx_MEM (reg_mode, addr);
8722	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8723
8724	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8725	}
8726      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8727      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8728			    NULL_RTX, NULL_RTX);
8729    }
8730  else
8731    {
8732      int i;
8733      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8734	if ((regs_ever_live[info->first_gp_reg_save+i]
8735	     && ! call_used_regs[info->first_gp_reg_save+i])
8736	    || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8737		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8738		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8739	  {
8740	    rtx addr, reg, mem;
8741	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8742	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8743				 GEN_INT (info->gp_save_offset
8744					  + sp_offset
8745					  + reg_size * i));
8746	    mem = gen_rtx_MEM (reg_mode, addr);
8747	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8748
8749	    insn = emit_move_insn (mem, reg);
8750	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8751				  NULL_RTX, NULL_RTX);
8752	  }
8753    }
8754
8755  /* ??? There's no need to emit actual instructions here, but it's the
8756     easiest way to get the frame unwind information emitted.  */
8757  if (current_function_calls_eh_return)
8758    {
8759      unsigned int i, regno;
8760
8761      for (i = 0; ; ++i)
8762	{
8763	  rtx addr, reg, mem;
8764
8765	  regno = EH_RETURN_DATA_REGNO (i);
8766	  if (regno == INVALID_REGNUM)
8767	    break;
8768
8769	  reg = gen_rtx_REG (reg_mode, regno);
8770	  addr = plus_constant (frame_reg_rtx,
8771				info->ehrd_offset + sp_offset
8772				+ reg_size * (int) i);
8773	  mem = gen_rtx_MEM (reg_mode, addr);
8774	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8775
8776	  insn = emit_move_insn (mem, reg);
8777	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8778				NULL_RTX, NULL_RTX);
8779	}
8780    }
8781
8782  /* Save lr if we used it.  */
8783  if (info->lr_save_p)
8784    {
8785      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8786			       GEN_INT (info->lr_save_offset + sp_offset));
8787      rtx reg = gen_rtx_REG (Pmode, 0);
8788      rtx mem = gen_rtx_MEM (Pmode, addr);
8789      /* This should not be of rs6000_sr_alias_set, because of
8790	 __builtin_return_address.  */
8791
8792      insn = emit_move_insn (mem, reg);
8793      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8794			    reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8795    }
8796
8797  /* Save CR if we use any that must be preserved.  */
8798  if (info->cr_save_p)
8799    {
8800      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8801			       GEN_INT (info->cr_save_offset + sp_offset));
8802      rtx mem = gen_rtx_MEM (SImode, addr);
8803
8804      set_mem_alias_set (mem, rs6000_sr_alias_set);
8805
8806      /* If r12 was used to hold the original sp, copy cr into r0 now
8807	 that it's free.  */
8808      if (REGNO (frame_reg_rtx) == 12)
8809	{
8810	  cr_save_rtx = gen_rtx_REG (SImode, 0);
8811	  emit_insn (gen_movesi_from_cr (cr_save_rtx));
8812	}
8813      insn = emit_move_insn (mem, cr_save_rtx);
8814
8815      /* Now, there's no way that dwarf2out_frame_debug_expr is going
8816	 to understand '(unspec:SI [(reg:CC 68) ...] 19)'.  But that's
8817	 OK.  All we have to do is specify that _one_ condition code
8818	 register is saved in this stack slot.  The thrower's epilogue
8819	 will then restore all the call-saved registers.
8820	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
8821      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8822			    cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8823    }
8824
8825  /* Update stack and set back pointer unless this is V.4,
8826     for which it was done previously.  */
8827  if (info->push_p && DEFAULT_ABI != ABI_V4)
8828    rs6000_emit_allocate_stack (info->total_size, FALSE);
8829
8830  /* Set frame pointer, if needed.  */
8831  if (frame_pointer_needed)
8832    {
8833      insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8834			     sp_reg_rtx);
8835      RTX_FRAME_RELATED_P (insn) = 1;
8836    }
8837
8838  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
8839  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8840      || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8841	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
8842  {
8843    /* If emit_load_toc_table will use the link register, we need to save
8844       it.  We use R11 for this purpose because emit_load_toc_table
8845       can use register 0.  This allows us to use a plain 'blr' to return
8846       from the procedure more often.  */
8847    int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8848				    && ! info->lr_save_p
8849				    && EXIT_BLOCK_PTR->pred != NULL);
8850    if (save_LR_around_toc_setup)
8851      emit_move_insn (gen_rtx_REG (Pmode, 11),
8852		      gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8853
8854    rs6000_emit_load_toc_table (TRUE);
8855
8856    if (save_LR_around_toc_setup)
8857      emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8858		      gen_rtx_REG (Pmode, 11));
8859  }
8860
8861  if (DEFAULT_ABI == ABI_DARWIN
8862      && flag_pic && current_function_uses_pic_offset_table)
8863    {
8864      rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8865
8866      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8867
8868      rs6000_maybe_dead (
8869	emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
8870			gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8871    }
8872}
8873
8874/* Write function prologue.  */
8875
8876static void
8877rs6000_output_function_prologue (file, size)
8878     FILE *file;
8879     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8880{
8881  rs6000_stack_t *info = rs6000_stack_info ();
8882
8883  if (TARGET_DEBUG_STACK)
8884    debug_stack_info (info);
8885
8886  /* Write .extern for any function we will call to save and restore
8887     fp values.  */
8888  if (info->first_fp_reg_save < 64
8889      && !FP_SAVE_INLINE (info->first_fp_reg_save))
8890    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8891	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8892	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8893	     RESTORE_FP_SUFFIX);
8894
8895  /* Write .extern for AIX common mode routines, if needed.  */
8896  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8897    {
8898      fputs ("\t.extern __mulh\n", file);
8899      fputs ("\t.extern __mull\n", file);
8900      fputs ("\t.extern __divss\n", file);
8901      fputs ("\t.extern __divus\n", file);
8902      fputs ("\t.extern __quoss\n", file);
8903      fputs ("\t.extern __quous\n", file);
8904      common_mode_defined = 1;
8905    }
8906
8907  if (! HAVE_prologue)
8908    {
8909      start_sequence ();
8910
8911      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8912	 the "toplevel" insn chain.  */
8913      emit_note (0, NOTE_INSN_DELETED);
8914      rs6000_emit_prologue ();
8915      emit_note (0, NOTE_INSN_DELETED);
8916
8917      /* Expand INSN_ADDRESSES so final() doesn't crash. */
8918      {
8919	rtx insn;
8920	unsigned addr = 0;
8921	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
8922	  {
8923	    INSN_ADDRESSES_NEW (insn, addr);
8924	    addr += 4;
8925	  }
8926      }
8927
8928      if (TARGET_DEBUG_STACK)
8929	debug_rtx_list (get_insns (), 100);
8930      final (get_insns (), file, FALSE, FALSE);
8931      end_sequence ();
8932    }
8933
8934  rs6000_pic_labelno++;
8935}
8936
8937/* Emit function epilogue as insns.
8938
8939   At present, dwarf2out_frame_debug_expr doesn't understand
8940   register restores, so we don't bother setting RTX_FRAME_RELATED_P
8941   anywhere in the epilogue.  Most of the insns below would in any case
8942   need special notes to explain where r11 is in relation to the stack.  */
8943
8944void
8945rs6000_emit_epilogue (sibcall)
8946     int sibcall;
8947{
8948  rs6000_stack_t *info;
8949  int restoring_FPRs_inline;
8950  int using_load_multiple;
8951  int using_mfcr_multiple;
8952  int use_backchain_to_restore_sp;
8953  int sp_offset = 0;
8954  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
8955  rtx frame_reg_rtx = sp_reg_rtx;
8956  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8957  int reg_size = TARGET_POWERPC64 ? 8 : 4;
8958  int i;
8959
8960  info = rs6000_stack_info ();
8961  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8962			 && info->first_gp_reg_save < 31);
8963  restoring_FPRs_inline = (sibcall
8964			   || current_function_calls_eh_return
8965			   || info->first_fp_reg_save == 64
8966			   || FP_SAVE_INLINE (info->first_fp_reg_save));
8967  use_backchain_to_restore_sp = (frame_pointer_needed
8968				 || current_function_calls_alloca
8969				 || info->total_size > 32767);
8970  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
8971			 || rs6000_cpu == PROCESSOR_PPC603
8972			 || rs6000_cpu == PROCESSOR_PPC750
8973			 || optimize_size);
8974
8975  /* If we have a frame pointer, a call to alloca,  or a large stack
8976     frame, restore the old stack pointer using the backchain.  Otherwise,
8977     we know what size to update it with.  */
8978  if (use_backchain_to_restore_sp)
8979    {
8980      /* Under V.4, don't reset the stack pointer until after we're done
8981	 loading the saved registers.  */
8982      if (DEFAULT_ABI == ABI_V4)
8983	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
8984
8985      emit_move_insn (frame_reg_rtx,
8986		      gen_rtx_MEM (Pmode, sp_reg_rtx));
8987
8988    }
8989  else if (info->push_p)
8990    {
8991      if (DEFAULT_ABI == ABI_V4)
8992	sp_offset = info->total_size;
8993      else
8994	{
8995	  emit_insn (TARGET_32BIT
8996		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8997				   GEN_INT (info->total_size))
8998		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8999				   GEN_INT (info->total_size)));
9000	}
9001    }
9002
9003  /* Restore AltiVec registers if needed.  */
9004  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9005    {
9006      int i;
9007
9008      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9009	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9010	  {
9011	    rtx addr, areg, mem;
9012
9013	    areg = gen_rtx_REG (Pmode, 0);
9014	    emit_move_insn
9015	      (areg, GEN_INT (info->altivec_save_offset
9016			      + sp_offset
9017			      + 16 * (i - info->first_altivec_reg_save)));
9018
9019	    /* AltiVec addressing mode is [reg+reg].  */
9020	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9021	    mem = gen_rtx_MEM (V4SImode, addr);
9022	    set_mem_alias_set (mem, rs6000_sr_alias_set);
9023
9024	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9025	  }
9026    }
9027
9028  /* Restore VRSAVE if needed.  */
9029  if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9030    {
9031      rtx addr, mem, reg;
9032
9033      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9034			   GEN_INT (info->vrsave_save_offset + sp_offset));
9035      mem = gen_rtx_MEM (SImode, addr);
9036      set_mem_alias_set (mem, rs6000_sr_alias_set);
9037      reg = gen_rtx_REG (SImode, 12);
9038      emit_move_insn (reg, mem);
9039
9040      emit_insn (generate_set_vrsave (reg, info, 1));
9041    }
9042
9043  /* Get the old lr if we saved it.  */
9044  if (info->lr_save_p)
9045    {
9046      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9047			       GEN_INT (info->lr_save_offset + sp_offset));
9048      rtx mem = gen_rtx_MEM (Pmode, addr);
9049
9050      set_mem_alias_set (mem, rs6000_sr_alias_set);
9051
9052      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9053    }
9054
9055  /* Get the old cr if we saved it.  */
9056  if (info->cr_save_p)
9057    {
9058      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9059			       GEN_INT (info->cr_save_offset + sp_offset));
9060      rtx mem = gen_rtx_MEM (SImode, addr);
9061
9062      set_mem_alias_set (mem, rs6000_sr_alias_set);
9063
9064      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9065    }
9066
9067  /* Set LR here to try to overlap restores below.  */
9068  if (info->lr_save_p)
9069    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9070		    gen_rtx_REG (Pmode, 0));
9071
9072  /* Load exception handler data registers, if needed.  */
9073  if (current_function_calls_eh_return)
9074    {
9075      unsigned int i, regno;
9076
9077      for (i = 0; ; ++i)
9078	{
9079	  rtx addr, mem;
9080
9081	  regno = EH_RETURN_DATA_REGNO (i);
9082	  if (regno == INVALID_REGNUM)
9083	    break;
9084
9085	  addr = plus_constant (frame_reg_rtx,
9086				info->ehrd_offset + sp_offset
9087				+ reg_size * (int) i);
9088	  mem = gen_rtx_MEM (reg_mode, addr);
9089	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9090
9091	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9092	}
9093    }
9094
9095  /* Restore GPRs.  This is done as a PARALLEL if we are using
9096     the load-multiple instructions.  */
9097  if (using_load_multiple)
9098    {
9099      rtvec p;
9100      p = rtvec_alloc (32 - info->first_gp_reg_save);
9101      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9102	{
9103	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9104				   GEN_INT (info->gp_save_offset
9105					    + sp_offset
9106					    + reg_size * i));
9107	  rtx mem = gen_rtx_MEM (reg_mode, addr);
9108
9109	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9110
9111	  RTVEC_ELT (p, i) =
9112	    gen_rtx_SET (VOIDmode,
9113			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9114			 mem);
9115	}
9116      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9117    }
9118  else
9119    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9120      if ((regs_ever_live[info->first_gp_reg_save+i]
9121	   && ! call_used_regs[info->first_gp_reg_save+i])
9122	  || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9123	      && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9124		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9125	{
9126	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9127				   GEN_INT (info->gp_save_offset
9128					    + sp_offset
9129					    + reg_size * i));
9130	  rtx mem = gen_rtx_MEM (reg_mode, addr);
9131
9132	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9133
9134	  emit_move_insn (gen_rtx_REG (reg_mode,
9135				       info->first_gp_reg_save + i),
9136			  mem);
9137	}
9138
9139  /* Restore fpr's if we need to do it without calling a function.  */
9140  if (restoring_FPRs_inline)
9141    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9142      if ((regs_ever_live[info->first_fp_reg_save+i]
9143	   && ! call_used_regs[info->first_fp_reg_save+i]))
9144	{
9145	  rtx addr, mem;
9146	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9147			       GEN_INT (info->fp_save_offset
9148					+ sp_offset
9149					+ 8 * i));
9150	  mem = gen_rtx_MEM (DFmode, addr);
9151	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9152
9153	  emit_move_insn (gen_rtx_REG (DFmode,
9154				       info->first_fp_reg_save + i),
9155			  mem);
9156	}
9157
9158  /* If we saved cr, restore it here.  Just those that were used.  */
9159  if (info->cr_save_p)
9160    {
9161      rtx r12_rtx = gen_rtx_REG (SImode, 12);
9162      int count = 0;
9163
9164      if (using_mfcr_multiple)
9165	{
9166	  for (i = 0; i < 8; i++)
9167	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9168	      count++;
9169	  if (count == 0)
9170	    abort ();
9171	}
9172
9173      if (using_mfcr_multiple && count > 1)
9174	{
9175	  rtvec p;
9176	  int ndx;
9177
9178	  p = rtvec_alloc (count);
9179
9180	  ndx = 0;
9181	  for (i = 0; i < 8; i++)
9182	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9183	      {
9184		rtvec r = rtvec_alloc (2);
9185		RTVEC_ELT (r, 0) = r12_rtx;
9186		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9187		RTVEC_ELT (p, ndx) =
9188		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9189			       gen_rtx_UNSPEC (CCmode, r, 20));
9190		ndx++;
9191	      }
9192	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9193	  if (ndx != count)
9194	    abort ();
9195	}
9196      else
9197	for (i = 0; i < 8; i++)
9198	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9199	    {
9200	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9201							   CR0_REGNO+i),
9202					      r12_rtx));
9203	    }
9204    }
9205
9206  /* If this is V.4, unwind the stack pointer after all of the loads
9207     have been done.  We need to emit a block here so that sched
9208     doesn't decide to move the sp change before the register restores
9209     (which may not have any obvious dependency on the stack).  This
9210     doesn't hurt performance, because there is no scheduling that can
9211     be done after this point.  */
9212  if (DEFAULT_ABI == ABI_V4)
9213    {
9214      if (frame_reg_rtx != sp_reg_rtx)
9215	  rs6000_emit_stack_tie ();
9216
9217      if (use_backchain_to_restore_sp)
9218	{
9219	  emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9220	}
9221      else if (sp_offset != 0)
9222	{
9223	  emit_insn (Pmode == SImode
9224		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9225				   GEN_INT (sp_offset))
9226		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9227				   GEN_INT (sp_offset)));
9228	}
9229    }
9230
9231  if (current_function_calls_eh_return)
9232    {
9233      rtx sa = EH_RETURN_STACKADJ_RTX;
9234      emit_insn (Pmode == SImode
9235		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9236		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9237    }
9238
9239  if (!sibcall)
9240    {
9241      rtvec p;
9242      if (! restoring_FPRs_inline)
9243	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9244      else
9245	p = rtvec_alloc (2);
9246
9247      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9248      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9249				      gen_rtx_REG (Pmode,
9250						   LINK_REGISTER_REGNUM));
9251
9252      /* If we have to restore more than two FP registers, branch to the
9253	 restore function.  It will return to our caller.  */
9254      if (! restoring_FPRs_inline)
9255	{
9256	  int i;
9257	  char rname[30];
9258	  const char *alloc_rname;
9259
9260	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9261		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9262	  alloc_rname = ggc_strdup (rname);
9263	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9264					  gen_rtx_SYMBOL_REF (Pmode,
9265							      alloc_rname));
9266
9267	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9268	    {
9269	      rtx addr, mem;
9270	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9271				   GEN_INT (info->fp_save_offset + 8*i));
9272	      mem = gen_rtx_MEM (DFmode, addr);
9273	      set_mem_alias_set (mem, rs6000_sr_alias_set);
9274
9275	      RTVEC_ELT (p, i+3) =
9276		gen_rtx_SET (VOIDmode,
9277			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9278			     mem);
9279	    }
9280	}
9281
9282      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9283    }
9284}
9285
9286/* Write function epilogue.  */
9287
9288static void
9289rs6000_output_function_epilogue (file, size)
9290     FILE *file;
9291     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9292{
9293  rs6000_stack_t *info = rs6000_stack_info ();
9294  int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9295
9296  if (! HAVE_epilogue)
9297    {
9298      rtx insn = get_last_insn ();
9299      /* If the last insn was a BARRIER, we don't have to write anything except
9300	 the trace table.  */
9301      if (GET_CODE (insn) == NOTE)
9302	insn = prev_nonnote_insn (insn);
9303      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
9304	{
9305	  /* This is slightly ugly, but at least we don't have two
9306	     copies of the epilogue-emitting code.  */
9307	  start_sequence ();
9308
9309	  /* A NOTE_INSN_DELETED is supposed to be at the start
9310	     and end of the "toplevel" insn chain.  */
9311	  emit_note (0, NOTE_INSN_DELETED);
9312	  rs6000_emit_epilogue (FALSE);
9313	  emit_note (0, NOTE_INSN_DELETED);
9314
9315	  /* Expand INSN_ADDRESSES so final() doesn't crash. */
9316	  {
9317	    rtx insn;
9318	    unsigned addr = 0;
9319	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9320	      {
9321		INSN_ADDRESSES_NEW (insn, addr);
9322		addr += 4;
9323	      }
9324	  }
9325
9326	  if (TARGET_DEBUG_STACK)
9327	    debug_rtx_list (get_insns (), 100);
9328	  final (get_insns (), file, FALSE, FALSE);
9329	  end_sequence ();
9330	}
9331    }
9332
9333  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
9334     on its format.
9335
9336     We don't output a traceback table if -finhibit-size-directive was
9337     used.  The documentation for -finhibit-size-directive reads
9338     ``don't output a @code{.size} assembler directive, or anything
9339     else that would cause trouble if the function is split in the
9340     middle, and the two halves are placed at locations far apart in
9341     memory.''  The traceback table has this property, since it
9342     includes the offset from the start of the function to the
9343     traceback table itself.
9344
9345     System V.4 Powerpc's (and the embedded ABI derived from it) use a
9346     different traceback table.  */
9347  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9348    {
9349      const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9350      const char *language_string = lang_hooks.name;
9351      int fixed_parms = 0, float_parms = 0, parm_info = 0;
9352      int i;
9353
9354      while (*fname == '.')	/* V.4 encodes . in the name */
9355	fname++;
9356
9357      /* Need label immediately before tbtab, so we can compute its offset
9358	 from the function start.  */
9359      if (*fname == '*')
9360	++fname;
9361      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9362      ASM_OUTPUT_LABEL (file, fname);
9363
9364      /* The .tbtab pseudo-op can only be used for the first eight
9365	 expressions, since it can't handle the possibly variable
9366	 length fields that follow.  However, if you omit the optional
9367	 fields, the assembler outputs zeros for all optional fields
9368	 anyways, giving each variable length field is minimum length
9369	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
9370	 pseudo-op at all.  */
9371
9372      /* An all-zero word flags the start of the tbtab, for debuggers
9373	 that have to find it by searching forward from the entry
9374	 point or from the current pc.  */
9375      fputs ("\t.long 0\n", file);
9376
9377      /* Tbtab format type.  Use format type 0.  */
9378      fputs ("\t.byte 0,", file);
9379
9380      /* Language type.  Unfortunately, there doesn't seem to be any
9381	 official way to get this info, so we use language_string.  C
9382	 is 0.  C++ is 9.  No number defined for Obj-C, so use the
9383	 value for C for now.  There is no official value for Java,
9384         although IBM appears to be using 13.  There is no official value
9385	 for Chill, so we've chosen 44 pseudo-randomly.  */
9386      if (! strcmp (language_string, "GNU C")
9387	  || ! strcmp (language_string, "GNU Objective-C"))
9388	i = 0;
9389      else if (! strcmp (language_string, "GNU F77"))
9390	i = 1;
9391      else if (! strcmp (language_string, "GNU Ada"))
9392	i = 3;
9393      else if (! strcmp (language_string, "GNU Pascal"))
9394	i = 2;
9395      else if (! strcmp (language_string, "GNU C++"))
9396	i = 9;
9397      else if (! strcmp (language_string, "GNU Java"))
9398	i = 13;
9399      else if (! strcmp (language_string, "GNU CHILL"))
9400	i = 44;
9401      else
9402	abort ();
9403      fprintf (file, "%d,", i);
9404
9405      /* 8 single bit fields: global linkage (not set for C extern linkage,
9406	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9407	 from start of procedure stored in tbtab, internal function, function
9408	 has controlled storage, function has no toc, function uses fp,
9409	 function logs/aborts fp operations.  */
9410      /* Assume that fp operations are used if any fp reg must be saved.  */
9411      fprintf (file, "%d,",
9412	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9413
9414      /* 6 bitfields: function is interrupt handler, name present in
9415	 proc table, function calls alloca, on condition directives
9416	 (controls stack walks, 3 bits), saves condition reg, saves
9417	 link reg.  */
9418      /* The `function calls alloca' bit seems to be set whenever reg 31 is
9419	 set up as a frame pointer, even when there is no alloca call.  */
9420      fprintf (file, "%d,",
9421	       ((optional_tbtab << 6)
9422		| ((optional_tbtab & frame_pointer_needed) << 5)
9423		| (info->cr_save_p << 1)
9424		| (info->lr_save_p)));
9425
9426      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9427	 (6 bits).  */
9428      fprintf (file, "%d,",
9429	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
9430
9431      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
9432      fprintf (file, "%d,", (32 - first_reg_to_save ()));
9433
9434      if (optional_tbtab)
9435	{
9436	  /* Compute the parameter info from the function decl argument
9437	     list.  */
9438	  tree decl;
9439	  int next_parm_info_bit = 31;
9440
9441	  for (decl = DECL_ARGUMENTS (current_function_decl);
9442	       decl; decl = TREE_CHAIN (decl))
9443	    {
9444	      rtx parameter = DECL_INCOMING_RTL (decl);
9445	      enum machine_mode mode = GET_MODE (parameter);
9446
9447	      if (GET_CODE (parameter) == REG)
9448		{
9449		  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9450		    {
9451		      int bits;
9452
9453		      float_parms++;
9454
9455		      if (mode == SFmode)
9456			bits = 0x2;
9457		      else if (mode == DFmode)
9458			bits = 0x3;
9459		      else
9460			abort ();
9461
9462		      /* If only one bit will fit, don't or in this entry.  */
9463		      if (next_parm_info_bit > 0)
9464			parm_info |= (bits << (next_parm_info_bit - 1));
9465		      next_parm_info_bit -= 2;
9466		    }
9467		  else
9468		    {
9469		      fixed_parms += ((GET_MODE_SIZE (mode)
9470				       + (UNITS_PER_WORD - 1))
9471				      / UNITS_PER_WORD);
9472		      next_parm_info_bit -= 1;
9473		    }
9474		}
9475	    }
9476	}
9477
9478      /* Number of fixed point parameters.  */
9479      /* This is actually the number of words of fixed point parameters; thus
9480	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
9481      fprintf (file, "%d,", fixed_parms);
9482
9483      /* 2 bitfields: number of floating point parameters (7 bits), parameters
9484	 all on stack.  */
9485      /* This is actually the number of fp registers that hold parameters;
9486	 and thus the maximum value is 13.  */
9487      /* Set parameters on stack bit if parameters are not in their original
9488	 registers, regardless of whether they are on the stack?  Xlc
9489	 seems to set the bit when not optimizing.  */
9490      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9491
9492      if (! optional_tbtab)
9493	return;
9494
9495      /* Optional fields follow.  Some are variable length.  */
9496
9497      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9498	 11 double float.  */
9499      /* There is an entry for each parameter in a register, in the order that
9500	 they occur in the parameter list.  Any intervening arguments on the
9501	 stack are ignored.  If the list overflows a long (max possible length
9502	 34 bits) then completely leave off all elements that don't fit.  */
9503      /* Only emit this long if there was at least one parameter.  */
9504      if (fixed_parms || float_parms)
9505	fprintf (file, "\t.long %d\n", parm_info);
9506
9507      /* Offset from start of code to tb table.  */
9508      fputs ("\t.long ", file);
9509      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9510#if TARGET_AIX
9511      RS6000_OUTPUT_BASENAME (file, fname);
9512#else
9513      assemble_name (file, fname);
9514#endif
9515      fputs ("-.", file);
9516#if TARGET_AIX
9517      RS6000_OUTPUT_BASENAME (file, fname);
9518#else
9519      assemble_name (file, fname);
9520#endif
9521      putc ('\n', file);
9522
9523      /* Interrupt handler mask.  */
9524      /* Omit this long, since we never set the interrupt handler bit
9525	 above.  */
9526
9527      /* Number of CTL (controlled storage) anchors.  */
9528      /* Omit this long, since the has_ctl bit is never set above.  */
9529
9530      /* Displacement into stack of each CTL anchor.  */
9531      /* Omit this list of longs, because there are no CTL anchors.  */
9532
9533      /* Length of function name.  */
9534      fprintf (file, "\t.short %d\n", (int) strlen (fname));
9535
9536      /* Function name.  */
9537      assemble_string (fname, strlen (fname));
9538
9539      /* Register for alloca automatic storage; this is always reg 31.
9540	 Only emit this if the alloca bit was set above.  */
9541      if (frame_pointer_needed)
9542	fputs ("\t.byte 31\n", file);
9543
9544      fputs ("\t.align 2\n", file);
9545    }
9546  return;
9547}
9548
9549/* A C compound statement that outputs the assembler code for a thunk
9550   function, used to implement C++ virtual function calls with
9551   multiple inheritance.  The thunk acts as a wrapper around a virtual
9552   function, adjusting the implicit object parameter before handing
9553   control off to the real function.
9554
9555   First, emit code to add the integer DELTA to the location that
9556   contains the incoming first argument.  Assume that this argument
9557   contains a pointer, and is the one used to pass the `this' pointer
9558   in C++.  This is the incoming argument *before* the function
9559   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
9560   values of all other incoming arguments.
9561
9562   After the addition, emit code to jump to FUNCTION, which is a
9563   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
9564   not touch the return address.  Hence returning from FUNCTION will
9565   return to whoever called the current `thunk'.
9566
9567   The effect must be as if FUNCTION had been called directly with the
9568   adjusted first argument.  This macro is responsible for emitting
9569   all of the code for a thunk function; output_function_prologue()
9570   and output_function_epilogue() are not invoked.
9571
9572   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
9573   been extracted from it.)  It might possibly be useful on some
9574   targets, but probably not.
9575
9576   If you do not define this macro, the target-independent code in the
9577   C++ frontend will generate a less efficient heavyweight thunk that
9578   calls FUNCTION instead of jumping to it.  The generic approach does
9579   not support varargs.  */
9580
9581void
9582output_mi_thunk (file, thunk_fndecl, delta, function)
9583     FILE *file;
9584     tree thunk_fndecl ATTRIBUTE_UNUSED;
9585     int delta;
9586     tree function;
9587{
9588  const char *this_reg =
9589    reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9590  const char *prefix;
9591  const char *fname;
9592  const char *r0	 = reg_names[0];
9593  const char *toc	 = reg_names[2];
9594  const char *schain	 = reg_names[11];
9595  const char *r12	 = reg_names[12];
9596  char buf[512];
9597  static int labelno = 0;
9598
9599  /* Small constants that can be done by one add instruction.  */
9600  if (delta >= -32768 && delta <= 32767)
9601    {
9602      if (! TARGET_NEW_MNEMONICS)
9603	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9604      else
9605	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9606    }
9607
9608  /* 64-bit constants.  If "int" is 32 bits, we'll never hit this abort.  */
9609  else if (TARGET_64BIT && (delta < -2147483647 - 1 || delta > 2147483647))
9610    abort ();
9611
9612  /* Large constants that can be done by one addis instruction.  */
9613  else if ((delta & 0xffff) == 0)
9614    asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9615		 delta >> 16);
9616
9617  /* 32-bit constants that can be done by an add and addis instruction.  */
9618  else
9619    {
9620      /* Break into two pieces, propagating the sign bit from the low
9621	 word to the upper word.  */
9622      int delta_low  = ((delta & 0xffff) ^ 0x8000) - 0x8000;
9623      int delta_high = (delta - delta_low) >> 16;
9624
9625      asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9626		   delta_high);
9627
9628      if (! TARGET_NEW_MNEMONICS)
9629	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9630      else
9631	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9632    }
9633
9634  /* Get the prefix in front of the names.  */
9635  switch (DEFAULT_ABI)
9636    {
9637    default:
9638      abort ();
9639
9640    case ABI_AIX:
9641      prefix = ".";
9642      break;
9643
9644    case ABI_V4:
9645    case ABI_AIX_NODESC:
9646      prefix = "";
9647      break;
9648    }
9649
9650  /* If the function is compiled in this module, jump to it directly.
9651     Otherwise, load up its address and jump to it.  */
9652
9653  fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9654
9655  if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9656      && ! lookup_attribute ("longcall",
9657			     TYPE_ATTRIBUTES (TREE_TYPE (function))))
9658    {
9659      fprintf (file, "\tb %s", prefix);
9660      assemble_name (file, fname);
9661      if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9662      putc ('\n', file);
9663    }
9664
9665  else
9666    {
9667      switch (DEFAULT_ABI)
9668	{
9669	default:
9670	  abort ();
9671
9672	case ABI_AIX:
9673	  /* Set up a TOC entry for the function.  */
9674	  ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9675	  toc_section ();
9676	  ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9677	  labelno++;
9678
9679	  if (TARGET_MINIMAL_TOC)
9680	    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9681	  else
9682	    {
9683	      fputs ("\t.tc ", file);
9684	      assemble_name (file, fname);
9685	      fputs ("[TC],", file);
9686	    }
9687	  assemble_name (file, fname);
9688	  putc ('\n', file);
9689	  if (TARGET_ELF)
9690	    function_section (current_function_decl);
9691	  else
9692	    text_section ();
9693	  if (TARGET_MINIMAL_TOC)
9694	    asm_fprintf (file, (TARGET_32BIT)
9695			 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9696			 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9697	  asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9698	  assemble_name (file, buf);
9699	  if (TARGET_ELF && TARGET_MINIMAL_TOC)
9700	    fputs ("-(.LCTOC1)", file);
9701	  asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9702	  asm_fprintf (file,
9703		       (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9704		       r0, r12);
9705
9706	  asm_fprintf (file,
9707		       (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9708		       toc, r12);
9709
9710	  asm_fprintf (file, "\tmtctr %s\n", r0);
9711	  asm_fprintf (file,
9712		       (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9713		       schain, r12);
9714
9715	  asm_fprintf (file, "\tbctr\n");
9716	  break;
9717
9718	case ABI_AIX_NODESC:
9719	case ABI_V4:
9720	  fprintf (file, "\tb %s", prefix);
9721	  assemble_name (file, fname);
9722	  if (flag_pic) fputs ("@plt", file);
9723	  putc ('\n', file);
9724	  break;
9725
9726#if TARGET_MACHO
9727	case ABI_DARWIN:
9728	  fprintf (file, "\tb %s", prefix);
9729	  if (flag_pic && !machopic_name_defined_p (fname))
9730	    assemble_name (file, machopic_stub_name (fname));
9731	  else
9732	    assemble_name (file, fname);
9733	  putc ('\n', file);
9734	  break;
9735#endif
9736	}
9737    }
9738}
9739
9740
9741/* A quick summary of the various types of 'constant-pool tables'
9742   under PowerPC:
9743
9744   Target	Flags		Name		One table per
9745   AIX		(none)		AIX TOC		object file
9746   AIX		-mfull-toc	AIX TOC		object file
9747   AIX		-mminimal-toc	AIX minimal TOC	translation unit
9748   SVR4/EABI	(none)		SVR4 SDATA	object file
9749   SVR4/EABI	-fpic		SVR4 pic	object file
9750   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
9751   SVR4/EABI	-mrelocatable	EABI TOC	function
9752   SVR4/EABI	-maix		AIX TOC		object file
9753   SVR4/EABI	-maix -mminimal-toc
9754				AIX minimal TOC	translation unit
9755
9756   Name			Reg.	Set by	entries	      contains:
9757					made by	 addrs?	fp?	sum?
9758
9759   AIX TOC		2	crt0	as	 Y	option	option
9760   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
9761   SVR4 SDATA		13	crt0	gcc	 N	Y	N
9762   SVR4 pic		30	prolog	ld	 Y	not yet	N
9763   SVR4 PIC		30	prolog	gcc	 Y	option	option
9764   EABI TOC		30	prolog	gcc	 Y	option	option
9765
9766*/
9767
9768/* Hash table stuff for keeping track of TOC entries.  */
9769
9770struct toc_hash_struct
9771{
9772  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9773     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
9774  rtx key;
9775  enum machine_mode key_mode;
9776  int labelno;
9777};
9778
9779static htab_t toc_hash_table;
9780
9781/* Hash functions for the hash table.  */
9782
9783static unsigned
9784rs6000_hash_constant (k)
9785     rtx k;
9786{
9787  unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9788  const char *format = GET_RTX_FORMAT (GET_CODE (k));
9789  int flen = strlen (format);
9790  int fidx;
9791
9792  if (GET_CODE (k) == LABEL_REF)
9793    return result * 1231 + X0INT (XEXP (k, 0), 3);
9794
9795  if (GET_CODE (k) == CONST_DOUBLE)
9796    fidx = 1;
9797  else if (GET_CODE (k) == CODE_LABEL)
9798    fidx = 3;
9799  else
9800    fidx = 0;
9801
9802  for (; fidx < flen; fidx++)
9803    switch (format[fidx])
9804      {
9805      case 's':
9806	{
9807	  unsigned i, len;
9808	  const char *str = XSTR (k, fidx);
9809	  len = strlen (str);
9810	  result = result * 613 + len;
9811	  for (i = 0; i < len; i++)
9812	    result = result * 613 + (unsigned) str[i];
9813	  break;
9814	}
9815      case 'u':
9816      case 'e':
9817	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9818	break;
9819      case 'i':
9820      case 'n':
9821	result = result * 613 + (unsigned) XINT (k, fidx);
9822	break;
9823      case 'w':
9824	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9825	  result = result * 613 + (unsigned) XWINT (k, fidx);
9826	else
9827	  {
9828	    size_t i;
9829	    for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9830	      result = result * 613 + (unsigned) (XWINT (k, fidx)
9831						  >> CHAR_BIT * i);
9832	  }
9833	break;
9834      default:
9835	abort ();
9836      }
9837  return result;
9838}
9839
9840static unsigned
9841toc_hash_function (hash_entry)
9842     const void * hash_entry;
9843{
9844  const struct toc_hash_struct *thc =
9845    (const struct toc_hash_struct *) hash_entry;
9846  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9847}
9848
9849/* Compare H1 and H2 for equivalence.  */
9850
9851static int
9852toc_hash_eq (h1, h2)
9853     const void * h1;
9854     const void * h2;
9855{
9856  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9857  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9858
9859  if (((const struct toc_hash_struct *) h1)->key_mode
9860      != ((const struct toc_hash_struct *) h2)->key_mode)
9861    return 0;
9862
9863  /* Gotcha:  One of these const_doubles will be in memory.
9864     The other may be on the constant-pool chain.
9865     So rtx_equal_p will think they are different...  */
9866  if (r1 == r2)
9867    return 1;
9868  if (GET_CODE (r1) != GET_CODE (r2)
9869      || GET_MODE (r1) != GET_MODE (r2))
9870    return 0;
9871  if (GET_CODE (r1) == CONST_DOUBLE)
9872    {
9873      int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9874      int i;
9875      for (i = 1; i < format_len; i++)
9876	if (XWINT (r1, i) != XWINT (r2, i))
9877	  return 0;
9878
9879      return 1;
9880    }
9881  else if (GET_CODE (r1) == LABEL_REF)
9882    return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9883	    == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9884  else
9885    return rtx_equal_p (r1, r2);
9886}
9887
9888/* Mark the hash table-entry HASH_ENTRY.  */
9889
9890static int
9891toc_hash_mark_entry (hash_slot, unused)
9892     void ** hash_slot;
9893     void * unused ATTRIBUTE_UNUSED;
9894{
9895  const struct toc_hash_struct * hash_entry =
9896    *(const struct toc_hash_struct **) hash_slot;
9897  rtx r = hash_entry->key;
9898  ggc_set_mark (hash_entry);
9899  /* For CODE_LABELS, we don't want to drag in the whole insn chain...  */
9900  if (GET_CODE (r) == LABEL_REF)
9901    {
9902      ggc_set_mark (r);
9903      ggc_set_mark (XEXP (r, 0));
9904    }
9905  else
9906    ggc_mark_rtx (r);
9907  return 1;
9908}
9909
9910/* Mark all the elements of the TOC hash-table *HT.  */
9911
9912static void
9913toc_hash_mark_table (vht)
9914     void *vht;
9915{
9916  htab_t *ht = vht;
9917
9918  htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9919}
9920
9921/* These are the names given by the C++ front-end to vtables, and
9922   vtable-like objects.  Ideally, this logic should not be here;
9923   instead, there should be some programmatic way of inquiring as
9924   to whether or not an object is a vtable.  */
9925
9926#define VTABLE_NAME_P(NAME)				\
9927  (strncmp ("_vt.", name, strlen("_vt.")) == 0		\
9928  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
9929  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
9930  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9931
9932void
9933rs6000_output_symbol_ref (file, x)
9934     FILE *file;
9935     rtx x;
9936{
9937  /* Currently C++ toc references to vtables can be emitted before it
9938     is decided whether the vtable is public or private.  If this is
9939     the case, then the linker will eventually complain that there is
9940     a reference to an unknown section.  Thus, for vtables only,
9941     we emit the TOC reference to reference the symbol and not the
9942     section.  */
9943  const char *name = XSTR (x, 0);
9944
9945  if (VTABLE_NAME_P (name))
9946    {
9947      RS6000_OUTPUT_BASENAME (file, name);
9948    }
9949  else
9950    assemble_name (file, name);
9951}
9952
9953/* Output a TOC entry.  We derive the entry name from what is being
9954   written.  */
9955
9956void
9957output_toc (file, x, labelno, mode)
9958     FILE *file;
9959     rtx x;
9960     int labelno;
9961     enum machine_mode mode;
9962{
9963  char buf[256];
9964  const char *name = buf;
9965  const char *real_name;
9966  rtx base = x;
9967  int offset = 0;
9968
9969  if (TARGET_NO_TOC)
9970    abort ();
9971
9972  /* When the linker won't eliminate them, don't output duplicate
9973     TOC entries (this happens on AIX if there is any kind of TOC,
9974     and on SVR4 under -fPIC or -mrelocatable).  */
9975  if (TARGET_TOC)
9976    {
9977      struct toc_hash_struct *h;
9978      void * * found;
9979
9980      h = ggc_alloc (sizeof (*h));
9981      h->key = x;
9982      h->key_mode = mode;
9983      h->labelno = labelno;
9984
9985      found = htab_find_slot (toc_hash_table, h, 1);
9986      if (*found == NULL)
9987	*found = h;
9988      else  /* This is indeed a duplicate.
9989	       Set this label equal to that label.  */
9990	{
9991	  fputs ("\t.set ", file);
9992	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9993	  fprintf (file, "%d,", labelno);
9994	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9995	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9996					      found)->labelno));
9997	  return;
9998	}
9999    }
10000
10001  /* If we're going to put a double constant in the TOC, make sure it's
10002     aligned properly when strict alignment is on.  */
10003  if (GET_CODE (x) == CONST_DOUBLE
10004      && STRICT_ALIGNMENT
10005      && GET_MODE_BITSIZE (mode) >= 64
10006      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10007    ASM_OUTPUT_ALIGN (file, 3);
10008  }
10009
10010  ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10011
10012  /* Handle FP constants specially.  Note that if we have a minimal
10013     TOC, things we put here aren't actually in the TOC, so we can allow
10014     FP constants.  */
10015  if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10016    {
10017      REAL_VALUE_TYPE rv;
10018      long k[2];
10019
10020      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10021      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10022
10023      if (TARGET_64BIT)
10024	{
10025	  if (TARGET_MINIMAL_TOC)
10026	    fputs (DOUBLE_INT_ASM_OP, file);
10027	  else
10028	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10029		     k[0] & 0xffffffff, k[1] & 0xffffffff);
10030	  fprintf (file, "0x%lx%08lx\n",
10031		   k[0] & 0xffffffff, k[1] & 0xffffffff);
10032	  return;
10033	}
10034      else
10035	{
10036	  if (TARGET_MINIMAL_TOC)
10037	    fputs ("\t.long ", file);
10038	  else
10039	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
10040		     k[0] & 0xffffffff, k[1] & 0xffffffff);
10041	  fprintf (file, "0x%lx,0x%lx\n",
10042		   k[0] & 0xffffffff, k[1] & 0xffffffff);
10043	  return;
10044	}
10045    }
10046  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10047    {
10048      REAL_VALUE_TYPE rv;
10049      long l;
10050
10051      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10052      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10053
10054      if (TARGET_64BIT)
10055	{
10056	  if (TARGET_MINIMAL_TOC)
10057	    fputs (DOUBLE_INT_ASM_OP, file);
10058	  else
10059	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10060	  fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
10061	  return;
10062	}
10063      else
10064	{
10065	  if (TARGET_MINIMAL_TOC)
10066	    fputs ("\t.long ", file);
10067	  else
10068	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
10069	  fprintf (file, "0x%lx\n", l & 0xffffffff);
10070	  return;
10071	}
10072    }
10073  else if (GET_MODE (x) == VOIDmode
10074	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10075    {
10076      unsigned HOST_WIDE_INT low;
10077      HOST_WIDE_INT high;
10078
10079      if (GET_CODE (x) == CONST_DOUBLE)
10080	{
10081	  low = CONST_DOUBLE_LOW (x);
10082	  high = CONST_DOUBLE_HIGH (x);
10083	}
10084      else
10085#if HOST_BITS_PER_WIDE_INT == 32
10086	{
10087	  low = INTVAL (x);
10088	  high = (low & 0x80000000) ? ~0 : 0;
10089	}
10090#else
10091	{
10092          low = INTVAL (x) & 0xffffffff;
10093          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10094	}
10095#endif
10096
10097      /* TOC entries are always Pmode-sized, but since this
10098	 is a bigendian machine then if we're putting smaller
10099	 integer constants in the TOC we have to pad them.
10100	 (This is still a win over putting the constants in
10101	 a separate constant pool, because then we'd have
10102	 to have both a TOC entry _and_ the actual constant.)
10103
10104	 For a 32-bit target, CONST_INT values are loaded and shifted
10105	 entirely within `low' and can be stored in one TOC entry.  */
10106
10107      if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10108	abort ();/* It would be easy to make this work, but it doesn't now.  */
10109
10110      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10111	{
10112#if HOST_BITS_PER_WIDE_INT == 32
10113	  lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10114			 POINTER_SIZE, &low, &high, 0);
10115#else
10116	  low |= high << 32;
10117	  low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
10118	  high = (HOST_WIDE_INT) low >> 32;
10119	  low &= 0xffffffff;
10120#endif
10121	}
10122
10123      if (TARGET_64BIT)
10124	{
10125	  if (TARGET_MINIMAL_TOC)
10126	    fputs (DOUBLE_INT_ASM_OP, file);
10127	  else
10128	    fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10129		     (long) high & 0xffffffff, (long) low & 0xffffffff);
10130	  fprintf (file, "0x%lx%08lx\n",
10131		   (long) high & 0xffffffff, (long) low & 0xffffffff);
10132	  return;
10133	}
10134      else
10135	{
10136	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10137	    {
10138	      if (TARGET_MINIMAL_TOC)
10139		fputs ("\t.long ", file);
10140	      else
10141		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10142			 (long) high & 0xffffffff, (long) low & 0xffffffff);
10143	      fprintf (file, "0x%lx,0x%lx\n",
10144		       (long) high & 0xffffffff, (long) low & 0xffffffff);
10145	    }
10146	  else
10147	    {
10148	      if (TARGET_MINIMAL_TOC)
10149		fputs ("\t.long ", file);
10150	      else
10151		fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
10152	      fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
10153	    }
10154	  return;
10155	}
10156    }
10157
10158  if (GET_CODE (x) == CONST)
10159    {
10160      if (GET_CODE (XEXP (x, 0)) != PLUS)
10161	abort ();
10162
10163      base = XEXP (XEXP (x, 0), 0);
10164      offset = INTVAL (XEXP (XEXP (x, 0), 1));
10165    }
10166
10167  if (GET_CODE (base) == SYMBOL_REF)
10168    name = XSTR (base, 0);
10169  else if (GET_CODE (base) == LABEL_REF)
10170    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10171  else if (GET_CODE (base) == CODE_LABEL)
10172    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10173  else
10174    abort ();
10175
10176  STRIP_NAME_ENCODING (real_name, name);
10177  if (TARGET_MINIMAL_TOC)
10178    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10179  else
10180    {
10181      fprintf (file, "\t.tc %s", real_name);
10182
10183      if (offset < 0)
10184	fprintf (file, ".N%d", - offset);
10185      else if (offset)
10186	fprintf (file, ".P%d", offset);
10187
10188      fputs ("[TC],", file);
10189    }
10190
10191  /* Currently C++ toc references to vtables can be emitted before it
10192     is decided whether the vtable is public or private.  If this is
10193     the case, then the linker will eventually complain that there is
10194     a TOC reference to an unknown section.  Thus, for vtables only,
10195     we emit the TOC reference to reference the symbol and not the
10196     section.  */
10197  if (VTABLE_NAME_P (name))
10198    {
10199      RS6000_OUTPUT_BASENAME (file, name);
10200      if (offset < 0)
10201	fprintf (file, "%d", offset);
10202      else if (offset > 0)
10203	fprintf (file, "+%d", offset);
10204    }
10205  else
10206    output_addr_const (file, x);
10207  putc ('\n', file);
10208}
10209
10210/* Output an assembler pseudo-op to write an ASCII string of N characters
10211   starting at P to FILE.
10212
10213   On the RS/6000, we have to do this using the .byte operation and
10214   write out special characters outside the quoted string.
10215   Also, the assembler is broken; very long strings are truncated,
10216   so we must artificially break them up early.  */
10217
10218void
10219output_ascii (file, p, n)
10220     FILE *file;
10221     const char *p;
10222     int n;
10223{
10224  char c;
10225  int i, count_string;
10226  const char *for_string = "\t.byte \"";
10227  const char *for_decimal = "\t.byte ";
10228  const char *to_close = NULL;
10229
10230  count_string = 0;
10231  for (i = 0; i < n; i++)
10232    {
10233      c = *p++;
10234      if (c >= ' ' && c < 0177)
10235	{
10236	  if (for_string)
10237	    fputs (for_string, file);
10238	  putc (c, file);
10239
10240	  /* Write two quotes to get one.  */
10241	  if (c == '"')
10242	    {
10243	      putc (c, file);
10244	      ++count_string;
10245	    }
10246
10247	  for_string = NULL;
10248	  for_decimal = "\"\n\t.byte ";
10249	  to_close = "\"\n";
10250	  ++count_string;
10251
10252	  if (count_string >= 512)
10253	    {
10254	      fputs (to_close, file);
10255
10256	      for_string = "\t.byte \"";
10257	      for_decimal = "\t.byte ";
10258	      to_close = NULL;
10259	      count_string = 0;
10260	    }
10261	}
10262      else
10263	{
10264	  if (for_decimal)
10265	    fputs (for_decimal, file);
10266	  fprintf (file, "%d", c);
10267
10268	  for_string = "\n\t.byte \"";
10269	  for_decimal = ", ";
10270	  to_close = "\n";
10271	  count_string = 0;
10272	}
10273    }
10274
10275  /* Now close the string if we have written one.  Then end the line.  */
10276  if (to_close)
10277    fputs (to_close, file);
10278}
10279
10280/* Generate a unique section name for FILENAME for a section type
10281   represented by SECTION_DESC.  Output goes into BUF.
10282
10283   SECTION_DESC can be any string, as long as it is different for each
10284   possible section type.
10285
10286   We name the section in the same manner as xlc.  The name begins with an
10287   underscore followed by the filename (after stripping any leading directory
10288   names) with the last period replaced by the string SECTION_DESC.  If
10289   FILENAME does not contain a period, SECTION_DESC is appended to the end of
10290   the name.  */
10291
10292void
10293rs6000_gen_section_name (buf, filename, section_desc)
10294     char **buf;
10295     const char *filename;
10296     const char *section_desc;
10297{
10298  const char *q, *after_last_slash, *last_period = 0;
10299  char *p;
10300  int len;
10301
10302  after_last_slash = filename;
10303  for (q = filename; *q; q++)
10304    {
10305      if (*q == '/')
10306	after_last_slash = q + 1;
10307      else if (*q == '.')
10308	last_period = q;
10309    }
10310
10311  len = strlen (after_last_slash) + strlen (section_desc) + 2;
10312  *buf = (char *) permalloc (len);
10313
10314  p = *buf;
10315  *p++ = '_';
10316
10317  for (q = after_last_slash; *q; q++)
10318    {
10319      if (q == last_period)
10320        {
10321	  strcpy (p, section_desc);
10322	  p += strlen (section_desc);
10323        }
10324
10325      else if (ISALNUM (*q))
10326        *p++ = *q;
10327    }
10328
10329  if (last_period == 0)
10330    strcpy (p, section_desc);
10331  else
10332    *p = '\0';
10333}
10334
10335/* Emit profile function.  */
10336
10337void
10338output_profile_hook (labelno)
10339     int labelno ATTRIBUTE_UNUSED;
10340{
10341  if (DEFAULT_ABI == ABI_AIX)
10342    {
10343#ifdef NO_PROFILE_COUNTERS
10344      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
10345#else
10346      char buf[30];
10347      const char *label_name;
10348      rtx fun;
10349
10350      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10351      STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10352      fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10353
10354      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10355                         fun, Pmode);
10356#endif
10357    }
10358  else if (DEFAULT_ABI == ABI_DARWIN)
10359    {
10360      const char *mcount_name = RS6000_MCOUNT;
10361      int caller_addr_regno = LINK_REGISTER_REGNUM;
10362
10363      /* Be conservative and always set this, at least for now.  */
10364      current_function_uses_pic_offset_table = 1;
10365
10366#if TARGET_MACHO
10367      /* For PIC code, set up a stub and collect the caller's address
10368	 from r0, which is where the prologue puts it.  */
10369      if (flag_pic)
10370	{
10371	  mcount_name = machopic_stub_name (mcount_name);
10372	  if (current_function_uses_pic_offset_table)
10373	    caller_addr_regno = 0;
10374	}
10375#endif
10376      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10377			 0, VOIDmode, 1,
10378			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10379    }
10380}
10381
10382/* Write function profiler code.  */
10383
10384void
10385output_function_profiler (file, labelno)
10386  FILE *file;
10387  int labelno;
10388{
10389  char buf[100];
10390  int save_lr = 8;
10391
10392  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10393  switch (DEFAULT_ABI)
10394    {
10395    default:
10396      abort ();
10397
10398    case ABI_V4:
10399      save_lr = 4;
10400      /* Fall through.  */
10401
10402    case ABI_AIX_NODESC:
10403      if (!TARGET_32BIT)
10404	{
10405	  warning ("no profiling of 64-bit code for this ABI");
10406	  return;
10407	}
10408      fprintf (file, "\tmflr %s\n", reg_names[0]);
10409      if (flag_pic == 1)
10410	{
10411	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10412	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10413		       reg_names[0], save_lr, reg_names[1]);
10414	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10415	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10416	  assemble_name (file, buf);
10417	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10418	}
10419      else if (flag_pic > 1)
10420	{
10421	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10422		       reg_names[0], save_lr, reg_names[1]);
10423	  /* Now, we need to get the address of the label.  */
10424	  fputs ("\tbl 1f\n\t.long ", file);
10425	  assemble_name (file, buf);
10426	  fputs ("-.\n1:", file);
10427	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10428	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10429		       reg_names[0], reg_names[11]);
10430	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10431		       reg_names[0], reg_names[0], reg_names[11]);
10432	}
10433      else
10434	{
10435	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10436	  assemble_name (file, buf);
10437	  fputs ("@ha\n", file);
10438	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10439		       reg_names[0], save_lr, reg_names[1]);
10440	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10441	  assemble_name (file, buf);
10442	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10443	}
10444
10445      if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
10446	{
10447	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
10448		       reg_names[STATIC_CHAIN_REGNUM],
10449		       12, reg_names[1]);
10450	  fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10451	  asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
10452		       reg_names[STATIC_CHAIN_REGNUM],
10453		       12, reg_names[1]);
10454	}
10455      else
10456	/* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
10457	fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10458      break;
10459
10460    case ABI_AIX:
10461    case ABI_DARWIN:
10462      /* Don't do anything, done in output_profile_hook ().  */
10463      break;
10464    }
10465}
10466
10467/* Adjust the cost of a scheduling dependency.  Return the new cost of
10468   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
10469
10470static int
10471rs6000_adjust_cost (insn, link, dep_insn, cost)
10472     rtx insn;
10473     rtx link;
10474     rtx dep_insn ATTRIBUTE_UNUSED;
10475     int cost;
10476{
10477  if (! recog_memoized (insn))
10478    return 0;
10479
10480  if (REG_NOTE_KIND (link) != 0)
10481    return 0;
10482
10483  if (REG_NOTE_KIND (link) == 0)
10484    {
10485      /* Data dependency; DEP_INSN writes a register that INSN reads
10486	 some cycles later.  */
10487      switch (get_attr_type (insn))
10488	{
10489	case TYPE_JMPREG:
10490          /* Tell the first scheduling pass about the latency between
10491	     a mtctr and bctr (and mtlr and br/blr).  The first
10492	     scheduling pass will not know about this latency since
10493	     the mtctr instruction, which has the latency associated
10494	     to it, will be generated by reload.  */
10495          return TARGET_POWER ? 5 : 4;
10496	case TYPE_BRANCH:
10497	  /* Leave some extra cycles between a compare and its
10498	     dependent branch, to inhibit expensive mispredicts.  */
10499	  if ((rs6000_cpu_attr == CPU_PPC750
10500               || rs6000_cpu_attr == CPU_PPC7400
10501               || rs6000_cpu_attr == CPU_PPC7450)
10502	      && recog_memoized (dep_insn)
10503	      && (INSN_CODE (dep_insn) >= 0)
10504	      && (get_attr_type (dep_insn) == TYPE_COMPARE
10505		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10506		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10507		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10508	    return cost + 2;
10509	default:
10510	  break;
10511	}
10512      /* Fall out to return default cost.  */
10513    }
10514
10515  return cost;
10516}
10517
10518/* A C statement (sans semicolon) to update the integer scheduling
10519   priority INSN_PRIORITY (INSN).  Reduce the priority to execute the
10520   INSN earlier, increase the priority to execute INSN later.  Do not
10521   define this macro if you do not need to adjust the scheduling
10522   priorities of insns.  */
10523
10524static int
10525rs6000_adjust_priority (insn, priority)
10526     rtx insn ATTRIBUTE_UNUSED;
10527     int priority;
10528{
10529  /* On machines (like the 750) which have asymmetric integer units,
10530     where one integer unit can do multiply and divides and the other
10531     can't, reduce the priority of multiply/divide so it is scheduled
10532     before other integer operations.  */
10533
10534#if 0
10535  if (! INSN_P (insn))
10536    return priority;
10537
10538  if (GET_CODE (PATTERN (insn)) == USE)
10539    return priority;
10540
10541  switch (rs6000_cpu_attr) {
10542  case CPU_PPC750:
10543    switch (get_attr_type (insn))
10544      {
10545      default:
10546	break;
10547
10548      case TYPE_IMUL:
10549      case TYPE_IDIV:
10550	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10551		 priority, priority);
10552	if (priority >= 0 && priority < 0x01000000)
10553	  priority >>= 3;
10554	break;
10555      }
10556  }
10557#endif
10558
10559  return priority;
10560}
10561
10562/* Return how many instructions the machine can issue per cycle.  */
10563
10564static int
10565rs6000_issue_rate ()
10566{
10567  switch (rs6000_cpu_attr) {
10568  case CPU_RIOS1:  /* ? */
10569  case CPU_RS64A:
10570  case CPU_PPC601: /* ? */
10571  case CPU_PPC7450:
10572    return 3;
10573  case CPU_PPC603:
10574  case CPU_PPC750:
10575  case CPU_PPC7400:
10576    return 2;
10577  case CPU_RIOS2:
10578  case CPU_PPC604:
10579  case CPU_PPC604E:
10580  case CPU_PPC620:
10581  case CPU_PPC630:
10582    return 4;
10583  default:
10584    return 1;
10585  }
10586}
10587
10588
10589/* Length in units of the trampoline for entering a nested function.  */
10590
10591int
10592rs6000_trampoline_size ()
10593{
10594  int ret = 0;
10595
10596  switch (DEFAULT_ABI)
10597    {
10598    default:
10599      abort ();
10600
10601    case ABI_AIX:
10602      ret = (TARGET_32BIT) ? 12 : 24;
10603      break;
10604
10605    case ABI_DARWIN:
10606    case ABI_V4:
10607    case ABI_AIX_NODESC:
10608      ret = (TARGET_32BIT) ? 40 : 48;
10609      break;
10610    }
10611
10612  return ret;
10613}
10614
10615/* Emit RTL insns to initialize the variable parts of a trampoline.
10616   FNADDR is an RTX for the address of the function's pure code.
10617   CXT is an RTX for the static chain value for the function.  */
10618
10619void
10620rs6000_initialize_trampoline (addr, fnaddr, cxt)
10621     rtx addr;
10622     rtx fnaddr;
10623     rtx cxt;
10624{
10625  enum machine_mode pmode = Pmode;
10626  int regsize = (TARGET_32BIT) ? 4 : 8;
10627  rtx ctx_reg = force_reg (pmode, cxt);
10628
10629  switch (DEFAULT_ABI)
10630    {
10631    default:
10632      abort ();
10633
10634/* Macros to shorten the code expansions below.  */
10635#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10636#define MEM_PLUS(addr,offset) \
10637  gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10638
10639    /* Under AIX, just build the 3 word function descriptor */
10640    case ABI_AIX:
10641      {
10642	rtx fn_reg = gen_reg_rtx (pmode);
10643	rtx toc_reg = gen_reg_rtx (pmode);
10644	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10645	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10646	emit_move_insn (MEM_DEREF (addr), fn_reg);
10647	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10648	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10649      }
10650      break;
10651
10652    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
10653    case ABI_DARWIN:
10654    case ABI_V4:
10655    case ABI_AIX_NODESC:
10656      emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10657			 FALSE, VOIDmode, 4,
10658			 addr, pmode,
10659			 GEN_INT (rs6000_trampoline_size ()), SImode,
10660			 fnaddr, pmode,
10661			 ctx_reg, pmode);
10662      break;
10663    }
10664
10665  return;
10666}
10667
10668
10669/* Table of valid machine attributes.  */
10670
10671const struct attribute_spec rs6000_attribute_table[] =
10672{
10673  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10674  { "longcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
10675  { NULL,       0, 0, false, false, false, NULL }
10676};
10677
10678/* Handle a "longcall" attribute; arguments as in struct
10679   attribute_spec.handler.  */
10680
10681static tree
10682rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10683     tree *node;
10684     tree name;
10685     tree args ATTRIBUTE_UNUSED;
10686     int flags ATTRIBUTE_UNUSED;
10687     bool *no_add_attrs;
10688{
10689  if (TREE_CODE (*node) != FUNCTION_TYPE
10690      && TREE_CODE (*node) != FIELD_DECL
10691      && TREE_CODE (*node) != TYPE_DECL)
10692    {
10693      warning ("`%s' attribute only applies to functions",
10694	       IDENTIFIER_POINTER (name));
10695      *no_add_attrs = true;
10696    }
10697
10698  return NULL_TREE;
10699}
10700
10701/* Return a reference suitable for calling a function with the
10702   longcall attribute.  */
10703
10704struct rtx_def *
10705rs6000_longcall_ref (call_ref)
10706     rtx call_ref;
10707{
10708  const char *call_name;
10709  tree node;
10710
10711  if (GET_CODE (call_ref) != SYMBOL_REF)
10712    return call_ref;
10713
10714  /* System V adds '.' to the internal name, so skip them.  */
10715  call_name = XSTR (call_ref, 0);
10716  if (*call_name == '.')
10717    {
10718      while (*call_name == '.')
10719	call_name++;
10720
10721      node = get_identifier (call_name);
10722      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10723    }
10724
10725  return force_reg (Pmode, call_ref);
10726}
10727
10728
10729/* A C statement or statements to switch to the appropriate section
10730   for output of RTX in mode MODE.  You can assume that RTX is some
10731   kind of constant in RTL.  The argument MODE is redundant except in
10732   the case of a `const_int' rtx.  Select the section by calling
10733   `text_section' or one of the alternatives for other sections.
10734
10735   Do not define this macro if you put all constants in the read-only
10736   data section.  */
10737
10738#ifdef USING_ELFOS_H
10739
10740void
10741rs6000_select_rtx_section (mode, x)
10742     enum machine_mode mode;
10743     rtx x;
10744{
10745  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10746    toc_section ();
10747  else if (flag_pic
10748	   && (GET_CODE (x) == SYMBOL_REF
10749	       || GET_CODE (x) == LABEL_REF
10750	       || GET_CODE (x) == CONST))
10751    data_section ();
10752  else
10753    const_section ();
10754}
10755
10756/* A C statement or statements to switch to the appropriate
10757   section for output of DECL.  DECL is either a `VAR_DECL' node
10758   or a constant of some sort.  RELOC indicates whether forming
10759   the initial value of DECL requires link-time relocations.  */
10760
10761void
10762rs6000_select_section (decl, reloc)
10763     tree decl;
10764     int reloc;
10765{
10766  int size = int_size_in_bytes (TREE_TYPE (decl));
10767  int needs_sdata;
10768  int readonly;
10769  static void (* const sec_funcs[4]) PARAMS ((void)) = {
10770    &const_section,
10771    &sdata2_section,
10772    &data_section,
10773    &sdata_section
10774  };
10775
10776  needs_sdata = (size > 0
10777		 && size <= g_switch_value
10778		 && rs6000_sdata != SDATA_NONE
10779		 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10780
10781  if (TREE_CODE (decl) == STRING_CST)
10782    readonly = ! flag_writable_strings;
10783  else if (TREE_CODE (decl) == VAR_DECL)
10784    readonly = (! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc)
10785		&& TREE_READONLY (decl)
10786		&& ! TREE_SIDE_EFFECTS (decl)
10787		&& DECL_INITIAL (decl)
10788		&& DECL_INITIAL (decl) != error_mark_node
10789		&& TREE_CONSTANT (DECL_INITIAL (decl)));
10790  else if (TREE_CODE (decl) == CONSTRUCTOR)
10791    readonly = (! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc)
10792		&& ! TREE_SIDE_EFFECTS (decl)
10793		&& TREE_CONSTANT (decl));
10794  else
10795    readonly = ! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc);
10796
10797  if (needs_sdata && rs6000_sdata != SDATA_EABI)
10798    readonly = 0;
10799
10800  (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10801}
10802
10803/* A C statement to build up a unique section name, expressed as a
10804   STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10805   RELOC indicates whether the initial value of EXP requires
10806   link-time relocations.  If you do not define this macro, GCC will use
10807   the symbol name prefixed by `.' as the section name.  Note - this
10808   macro can now be called for uninitialized data items as well as
10809   initialised data and functions.  */
10810
10811void
10812rs6000_unique_section (decl, reloc)
10813     tree decl;
10814     int reloc;
10815{
10816  int len;
10817  int sec;
10818  const char *name;
10819  char *string;
10820  const char *prefix;
10821
10822  static const char *const prefixes[7][2] =
10823  {
10824    { ".rodata.", ".gnu.linkonce.r." },
10825    { ".sdata2.", ".gnu.linkonce.s2." },
10826    { ".data.",   ".gnu.linkonce.d." },
10827    { ".sdata.",  ".gnu.linkonce.s." },
10828    { ".bss.",    ".gnu.linkonce.b." },
10829    { ".sbss.",   ".gnu.linkonce.sb." },
10830    { ".text.",   ".gnu.linkonce.t." }
10831  };
10832
10833  if (TREE_CODE (decl) == FUNCTION_DECL)
10834    sec = 6;
10835  else
10836    {
10837      int readonly;
10838      int needs_sdata;
10839      int size;
10840
10841      if (TREE_CODE (decl) == STRING_CST)
10842	readonly = ! flag_writable_strings;
10843      else if (TREE_CODE (decl) == VAR_DECL)
10844	readonly = (! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc)
10845		    && TREE_READONLY (decl)
10846		    && ! TREE_SIDE_EFFECTS (decl)
10847		    && TREE_CONSTANT (DECL_INITIAL (decl)));
10848      else
10849	readonly = ! ((flag_pic || DEFAULT_ABI == ABI_AIX) && reloc);
10850
10851      size = int_size_in_bytes (TREE_TYPE (decl));
10852      needs_sdata = (size > 0
10853		     && size <= g_switch_value
10854		     && rs6000_sdata != SDATA_NONE
10855		     && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10856
10857      if (DECL_INITIAL (decl) == 0
10858	  || DECL_INITIAL (decl) == error_mark_node)
10859	sec = 4;
10860      else if (! readonly)
10861	sec = 2;
10862      else
10863	sec = 0;
10864
10865      if (needs_sdata)
10866	{
10867	  /* .sdata2 is only for EABI.  */
10868	  if (sec == 0 && rs6000_sdata != SDATA_EABI)
10869	    sec = 2;
10870	  sec += 1;
10871	}
10872    }
10873
10874  STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10875  prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10876  len    = strlen (name) + strlen (prefix);
10877  string = alloca (len + 1);
10878
10879  sprintf (string, "%s%s", prefix, name);
10880
10881  DECL_SECTION_NAME (decl) = build_string (len, string);
10882}
10883
10884
10885/* If we are referencing a function that is static or is known to be
10886   in this file, make the SYMBOL_REF special.  We can use this to indicate
10887   that we can branch to this function without emitting a no-op after the
10888   call.  For real AIX calling sequences, we also replace the
10889   function name with the real name (1 or 2 leading .'s), rather than
10890   the function descriptor name.  This saves a lot of overriding code
10891   to read the prefixes.  */
10892
10893void
10894rs6000_encode_section_info (decl)
10895     tree decl;
10896{
10897  if (TREE_CODE (decl) == FUNCTION_DECL)
10898    {
10899      rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10900      if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10901          && ! DECL_WEAK (decl))
10902	SYMBOL_REF_FLAG (sym_ref) = 1;
10903
10904      if (DEFAULT_ABI == ABI_AIX)
10905	{
10906	  size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10907	  size_t len2 = strlen (XSTR (sym_ref, 0));
10908	  char *str = alloca (len1 + len2 + 1);
10909	  str[0] = '.';
10910	  str[1] = '.';
10911	  memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10912
10913	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10914	}
10915    }
10916  else if (rs6000_sdata != SDATA_NONE
10917	   && DEFAULT_ABI == ABI_V4
10918	   && TREE_CODE (decl) == VAR_DECL)
10919    {
10920      int size = int_size_in_bytes (TREE_TYPE (decl));
10921      tree section_name = DECL_SECTION_NAME (decl);
10922      const char *name = (char *)0;
10923      int len = 0;
10924
10925      if (section_name)
10926	{
10927	  if (TREE_CODE (section_name) == STRING_CST)
10928	    {
10929	      name = TREE_STRING_POINTER (section_name);
10930	      len = TREE_STRING_LENGTH (section_name);
10931	    }
10932	  else
10933	    abort ();
10934	}
10935
10936      if ((size > 0 && size <= g_switch_value)
10937	  || (name
10938	      && ((len == sizeof (".sdata") - 1
10939		   && strcmp (name, ".sdata") == 0)
10940		  || (len == sizeof (".sdata2") - 1
10941		      && strcmp (name, ".sdata2") == 0)
10942		  || (len == sizeof (".sbss") - 1
10943		      && strcmp (name, ".sbss") == 0)
10944		  || (len == sizeof (".sbss2") - 1
10945		      && strcmp (name, ".sbss2") == 0)
10946		  || (len == sizeof (".PPC.EMB.sdata0") - 1
10947		      && strcmp (name, ".PPC.EMB.sdata0") == 0)
10948		  || (len == sizeof (".PPC.EMB.sbss0") - 1
10949		      && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10950	{
10951	  rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10952	  size_t len = strlen (XSTR (sym_ref, 0));
10953	  char *str = alloca (len + 2);
10954
10955	  str[0] = '@';
10956	  memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
10957	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
10958	}
10959    }
10960}
10961
10962#endif /* USING_ELFOS_H */
10963
10964
10965/* Return a REG that occurs in ADDR with coefficient 1.
10966   ADDR can be effectively incremented by incrementing REG.
10967
10968   r0 is special and we must not select it as an address
10969   register by this routine since our caller will try to
10970   increment the returned register via an "la" instruction.  */
10971
10972struct rtx_def *
10973find_addr_reg (addr)
10974     rtx addr;
10975{
10976  while (GET_CODE (addr) == PLUS)
10977    {
10978      if (GET_CODE (XEXP (addr, 0)) == REG
10979	  && REGNO (XEXP (addr, 0)) != 0)
10980	addr = XEXP (addr, 0);
10981      else if (GET_CODE (XEXP (addr, 1)) == REG
10982	       && REGNO (XEXP (addr, 1)) != 0)
10983	addr = XEXP (addr, 1);
10984      else if (CONSTANT_P (XEXP (addr, 0)))
10985	addr = XEXP (addr, 1);
10986      else if (CONSTANT_P (XEXP (addr, 1)))
10987	addr = XEXP (addr, 0);
10988      else
10989	abort ();
10990    }
10991  if (GET_CODE (addr) == REG && REGNO (addr) != 0)
10992    return addr;
10993  abort ();
10994}
10995
10996void
10997rs6000_fatal_bad_address (op)
10998  rtx op;
10999{
11000  fatal_insn ("bad address", op);
11001}
11002
11003/* Called to register all of our global variables with the garbage
11004   collector.  */
11005
11006static void
11007rs6000_add_gc_roots ()
11008{
11009  ggc_add_rtx_root (&rs6000_compare_op0, 1);
11010  ggc_add_rtx_root (&rs6000_compare_op1, 1);
11011
11012  toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11013  ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11014		toc_hash_mark_table);
11015
11016#if TARGET_MACHO
11017  machopic_add_gc_roots ();
11018#endif
11019}
11020
11021#if TARGET_MACHO
11022
11023#if 0
11024/* Returns 1 if OP is either a symbol reference or a sum of a symbol
11025   reference and a constant.  */
11026
11027int
11028symbolic_operand (op)
11029     rtx op;
11030{
11031  switch (GET_CODE (op))
11032    {
11033    case SYMBOL_REF:
11034    case LABEL_REF:
11035      return 1;
11036    case CONST:
11037      op = XEXP (op, 0);
11038      return (GET_CODE (op) == SYMBOL_REF ||
11039	      (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11040	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11041	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
11042    default:
11043      return 0;
11044    }
11045}
11046#endif
11047
11048#ifdef RS6000_LONG_BRANCH
11049
11050static tree stub_list = 0;
11051
11052/* ADD_COMPILER_STUB adds the compiler generated stub for handling
11053   procedure calls to the linked list.  */
11054
11055void
11056add_compiler_stub (label_name, function_name, line_number)
11057     tree label_name;
11058     tree function_name;
11059     int line_number;
11060{
11061  tree stub = build_tree_list (function_name, label_name);
11062  TREE_TYPE (stub) = build_int_2 (line_number, 0);
11063  TREE_CHAIN (stub) = stub_list;
11064  stub_list = stub;
11065}
11066
11067#define STUB_LABEL_NAME(STUB)     TREE_VALUE (STUB)
11068#define STUB_FUNCTION_NAME(STUB)  TREE_PURPOSE (STUB)
11069#define STUB_LINE_NUMBER(STUB)    TREE_INT_CST_LOW (TREE_TYPE (STUB))
11070
11071/* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11072   handling procedure calls from the linked list and initializes the
11073   linked list.  */
11074
11075void
11076output_compiler_stub ()
11077{
11078  char tmp_buf[256];
11079  char label_buf[256];
11080  char *label;
11081  tree tmp_stub, stub;
11082
11083  if (!flag_pic)
11084    for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11085      {
11086	fprintf (asm_out_file,
11087		 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11088
11089#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11090	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11091	  fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11092#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11093
11094	if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11095	  strcpy (label_buf,
11096		  IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11097	else
11098	  {
11099	    label_buf[0] = '_';
11100	    strcpy (label_buf+1,
11101		    IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11102	  }
11103
11104	strcpy (tmp_buf, "lis r12,hi16(");
11105	strcat (tmp_buf, label_buf);
11106	strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11107	strcat (tmp_buf, label_buf);
11108	strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11109	output_asm_insn (tmp_buf, 0);
11110
11111#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11112	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11113	  fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11114#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11115      }
11116
11117  stub_list = 0;
11118}
11119
11120/* NO_PREVIOUS_DEF checks in the link list whether the function name is
11121   already there or not.  */
11122
11123int
11124no_previous_def (function_name)
11125     tree function_name;
11126{
11127  tree stub;
11128  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11129    if (function_name == STUB_FUNCTION_NAME (stub))
11130      return 0;
11131  return 1;
11132}
11133
11134/* GET_PREV_LABEL gets the label name from the previous definition of
11135   the function.  */
11136
11137tree
11138get_prev_label (function_name)
11139     tree function_name;
11140{
11141  tree stub;
11142  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11143    if (function_name == STUB_FUNCTION_NAME (stub))
11144      return STUB_LABEL_NAME (stub);
11145  return 0;
11146}
11147
11148/* INSN is either a function call or a millicode call.  It may have an
11149   unconditional jump in its delay slot.
11150
11151   CALL_DEST is the routine we are calling.  */
11152
11153char *
11154output_call (insn, call_dest, operand_number)
11155     rtx insn;
11156     rtx call_dest;
11157     int operand_number;
11158{
11159  static char buf[256];
11160  if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11161    {
11162      tree labelname;
11163      tree funname = get_identifier (XSTR (call_dest, 0));
11164
11165      if (no_previous_def (funname))
11166	{
11167	  int line_number;
11168	  rtx label_rtx = gen_label_rtx ();
11169	  char *label_buf, temp_buf[256];
11170	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11171				       CODE_LABEL_NUMBER (label_rtx));
11172	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11173	  labelname = get_identifier (label_buf);
11174	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11175	  if (insn)
11176	    line_number = NOTE_LINE_NUMBER (insn);
11177	  add_compiler_stub (labelname, funname, line_number);
11178	}
11179      else
11180	labelname = get_prev_label (funname);
11181
11182      sprintf (buf, "jbsr %%z%d,%.246s",
11183	       operand_number, IDENTIFIER_POINTER (labelname));
11184      return buf;
11185    }
11186  else
11187    {
11188      sprintf (buf, "bl %%z%d", operand_number);
11189      return buf;
11190    }
11191}
11192
11193#endif /* RS6000_LONG_BRANCH */
11194
11195#define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N)		\
11196  do {								\
11197    const char *const symbol_ = (SYMBOL);			\
11198    char *buffer_ = (BUF);					\
11199    if (symbol_[0] == '"')					\
11200      {								\
11201        sprintf(buffer_, "\"L%d$%s", (N), symbol_+1);		\
11202      }								\
11203    else if (name_needs_quotes(symbol_))			\
11204      {								\
11205        sprintf(buffer_, "\"L%d$%s\"", (N), symbol_);		\
11206      }								\
11207    else							\
11208      {								\
11209        sprintf(buffer_, "L%d$%s", (N), symbol_);		\
11210      }								\
11211  } while (0)
11212
11213
11214/* Generate PIC and indirect symbol stubs.  */
11215
11216void
11217machopic_output_stub (file, symb, stub)
11218     FILE *file;
11219     const char *symb, *stub;
11220{
11221  unsigned int length;
11222  char *symbol_name, *lazy_ptr_name;
11223  char *local_label_0;
11224  static int label = 0;
11225
11226  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
11227  STRIP_NAME_ENCODING (symb, symb);
11228
11229  label += 1;
11230
11231  length = strlen (symb);
11232  symbol_name = alloca (length + 32);
11233  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11234
11235  lazy_ptr_name = alloca (length + 32);
11236  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11237
11238  local_label_0 = alloca (length + 32);
11239  GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11240
11241  if (flag_pic == 2)
11242    machopic_picsymbol_stub_section ();
11243  else
11244    machopic_symbol_stub_section ();
11245
11246  fprintf (file, "%s:\n", stub);
11247  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11248
11249  if (flag_pic == 2)
11250    {
11251      fprintf (file, "\tmflr r0\n");
11252      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11253      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11254      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11255	       lazy_ptr_name, local_label_0);
11256      fprintf (file, "\tmtlr r0\n");
11257      fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11258	       lazy_ptr_name, local_label_0);
11259      fprintf (file, "\tmtctr r12\n");
11260      fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11261	       lazy_ptr_name, local_label_0);
11262      fprintf (file, "\tbctr\n");
11263    }
11264  else
11265    fprintf (file, "non-pure not supported\n");
11266
11267  machopic_lazy_symbol_ptr_section ();
11268  fprintf (file, "%s:\n", lazy_ptr_name);
11269  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11270  fprintf (file, "\t.long dyld_stub_binding_helper\n");
11271}
11272
11273/* Legitimize PIC addresses.  If the address is already
11274   position-independent, we return ORIG.  Newly generated
11275   position-independent addresses go into a reg.  This is REG if non
11276   zero, otherwise we allocate register(s) as necessary.  */
11277
11278#define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11279
11280rtx
11281rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11282     rtx orig;
11283     enum machine_mode mode;
11284     rtx reg;
11285{
11286  rtx base, offset;
11287
11288  if (reg == NULL && ! reload_in_progress && ! reload_completed)
11289    reg = gen_reg_rtx (Pmode);
11290
11291  if (GET_CODE (orig) == CONST)
11292    {
11293      if (GET_CODE (XEXP (orig, 0)) == PLUS
11294	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11295	return orig;
11296
11297      if (GET_CODE (XEXP (orig, 0)) == PLUS)
11298	{
11299	  base =
11300	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11301						    Pmode, reg);
11302	  offset =
11303	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11304						    Pmode, reg);
11305	}
11306      else
11307	abort ();
11308
11309      if (GET_CODE (offset) == CONST_INT)
11310	{
11311	  if (SMALL_INT (offset))
11312	    return plus_constant (base, INTVAL (offset));
11313	  else if (! reload_in_progress && ! reload_completed)
11314	    offset = force_reg (Pmode, offset);
11315	  else
11316	    {
11317 	      rtx mem = force_const_mem (Pmode, orig);
11318	      return machopic_legitimize_pic_address (mem, Pmode, reg);
11319	    }
11320	}
11321      return gen_rtx (PLUS, Pmode, base, offset);
11322    }
11323
11324  /* Fall back on generic machopic code.  */
11325  return machopic_legitimize_pic_address (orig, mode, reg);
11326}
11327
11328/* This is just a placeholder to make linking work without having to
11329   add this to the generic Darwin EXTRA_SECTIONS.  If -mcall-aix is
11330   ever needed for Darwin (not too likely!) this would have to get a
11331   real definition.  */
11332
11333void
11334toc_section ()
11335{
11336}
11337
11338#endif /* TARGET_MACHO */
11339
11340#if TARGET_ELF
11341static unsigned int
11342rs6000_elf_section_type_flags (decl, name, reloc)
11343     tree decl;
11344     const char *name;
11345     int reloc;
11346{
11347  unsigned int flags = default_section_type_flags (decl, name, reloc);
11348
11349  if (TARGET_RELOCATABLE)
11350    flags |= SECTION_WRITE;
11351
11352  return flags;
11353}
11354
11355/* Record an element in the table of global constructors.  SYMBOL is
11356   a SYMBOL_REF of the function to be called; PRIORITY is a number
11357   between 0 and MAX_INIT_PRIORITY.
11358
11359   This differs from default_named_section_asm_out_constructor in
11360   that we have special handling for -mrelocatable.  */
11361
11362static void
11363rs6000_elf_asm_out_constructor (symbol, priority)
11364     rtx symbol;
11365     int priority;
11366{
11367  const char *section = ".ctors";
11368  char buf[16];
11369
11370  if (priority != DEFAULT_INIT_PRIORITY)
11371    {
11372      sprintf (buf, ".ctors.%.5u",
11373               /* Invert the numbering so the linker puts us in the proper
11374                  order; constructors are run from right to left, and the
11375                  linker sorts in increasing order.  */
11376               MAX_INIT_PRIORITY - priority);
11377      section = buf;
11378    }
11379
11380  named_section_flags (section, SECTION_WRITE);
11381  assemble_align (POINTER_SIZE);
11382
11383  if (TARGET_RELOCATABLE)
11384    {
11385      fputs ("\t.long (", asm_out_file);
11386      output_addr_const (asm_out_file, symbol);
11387      fputs (")@fixup\n", asm_out_file);
11388    }
11389  else
11390    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11391}
11392
11393static void
11394rs6000_elf_asm_out_destructor (symbol, priority)
11395     rtx symbol;
11396     int priority;
11397{
11398  const char *section = ".dtors";
11399  char buf[16];
11400
11401  if (priority != DEFAULT_INIT_PRIORITY)
11402    {
11403      sprintf (buf, ".dtors.%.5u",
11404               /* Invert the numbering so the linker puts us in the proper
11405                  order; constructors are run from right to left, and the
11406                  linker sorts in increasing order.  */
11407               MAX_INIT_PRIORITY - priority);
11408      section = buf;
11409    }
11410
11411  named_section_flags (section, SECTION_WRITE);
11412  assemble_align (POINTER_SIZE);
11413
11414  if (TARGET_RELOCATABLE)
11415    {
11416      fputs ("\t.long (", asm_out_file);
11417      output_addr_const (asm_out_file, symbol);
11418      fputs (")@fixup\n", asm_out_file);
11419    }
11420  else
11421    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11422}
11423#endif
11424
11425#ifdef OBJECT_FORMAT_COFF
11426static void
11427xcoff_asm_named_section (name, flags)
11428     const char *name;
11429     unsigned int flags ATTRIBUTE_UNUSED;
11430{
11431  fprintf (asm_out_file, "\t.csect %s\n", name);
11432}
11433#endif
11434