rs6000.c revision 90075
1/* Subroutines used for code generation on IBM RS/6000.
2   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002 Free Software Foundation, Inc.
4   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING.  If not, write to
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
28#include "real.h"
29#include "insn-config.h"
30#include "conditions.h"
31#include "insn-attr.h"
32#include "flags.h"
33#include "recog.h"
34#include "obstack.h"
35#include "tree.h"
36#include "expr.h"
37#include "optabs.h"
38#include "except.h"
39#include "function.h"
40#include "output.h"
41#include "basic-block.h"
42#include "integrate.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "hashtab.h"
46#include "tm_p.h"
47#include "target.h"
48#include "target-def.h"
49#include "langhooks.h"
50#include "reload.h"
51
52#ifndef TARGET_NO_PROTOTYPE
53#define TARGET_NO_PROTOTYPE 0
54#endif
55
56#define min(A,B)	((A) < (B) ? (A) : (B))
57#define max(A,B)	((A) > (B) ? (A) : (B))
58
59/* Target cpu type */
60
61enum processor_type rs6000_cpu;
62struct rs6000_cpu_select rs6000_select[3] =
63{
64  /* switch		name,			tune	arch */
65  { (const char *)0,	"--with-cpu=",		1,	1 },
66  { (const char *)0,	"-mcpu=",		1,	1 },
67  { (const char *)0,	"-mtune=",		1,	0 },
68};
69
70/* Size of long double */
71const char *rs6000_long_double_size_string;
72int rs6000_long_double_type_size;
73
74/* Whether -mabi=altivec has appeared */
75int rs6000_altivec_abi;
76
77/* Set to non-zero once AIX common-mode calls have been defined.  */
78static int common_mode_defined;
79
80/* Save information from a "cmpxx" operation until the branch or scc is
81   emitted.  */
82rtx rs6000_compare_op0, rs6000_compare_op1;
83int rs6000_compare_fp_p;
84
85/* Label number of label created for -mrelocatable, to call to so we can
86   get the address of the GOT section */
87int rs6000_pic_labelno;
88
89#ifdef USING_ELFOS_H
90/* Which abi to adhere to */
91const char *rs6000_abi_name = RS6000_ABI_NAME;
92
93/* Semantics of the small data area */
94enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
95
96/* Which small data model to use */
97const char *rs6000_sdata_name = (char *)0;
98
99/* Counter for labels which are to be placed in .fixup.  */
100int fixuplabelno = 0;
101#endif
102
103/* ABI enumeration available for subtarget to use.  */
104enum rs6000_abi rs6000_current_abi;
105
106/* ABI string from -mabi= option.  */
107const char *rs6000_abi_string;
108
109/* Debug flags */
110const char *rs6000_debug_name;
111int rs6000_debug_stack;		/* debug stack applications */
112int rs6000_debug_arg;		/* debug argument handling */
113
114/* Flag to say the TOC is initialized */
115int toc_initialized;
116char toc_label_name[10];
117
118/* Alias set for saves and restores from the rs6000 stack.  */
119static int rs6000_sr_alias_set;
120
121static void rs6000_add_gc_roots PARAMS ((void));
122static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124static void validate_condition_mode
125  PARAMS ((enum rtx_code, enum machine_mode));
126static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127static void rs6000_maybe_dead PARAMS ((rtx));
128static void rs6000_emit_stack_tie PARAMS ((void));
129static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131static unsigned rs6000_hash_constant PARAMS ((rtx));
132static unsigned toc_hash_function PARAMS ((const void *));
133static int toc_hash_eq PARAMS ((const void *, const void *));
134static int toc_hash_mark_entry PARAMS ((void **, void *));
135static void toc_hash_mark_table PARAMS ((void *));
136static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137static void rs6000_free_machine_status PARAMS ((struct function *));
138static void rs6000_init_machine_status PARAMS ((struct function *));
139static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140static int rs6000_ra_ever_killed PARAMS ((void));
141static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142const struct attribute_spec rs6000_attribute_table[];
143static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146  HOST_WIDE_INT, HOST_WIDE_INT));
147#if TARGET_ELF
148static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
149							   int));
150static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
152#endif
153#ifdef OBJECT_FORMAT_COFF
154static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
155#endif
156static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157static int rs6000_adjust_priority PARAMS ((rtx, int));
158static int rs6000_issue_rate PARAMS ((void));
159
160static void rs6000_init_builtins PARAMS ((void));
161static void altivec_init_builtins PARAMS ((void));
162static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
167static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
168static void rs6000_parse_abi_options PARAMS ((void));
169static int first_altivec_reg_to_save PARAMS ((void));
170static unsigned int compute_vrsave_mask PARAMS ((void));
171static void is_altivec_return_reg PARAMS ((rtx, void *));
172int vrsave_operation PARAMS ((rtx, enum machine_mode));
173static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
174static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
175
176/* Default register names.  */
177char rs6000_reg_names[][8] =
178{
179      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
180      "8",  "9", "10", "11", "12", "13", "14", "15",
181     "16", "17", "18", "19", "20", "21", "22", "23",
182     "24", "25", "26", "27", "28", "29", "30", "31",
183      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
184      "8",  "9", "10", "11", "12", "13", "14", "15",
185     "16", "17", "18", "19", "20", "21", "22", "23",
186     "24", "25", "26", "27", "28", "29", "30", "31",
187     "mq", "lr", "ctr","ap",
188      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
189      "xer",
190      /* AltiVec registers.  */
191      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
192      "8",  "9",  "10", "11", "12", "13", "14", "15",
193      "16", "17", "18", "19", "20", "21", "22", "23",
194      "24", "25", "26", "27", "28", "29", "30", "31",
195      "vrsave"
196};
197
198#ifdef TARGET_REGNAMES
199static const char alt_reg_names[][8] =
200{
201   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
202   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
203  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
204  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
205   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
206   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
207  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
208  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
209    "mq",    "lr",  "ctr",   "ap",
210  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
211   "xer",
212   /* AltiVec registers.  */
213   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
214   "%v8",  "%v9",  "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
215   "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
216   "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
217   "%vrsave"
218};
219#endif
220
221#ifndef MASK_STRICT_ALIGN
222#define MASK_STRICT_ALIGN 0
223#endif
224
225/* Initialize the GCC target structure.  */
226#undef TARGET_ATTRIBUTE_TABLE
227#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
228
229#undef TARGET_ASM_ALIGNED_DI_OP
230#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
231
232/* Default unaligned ops are only provided for ELF.  Find the ops needed
233   for non-ELF systems.  */
234#ifndef OBJECT_FORMAT_ELF
235#ifdef OBJECT_FORMAT_COFF
236/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
237   64-bit targets.  */
238#undef TARGET_ASM_UNALIGNED_HI_OP
239#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
240#undef TARGET_ASM_UNALIGNED_SI_OP
241#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
242#undef TARGET_ASM_UNALIGNED_DI_OP
243#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
244#else
245/* For Darwin.  */
246#undef TARGET_ASM_UNALIGNED_HI_OP
247#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
248#undef TARGET_ASM_UNALIGNED_SI_OP
249#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
250#endif
251#endif
252
253/* This hook deals with fixups for relocatable code and DI-mode objects
254   in 64-bit code.  */
255#undef TARGET_ASM_INTEGER
256#define TARGET_ASM_INTEGER rs6000_assemble_integer
257
258#undef TARGET_ASM_FUNCTION_PROLOGUE
259#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
260#undef TARGET_ASM_FUNCTION_EPILOGUE
261#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
262
263#if TARGET_ELF
264#undef TARGET_SECTION_TYPE_FLAGS
265#define TARGET_SECTION_TYPE_FLAGS  rs6000_elf_section_type_flags
266#endif
267
268#undef TARGET_SCHED_ISSUE_RATE
269#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
270#undef TARGET_SCHED_ADJUST_COST
271#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
272#undef TARGET_SCHED_ADJUST_PRIORITY
273#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
274
275#undef TARGET_INIT_BUILTINS
276#define TARGET_INIT_BUILTINS rs6000_init_builtins
277
278#undef TARGET_EXPAND_BUILTIN
279#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
280
281/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
282#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
283
284struct gcc_target targetm = TARGET_INITIALIZER;
285
286/* Override command line options.  Mostly we process the processor
287   type and sometimes adjust other TARGET_ options.  */
288
289void
290rs6000_override_options (default_cpu)
291     const char *default_cpu;
292{
293  size_t i, j;
294  struct rs6000_cpu_select *ptr;
295
296  /* Simplify the entries below by making a mask for any POWER
297     variant and any PowerPC variant.  */
298
299#define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
300#define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
301		       | MASK_PPC_GFXOPT | MASK_POWERPC64)
302#define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
303
304  static struct ptt
305    {
306      const char *const name;		/* Canonical processor name.  */
307      const enum processor_type processor; /* Processor type enum value.  */
308      const int target_enable;	/* Target flags to enable.  */
309      const int target_disable;	/* Target flags to disable.  */
310    } const processor_target_table[]
311      = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
312	    POWER_MASKS | POWERPC_MASKS},
313	 {"power", PROCESSOR_POWER,
314	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
315	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
316	 {"power2", PROCESSOR_POWER,
317	    MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
318	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
319	 {"power3", PROCESSOR_PPC630,
320	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
321	    POWER_MASKS | MASK_PPC_GPOPT},
322	 {"powerpc", PROCESSOR_POWERPC,
323	    MASK_POWERPC | MASK_NEW_MNEMONICS,
324	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
325	 {"powerpc64", PROCESSOR_POWERPC64,
326	    MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
327	    POWER_MASKS | POWERPC_OPT_MASKS},
328	 {"rios", PROCESSOR_RIOS1,
329	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
330	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
331	 {"rios1", PROCESSOR_RIOS1,
332	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
333	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
334	 {"rsc", PROCESSOR_PPC601,
335	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
336	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
337	 {"rsc1", PROCESSOR_PPC601,
338	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
339	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
340	 {"rios2", PROCESSOR_RIOS2,
341	    MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
342	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
343	 {"rs64a", PROCESSOR_RS64A,
344	    MASK_POWERPC | MASK_NEW_MNEMONICS,
345	    POWER_MASKS | POWERPC_OPT_MASKS},
346	 {"401", PROCESSOR_PPC403,
347	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
348	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
349	 {"403", PROCESSOR_PPC403,
350	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
351	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
352	 {"405", PROCESSOR_PPC405,
353	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
354	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
355	 {"505", PROCESSOR_MPCCORE,
356	    MASK_POWERPC | MASK_NEW_MNEMONICS,
357	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
358	 {"601", PROCESSOR_PPC601,
359	    MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
360	    MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
361	 {"602", PROCESSOR_PPC603,
362	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
363	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
364	 {"603", PROCESSOR_PPC603,
365	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
366	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
367	 {"603e", PROCESSOR_PPC603,
368	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
369	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
370	 {"ec603e", PROCESSOR_PPC603,
371	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
372	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
373	 {"604", PROCESSOR_PPC604,
374	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
375	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
376	 {"604e", PROCESSOR_PPC604e,
377	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
378	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
379	 {"620", PROCESSOR_PPC620,
380	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
381	    POWER_MASKS | MASK_PPC_GPOPT},
382	 {"630", PROCESSOR_PPC630,
383	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
384	    POWER_MASKS | MASK_PPC_GPOPT},
385	 {"740", PROCESSOR_PPC750,
386 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
387 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
388	 {"750", PROCESSOR_PPC750,
389 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
390 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
391	 {"7400", PROCESSOR_PPC7400,
392            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
393            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
394	 {"7450", PROCESSOR_PPC7450,
395            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
396            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
397	 {"801", PROCESSOR_MPCCORE,
398	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
399	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
400	 {"821", PROCESSOR_MPCCORE,
401	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
402	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
403	 {"823", PROCESSOR_MPCCORE,
404	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
405	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
406	 {"860", PROCESSOR_MPCCORE,
407	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
408	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
409
410  size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
411
412  /* Save current -mmultiple/-mno-multiple status.  */
413  int multiple = TARGET_MULTIPLE;
414  /* Save current -mstring/-mno-string status.  */
415  int string = TARGET_STRING;
416
417  /* Identify the processor type.  */
418  rs6000_select[0].string = default_cpu;
419  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
420
421  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
422    {
423      ptr = &rs6000_select[i];
424      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
425	{
426	  for (j = 0; j < ptt_size; j++)
427	    if (! strcmp (ptr->string, processor_target_table[j].name))
428	      {
429		if (ptr->set_tune_p)
430		  rs6000_cpu = processor_target_table[j].processor;
431
432		if (ptr->set_arch_p)
433		  {
434		    target_flags |= processor_target_table[j].target_enable;
435		    target_flags &= ~processor_target_table[j].target_disable;
436		  }
437		break;
438	      }
439
440	  if (j == ptt_size)
441	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
442	}
443    }
444
445  /* If we are optimizing big endian systems for space, use the store
446     multiple instructions.  */
447  if (BYTES_BIG_ENDIAN && optimize_size)
448    target_flags |= MASK_MULTIPLE;
449
450  /* If -mmultiple or -mno-multiple was explicitly used, don't
451     override with the processor default */
452  if (TARGET_MULTIPLE_SET)
453    target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
454
455  /* If -mstring or -mno-string was explicitly used, don't override
456     with the processor default.  */
457  if (TARGET_STRING_SET)
458    target_flags = (target_flags & ~MASK_STRING) | string;
459
460  /* Don't allow -mmultiple or -mstring on little endian systems
461     unless the cpu is a 750, because the hardware doesn't support the
462     instructions used in little endian mode, and causes an alignment
463     trap.  The 750 does not cause an alignment trap (except when the
464     target is unaligned).  */
465
466  if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
467    {
468      if (TARGET_MULTIPLE)
469	{
470	  target_flags &= ~MASK_MULTIPLE;
471	  if (TARGET_MULTIPLE_SET)
472	    warning ("-mmultiple is not supported on little endian systems");
473	}
474
475      if (TARGET_STRING)
476	{
477	  target_flags &= ~MASK_STRING;
478	  if (TARGET_STRING_SET)
479	    warning ("-mstring is not supported on little endian systems");
480	}
481    }
482
483  if (flag_pic && DEFAULT_ABI == ABI_AIX)
484    {
485      warning ("-f%s ignored (all code is position independent)",
486	       (flag_pic > 1) ? "PIC" : "pic");
487      flag_pic = 0;
488    }
489
490#ifdef XCOFF_DEBUGGING_INFO
491  if (flag_function_sections && (write_symbols != NO_DEBUG)
492      && DEFAULT_ABI == ABI_AIX)
493    {
494      warning ("-ffunction-sections disabled on AIX when debugging");
495      flag_function_sections = 0;
496    }
497
498  if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
499    {
500      warning ("-fdata-sections not supported on AIX");
501      flag_data_sections = 0;
502    }
503#endif
504
505  /* Set debug flags */
506  if (rs6000_debug_name)
507    {
508      if (! strcmp (rs6000_debug_name, "all"))
509	rs6000_debug_stack = rs6000_debug_arg = 1;
510      else if (! strcmp (rs6000_debug_name, "stack"))
511	rs6000_debug_stack = 1;
512      else if (! strcmp (rs6000_debug_name, "arg"))
513	rs6000_debug_arg = 1;
514      else
515	error ("unknown -mdebug-%s switch", rs6000_debug_name);
516    }
517
518  /* Set size of long double */
519  rs6000_long_double_type_size = 64;
520  if (rs6000_long_double_size_string)
521    {
522      char *tail;
523      int size = strtol (rs6000_long_double_size_string, &tail, 10);
524      if (*tail != '\0' || (size != 64 && size != 128))
525	error ("Unknown switch -mlong-double-%s",
526	       rs6000_long_double_size_string);
527      else
528	rs6000_long_double_type_size = size;
529    }
530
531  /* Handle -mabi= options.  */
532  rs6000_parse_abi_options ();
533
534#ifdef TARGET_REGNAMES
535  /* If the user desires alternate register names, copy in the
536     alternate names now.  */
537  if (TARGET_REGNAMES)
538    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
539#endif
540
541#ifdef SUBTARGET_OVERRIDE_OPTIONS
542  SUBTARGET_OVERRIDE_OPTIONS;
543#endif
544#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
545  SUBSUBTARGET_OVERRIDE_OPTIONS;
546#endif
547
548  /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
549     If -maix-struct-return or -msvr4-struct-return was explicitly
550     used, don't override with the ABI default.  */
551  if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
552    {
553      if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
554	target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
555      else
556	target_flags |= MASK_AIX_STRUCT_RET;
557    }
558
559  /* Register global variables with the garbage collector.  */
560  rs6000_add_gc_roots ();
561
562  /* Allocate an alias set for register saves & restores from stack.  */
563  rs6000_sr_alias_set = new_alias_set ();
564
565  if (TARGET_TOC)
566    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
567
568  /* We can only guarantee the availability of DI pseudo-ops when
569     assembling for 64-bit targets.  */
570  if (!TARGET_64BIT)
571    {
572      targetm.asm_out.aligned_op.di = NULL;
573      targetm.asm_out.unaligned_op.di = NULL;
574    }
575
576  /* Arrange to save and restore machine status around nested functions.  */
577  init_machine_status = rs6000_init_machine_status;
578  free_machine_status = rs6000_free_machine_status;
579}
580
581/* Handle -mabi= options.  */
582static void
583rs6000_parse_abi_options ()
584{
585  if (rs6000_abi_string == 0)
586    return;
587  else if (! strcmp (rs6000_abi_string, "altivec"))
588    rs6000_altivec_abi = 1;
589  else
590    error ("unknown ABI specified: '%s'", rs6000_abi_string);
591}
592
593void
594optimization_options (level, size)
595     int level ATTRIBUTE_UNUSED;
596     int size ATTRIBUTE_UNUSED;
597{
598}
599
600/* Do anything needed at the start of the asm file.  */
601
602void
603rs6000_file_start (file, default_cpu)
604     FILE *file;
605     const char *default_cpu;
606{
607  size_t i;
608  char buffer[80];
609  const char *start = buffer;
610  struct rs6000_cpu_select *ptr;
611
612  if (flag_verbose_asm)
613    {
614      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
615      rs6000_select[0].string = default_cpu;
616
617      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
618	{
619	  ptr = &rs6000_select[i];
620	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
621	    {
622	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
623	      start = "";
624	    }
625	}
626
627#ifdef USING_ELFOS_H
628      switch (rs6000_sdata)
629	{
630	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
631	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
632	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
633	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
634	}
635
636      if (rs6000_sdata && g_switch_value)
637	{
638	  fprintf (file, "%s -G %d", start, g_switch_value);
639	  start = "";
640	}
641#endif
642
643      if (*start == '\0')
644	putc ('\n', file);
645    }
646}
647
648
649/* Create a CONST_DOUBLE from a string.  */
650
651struct rtx_def *
652rs6000_float_const (string, mode)
653     const char *string;
654     enum machine_mode mode;
655{
656  REAL_VALUE_TYPE value;
657  value = REAL_VALUE_ATOF (string, mode);
658  return immed_real_const_1 (value, mode);
659}
660
661/* Return non-zero if this function is known to have a null epilogue.  */
662
663int
664direct_return ()
665{
666  if (reload_completed)
667    {
668      rs6000_stack_t *info = rs6000_stack_info ();
669
670      if (info->first_gp_reg_save == 32
671	  && info->first_fp_reg_save == 64
672	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
673	  && ! info->lr_save_p
674	  && ! info->cr_save_p
675	  && info->vrsave_mask == 0
676	  && ! info->push_p)
677	return 1;
678    }
679
680  return 0;
681}
682
683/* Returns 1 always.  */
684
685int
686any_operand (op, mode)
687     rtx op ATTRIBUTE_UNUSED;
688     enum machine_mode mode ATTRIBUTE_UNUSED;
689{
690  return 1;
691}
692
693/* Returns 1 if op is the count register.  */
694int
695count_register_operand (op, mode)
696     rtx op;
697     enum machine_mode mode ATTRIBUTE_UNUSED;
698{
699  if (GET_CODE (op) != REG)
700    return 0;
701
702  if (REGNO (op) == COUNT_REGISTER_REGNUM)
703    return 1;
704
705  if (REGNO (op) > FIRST_PSEUDO_REGISTER)
706    return 1;
707
708  return 0;
709}
710
711int
712xer_operand (op, mode)
713     rtx op;
714     enum machine_mode mode ATTRIBUTE_UNUSED;
715{
716  if (GET_CODE (op) != REG)
717    return 0;
718
719  if (XER_REGNO_P (REGNO (op)))
720    return 1;
721
722  return 0;
723}
724
725/* Return 1 if OP is a signed 8-bit constant.  Int multiplication
726   by such constants completes more quickly.  */
727
728int
729s8bit_cint_operand (op, mode)
730     rtx op;
731     enum machine_mode mode ATTRIBUTE_UNUSED;
732{
733  return ( GET_CODE (op) == CONST_INT
734	  && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
735}
736
737/* Return 1 if OP is a constant that can fit in a D field.  */
738
739int
740short_cint_operand (op, mode)
741     rtx op;
742     enum machine_mode mode ATTRIBUTE_UNUSED;
743{
744  return (GET_CODE (op) == CONST_INT
745	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
746}
747
748/* Similar for an unsigned D field.  */
749
750int
751u_short_cint_operand (op, mode)
752     rtx op;
753     enum machine_mode mode ATTRIBUTE_UNUSED;
754{
755  return (GET_CODE (op) == CONST_INT
756	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'));
757}
758
759/* Return 1 if OP is a CONST_INT that cannot fit in a signed D field.  */
760
761int
762non_short_cint_operand (op, mode)
763     rtx op;
764     enum machine_mode mode ATTRIBUTE_UNUSED;
765{
766  return (GET_CODE (op) == CONST_INT
767	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
768}
769
770/* Returns 1 if OP is a CONST_INT that is a positive value
771   and an exact power of 2.  */
772
773int
774exact_log2_cint_operand (op, mode)
775     rtx op;
776     enum machine_mode mode ATTRIBUTE_UNUSED;
777{
778  return (GET_CODE (op) == CONST_INT
779	  && INTVAL (op) > 0
780	  && exact_log2 (INTVAL (op)) >= 0);
781}
782
783/* Returns 1 if OP is a register that is not special (i.e., not MQ,
784   ctr, or lr).  */
785
786int
787gpc_reg_operand (op, mode)
788     rtx op;
789     enum machine_mode mode;
790{
791  return (register_operand (op, mode)
792	  && (GET_CODE (op) != REG
793	      || (REGNO (op) >= ARG_POINTER_REGNUM
794		  && !XER_REGNO_P (REGNO (op)))
795	      || REGNO (op) < MQ_REGNO));
796}
797
798/* Returns 1 if OP is either a pseudo-register or a register denoting a
799   CR field.  */
800
801int
802cc_reg_operand (op, mode)
803     rtx op;
804     enum machine_mode mode;
805{
806  return (register_operand (op, mode)
807	  && (GET_CODE (op) != REG
808	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
809	      || CR_REGNO_P (REGNO (op))));
810}
811
812/* Returns 1 if OP is either a pseudo-register or a register denoting a
813   CR field that isn't CR0.  */
814
815int
816cc_reg_not_cr0_operand (op, mode)
817     rtx op;
818     enum machine_mode mode;
819{
820  return (register_operand (op, mode)
821	  && (GET_CODE (op) != REG
822	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
823	      || CR_REGNO_NOT_CR0_P (REGNO (op))));
824}
825
826/* Returns 1 if OP is either a constant integer valid for a D-field or
827   a non-special register.  If a register, it must be in the proper
828   mode unless MODE is VOIDmode.  */
829
830int
831reg_or_short_operand (op, mode)
832      rtx op;
833      enum machine_mode mode;
834{
835  return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
836}
837
838/* Similar, except check if the negation of the constant would be
839   valid for a D-field.  */
840
841int
842reg_or_neg_short_operand (op, mode)
843      rtx op;
844      enum machine_mode mode;
845{
846  if (GET_CODE (op) == CONST_INT)
847    return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
848
849  return gpc_reg_operand (op, mode);
850}
851
852/* Return 1 if the operand is either a register or an integer whose
853   high-order 16 bits are zero.  */
854
855int
856reg_or_u_short_operand (op, mode)
857     rtx op;
858     enum machine_mode mode;
859{
860  return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
861}
862
863/* Return 1 is the operand is either a non-special register or ANY
864   constant integer.  */
865
866int
867reg_or_cint_operand (op, mode)
868    rtx op;
869    enum machine_mode mode;
870{
871  return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
872}
873
874/* Return 1 is the operand is either a non-special register or ANY
875   32-bit signed constant integer.  */
876
877int
878reg_or_arith_cint_operand (op, mode)
879    rtx op;
880    enum machine_mode mode;
881{
882  return (gpc_reg_operand (op, mode)
883	  || (GET_CODE (op) == CONST_INT
884#if HOST_BITS_PER_WIDE_INT != 32
885	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
886		  < (unsigned HOST_WIDE_INT) 0x100000000ll)
887#endif
888	      ));
889}
890
891/* Return 1 is the operand is either a non-special register or a 32-bit
892   signed constant integer valid for 64-bit addition.  */
893
894int
895reg_or_add_cint64_operand (op, mode)
896    rtx op;
897    enum machine_mode mode;
898{
899  return (gpc_reg_operand (op, mode)
900	  || (GET_CODE (op) == CONST_INT
901	      && INTVAL (op) < 0x7fff8000
902#if HOST_BITS_PER_WIDE_INT != 32
903	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
904		  < 0x100000000ll)
905#endif
906	      ));
907}
908
909/* Return 1 is the operand is either a non-special register or a 32-bit
910   signed constant integer valid for 64-bit subtraction.  */
911
912int
913reg_or_sub_cint64_operand (op, mode)
914    rtx op;
915    enum machine_mode mode;
916{
917  return (gpc_reg_operand (op, mode)
918	  || (GET_CODE (op) == CONST_INT
919	      && (- INTVAL (op)) < 0x7fff8000
920#if HOST_BITS_PER_WIDE_INT != 32
921	      && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
922		  < 0x100000000ll)
923#endif
924	      ));
925}
926
927/* Return 1 is the operand is either a non-special register or ANY
928   32-bit unsigned constant integer.  */
929
930int
931reg_or_logical_cint_operand (op, mode)
932    rtx op;
933    enum machine_mode mode;
934{
935  if (GET_CODE (op) == CONST_INT)
936    {
937      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
938	{
939	  if (GET_MODE_BITSIZE (mode) <= 32)
940	    abort ();
941
942	  if (INTVAL (op) < 0)
943	    return 0;
944	}
945
946      return ((INTVAL (op) & GET_MODE_MASK (mode)
947	       & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
948    }
949  else if (GET_CODE (op) == CONST_DOUBLE)
950    {
951      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
952	  || mode != DImode)
953	abort ();
954
955      return CONST_DOUBLE_HIGH (op) == 0;
956    }
957  else
958    return gpc_reg_operand (op, mode);
959}
960
961/* Return 1 if the operand is an operand that can be loaded via the GOT.  */
962
963int
964got_operand (op, mode)
965     rtx op;
966     enum machine_mode mode ATTRIBUTE_UNUSED;
967{
968  return (GET_CODE (op) == SYMBOL_REF
969	  || GET_CODE (op) == CONST
970	  || GET_CODE (op) == LABEL_REF);
971}
972
973/* Return 1 if the operand is a simple references that can be loaded via
974   the GOT (labels involving addition aren't allowed).  */
975
976int
977got_no_const_operand (op, mode)
978     rtx op;
979     enum machine_mode mode ATTRIBUTE_UNUSED;
980{
981  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
982}
983
984/* Return the number of instructions it takes to form a constant in an
985   integer register.  */
986
987static int
988num_insns_constant_wide (value)
989     HOST_WIDE_INT value;
990{
991  /* signed constant loadable with {cal|addi} */
992  if (CONST_OK_FOR_LETTER_P (value, 'I'))
993    return 1;
994
995  /* constant loadable with {cau|addis} */
996  else if (CONST_OK_FOR_LETTER_P (value, 'L'))
997    return 1;
998
999#if HOST_BITS_PER_WIDE_INT == 64
1000  else if (TARGET_POWERPC64)
1001    {
1002      HOST_WIDE_INT low  = value & 0xffffffff;
1003      HOST_WIDE_INT high = value >> 32;
1004
1005      low = (low ^ 0x80000000) - 0x80000000;  /* sign extend */
1006
1007      if (high == 0 && (low & 0x80000000) == 0)
1008	return 2;
1009
1010      else if (high == -1 && (low & 0x80000000) != 0)
1011	return 2;
1012
1013      else if (! low)
1014	return num_insns_constant_wide (high) + 1;
1015
1016      else
1017	return (num_insns_constant_wide (high)
1018		+ num_insns_constant_wide (low) + 1);
1019    }
1020#endif
1021
1022  else
1023    return 2;
1024}
1025
1026int
1027num_insns_constant (op, mode)
1028     rtx op;
1029     enum machine_mode mode;
1030{
1031  if (GET_CODE (op) == CONST_INT)
1032    {
1033#if HOST_BITS_PER_WIDE_INT == 64
1034      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1035	  && mask64_operand (op, mode))
1036	    return 2;
1037      else
1038#endif
1039	return num_insns_constant_wide (INTVAL (op));
1040    }
1041
1042  else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1043    {
1044      long l;
1045      REAL_VALUE_TYPE rv;
1046
1047      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1048      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1049      return num_insns_constant_wide ((HOST_WIDE_INT)l);
1050    }
1051
1052  else if (GET_CODE (op) == CONST_DOUBLE)
1053    {
1054      HOST_WIDE_INT low;
1055      HOST_WIDE_INT high;
1056      long l[2];
1057      REAL_VALUE_TYPE rv;
1058      int endian = (WORDS_BIG_ENDIAN == 0);
1059
1060      if (mode == VOIDmode || mode == DImode)
1061	{
1062	  high = CONST_DOUBLE_HIGH (op);
1063	  low  = CONST_DOUBLE_LOW (op);
1064	}
1065      else
1066	{
1067	  REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1068	  REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1069	  high = l[endian];
1070	  low  = l[1 - endian];
1071	}
1072
1073      if (TARGET_32BIT)
1074	return (num_insns_constant_wide (low)
1075		+ num_insns_constant_wide (high));
1076
1077      else
1078	{
1079	  if (high == 0 && (low & 0x80000000) == 0)
1080	    return num_insns_constant_wide (low);
1081
1082	  else if (high == -1 && (low & 0x80000000) != 0)
1083	    return num_insns_constant_wide (low);
1084
1085	  else if (mask64_operand (op, mode))
1086	    return 2;
1087
1088	  else if (low == 0)
1089	    return num_insns_constant_wide (high) + 1;
1090
1091	  else
1092	    return (num_insns_constant_wide (high)
1093		    + num_insns_constant_wide (low) + 1);
1094	}
1095    }
1096
1097  else
1098    abort ();
1099}
1100
1101/* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1102   register with one instruction per word.  We only do this if we can
1103   safely read CONST_DOUBLE_{LOW,HIGH}.  */
1104
1105int
1106easy_fp_constant (op, mode)
1107     rtx op;
1108     enum machine_mode mode;
1109{
1110  if (GET_CODE (op) != CONST_DOUBLE
1111      || GET_MODE (op) != mode
1112      || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1113    return 0;
1114
1115  /* Consider all constants with -msoft-float to be easy.  */
1116  if (TARGET_SOFT_FLOAT && mode != DImode)
1117    return 1;
1118
1119  /* If we are using V.4 style PIC, consider all constants to be hard.  */
1120  if (flag_pic && DEFAULT_ABI == ABI_V4)
1121    return 0;
1122
1123#ifdef TARGET_RELOCATABLE
1124  /* Similarly if we are using -mrelocatable, consider all constants
1125     to be hard.  */
1126  if (TARGET_RELOCATABLE)
1127    return 0;
1128#endif
1129
1130  if (mode == DFmode)
1131    {
1132      long k[2];
1133      REAL_VALUE_TYPE rv;
1134
1135      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1136      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1137
1138      return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1139	      && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1140    }
1141
1142  else if (mode == SFmode)
1143    {
1144      long l;
1145      REAL_VALUE_TYPE rv;
1146
1147      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1148      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1149
1150      return num_insns_constant_wide (l) == 1;
1151    }
1152
1153  else if (mode == DImode)
1154    return ((TARGET_POWERPC64
1155	     && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1156	    || (num_insns_constant (op, DImode) <= 2));
1157
1158  else if (mode == SImode)
1159    return 1;
1160  else
1161    abort ();
1162}
1163
1164/* Return 1 if the operand is 0.0.  */
1165int
1166zero_fp_constant (op, mode)
1167     rtx op;
1168     enum machine_mode mode;
1169{
1170  return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1171}
1172
1173/* Return 1 if the operand is in volatile memory.  Note that during
1174   the RTL generation phase, memory_operand does not return TRUE for
1175   volatile memory references.  So this function allows us to
1176   recognize volatile references where its safe.  */
1177
1178int
1179volatile_mem_operand (op, mode)
1180     rtx op;
1181     enum machine_mode mode;
1182{
1183  if (GET_CODE (op) != MEM)
1184    return 0;
1185
1186  if (!MEM_VOLATILE_P (op))
1187    return 0;
1188
1189  if (mode != GET_MODE (op))
1190    return 0;
1191
1192  if (reload_completed)
1193    return memory_operand (op, mode);
1194
1195  if (reload_in_progress)
1196    return strict_memory_address_p (mode, XEXP (op, 0));
1197
1198  return memory_address_p (mode, XEXP (op, 0));
1199}
1200
1201/* Return 1 if the operand is an offsettable memory operand.  */
1202
1203int
1204offsettable_mem_operand (op, mode)
1205     rtx op;
1206     enum machine_mode mode;
1207{
1208  return ((GET_CODE (op) == MEM)
1209	  && offsettable_address_p (reload_completed || reload_in_progress,
1210				    mode, XEXP (op, 0)));
1211}
1212
1213/* Return 1 if the operand is either an easy FP constant (see above) or
1214   memory.  */
1215
1216int
1217mem_or_easy_const_operand (op, mode)
1218     rtx op;
1219     enum machine_mode mode;
1220{
1221  return memory_operand (op, mode) || easy_fp_constant (op, mode);
1222}
1223
1224/* Return 1 if the operand is either a non-special register or an item
1225   that can be used as the operand of a `mode' add insn.  */
1226
1227int
1228add_operand (op, mode)
1229    rtx op;
1230    enum machine_mode mode;
1231{
1232  if (GET_CODE (op) == CONST_INT)
1233    return (CONST_OK_FOR_LETTER_P (INTVAL(op), 'I')
1234	    || CONST_OK_FOR_LETTER_P (INTVAL(op), 'L'));
1235
1236  return gpc_reg_operand (op, mode);
1237}
1238
1239/* Return 1 if OP is a constant but not a valid add_operand.  */
1240
1241int
1242non_add_cint_operand (op, mode)
1243     rtx op;
1244     enum machine_mode mode ATTRIBUTE_UNUSED;
1245{
1246  return (GET_CODE (op) == CONST_INT
1247	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000
1248	  && ! CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1249}
1250
1251/* Return 1 if the operand is a non-special register or a constant that
1252   can be used as the operand of an OR or XOR insn on the RS/6000.  */
1253
1254int
1255logical_operand (op, mode)
1256     rtx op;
1257     enum machine_mode mode;
1258{
1259  HOST_WIDE_INT opl, oph;
1260
1261  if (gpc_reg_operand (op, mode))
1262    return 1;
1263
1264  if (GET_CODE (op) == CONST_INT)
1265    {
1266      opl = INTVAL (op) & GET_MODE_MASK (mode);
1267
1268#if HOST_BITS_PER_WIDE_INT <= 32
1269      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1270	return 0;
1271#endif
1272    }
1273  else if (GET_CODE (op) == CONST_DOUBLE)
1274    {
1275      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1276	abort ();
1277
1278      opl = CONST_DOUBLE_LOW (op);
1279      oph = CONST_DOUBLE_HIGH (op);
1280      if (oph != 0)
1281	return 0;
1282    }
1283  else
1284    return 0;
1285
1286  return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1287	  || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1288}
1289
1290/* Return 1 if C is a constant that is not a logical operand (as
1291   above), but could be split into one.  */
1292
1293int
1294non_logical_cint_operand (op, mode)
1295     rtx op;
1296     enum machine_mode mode;
1297{
1298  return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1299	  && ! logical_operand (op, mode)
1300	  && reg_or_logical_cint_operand (op, mode));
1301}
1302
1303/* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1304   RS/6000.  It is if there are no more than two 1->0 or 0->1 transitions.
1305   Reject all ones and all zeros, since these should have been optimized
1306   away and confuse the making of MB and ME.  */
1307
1308int
1309mask_operand (op, mode)
1310     rtx op;
1311     enum machine_mode mode ATTRIBUTE_UNUSED;
1312{
1313  HOST_WIDE_INT c, lsb;
1314
1315  if (GET_CODE (op) != CONST_INT)
1316    return 0;
1317
1318  c = INTVAL (op);
1319
1320  /* We don't change the number of transitions by inverting,
1321     so make sure we start with the LS bit zero.  */
1322  if (c & 1)
1323    c = ~c;
1324
1325  /* Reject all zeros or all ones.  */
1326  if (c == 0)
1327    return 0;
1328
1329  /* Find the first transition.  */
1330  lsb = c & -c;
1331
1332  /* Invert to look for a second transition.  */
1333  c = ~c;
1334
1335  /* Erase first transition.  */
1336  c &= -lsb;
1337
1338  /* Find the second transition (if any).  */
1339  lsb = c & -c;
1340
1341  /* Match if all the bits above are 1's (or c is zero).  */
1342  return c == -lsb;
1343}
1344
1345/* Return 1 if the operand is a constant that is a PowerPC64 mask.
1346   It is if there are no more than one 1->0 or 0->1 transitions.
1347   Reject all ones and all zeros, since these should have been optimized
1348   away and confuse the making of MB and ME.  */
1349
1350int
1351mask64_operand (op, mode)
1352     rtx op;
1353     enum machine_mode mode;
1354{
1355  if (GET_CODE (op) == CONST_INT)
1356    {
1357      HOST_WIDE_INT c, lsb;
1358
1359      /* We don't change the number of transitions by inverting,
1360	 so make sure we start with the LS bit zero.  */
1361      c = INTVAL (op);
1362      if (c & 1)
1363	c = ~c;
1364
1365      /* Reject all zeros or all ones.  */
1366      if (c == 0)
1367	return 0;
1368
1369      /* Find the transition, and check that all bits above are 1's.  */
1370      lsb = c & -c;
1371      return c == -lsb;
1372    }
1373  else if (GET_CODE (op) == CONST_DOUBLE
1374	   && (mode == VOIDmode || mode == DImode))
1375    {
1376      HOST_WIDE_INT low, high, lsb;
1377
1378      if (HOST_BITS_PER_WIDE_INT < 64)
1379	high = CONST_DOUBLE_HIGH (op);
1380
1381      low = CONST_DOUBLE_LOW (op);
1382      if (low & 1)
1383	{
1384	  if (HOST_BITS_PER_WIDE_INT < 64)
1385	    high = ~high;
1386	  low = ~low;
1387	}
1388
1389      if (low == 0)
1390	{
1391	  if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1392	    return 0;
1393
1394	  lsb = high & -high;
1395	  return high == -lsb;
1396	}
1397
1398      lsb = low & -low;
1399      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1400    }
1401  else
1402    return 0;
1403}
1404
1405/* Return 1 if the operand is either a non-special register or a constant
1406   that can be used as the operand of a PowerPC64 logical AND insn.  */
1407
1408int
1409and64_operand (op, mode)
1410    rtx op;
1411    enum machine_mode mode;
1412{
1413  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1414    return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1415
1416  return (logical_operand (op, mode) || mask64_operand (op, mode));
1417}
1418
1419/* Return 1 if the operand is either a non-special register or a
1420   constant that can be used as the operand of an RS/6000 logical AND insn.  */
1421
1422int
1423and_operand (op, mode)
1424    rtx op;
1425    enum machine_mode mode;
1426{
1427  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1428    return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1429
1430  return (logical_operand (op, mode) || mask_operand (op, mode));
1431}
1432
1433/* Return 1 if the operand is a general register or memory operand.  */
1434
1435int
1436reg_or_mem_operand (op, mode)
1437     rtx op;
1438     enum machine_mode mode;
1439{
1440  return (gpc_reg_operand (op, mode)
1441	  || memory_operand (op, mode)
1442	  || volatile_mem_operand (op, mode));
1443}
1444
1445/* Return 1 if the operand is a general register or memory operand without
1446   pre_inc or pre_dec which produces invalid form of PowerPC lwa
1447   instruction.  */
1448
1449int
1450lwa_operand (op, mode)
1451     rtx op;
1452     enum machine_mode mode;
1453{
1454  rtx inner = op;
1455
1456  if (reload_completed && GET_CODE (inner) == SUBREG)
1457    inner = SUBREG_REG (inner);
1458
1459  return gpc_reg_operand (inner, mode)
1460    || (memory_operand (inner, mode)
1461	&& GET_CODE (XEXP (inner, 0)) != PRE_INC
1462	&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
1463	&& (GET_CODE (XEXP (inner, 0)) != PLUS
1464	    || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1465	    || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1466}
1467
1468/* Return 1 if the operand, used inside a MEM, is a valid first argument
1469   to CALL.  This is a SYMBOL_REF or a pseudo-register, which will be
1470   forced to lr.  */
1471
1472int
1473call_operand (op, mode)
1474     rtx op;
1475     enum machine_mode mode;
1476{
1477  if (mode != VOIDmode && GET_MODE (op) != mode)
1478    return 0;
1479
1480  return (GET_CODE (op) == SYMBOL_REF
1481	  || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1482}
1483
1484/* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1485   this file and the function is not weakly defined.  */
1486
1487int
1488current_file_function_operand (op, mode)
1489     rtx op;
1490     enum machine_mode mode ATTRIBUTE_UNUSED;
1491{
1492  return (GET_CODE (op) == SYMBOL_REF
1493	  && (SYMBOL_REF_FLAG (op)
1494	      || (op == XEXP (DECL_RTL (current_function_decl), 0)
1495	          && ! DECL_WEAK (current_function_decl))));
1496}
1497
1498/* Return 1 if this operand is a valid input for a move insn.  */
1499
1500int
1501input_operand (op, mode)
1502     rtx op;
1503     enum machine_mode mode;
1504{
1505  /* Memory is always valid.  */
1506  if (memory_operand (op, mode))
1507    return 1;
1508
1509  /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary.  */
1510  if (GET_CODE (op) == CONSTANT_P_RTX)
1511    return 1;
1512
1513  /* For floating-point, easy constants are valid.  */
1514  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1515      && CONSTANT_P (op)
1516      && easy_fp_constant (op, mode))
1517    return 1;
1518
1519  /* Allow any integer constant.  */
1520  if (GET_MODE_CLASS (mode) == MODE_INT
1521      && (GET_CODE (op) == CONST_INT
1522	  || GET_CODE (op) == CONST_DOUBLE))
1523    return 1;
1524
1525  /* For floating-point or multi-word mode, the only remaining valid type
1526     is a register.  */
1527  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1528      || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1529    return register_operand (op, mode);
1530
1531  /* The only cases left are integral modes one word or smaller (we
1532     do not get called for MODE_CC values).  These can be in any
1533     register.  */
1534  if (register_operand (op, mode))
1535    return 1;
1536
1537  /* A SYMBOL_REF referring to the TOC is valid.  */
1538  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1539    return 1;
1540
1541  /* A constant pool expression (relative to the TOC) is valid */
1542  if (TOC_RELATIVE_EXPR_P (op))
1543    return 1;
1544
1545  /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1546     to be valid.  */
1547  if (DEFAULT_ABI == ABI_V4
1548      && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1549      && small_data_operand (op, Pmode))
1550    return 1;
1551
1552  return 0;
1553}
1554
1555/* Return 1 for an operand in small memory on V.4/eabi.  */
1556
1557int
1558small_data_operand (op, mode)
1559     rtx op ATTRIBUTE_UNUSED;
1560     enum machine_mode mode ATTRIBUTE_UNUSED;
1561{
1562#if TARGET_ELF
1563  rtx sym_ref;
1564
1565  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1566    return 0;
1567
1568  if (DEFAULT_ABI != ABI_V4)
1569    return 0;
1570
1571  if (GET_CODE (op) == SYMBOL_REF)
1572    sym_ref = op;
1573
1574  else if (GET_CODE (op) != CONST
1575	   || GET_CODE (XEXP (op, 0)) != PLUS
1576	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1577	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1578    return 0;
1579
1580  else
1581    {
1582      rtx sum = XEXP (op, 0);
1583      HOST_WIDE_INT summand;
1584
1585      /* We have to be careful here, because it is the referenced address
1586        that must be 32k from _SDA_BASE_, not just the symbol.  */
1587      summand = INTVAL (XEXP (sum, 1));
1588      if (summand < 0 || summand > g_switch_value)
1589       return 0;
1590
1591      sym_ref = XEXP (sum, 0);
1592    }
1593
1594  if (*XSTR (sym_ref, 0) != '@')
1595    return 0;
1596
1597  return 1;
1598
1599#else
1600  return 0;
1601#endif
1602}
1603
1604static int
1605constant_pool_expr_1 (op, have_sym, have_toc)
1606    rtx op;
1607    int *have_sym;
1608    int *have_toc;
1609{
1610  switch (GET_CODE(op))
1611    {
1612    case SYMBOL_REF:
1613      if (CONSTANT_POOL_ADDRESS_P (op))
1614	{
1615	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1616	    {
1617	      *have_sym = 1;
1618	      return 1;
1619	    }
1620	  else
1621	    return 0;
1622	}
1623      else if (! strcmp (XSTR (op, 0), toc_label_name))
1624	{
1625	  *have_toc = 1;
1626	  return 1;
1627	}
1628      else
1629	return 0;
1630    case PLUS:
1631    case MINUS:
1632      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) &&
1633	constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc);
1634    case CONST:
1635      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1636    case CONST_INT:
1637      return 1;
1638    default:
1639      return 0;
1640    }
1641}
1642
1643int
1644constant_pool_expr_p (op)
1645    rtx op;
1646{
1647  int have_sym = 0;
1648  int have_toc = 0;
1649  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1650}
1651
1652int
1653toc_relative_expr_p (op)
1654    rtx op;
1655{
1656    int have_sym = 0;
1657    int have_toc = 0;
1658    return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1659}
1660
1661/* Try machine-dependent ways of modifying an illegitimate address
1662   to be legitimate.  If we find one, return the new, valid address.
1663   This is used from only one place: `memory_address' in explow.c.
1664
1665   OLDX is the address as it was before break_out_memory_refs was
1666   called.  In some cases it is useful to look at this to decide what
1667   needs to be done.
1668
1669   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1670
1671   It is always safe for this function to do nothing.  It exists to
1672   recognize opportunities to optimize the output.
1673
1674   On RS/6000, first check for the sum of a register with a constant
1675   integer that is out of range.  If so, generate code to add the
1676   constant with the low-order 16 bits masked to the register and force
1677   this result into another register (this can be done with `cau').
1678   Then generate an address of REG+(CONST&0xffff), allowing for the
1679   possibility of bit 16 being a one.
1680
1681   Then check for the sum of a register and something not constant, try to
1682   load the other things into a register and return the sum.  */
1683rtx
1684rs6000_legitimize_address (x, oldx, mode)
1685     rtx x;
1686     rtx oldx ATTRIBUTE_UNUSED;
1687     enum machine_mode mode;
1688{
1689  if (GET_CODE (x) == PLUS
1690      && GET_CODE (XEXP (x, 0)) == REG
1691      && GET_CODE (XEXP (x, 1)) == CONST_INT
1692      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1693    {
1694      HOST_WIDE_INT high_int, low_int;
1695      rtx sum;
1696      high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1697      low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1698      if (low_int & 0x8000)
1699	high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1700      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1701					 GEN_INT (high_int)), 0);
1702      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1703    }
1704  else if (GET_CODE (x) == PLUS
1705	   && GET_CODE (XEXP (x, 0)) == REG
1706	   && GET_CODE (XEXP (x, 1)) != CONST_INT
1707	   && GET_MODE_NUNITS (mode) == 1
1708	   && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1709	   && (TARGET_POWERPC64 || mode != DImode)
1710	   && mode != TImode)
1711    {
1712      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1713			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1714    }
1715  else if (ALTIVEC_VECTOR_MODE (mode))
1716    {
1717      rtx reg;
1718
1719      /* Make sure both operands are registers.  */
1720      if (GET_CODE (x) == PLUS)
1721	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1722			     force_reg (Pmode, XEXP (x, 1)));
1723
1724      reg = force_reg (Pmode, x);
1725      return reg;
1726    }
1727  else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1728	   && GET_CODE (x) != CONST_INT
1729	   && GET_CODE (x) != CONST_DOUBLE
1730	   && CONSTANT_P (x)
1731	   && GET_MODE_NUNITS (mode) == 1
1732	   && (GET_MODE_BITSIZE (mode) <= 32
1733	       || (TARGET_HARD_FLOAT && mode == DFmode)))
1734    {
1735      rtx reg = gen_reg_rtx (Pmode);
1736      emit_insn (gen_elf_high (reg, (x)));
1737      return gen_rtx_LO_SUM (Pmode, reg, (x));
1738    }
1739  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1740	   && ! flag_pic
1741	   && GET_CODE (x) != CONST_INT
1742	   && GET_CODE (x) != CONST_DOUBLE
1743	   && CONSTANT_P (x)
1744	   && (TARGET_HARD_FLOAT || mode != DFmode)
1745	   && mode != DImode
1746	   && mode != TImode)
1747    {
1748      rtx reg = gen_reg_rtx (Pmode);
1749      emit_insn (gen_macho_high (reg, (x)));
1750      return gen_rtx_LO_SUM (Pmode, reg, (x));
1751    }
1752  else if (TARGET_TOC
1753	   && CONSTANT_POOL_EXPR_P (x)
1754	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1755    {
1756      return create_TOC_reference (x);
1757    }
1758  else
1759    return NULL_RTX;
1760}
1761
1762/* The convention appears to be to define this wherever it is used.
1763   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1764   is now used here.  */
1765#ifndef REG_MODE_OK_FOR_BASE_P
1766#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1767#endif
1768
1769/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
1770   replace the input X, or the original X if no replacement is called for.
1771   The output parameter *WIN is 1 if the calling macro should goto WIN,
1772   0 if it should not.
1773
1774   For RS/6000, we wish to handle large displacements off a base
1775   register by splitting the addend across an addiu/addis and the mem insn.
1776   This cuts number of extra insns needed from 3 to 1.
1777
1778   On Darwin, we use this to generate code for floating point constants.
1779   A movsf_low is generated so we wind up with 2 instructions rather than 3.
1780   The Darwin code is inside #if TARGET_MACHO because only then is
1781   machopic_function_base_name() defined.  */
1782rtx
1783rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1784    rtx x;
1785    enum machine_mode mode;
1786    int opnum;
1787    int type;
1788    int ind_levels ATTRIBUTE_UNUSED;
1789    int *win;
1790{
1791  /* We must recognize output that we have already generated ourselves.  */
1792  if (GET_CODE (x) == PLUS
1793      && GET_CODE (XEXP (x, 0)) == PLUS
1794      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1795      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1796      && GET_CODE (XEXP (x, 1)) == CONST_INT)
1797    {
1798      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1799                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1800                   opnum, (enum reload_type)type);
1801      *win = 1;
1802      return x;
1803    }
1804#if TARGET_MACHO
1805  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1806      && GET_CODE (x) == LO_SUM
1807      && GET_CODE (XEXP (x, 0)) == PLUS
1808      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1809      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1810      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1811      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1812      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1813      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1814      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1815    {
1816      /* Result of previous invocation of this function on Darwin
1817	 floating point constant.  */
1818      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1819		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1820		opnum, (enum reload_type)type);
1821      *win = 1;
1822      return x;
1823    }
1824#endif
1825  if (GET_CODE (x) == PLUS
1826      && GET_CODE (XEXP (x, 0)) == REG
1827      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1828      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1829      && GET_CODE (XEXP (x, 1)) == CONST_INT)
1830    {
1831      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1832      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1833      HOST_WIDE_INT high
1834        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1835
1836      /* Check for 32-bit overflow.  */
1837      if (high + low != val)
1838        {
1839	  *win = 0;
1840	  return x;
1841	}
1842
1843      /* Reload the high part into a base reg; leave the low part
1844         in the mem directly.  */
1845
1846      x = gen_rtx_PLUS (GET_MODE (x),
1847                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1848                                      GEN_INT (high)),
1849                        GEN_INT (low));
1850
1851      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1852                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1853                   opnum, (enum reload_type)type);
1854      *win = 1;
1855      return x;
1856    }
1857#if TARGET_MACHO
1858  if (GET_CODE (x) == SYMBOL_REF
1859      && DEFAULT_ABI == ABI_DARWIN
1860      && flag_pic)
1861    {
1862      /* Darwin load of floating point constant.  */
1863      rtx offset = gen_rtx (CONST, Pmode,
1864		    gen_rtx (MINUS, Pmode, x,
1865		    gen_rtx (SYMBOL_REF, Pmode,
1866			machopic_function_base_name ())));
1867      x = gen_rtx (LO_SUM, GET_MODE (x),
1868	    gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1869		gen_rtx (HIGH, Pmode, offset)), offset);
1870      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1871		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1872		opnum, (enum reload_type)type);
1873      *win = 1;
1874      return x;
1875    }
1876#endif
1877  if (TARGET_TOC
1878	   && CONSTANT_POOL_EXPR_P (x)
1879	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1880    {
1881      (x) = create_TOC_reference (x);
1882      *win = 1;
1883      return x;
1884    }
1885  *win = 0;
1886  return x;
1887}
1888
1889/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1890   that is a valid memory address for an instruction.
1891   The MODE argument is the machine mode for the MEM expression
1892   that wants to use this address.
1893
1894   On the RS/6000, there are four valid address: a SYMBOL_REF that
1895   refers to a constant pool entry of an address (or the sum of it
1896   plus a constant), a short (16-bit signed) constant plus a register,
1897   the sum of two registers, or a register indirect, possibly with an
1898   auto-increment.  For DFmode and DImode with an constant plus register,
1899   we must ensure that both words are addressable or PowerPC64 with offset
1900   word aligned.
1901
1902   For modes spanning multiple registers (DFmode in 32-bit GPRs,
1903   32-bit DImode, TImode), indexed addressing cannot be used because
1904   adjacent memory cells are accessed by adding word-sized offsets
1905   during assembly output.  */
1906int
1907rs6000_legitimate_address (mode, x, reg_ok_strict)
1908    enum machine_mode mode;
1909    rtx x;
1910    int reg_ok_strict;
1911{
1912  if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
1913    return 1;
1914  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
1915      && TARGET_UPDATE
1916      && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
1917    return 1;
1918  if (LEGITIMATE_SMALL_DATA_P (mode, x))
1919    return 1;
1920  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
1921    return 1;
1922  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
1923  if (! reg_ok_strict
1924      && GET_CODE (x) == PLUS
1925      && GET_CODE (XEXP (x, 0)) == REG
1926      && XEXP (x, 0) == virtual_stack_vars_rtx
1927      && GET_CODE (XEXP (x, 1)) == CONST_INT)
1928    return 1;
1929  if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
1930    return 1;
1931  if (mode != TImode
1932      && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1933      && (TARGET_POWERPC64 || mode != DImode)
1934      && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
1935    return 1;
1936  if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
1937    return 1;
1938  return 0;
1939}
1940
1941/* Try to output insns to set TARGET equal to the constant C if it can
1942   be done in less than N insns.  Do all computations in MODE.
1943   Returns the place where the output has been placed if it can be
1944   done and the insns have been emitted.  If it would take more than N
1945   insns, zero is returned and no insns and emitted.  */
1946
1947rtx
1948rs6000_emit_set_const (dest, mode, source, n)
1949     rtx dest, source;
1950     enum machine_mode mode;
1951     int n ATTRIBUTE_UNUSED;
1952{
1953  HOST_WIDE_INT c0, c1;
1954
1955  if (mode == QImode || mode == HImode || mode == SImode)
1956    {
1957      if (dest == NULL)
1958        dest = gen_reg_rtx (mode);
1959      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
1960      return dest;
1961    }
1962
1963  if (GET_CODE (source) == CONST_INT)
1964    {
1965      c0 = INTVAL (source);
1966      c1 = -(c0 < 0);
1967    }
1968  else if (GET_CODE (source) == CONST_DOUBLE)
1969    {
1970#if HOST_BITS_PER_WIDE_INT >= 64
1971      c0 = CONST_DOUBLE_LOW (source);
1972      c1 = -(c0 < 0);
1973#else
1974      c0 = CONST_DOUBLE_LOW (source);
1975      c1 = CONST_DOUBLE_HIGH (source);
1976#endif
1977    }
1978  else
1979    abort ();
1980
1981  return rs6000_emit_set_long_const (dest, c0, c1);
1982}
1983
1984/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
1985   fall back to a straight forward decomposition.  We do this to avoid
1986   exponential run times encountered when looking for longer sequences
1987   with rs6000_emit_set_const.  */
1988static rtx
1989rs6000_emit_set_long_const (dest, c1, c2)
1990     rtx dest;
1991     HOST_WIDE_INT c1, c2;
1992{
1993  if (!TARGET_POWERPC64)
1994    {
1995      rtx operand1, operand2;
1996
1997      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
1998					DImode);
1999      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2000					DImode);
2001      emit_move_insn (operand1, GEN_INT (c1));
2002      emit_move_insn (operand2, GEN_INT (c2));
2003    }
2004  else
2005    {
2006      HOST_WIDE_INT ud1, ud2, ud3, ud4;
2007
2008      ud1 = c1 & 0xffff;
2009      ud2 = (c1 & 0xffff0000) >> 16;
2010#if HOST_BITS_PER_WIDE_INT >= 64
2011      c2 = c1 >> 32;
2012#endif
2013      ud3 = c2 & 0xffff;
2014      ud4 = (c2 & 0xffff0000) >> 16;
2015
2016      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2017	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2018	{
2019	  if (ud1 & 0x8000)
2020	    emit_move_insn (dest, GEN_INT (((ud1  ^ 0x8000) -  0x8000)));
2021	  else
2022	    emit_move_insn (dest, GEN_INT (ud1));
2023	}
2024
2025      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2026	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2027	{
2028	  if (ud2 & 0x8000)
2029	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2030					   - 0x80000000));
2031	  else
2032	    emit_move_insn (dest, GEN_INT (ud2 << 16));
2033	  if (ud1 != 0)
2034	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2035	}
2036      else if ((ud4 == 0xffff && (ud3 & 0x8000))
2037	       || (ud4 == 0 && ! (ud3 & 0x8000)))
2038	{
2039	  if (ud3 & 0x8000)
2040	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2041					   - 0x80000000));
2042	  else
2043	    emit_move_insn (dest, GEN_INT (ud3 << 16));
2044
2045	  if (ud2 != 0)
2046	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2047	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2048	  if (ud1 != 0)
2049	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2050	}
2051      else
2052	{
2053	  if (ud4 & 0x8000)
2054	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2055					   - 0x80000000));
2056	  else
2057	    emit_move_insn (dest, GEN_INT (ud4 << 16));
2058
2059	  if (ud3 != 0)
2060	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2061
2062	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2063	  if (ud2 != 0)
2064	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2065					       GEN_INT (ud2 << 16)));
2066	  if (ud1 != 0)
2067	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2068	}
2069    }
2070  return dest;
2071}
2072
2073/* Emit a move from SOURCE to DEST in mode MODE.  */
2074void
2075rs6000_emit_move (dest, source, mode)
2076     rtx dest;
2077     rtx source;
2078     enum machine_mode mode;
2079{
2080  rtx operands[2];
2081  operands[0] = dest;
2082  operands[1] = source;
2083
2084  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
2085  if (GET_CODE (operands[1]) == CONST_DOUBLE
2086      && ! FLOAT_MODE_P (mode)
2087      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2088    {
2089      /* FIXME.  This should never happen.  */
2090      /* Since it seems that it does, do the safe thing and convert
2091	 to a CONST_INT.  */
2092      operands[1] =
2093	GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2094    }
2095  if (GET_CODE (operands[1]) == CONST_DOUBLE
2096      && ! FLOAT_MODE_P (mode)
2097      && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2098	   && CONST_DOUBLE_LOW (operands[1]) >= 0)
2099	  || (CONST_DOUBLE_HIGH (operands[1]) == -1
2100	      && CONST_DOUBLE_LOW (operands[1]) < 0)))
2101    abort ();
2102
2103  /* Check if GCC is setting up a block move that will end up using FP
2104     registers as temporaries.  We must make sure this is acceptable.  */
2105  if (GET_CODE (operands[0]) == MEM
2106      && GET_CODE (operands[1]) == MEM
2107      && mode == DImode
2108      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2109	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2110      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2111					    ? 32 : MEM_ALIGN (operands[0])))
2112	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2113					       ? 32
2114					       : MEM_ALIGN (operands[1]))))
2115      && ! MEM_VOLATILE_P (operands [0])
2116      && ! MEM_VOLATILE_P (operands [1]))
2117    {
2118      emit_move_insn (adjust_address (operands[0], SImode, 0),
2119		      adjust_address (operands[1], SImode, 0));
2120      emit_move_insn (adjust_address (operands[0], SImode, 4),
2121		      adjust_address (operands[1], SImode, 4));
2122      return;
2123    }
2124
2125  if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2126    operands[1] = force_reg (mode, operands[1]);
2127
2128  if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2129      && GET_CODE (operands[0]) == MEM)
2130    {
2131      int regnum;
2132
2133      if (reload_in_progress || reload_completed)
2134	regnum = true_regnum (operands[1]);
2135      else if (GET_CODE (operands[1]) == REG)
2136	regnum = REGNO (operands[1]);
2137      else
2138	regnum = -1;
2139
2140      /* If operands[1] is a register, on POWER it may have
2141	 double-precision data in it, so truncate it to single
2142	 precision.  */
2143      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2144	{
2145	  rtx newreg;
2146	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2147	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2148	  operands[1] = newreg;
2149	}
2150    }
2151
2152  /* Handle the case where reload calls us with an invalid address;
2153     and the case of CONSTANT_P_RTX.  */
2154  if (! general_operand (operands[1], mode)
2155      || ! nonimmediate_operand (operands[0], mode)
2156      || GET_CODE (operands[1]) == CONSTANT_P_RTX)
2157    {
2158      emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2159      return;
2160    }
2161
2162  /* FIXME:  In the long term, this switch statement should go away
2163     and be replaced by a sequence of tests based on things like
2164     mode == Pmode.  */
2165  switch (mode)
2166    {
2167    case HImode:
2168    case QImode:
2169      if (CONSTANT_P (operands[1])
2170	  && GET_CODE (operands[1]) != CONST_INT)
2171	operands[1] = force_const_mem (mode, operands[1]);
2172      break;
2173
2174    case TFmode:
2175    case DFmode:
2176    case SFmode:
2177      if (CONSTANT_P (operands[1])
2178	  && ! easy_fp_constant (operands[1], mode))
2179	operands[1] = force_const_mem (mode, operands[1]);
2180      break;
2181
2182    case V16QImode:
2183    case V8HImode:
2184    case V4SFmode:
2185    case V4SImode:
2186      /* fixme: aldyh -- allow vector constants when they are implemented.  */
2187      if (CONSTANT_P (operands[1]))
2188	operands[1] = force_const_mem (mode, operands[1]);
2189      break;
2190
2191    case SImode:
2192    case DImode:
2193      /* Use default pattern for address of ELF small data */
2194      if (TARGET_ELF
2195	  && mode == Pmode
2196	  && DEFAULT_ABI == ABI_V4
2197	  && (GET_CODE (operands[1]) == SYMBOL_REF
2198	      || GET_CODE (operands[1]) == CONST)
2199	  && small_data_operand (operands[1], mode))
2200	{
2201	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2202	  return;
2203	}
2204
2205      if (DEFAULT_ABI == ABI_V4
2206	  && mode == Pmode && mode == SImode
2207	  && flag_pic == 1 && got_operand (operands[1], mode))
2208	{
2209	  emit_insn (gen_movsi_got (operands[0], operands[1]));
2210	  return;
2211	}
2212
2213      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2214	  && TARGET_NO_TOC && ! flag_pic
2215	  && mode == Pmode
2216	  && CONSTANT_P (operands[1])
2217	  && GET_CODE (operands[1]) != HIGH
2218	  && GET_CODE (operands[1]) != CONST_INT)
2219	{
2220	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2221
2222	  /* If this is a function address on -mcall-aixdesc,
2223	     convert it to the address of the descriptor.  */
2224	  if (DEFAULT_ABI == ABI_AIX
2225	      && GET_CODE (operands[1]) == SYMBOL_REF
2226	      && XSTR (operands[1], 0)[0] == '.')
2227	    {
2228	      const char *name = XSTR (operands[1], 0);
2229	      rtx new_ref;
2230	      while (*name == '.')
2231		name++;
2232	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2233	      CONSTANT_POOL_ADDRESS_P (new_ref)
2234		= CONSTANT_POOL_ADDRESS_P (operands[1]);
2235	      SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2236	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2237	      operands[1] = new_ref;
2238	    }
2239
2240	  if (DEFAULT_ABI == ABI_DARWIN)
2241	    {
2242	      emit_insn (gen_macho_high (target, operands[1]));
2243	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
2244	      return;
2245	    }
2246
2247	  emit_insn (gen_elf_high (target, operands[1]));
2248	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
2249	  return;
2250	}
2251
2252      /* If this is a SYMBOL_REF that refers to a constant pool entry,
2253	 and we have put it in the TOC, we just need to make a TOC-relative
2254	 reference to it.  */
2255      if (TARGET_TOC
2256	  && GET_CODE (operands[1]) == SYMBOL_REF
2257	  && CONSTANT_POOL_EXPR_P (operands[1])
2258	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2259					      get_pool_mode (operands[1])))
2260	{
2261	  operands[1] = create_TOC_reference (operands[1]);
2262	}
2263      else if (mode == Pmode
2264	       && CONSTANT_P (operands[1])
2265	       && ((GET_CODE (operands[1]) != CONST_INT
2266		    && ! easy_fp_constant (operands[1], mode))
2267		   || (GET_CODE (operands[1]) == CONST_INT
2268		       && num_insns_constant (operands[1], mode) > 2)
2269		   || (GET_CODE (operands[0]) == REG
2270		       && FP_REGNO_P (REGNO (operands[0]))))
2271	       && GET_CODE (operands[1]) != HIGH
2272	       && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2273	       && ! TOC_RELATIVE_EXPR_P (operands[1]))
2274	{
2275	  /* Emit a USE operation so that the constant isn't deleted if
2276	     expensive optimizations are turned on because nobody
2277	     references it.  This should only be done for operands that
2278	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2279	     This should not be done for operands that contain LABEL_REFs.
2280	     For now, we just handle the obvious case.  */
2281	  if (GET_CODE (operands[1]) != LABEL_REF)
2282	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2283
2284#if TARGET_MACHO
2285	  /* Darwin uses a special PIC legitimizer.  */
2286	  if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2287	    {
2288	      operands[1] =
2289		rs6000_machopic_legitimize_pic_address (operands[1], mode,
2290							operands[0]);
2291	      if (operands[0] != operands[1])
2292		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2293	      return;
2294	    }
2295#endif
2296
2297	  /* If we are to limit the number of things we put in the TOC and
2298	     this is a symbol plus a constant we can add in one insn,
2299	     just put the symbol in the TOC and add the constant.  Don't do
2300	     this if reload is in progress.  */
2301	  if (GET_CODE (operands[1]) == CONST
2302	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2303	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
2304	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2305	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2306		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2307	      && ! side_effects_p (operands[0]))
2308	    {
2309	      rtx sym =
2310		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2311	      rtx other = XEXP (XEXP (operands[1], 0), 1);
2312
2313	      sym = force_reg (mode, sym);
2314	      if (mode == SImode)
2315		emit_insn (gen_addsi3 (operands[0], sym, other));
2316	      else
2317		emit_insn (gen_adddi3 (operands[0], sym, other));
2318	      return;
2319	    }
2320
2321	  operands[1] = force_const_mem (mode, operands[1]);
2322
2323	  if (TARGET_TOC
2324	      && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2325	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2326			get_pool_constant (XEXP (operands[1], 0)),
2327			get_pool_mode (XEXP (operands[1], 0))))
2328	    {
2329	      operands[1]
2330		= gen_rtx_MEM (mode,
2331			       create_TOC_reference (XEXP (operands[1], 0)));
2332	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
2333	      RTX_UNCHANGING_P (operands[1]) = 1;
2334	    }
2335	}
2336      break;
2337
2338    case TImode:
2339      if (GET_CODE (operands[0]) == MEM
2340	  && GET_CODE (XEXP (operands[0], 0)) != REG
2341	  && ! reload_in_progress)
2342	operands[0]
2343	  = replace_equiv_address (operands[0],
2344				   copy_addr_to_reg (XEXP (operands[0], 0)));
2345
2346      if (GET_CODE (operands[1]) == MEM
2347	  && GET_CODE (XEXP (operands[1], 0)) != REG
2348	  && ! reload_in_progress)
2349	operands[1]
2350	  = replace_equiv_address (operands[1],
2351				   copy_addr_to_reg (XEXP (operands[1], 0)));
2352      break;
2353
2354    default:
2355      abort ();
2356    }
2357
2358  /* Above, we may have called force_const_mem which may have returned
2359     an invalid address.  If we can, fix this up; otherwise, reload will
2360     have to deal with it.  */
2361  if (GET_CODE (operands[1]) == MEM
2362      && ! memory_address_p (mode, XEXP (operands[1], 0))
2363      && ! reload_in_progress)
2364    operands[1] = adjust_address (operands[1], mode, 0);
2365
2366  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2367  return;
2368}
2369
2370/* Initialize a variable CUM of type CUMULATIVE_ARGS
2371   for a call to a function whose data type is FNTYPE.
2372   For a library call, FNTYPE is 0.
2373
2374   For incoming args we set the number of arguments in the prototype large
2375   so we never return a PARALLEL.  */
2376
2377void
2378init_cumulative_args (cum, fntype, libname, incoming)
2379     CUMULATIVE_ARGS *cum;
2380     tree fntype;
2381     rtx libname ATTRIBUTE_UNUSED;
2382     int incoming;
2383{
2384  static CUMULATIVE_ARGS zero_cumulative;
2385
2386  *cum = zero_cumulative;
2387  cum->words = 0;
2388  cum->fregno = FP_ARG_MIN_REG;
2389  cum->vregno = ALTIVEC_ARG_MIN_REG;
2390  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2391  cum->call_cookie = CALL_NORMAL;
2392  cum->sysv_gregno = GP_ARG_MIN_REG;
2393
2394  if (incoming)
2395    cum->nargs_prototype = 1000;		/* don't return a PARALLEL */
2396
2397  else if (cum->prototype)
2398    cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2399			    + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2400			       || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2401
2402  else
2403    cum->nargs_prototype = 0;
2404
2405  cum->orig_nargs = cum->nargs_prototype;
2406
2407  /* Check for longcall's */
2408  if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2409    cum->call_cookie = CALL_LONG;
2410
2411  if (TARGET_DEBUG_ARG)
2412    {
2413      fprintf (stderr, "\ninit_cumulative_args:");
2414      if (fntype)
2415	{
2416	  tree ret_type = TREE_TYPE (fntype);
2417	  fprintf (stderr, " ret code = %s,",
2418		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
2419	}
2420
2421      if (cum->call_cookie & CALL_LONG)
2422	fprintf (stderr, " longcall,");
2423
2424      fprintf (stderr, " proto = %d, nargs = %d\n",
2425	       cum->prototype, cum->nargs_prototype);
2426    }
2427}
2428
2429/* If defined, a C expression which determines whether, and in which
2430   direction, to pad out an argument with extra space.  The value
2431   should be of type `enum direction': either `upward' to pad above
2432   the argument, `downward' to pad below, or `none' to inhibit
2433   padding.
2434
2435   For the AIX ABI structs are always stored left shifted in their
2436   argument slot.  */
2437
2438enum direction
2439function_arg_padding (mode, type)
2440     enum machine_mode mode;
2441     tree type;
2442{
2443  if (type != 0 && AGGREGATE_TYPE_P (type))
2444    return upward;
2445
2446  /* This is the default definition.  */
2447  return (! BYTES_BIG_ENDIAN
2448          ? upward
2449          : ((mode == BLKmode
2450              ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2451                 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2452              : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2453             ? downward : upward));
2454}
2455
2456/* If defined, a C expression that gives the alignment boundary, in bits,
2457   of an argument with the specified mode and type.  If it is not defined,
2458   PARM_BOUNDARY is used for all arguments.
2459
2460   V.4 wants long longs to be double word aligned.  */
2461
2462int
2463function_arg_boundary (mode, type)
2464     enum machine_mode mode;
2465     tree type ATTRIBUTE_UNUSED;
2466{
2467  if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2468    return 64;
2469  else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2470    return 128;
2471  else
2472    return PARM_BOUNDARY;
2473}
2474
2475/* Update the data in CUM to advance over an argument
2476   of mode MODE and data type TYPE.
2477   (TYPE is null for libcalls where that information may not be available.)  */
2478
2479void
2480function_arg_advance (cum, mode, type, named)
2481     CUMULATIVE_ARGS *cum;
2482     enum machine_mode mode;
2483     tree type;
2484     int named;
2485{
2486  cum->nargs_prototype--;
2487
2488  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2489    {
2490      if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2491	cum->vregno++;
2492      else
2493	cum->words += RS6000_ARG_SIZE (mode, type);
2494    }
2495  else if (DEFAULT_ABI == ABI_V4)
2496    {
2497      if (TARGET_HARD_FLOAT
2498	  && (mode == SFmode || mode == DFmode))
2499	{
2500	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2501	    cum->fregno++;
2502	  else
2503	    {
2504	      if (mode == DFmode)
2505	        cum->words += cum->words & 1;
2506	      cum->words += RS6000_ARG_SIZE (mode, type);
2507	    }
2508	}
2509      else
2510	{
2511	  int n_words;
2512	  int gregno = cum->sysv_gregno;
2513
2514	  /* Aggregates and IEEE quad get passed by reference.  */
2515	  if ((type && AGGREGATE_TYPE_P (type))
2516	      || mode == TFmode)
2517	    n_words = 1;
2518	  else
2519	    n_words = RS6000_ARG_SIZE (mode, type);
2520
2521	  /* Long long is put in odd registers.  */
2522	  if (n_words == 2 && (gregno & 1) == 0)
2523	    gregno += 1;
2524
2525	  /* Long long is not split between registers and stack.  */
2526	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2527	    {
2528	      /* Long long is aligned on the stack.  */
2529	      if (n_words == 2)
2530		cum->words += cum->words & 1;
2531	      cum->words += n_words;
2532	    }
2533
2534	  /* Note: continuing to accumulate gregno past when we've started
2535	     spilling to the stack indicates the fact that we've started
2536	     spilling to the stack to expand_builtin_saveregs.  */
2537	  cum->sysv_gregno = gregno + n_words;
2538	}
2539
2540      if (TARGET_DEBUG_ARG)
2541	{
2542	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2543		   cum->words, cum->fregno);
2544	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2545		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2546	  fprintf (stderr, "mode = %4s, named = %d\n",
2547		   GET_MODE_NAME (mode), named);
2548	}
2549    }
2550  else
2551    {
2552      int align = (TARGET_32BIT && (cum->words & 1) != 0
2553		   && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2554
2555      cum->words += align + RS6000_ARG_SIZE (mode, type);
2556
2557      if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2558	cum->fregno++;
2559
2560      if (TARGET_DEBUG_ARG)
2561	{
2562	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2563		   cum->words, cum->fregno);
2564	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2565		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2566	  fprintf (stderr, "named = %d, align = %d\n", named, align);
2567	}
2568    }
2569}
2570
2571/* Determine where to put an argument to a function.
2572   Value is zero to push the argument on the stack,
2573   or a hard register in which to store the argument.
2574
2575   MODE is the argument's machine mode.
2576   TYPE is the data type of the argument (as a tree).
2577    This is null for libcalls where that information may
2578    not be available.
2579   CUM is a variable of type CUMULATIVE_ARGS which gives info about
2580    the preceding args and about the function being called.
2581   NAMED is nonzero if this argument is a named parameter
2582    (otherwise it is an extra parameter matching an ellipsis).
2583
2584   On RS/6000 the first eight words of non-FP are normally in registers
2585   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
2586   Under V.4, the first 8 FP args are in registers.
2587
2588   If this is floating-point and no prototype is specified, we use
2589   both an FP and integer register (or possibly FP reg and stack).  Library
2590   functions (when TYPE is zero) always have the proper types for args,
2591   so we can pass the FP value just in one register.  emit_library_function
2592   doesn't support PARALLEL anyway.  */
2593
2594struct rtx_def *
2595function_arg (cum, mode, type, named)
2596     CUMULATIVE_ARGS *cum;
2597     enum machine_mode mode;
2598     tree type;
2599     int named;
2600{
2601  enum rs6000_abi abi = DEFAULT_ABI;
2602
2603  /* Return a marker to indicate whether CR1 needs to set or clear the
2604     bit that V.4 uses to say fp args were passed in registers.
2605     Assume that we don't need the marker for software floating point,
2606     or compiler generated library calls.  */
2607  if (mode == VOIDmode)
2608    {
2609      if (abi == ABI_V4
2610	  && TARGET_HARD_FLOAT
2611	  && cum->nargs_prototype < 0
2612	  && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2613	{
2614	  return GEN_INT (cum->call_cookie
2615			  | ((cum->fregno == FP_ARG_MIN_REG)
2616			     ? CALL_V4_SET_FP_ARGS
2617			     : CALL_V4_CLEAR_FP_ARGS));
2618	}
2619
2620      return GEN_INT (cum->call_cookie);
2621    }
2622
2623  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2624    {
2625      if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2626	return gen_rtx_REG (mode, cum->vregno);
2627      else
2628	return NULL;
2629    }
2630  else if (abi == ABI_V4)
2631    {
2632      if (TARGET_HARD_FLOAT
2633	  && (mode == SFmode || mode == DFmode))
2634	{
2635	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2636	    return gen_rtx_REG (mode, cum->fregno);
2637	  else
2638	    return NULL;
2639	}
2640      else
2641	{
2642	  int n_words;
2643	  int gregno = cum->sysv_gregno;
2644
2645	  /* Aggregates and IEEE quad get passed by reference.  */
2646	  if ((type && AGGREGATE_TYPE_P (type))
2647	      || mode == TFmode)
2648	    n_words = 1;
2649	  else
2650	    n_words = RS6000_ARG_SIZE (mode, type);
2651
2652	  /* Long long is put in odd registers.  */
2653	  if (n_words == 2 && (gregno & 1) == 0)
2654	    gregno += 1;
2655
2656	  /* Long long is not split between registers and stack.  */
2657	  if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2658	    return gen_rtx_REG (mode, gregno);
2659	  else
2660	    return NULL;
2661	}
2662    }
2663  else
2664    {
2665      int align = (TARGET_32BIT && (cum->words & 1) != 0
2666	           && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2667      int align_words = cum->words + align;
2668
2669      if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2670        return NULL_RTX;
2671
2672      if (USE_FP_FOR_ARG_P (*cum, mode, type))
2673	{
2674	  if (! type
2675	      || ((cum->nargs_prototype > 0)
2676	          /* IBM AIX extended its linkage convention definition always
2677		     to require FP args after register save area hole on the
2678		     stack.  */
2679	          && (DEFAULT_ABI != ABI_AIX
2680		      || ! TARGET_XL_CALL
2681		      || (align_words < GP_ARG_NUM_REG))))
2682	    return gen_rtx_REG (mode, cum->fregno);
2683
2684          return gen_rtx_PARALLEL (mode,
2685	    gen_rtvec (2,
2686		       gen_rtx_EXPR_LIST (VOIDmode,
2687				((align_words >= GP_ARG_NUM_REG)
2688				 ? NULL_RTX
2689				 : (align_words
2690				    + RS6000_ARG_SIZE (mode, type)
2691				    > GP_ARG_NUM_REG
2692				    /* If this is partially on the stack, then
2693				       we only include the portion actually
2694				       in registers here.  */
2695				    ? gen_rtx_REG (SImode,
2696					       GP_ARG_MIN_REG + align_words)
2697				    : gen_rtx_REG (mode,
2698					       GP_ARG_MIN_REG + align_words))),
2699				const0_rtx),
2700		       gen_rtx_EXPR_LIST (VOIDmode,
2701				gen_rtx_REG (mode, cum->fregno),
2702				const0_rtx)));
2703	}
2704      else if (align_words < GP_ARG_NUM_REG)
2705	return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2706      else
2707	return NULL_RTX;
2708    }
2709}
2710
2711/* For an arg passed partly in registers and partly in memory,
2712   this is the number of registers used.
2713   For args passed entirely in registers or entirely in memory, zero.  */
2714
2715int
2716function_arg_partial_nregs (cum, mode, type, named)
2717     CUMULATIVE_ARGS *cum;
2718     enum machine_mode mode;
2719     tree type;
2720     int named ATTRIBUTE_UNUSED;
2721{
2722  if (DEFAULT_ABI == ABI_V4)
2723    return 0;
2724
2725  if (USE_FP_FOR_ARG_P (*cum, mode, type)
2726      || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2727    {
2728      if (cum->nargs_prototype >= 0)
2729	return 0;
2730    }
2731
2732  if (cum->words < GP_ARG_NUM_REG
2733      && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2734    {
2735      int ret = GP_ARG_NUM_REG - cum->words;
2736      if (ret && TARGET_DEBUG_ARG)
2737	fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2738
2739      return ret;
2740    }
2741
2742  return 0;
2743}
2744
2745/* A C expression that indicates when an argument must be passed by
2746   reference.  If nonzero for an argument, a copy of that argument is
2747   made in memory and a pointer to the argument is passed instead of
2748   the argument itself.  The pointer is passed in whatever way is
2749   appropriate for passing a pointer to that type.
2750
2751   Under V.4, structures and unions are passed by reference.  */
2752
2753int
2754function_arg_pass_by_reference (cum, mode, type, named)
2755     CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2756     enum machine_mode mode ATTRIBUTE_UNUSED;
2757     tree type;
2758     int named ATTRIBUTE_UNUSED;
2759{
2760  if (DEFAULT_ABI == ABI_V4
2761      && ((type && AGGREGATE_TYPE_P (type))
2762	  || mode == TFmode))
2763    {
2764      if (TARGET_DEBUG_ARG)
2765	fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2766
2767      return 1;
2768    }
2769
2770  return 0;
2771}
2772
2773/* Perform any needed actions needed for a function that is receiving a
2774   variable number of arguments.
2775
2776   CUM is as above.
2777
2778   MODE and TYPE are the mode and type of the current parameter.
2779
2780   PRETEND_SIZE is a variable that should be set to the amount of stack
2781   that must be pushed by the prolog to pretend that our caller pushed
2782   it.
2783
2784   Normally, this macro will push all remaining incoming registers on the
2785   stack and set PRETEND_SIZE to the length of the registers pushed.  */
2786
2787void
2788setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2789     CUMULATIVE_ARGS *cum;
2790     enum machine_mode mode;
2791     tree type;
2792     int *pretend_size;
2793     int no_rtl;
2794
2795{
2796  CUMULATIVE_ARGS next_cum;
2797  int reg_size = TARGET_32BIT ? 4 : 8;
2798  rtx save_area = NULL_RTX, mem;
2799  int first_reg_offset, set;
2800  tree fntype;
2801  int stdarg_p;
2802
2803  fntype = TREE_TYPE (current_function_decl);
2804  stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2805	      && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2806		  != void_type_node));
2807
2808  /* For varargs, we do not want to skip the dummy va_dcl argument.
2809     For stdargs, we do want to skip the last named argument.  */
2810  next_cum = *cum;
2811  if (stdarg_p)
2812    function_arg_advance (&next_cum, mode, type, 1);
2813
2814  if (DEFAULT_ABI == ABI_V4)
2815    {
2816      /* Indicate to allocate space on the stack for varargs save area.  */
2817      /* ??? Does this really have to be located at a magic spot on the
2818	 stack, or can we allocate this with assign_stack_local instead.  */
2819      cfun->machine->sysv_varargs_p = 1;
2820      if (! no_rtl)
2821	save_area = plus_constant (virtual_stack_vars_rtx,
2822				   - RS6000_VARARGS_SIZE);
2823
2824      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2825    }
2826  else
2827    {
2828      first_reg_offset = next_cum.words;
2829      save_area = virtual_incoming_args_rtx;
2830      cfun->machine->sysv_varargs_p = 0;
2831
2832      if (MUST_PASS_IN_STACK (mode, type))
2833	first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2834    }
2835
2836  set = get_varargs_alias_set ();
2837  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2838    {
2839      mem = gen_rtx_MEM (BLKmode,
2840		         plus_constant (save_area,
2841					first_reg_offset * reg_size)),
2842      set_mem_alias_set (mem, set);
2843      set_mem_align (mem, BITS_PER_WORD);
2844
2845      move_block_from_reg
2846	(GP_ARG_MIN_REG + first_reg_offset, mem,
2847	 GP_ARG_NUM_REG - first_reg_offset,
2848	 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2849
2850      /* ??? Does ABI_V4 need this at all?  */
2851      *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2852    }
2853
2854  /* Save FP registers if needed.  */
2855  if (DEFAULT_ABI == ABI_V4
2856      && TARGET_HARD_FLOAT && ! no_rtl
2857      && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2858    {
2859      int fregno = next_cum.fregno;
2860      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2861      rtx lab = gen_label_rtx ();
2862      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2863
2864      emit_jump_insn (gen_rtx_SET (VOIDmode,
2865				   pc_rtx,
2866				   gen_rtx_IF_THEN_ELSE (VOIDmode,
2867					    gen_rtx_NE (VOIDmode, cr1,
2868						        const0_rtx),
2869					    gen_rtx_LABEL_REF (VOIDmode, lab),
2870					    pc_rtx)));
2871
2872      while (fregno <= FP_ARG_V4_MAX_REG)
2873	{
2874	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2875          set_mem_alias_set (mem, set);
2876	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2877	  fregno++;
2878	  off += 8;
2879	}
2880
2881      emit_label (lab);
2882    }
2883}
2884
2885/* Create the va_list data type.  */
2886
2887tree
2888rs6000_build_va_list ()
2889{
2890  tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2891
2892  /* For AIX, prefer 'char *' because that's what the system
2893     header files like.  */
2894  if (DEFAULT_ABI != ABI_V4)
2895    return build_pointer_type (char_type_node);
2896
2897  record = make_lang_type (RECORD_TYPE);
2898  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2899
2900  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2901		      unsigned_char_type_node);
2902  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2903		      unsigned_char_type_node);
2904  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2905		      ptr_type_node);
2906  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2907		      ptr_type_node);
2908
2909  DECL_FIELD_CONTEXT (f_gpr) = record;
2910  DECL_FIELD_CONTEXT (f_fpr) = record;
2911  DECL_FIELD_CONTEXT (f_ovf) = record;
2912  DECL_FIELD_CONTEXT (f_sav) = record;
2913
2914  TREE_CHAIN (record) = type_decl;
2915  TYPE_NAME (record) = type_decl;
2916  TYPE_FIELDS (record) = f_gpr;
2917  TREE_CHAIN (f_gpr) = f_fpr;
2918  TREE_CHAIN (f_fpr) = f_ovf;
2919  TREE_CHAIN (f_ovf) = f_sav;
2920
2921  layout_type (record);
2922
2923  /* The correct type is an array type of one element.  */
2924  return build_array_type (record, build_index_type (size_zero_node));
2925}
2926
2927/* Implement va_start.  */
2928
2929void
2930rs6000_va_start (stdarg_p, valist, nextarg)
2931     int stdarg_p;
2932     tree valist;
2933     rtx nextarg;
2934{
2935  HOST_WIDE_INT words, n_gpr, n_fpr;
2936  tree f_gpr, f_fpr, f_ovf, f_sav;
2937  tree gpr, fpr, ovf, sav, t;
2938
2939  /* Only SVR4 needs something special.  */
2940  if (DEFAULT_ABI != ABI_V4)
2941    {
2942      std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2943      return;
2944    }
2945
2946  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2947  f_fpr = TREE_CHAIN (f_gpr);
2948  f_ovf = TREE_CHAIN (f_fpr);
2949  f_sav = TREE_CHAIN (f_ovf);
2950
2951  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2952  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2953  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2954  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2955  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2956
2957  /* Count number of gp and fp argument registers used.  */
2958  words = current_function_args_info.words;
2959  n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
2960  n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
2961
2962  if (TARGET_DEBUG_ARG)
2963    {
2964      fputs ("va_start: words = ", stderr);
2965      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
2966      fputs (", n_gpr = ", stderr);
2967      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
2968      fputs (", n_fpr = ", stderr);
2969      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
2970      putc ('\n', stderr);
2971    }
2972
2973  t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
2974  TREE_SIDE_EFFECTS (t) = 1;
2975  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2976
2977  t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
2978  TREE_SIDE_EFFECTS (t) = 1;
2979  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2980
2981  /* Find the overflow area.  */
2982  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
2983  if (words != 0)
2984    t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
2985	       build_int_2 (words * UNITS_PER_WORD, 0));
2986  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
2987  TREE_SIDE_EFFECTS (t) = 1;
2988  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2989
2990  /* Find the register save area.  */
2991  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
2992  t = build (PLUS_EXPR, TREE_TYPE (sav), t,
2993	     build_int_2 (-RS6000_VARARGS_SIZE, -1));
2994  t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
2995  TREE_SIDE_EFFECTS (t) = 1;
2996  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2997}
2998
2999/* Implement va_arg.  */
3000
3001rtx
3002rs6000_va_arg (valist, type)
3003     tree valist, type;
3004{
3005  tree f_gpr, f_fpr, f_ovf, f_sav;
3006  tree gpr, fpr, ovf, sav, reg, t, u;
3007  int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3008  rtx lab_false, lab_over, addr_rtx, r;
3009
3010  /* For AIX, the rule is that structures are passed left-aligned in
3011     their stack slot.  However, GCC does not presently do this:
3012     structures which are the same size as integer types are passed
3013     right-aligned, as if they were in fact integers.  This only
3014     matters for structures of size 1 or 2, or 4 when TARGET_64BIT.  */
3015  if (DEFAULT_ABI != ABI_V4)
3016    {
3017      HOST_WIDE_INT align, rounded_size;
3018      enum machine_mode mode;
3019      tree addr_tree;
3020
3021      /* Compute the rounded size of the type.  */
3022      align = PARM_BOUNDARY / BITS_PER_UNIT;
3023      rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
3024		      * align);
3025
3026      addr_tree = valist;
3027
3028      mode = TYPE_MODE (type);
3029      if (mode != BLKmode)
3030	{
3031	  HOST_WIDE_INT adj;
3032	  adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
3033	  if (rounded_size > align)
3034	    adj = rounded_size;
3035
3036	  addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3037			     build_int_2 (rounded_size - adj, 0));
3038	}
3039
3040      addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3041      addr_rtx = copy_to_reg (addr_rtx);
3042
3043      /* Compute new value for AP.  */
3044      t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3045		 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3046			build_int_2 (rounded_size, 0)));
3047      TREE_SIDE_EFFECTS (t) = 1;
3048      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3049
3050      return addr_rtx;
3051    }
3052
3053  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3054  f_fpr = TREE_CHAIN (f_gpr);
3055  f_ovf = TREE_CHAIN (f_fpr);
3056  f_sav = TREE_CHAIN (f_ovf);
3057
3058  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3059  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3060  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3061  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3062  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3063
3064  size = int_size_in_bytes (type);
3065  rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3066
3067  if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3068    {
3069      /* Aggregates and long doubles are passed by reference.  */
3070      indirect_p = 1;
3071      reg = gpr;
3072      n_reg = 1;
3073      sav_ofs = 0;
3074      sav_scale = 4;
3075      size = rsize = UNITS_PER_WORD;
3076    }
3077  else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3078    {
3079      /* FP args go in FP registers, if present.  */
3080      indirect_p = 0;
3081      reg = fpr;
3082      n_reg = 1;
3083      sav_ofs = 8*4;
3084      sav_scale = 8;
3085    }
3086  else
3087    {
3088      /* Otherwise into GP registers.  */
3089      indirect_p = 0;
3090      reg = gpr;
3091      n_reg = rsize;
3092      sav_ofs = 0;
3093      sav_scale = 4;
3094    }
3095
3096  /* Pull the value out of the saved registers ...  */
3097
3098  lab_false = gen_label_rtx ();
3099  lab_over = gen_label_rtx ();
3100  addr_rtx = gen_reg_rtx (Pmode);
3101
3102  emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3103			   GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3104			   lab_false);
3105
3106  /* Long long is aligned in the registers.  */
3107  if (n_reg > 1)
3108    {
3109      u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3110		 build_int_2 (n_reg - 1, 0));
3111      u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3112      u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3113      TREE_SIDE_EFFECTS (u) = 1;
3114      expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3115    }
3116
3117  if (sav_ofs)
3118    t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3119  else
3120    t = sav;
3121
3122  u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0));
3123  TREE_SIDE_EFFECTS (u) = 1;
3124
3125  u = build1 (CONVERT_EXPR, integer_type_node, u);
3126  TREE_SIDE_EFFECTS (u) = 1;
3127
3128  u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3129  TREE_SIDE_EFFECTS (u) = 1;
3130
3131  t = build (PLUS_EXPR, ptr_type_node, t, u);
3132  TREE_SIDE_EFFECTS (t) = 1;
3133
3134  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3135  if (r != addr_rtx)
3136    emit_move_insn (addr_rtx, r);
3137
3138  emit_jump_insn (gen_jump (lab_over));
3139  emit_barrier ();
3140  emit_label (lab_false);
3141
3142  /* ... otherwise out of the overflow area.  */
3143
3144  /* Make sure we don't find reg 7 for the next int arg.  */
3145  if (n_reg > 1)
3146    {
3147      t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3148      TREE_SIDE_EFFECTS (t) = 1;
3149      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3150    }
3151
3152  /* Care for on-stack alignment if needed.  */
3153  if (rsize <= 1)
3154    t = ovf;
3155  else
3156    {
3157      t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0));
3158      t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1));
3159    }
3160  t = save_expr (t);
3161
3162  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3163  if (r != addr_rtx)
3164    emit_move_insn (addr_rtx, r);
3165
3166  t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3167  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3168  TREE_SIDE_EFFECTS (t) = 1;
3169  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3170
3171  emit_label (lab_over);
3172
3173  if (indirect_p)
3174    {
3175      r = gen_rtx_MEM (Pmode, addr_rtx);
3176      set_mem_alias_set (r, get_varargs_alias_set ());
3177      emit_move_insn (addr_rtx, r);
3178    }
3179
3180  return addr_rtx;
3181}
3182
3183/* Builtins.  */
3184
3185#define def_builtin(MASK, NAME, TYPE, CODE)				\
3186do {									\
3187  if ((MASK) & target_flags)						\
3188    builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL);	\
3189} while (0)
3190
3191struct builtin_description
3192{
3193  const unsigned int mask;
3194  const enum insn_code icode;
3195  const char *const name;
3196  const enum rs6000_builtins code;
3197};
3198
3199/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
3200
3201static const struct builtin_description bdesc_3arg[] =
3202{
3203  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3204  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3205  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3206  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3207  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3208  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3209  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3210  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3211  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3212  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3213  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3214  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3215  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3216  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3217  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3218  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3219  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3220  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3221  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3222  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3223  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3224  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3225  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3226};
3227
3228/* DST operations: void foo (void *, const int, const char).  */
3229
3230static const struct builtin_description bdesc_dst[] =
3231{
3232  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3233  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3234  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3235  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3236};
3237
3238/* Simple binary operations: VECc = foo (VECa, VECb).  */
3239
3240static const struct builtin_description bdesc_2arg[] =
3241{
3242  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3243  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3244  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3245  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3246  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3247  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3248  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3249  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3250  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3251  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3252  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3253  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3254  { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3255  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3256  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3257  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3258  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3259  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3260  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3261  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3262  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3263  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3264  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3265  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3266  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3267  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3268  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3269  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3270  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3271  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3272  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3273  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3274  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3275  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3276  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3277  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3278  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3279  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3280  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3281  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3282  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3283  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3284  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3285  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3286  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3287  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3288  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3289  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3290  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3291  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3292  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3293  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3294  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3295  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3296  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3297  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3298  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3299  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3300  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3301  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3302  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3303  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3304  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3305  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3306  { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3307  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3308  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3309  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3310  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3311  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3312  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3313  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3314  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3315  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3316  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3317  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3318  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3319  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3320  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3321  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3322  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3323  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3324  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3325  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3326  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3327  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3328  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3329  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3330  { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3331  { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3332  { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3333  { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3334  { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3335  { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3336  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3337  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3338  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3339  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3340  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3341  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3342  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3343  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3344  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3345  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3346  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3347  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3348  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3349  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3350  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3351  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3352  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3353  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3354  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3355  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp_p, "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3356  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp_p, "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3357  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb_p, "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3358  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh_p, "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3359  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw_p, "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3360  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp_p, "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3361  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp_p, "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3362  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb_p, "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3363  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh_p, "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3364  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw_p, "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3365  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub_p, "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
3366  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh_p, "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3367  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw_p, "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3368};
3369
3370/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3371   foo (VECa).  */
3372
3373static const struct builtin_description bdesc_1arg[] =
3374{
3375  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3376  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3377  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3378  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3379  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3380  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3381  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3382  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3383  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3384  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3385  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3386  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3387  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3388  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3389  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3390  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3391  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3392};
3393
3394static rtx
3395altivec_expand_unop_builtin (icode, arglist, target)
3396     enum insn_code icode;
3397     tree arglist;
3398     rtx target;
3399{
3400  rtx pat;
3401  tree arg0 = TREE_VALUE (arglist);
3402  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3403  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3404  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3405
3406  /* If we got invalid arguments bail out before generating bad rtl.  */
3407  if (arg0 == error_mark_node)
3408    return NULL_RTX;
3409
3410  if (target == 0
3411      || GET_MODE (target) != tmode
3412      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3413    target = gen_reg_rtx (tmode);
3414
3415  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3416    op0 = copy_to_mode_reg (mode0, op0);
3417
3418  pat = GEN_FCN (icode) (target, op0);
3419  if (! pat)
3420    return 0;
3421  emit_insn (pat);
3422
3423  return target;
3424}
3425static rtx
3426altivec_expand_binop_builtin (icode, arglist, target)
3427     enum insn_code icode;
3428     tree arglist;
3429     rtx target;
3430{
3431  rtx pat;
3432  tree arg0 = TREE_VALUE (arglist);
3433  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3434  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3435  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3436  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3437  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3438  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3439
3440  /* If we got invalid arguments bail out before generating bad rtl.  */
3441  if (arg0 == error_mark_node || arg1 == error_mark_node)
3442    return NULL_RTX;
3443
3444  if (target == 0
3445      || GET_MODE (target) != tmode
3446      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3447    target = gen_reg_rtx (tmode);
3448
3449  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3450    op0 = copy_to_mode_reg (mode0, op0);
3451  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3452    op1 = copy_to_mode_reg (mode1, op1);
3453
3454  pat = GEN_FCN (icode) (target, op0, op1);
3455  if (! pat)
3456    return 0;
3457  emit_insn (pat);
3458
3459  return target;
3460}
3461
3462static rtx
3463altivec_expand_stv_builtin (icode, arglist)
3464     enum insn_code icode;
3465     tree arglist;
3466{
3467  tree arg0 = TREE_VALUE (arglist);
3468  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3469  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3470  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3471  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3472  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3473  rtx pat;
3474  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3475  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3476  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3477
3478  /* Invalid arguments.  Bail before doing anything stoopid!  */
3479  if (arg0 == error_mark_node
3480      || arg1 == error_mark_node
3481      || arg2 == error_mark_node)
3482    return NULL_RTX;
3483
3484  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3485    op0 = copy_to_mode_reg (mode2, op0);
3486  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3487    op1 = copy_to_mode_reg (mode0, op1);
3488  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3489    op2 = copy_to_mode_reg (mode1, op2);
3490
3491  pat = GEN_FCN (icode) (op1, op2, op0);
3492  if (pat)
3493    emit_insn (pat);
3494  return NULL_RTX;
3495}
3496
3497static rtx
3498altivec_expand_ternop_builtin (icode, arglist, target)
3499     enum insn_code icode;
3500     tree arglist;
3501     rtx target;
3502{
3503  rtx pat;
3504  tree arg0 = TREE_VALUE (arglist);
3505  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3506  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3507  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3508  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3509  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3510  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3511  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3512  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3513  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3514
3515  /* If we got invalid arguments bail out before generating bad rtl.  */
3516  if (arg0 == error_mark_node
3517      || arg1 == error_mark_node
3518      || arg2 == error_mark_node)
3519    return NULL_RTX;
3520
3521  if (target == 0
3522      || GET_MODE (target) != tmode
3523      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3524    target = gen_reg_rtx (tmode);
3525
3526  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3527    op0 = copy_to_mode_reg (mode0, op0);
3528  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3529    op1 = copy_to_mode_reg (mode1, op1);
3530  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3531    op2 = copy_to_mode_reg (mode2, op2);
3532
3533  pat = GEN_FCN (icode) (target, op0, op1, op2);
3534  if (! pat)
3535    return 0;
3536  emit_insn (pat);
3537
3538  return target;
3539}
3540static rtx
3541altivec_expand_builtin (exp, target)
3542     tree exp;
3543     rtx target;
3544{
3545  struct builtin_description *d;
3546  size_t i;
3547  enum insn_code icode;
3548  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3549  tree arglist = TREE_OPERAND (exp, 1);
3550  tree arg0, arg1, arg2;
3551  rtx op0, op1, op2, pat;
3552  enum machine_mode tmode, mode0, mode1, mode2;
3553  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3554
3555  switch (fcode)
3556    {
3557    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3558      icode = CODE_FOR_altivec_lvx_16qi;
3559      arg0 = TREE_VALUE (arglist);
3560      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3561      tmode = insn_data[icode].operand[0].mode;
3562      mode0 = insn_data[icode].operand[1].mode;
3563
3564      if (target == 0
3565	  || GET_MODE (target) != tmode
3566	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3567	target = gen_reg_rtx (tmode);
3568
3569      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3570	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3571
3572      pat = GEN_FCN (icode) (target, op0);
3573      if (! pat)
3574	return 0;
3575      emit_insn (pat);
3576      return target;
3577
3578    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3579      icode = CODE_FOR_altivec_lvx_8hi;
3580      arg0 = TREE_VALUE (arglist);
3581      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3582      tmode = insn_data[icode].operand[0].mode;
3583      mode0 = insn_data[icode].operand[1].mode;
3584
3585      if (target == 0
3586	  || GET_MODE (target) != tmode
3587	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3588	target = gen_reg_rtx (tmode);
3589
3590      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3591	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3592
3593      pat = GEN_FCN (icode) (target, op0);
3594      if (! pat)
3595	return 0;
3596      emit_insn (pat);
3597      return target;
3598
3599    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3600      icode = CODE_FOR_altivec_lvx_4si;
3601      arg0 = TREE_VALUE (arglist);
3602      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3603      tmode = insn_data[icode].operand[0].mode;
3604      mode0 = insn_data[icode].operand[1].mode;
3605
3606      if (target == 0
3607	  || GET_MODE (target) != tmode
3608	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3609	target = gen_reg_rtx (tmode);
3610
3611      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3612	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3613
3614      pat = GEN_FCN (icode) (target, op0);
3615      if (! pat)
3616	return 0;
3617      emit_insn (pat);
3618      return target;
3619
3620    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3621      icode = CODE_FOR_altivec_lvx_4sf;
3622      arg0 = TREE_VALUE (arglist);
3623      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3624      tmode = insn_data[icode].operand[0].mode;
3625      mode0 = insn_data[icode].operand[1].mode;
3626
3627      if (target == 0
3628	  || GET_MODE (target) != tmode
3629	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3630	target = gen_reg_rtx (tmode);
3631
3632      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3633	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3634
3635      pat = GEN_FCN (icode) (target, op0);
3636      if (! pat)
3637	return 0;
3638      emit_insn (pat);
3639      return target;
3640
3641    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3642      icode = CODE_FOR_altivec_stvx_16qi;
3643      arg0 = TREE_VALUE (arglist);
3644      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3645      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3646      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3647      mode0 = insn_data[icode].operand[0].mode;
3648      mode1 = insn_data[icode].operand[1].mode;
3649
3650      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3651	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3652      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3653	op1 = copy_to_mode_reg (mode1, op1);
3654
3655      pat = GEN_FCN (icode) (op0, op1);
3656      if (pat)
3657	emit_insn (pat);
3658      return NULL_RTX;
3659
3660    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3661      icode = CODE_FOR_altivec_stvx_8hi;
3662      arg0 = TREE_VALUE (arglist);
3663      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3664      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3665      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3666      mode0 = insn_data[icode].operand[0].mode;
3667      mode1 = insn_data[icode].operand[1].mode;
3668
3669      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3670	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3671      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3672	op1 = copy_to_mode_reg (mode1, op1);
3673
3674      pat = GEN_FCN (icode) (op0, op1);
3675      if (pat)
3676	emit_insn (pat);
3677      return NULL_RTX;
3678
3679    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3680      icode = CODE_FOR_altivec_stvx_4si;
3681      arg0 = TREE_VALUE (arglist);
3682      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3683      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3684      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3685      mode0 = insn_data[icode].operand[0].mode;
3686      mode1 = insn_data[icode].operand[1].mode;
3687
3688      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3689	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3690      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3691	op1 = copy_to_mode_reg (mode1, op1);
3692
3693      pat = GEN_FCN (icode) (op0, op1);
3694      if (pat)
3695	emit_insn (pat);
3696      return NULL_RTX;
3697
3698    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3699      icode = CODE_FOR_altivec_stvx_4sf;
3700      arg0 = TREE_VALUE (arglist);
3701      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3702      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3703      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3704      mode0 = insn_data[icode].operand[0].mode;
3705      mode1 = insn_data[icode].operand[1].mode;
3706
3707      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3708	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3709      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3710	op1 = copy_to_mode_reg (mode1, op1);
3711
3712      pat = GEN_FCN (icode) (op0, op1);
3713      if (pat)
3714	emit_insn (pat);
3715      return NULL_RTX;
3716
3717    case ALTIVEC_BUILTIN_STVX:
3718      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3719    case ALTIVEC_BUILTIN_STVEBX:
3720      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3721    case ALTIVEC_BUILTIN_STVEHX:
3722      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3723    case ALTIVEC_BUILTIN_STVEWX:
3724      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3725    case ALTIVEC_BUILTIN_STVXL:
3726      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3727
3728    case ALTIVEC_BUILTIN_MFVSCR:
3729      icode = CODE_FOR_altivec_mfvscr;
3730      tmode = insn_data[icode].operand[0].mode;
3731
3732      if (target == 0
3733	  || GET_MODE (target) != tmode
3734	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3735	target = gen_reg_rtx (tmode);
3736
3737      pat = GEN_FCN (icode) (target);
3738      if (! pat)
3739	return 0;
3740      emit_insn (pat);
3741      return target;
3742
3743    case ALTIVEC_BUILTIN_MTVSCR:
3744      icode = CODE_FOR_altivec_mtvscr;
3745      arg0 = TREE_VALUE (arglist);
3746      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3747      mode0 = insn_data[icode].operand[0].mode;
3748
3749      /* If we got invalid arguments bail out before generating bad rtl.  */
3750      if (arg0 == error_mark_node)
3751	return NULL_RTX;
3752
3753      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3754	op0 = copy_to_mode_reg (mode0, op0);
3755
3756      pat = GEN_FCN (icode) (op0);
3757      if (pat)
3758	emit_insn (pat);
3759      return NULL_RTX;
3760
3761    case ALTIVEC_BUILTIN_DSSALL:
3762      emit_insn (gen_altivec_dssall ());
3763      return NULL_RTX;
3764
3765    case ALTIVEC_BUILTIN_DSS:
3766      icode = CODE_FOR_altivec_dss;
3767      arg0 = TREE_VALUE (arglist);
3768      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3769      mode0 = insn_data[icode].operand[0].mode;
3770
3771      /* If we got invalid arguments bail out before generating bad rtl.  */
3772      if (arg0 == error_mark_node)
3773	return NULL_RTX;
3774
3775      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3776	op0 = copy_to_mode_reg (mode0, op0);
3777
3778      emit_insn (gen_altivec_dss (op0));
3779      return NULL_RTX;
3780    }
3781
3782  /* Handle DST variants.  */
3783  d = (struct builtin_description *) bdesc_dst;
3784  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
3785    if (d->code == fcode)
3786      {
3787	arg0 = TREE_VALUE (arglist);
3788	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3789	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3790	op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3791	op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3792	op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3793	mode0 = insn_data[d->icode].operand[0].mode;
3794	mode1 = insn_data[d->icode].operand[1].mode;
3795	mode2 = insn_data[d->icode].operand[2].mode;
3796
3797	/* Invalid arguments, bail out before generating bad rtl.  */
3798	if (arg0 == error_mark_node
3799	    || arg1 == error_mark_node
3800	    || arg2 == error_mark_node)
3801	  return NULL_RTX;
3802
3803	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
3804	  op0 = copy_to_mode_reg (mode0, op0);
3805	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
3806	  op1 = copy_to_mode_reg (mode1, op1);
3807
3808	if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
3809	  {
3810	    error ("argument 3 of `%s' must be a 2-bit literal", d->name);
3811	    return NULL_RTX;
3812	  }
3813
3814	pat = GEN_FCN (d->icode) (op0, op1, op2);
3815	if (pat != 0)
3816	  emit_insn (pat);
3817
3818	return NULL_RTX;
3819      }
3820
3821  /* Handle simple unary operations.  */
3822  d = (struct builtin_description *) bdesc_1arg;
3823  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
3824    if (d->code == fcode)
3825      return altivec_expand_unop_builtin (d->icode, arglist, target);
3826
3827  /* Handle simple binary operations.  */
3828  d = (struct builtin_description *) bdesc_2arg;
3829  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3830    if (d->code == fcode)
3831      return altivec_expand_binop_builtin (d->icode, arglist, target);
3832
3833  /* LV* are funky.  We initialized them differently.  */
3834  switch (fcode)
3835    {
3836    case ALTIVEC_BUILTIN_LVSL:
3837      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
3838					   arglist, target);
3839    case ALTIVEC_BUILTIN_LVSR:
3840      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
3841					   arglist, target);
3842    case ALTIVEC_BUILTIN_LVEBX:
3843      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
3844					   arglist, target);
3845    case ALTIVEC_BUILTIN_LVEHX:
3846      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
3847					   arglist, target);
3848    case ALTIVEC_BUILTIN_LVEWX:
3849      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
3850					   arglist, target);
3851    case ALTIVEC_BUILTIN_LVXL:
3852      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
3853					   arglist, target);
3854    case ALTIVEC_BUILTIN_LVX:
3855      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
3856					   arglist, target);
3857    default:
3858      break;
3859      /* Fall through.  */
3860    }
3861
3862  /* Handle simple ternary operations.  */
3863  d = (struct builtin_description *) bdesc_3arg;
3864  for (i = 0; i < sizeof  (bdesc_3arg) / sizeof *d; i++, d++)
3865    if (d->code == fcode)
3866      return altivec_expand_ternop_builtin (d->icode, arglist, target);
3867
3868  abort ();
3869  return NULL_RTX;
3870}
3871
3872/* Expand an expression EXP that calls a built-in function,
3873   with result going to TARGET if that's convenient
3874   (and in mode MODE if that's convenient).
3875   SUBTARGET may be used as the target for computing one of EXP's operands.
3876   IGNORE is nonzero if the value is to be ignored.  */
3877
3878static rtx
3879rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
3880     tree exp;
3881     rtx target;
3882     rtx subtarget ATTRIBUTE_UNUSED;
3883     enum machine_mode mode ATTRIBUTE_UNUSED;
3884     int ignore ATTRIBUTE_UNUSED;
3885{
3886  if (TARGET_ALTIVEC)
3887    return altivec_expand_builtin (exp, target);
3888
3889  abort ();
3890}
3891
3892static void
3893rs6000_init_builtins ()
3894{
3895  if (TARGET_ALTIVEC)
3896    altivec_init_builtins ();
3897}
3898
3899static void
3900altivec_init_builtins (void)
3901{
3902  struct builtin_description * d;
3903  size_t i;
3904
3905  tree endlink = void_list_node;
3906
3907  tree pint_type_node = build_pointer_type (integer_type_node);
3908  tree pvoid_type_node = build_pointer_type (void_type_node);
3909  tree pshort_type_node = build_pointer_type (short_integer_type_node);
3910  tree pchar_type_node = build_pointer_type (char_type_node);
3911  tree pfloat_type_node = build_pointer_type (float_type_node);
3912
3913  tree v4sf_ftype_v4sf_v4sf_v16qi
3914    = build_function_type (V4SF_type_node,
3915			   tree_cons (NULL_TREE, V4SF_type_node,
3916				      tree_cons (NULL_TREE, V4SF_type_node,
3917						 tree_cons (NULL_TREE,
3918							    V16QI_type_node,
3919							    endlink))));
3920  tree v4si_ftype_v4si_v4si_v16qi
3921    = build_function_type (V4SI_type_node,
3922			   tree_cons (NULL_TREE, V4SI_type_node,
3923				      tree_cons (NULL_TREE, V4SI_type_node,
3924						 tree_cons (NULL_TREE,
3925							    V16QI_type_node,
3926							    endlink))));
3927  tree v8hi_ftype_v8hi_v8hi_v16qi
3928    = build_function_type (V8HI_type_node,
3929			   tree_cons (NULL_TREE, V8HI_type_node,
3930				      tree_cons (NULL_TREE, V8HI_type_node,
3931						 tree_cons (NULL_TREE,
3932							    V16QI_type_node,
3933							    endlink))));
3934  tree v16qi_ftype_v16qi_v16qi_v16qi
3935    = build_function_type (V16QI_type_node,
3936			   tree_cons (NULL_TREE, V16QI_type_node,
3937				      tree_cons (NULL_TREE, V16QI_type_node,
3938						 tree_cons (NULL_TREE,
3939							    V16QI_type_node,
3940							    endlink))));
3941
3942  /* V4SI foo (char).  */
3943  tree v4si_ftype_char
3944    = build_function_type (V4SI_type_node,
3945		           tree_cons (NULL_TREE, char_type_node, endlink));
3946
3947  /* V8HI foo (char).  */
3948  tree v8hi_ftype_char
3949    = build_function_type (V8HI_type_node,
3950		           tree_cons (NULL_TREE, char_type_node, endlink));
3951
3952  /* V16QI foo (char).  */
3953  tree v16qi_ftype_char
3954    = build_function_type (V16QI_type_node,
3955		           tree_cons (NULL_TREE, char_type_node, endlink));
3956  /* V4SF foo (V4SF).  */
3957  tree v4sf_ftype_v4sf
3958    = build_function_type (V4SF_type_node,
3959			   tree_cons (NULL_TREE, V4SF_type_node, endlink));
3960
3961  /* V4SI foo (int *).  */
3962  tree v4si_ftype_pint
3963    = build_function_type (V4SI_type_node,
3964			   tree_cons (NULL_TREE, pint_type_node, endlink));
3965  /* V8HI foo (short *).  */
3966  tree v8hi_ftype_pshort
3967    = build_function_type (V8HI_type_node,
3968			   tree_cons (NULL_TREE, pshort_type_node, endlink));
3969  /* V16QI foo (char *).  */
3970  tree v16qi_ftype_pchar
3971    = build_function_type (V16QI_type_node,
3972			   tree_cons (NULL_TREE, pchar_type_node, endlink));
3973  /* V4SF foo (float *).  */
3974  tree v4sf_ftype_pfloat
3975    = build_function_type (V4SF_type_node,
3976			   tree_cons (NULL_TREE, pfloat_type_node, endlink));
3977
3978  /* V8HI foo (V16QI).  */
3979  tree v8hi_ftype_v16qi
3980    = build_function_type (V8HI_type_node,
3981			   tree_cons (NULL_TREE, V16QI_type_node, endlink));
3982
3983  /* void foo (void *, int, char/literal).  */
3984  tree void_ftype_pvoid_int_char
3985    = build_function_type (void_type_node,
3986			   tree_cons (NULL_TREE, pvoid_type_node,
3987				      tree_cons (NULL_TREE, integer_type_node,
3988						 tree_cons (NULL_TREE,
3989							    char_type_node,
3990							    endlink))));
3991
3992  /* void foo (int *, V4SI).  */
3993  tree void_ftype_pint_v4si
3994    = build_function_type (void_type_node,
3995			   tree_cons (NULL_TREE, pint_type_node,
3996				      tree_cons (NULL_TREE, V4SI_type_node,
3997						 endlink)));
3998  /* void foo (short *, V8HI).  */
3999  tree void_ftype_pshort_v8hi
4000    = build_function_type (void_type_node,
4001			   tree_cons (NULL_TREE, pshort_type_node,
4002				      tree_cons (NULL_TREE, V8HI_type_node,
4003						 endlink)));
4004  /* void foo (char *, V16QI).  */
4005  tree void_ftype_pchar_v16qi
4006    = build_function_type (void_type_node,
4007			   tree_cons (NULL_TREE, pchar_type_node,
4008				      tree_cons (NULL_TREE, V16QI_type_node,
4009						 endlink)));
4010  /* void foo (float *, V4SF).  */
4011  tree void_ftype_pfloat_v4sf
4012    = build_function_type (void_type_node,
4013			   tree_cons (NULL_TREE, pfloat_type_node,
4014				      tree_cons (NULL_TREE, V4SF_type_node,
4015						 endlink)));
4016
4017  /* void foo (V4SI).  */
4018  tree void_ftype_v4si
4019    = build_function_type (void_type_node,
4020			   tree_cons (NULL_TREE, V4SI_type_node,
4021				      endlink));
4022
4023  /* void foo (vint, int, void *).  */
4024  tree void_ftype_v4si_int_pvoid
4025    = build_function_type (void_type_node,
4026			   tree_cons (NULL_TREE, V4SI_type_node,
4027				      tree_cons (NULL_TREE, integer_type_node,
4028						 tree_cons (NULL_TREE,
4029							    pvoid_type_node,
4030							    endlink))));
4031
4032  /* void foo (vchar, int, void *).  */
4033  tree void_ftype_v16qi_int_pvoid
4034    = build_function_type (void_type_node,
4035			   tree_cons (NULL_TREE, V16QI_type_node,
4036				      tree_cons (NULL_TREE, integer_type_node,
4037						 tree_cons (NULL_TREE,
4038							    pvoid_type_node,
4039							    endlink))));
4040
4041  /* void foo (vshort, int, void *).  */
4042  tree void_ftype_v8hi_int_pvoid
4043    = build_function_type (void_type_node,
4044			   tree_cons (NULL_TREE, V8HI_type_node,
4045				      tree_cons (NULL_TREE, integer_type_node,
4046						 tree_cons (NULL_TREE,
4047							    pvoid_type_node,
4048							    endlink))));
4049
4050  /* void foo (char).  */
4051  tree void_ftype_qi
4052    = build_function_type (void_type_node,
4053			   tree_cons (NULL_TREE, char_type_node,
4054				      endlink));
4055
4056  /* void foo (void).  */
4057  tree void_ftype_void
4058    = build_function_type (void_type_node,
4059			   tree_cons (NULL_TREE, void_type_node,
4060				      endlink));
4061
4062  /* vshort foo (void).  */
4063  tree v8hi_ftype_void
4064    = build_function_type (V8HI_type_node,
4065			   tree_cons (NULL_TREE, void_type_node,
4066				      endlink));
4067
4068  tree v4si_ftype_v4si_v4si
4069    = build_function_type (V4SI_type_node,
4070			   tree_cons (NULL_TREE, V4SI_type_node,
4071				      tree_cons (NULL_TREE, V4SI_type_node,
4072						 endlink)));
4073
4074  /* These are for the unsigned 5 bit literals.  */
4075
4076  tree v4sf_ftype_v4si_char
4077    = build_function_type (V4SF_type_node,
4078			   tree_cons (NULL_TREE, V4SI_type_node,
4079				      tree_cons (NULL_TREE, char_type_node,
4080						 endlink)));
4081  tree v4si_ftype_v4sf_char
4082    = build_function_type (V4SI_type_node,
4083			   tree_cons (NULL_TREE, V4SF_type_node,
4084				      tree_cons (NULL_TREE, char_type_node,
4085						 endlink)));
4086  tree v4si_ftype_v4si_char
4087    = build_function_type (V4SI_type_node,
4088			   tree_cons (NULL_TREE, V4SI_type_node,
4089				      tree_cons (NULL_TREE, char_type_node,
4090						 endlink)));
4091  tree v8hi_ftype_v8hi_char
4092    = build_function_type (V8HI_type_node,
4093			   tree_cons (NULL_TREE, V8HI_type_node,
4094				      tree_cons (NULL_TREE, char_type_node,
4095						 endlink)));
4096  tree v16qi_ftype_v16qi_char
4097    = build_function_type (V16QI_type_node,
4098			   tree_cons (NULL_TREE, V16QI_type_node,
4099				      tree_cons (NULL_TREE, char_type_node,
4100						 endlink)));
4101
4102  /* These are for the unsigned 4 bit literals.  */
4103
4104  tree v16qi_ftype_v16qi_v16qi_char
4105    = build_function_type (V16QI_type_node,
4106			   tree_cons (NULL_TREE, V16QI_type_node,
4107				      tree_cons (NULL_TREE, V16QI_type_node,
4108						 tree_cons (NULL_TREE,
4109							    char_type_node,
4110							    endlink))));
4111
4112  tree v8hi_ftype_v8hi_v8hi_char
4113    = build_function_type (V8HI_type_node,
4114			   tree_cons (NULL_TREE, V8HI_type_node,
4115				      tree_cons (NULL_TREE, V8HI_type_node,
4116						 tree_cons (NULL_TREE,
4117							    char_type_node,
4118							    endlink))));
4119
4120  tree v4si_ftype_v4si_v4si_char
4121    = build_function_type (V4SI_type_node,
4122			   tree_cons (NULL_TREE, V4SI_type_node,
4123				      tree_cons (NULL_TREE, V4SI_type_node,
4124						 tree_cons (NULL_TREE,
4125							    char_type_node,
4126							    endlink))));
4127
4128  tree v4sf_ftype_v4sf_v4sf_char
4129    = build_function_type (V4SF_type_node,
4130			   tree_cons (NULL_TREE, V4SF_type_node,
4131				      tree_cons (NULL_TREE, V4SF_type_node,
4132						 tree_cons (NULL_TREE,
4133							    char_type_node,
4134							    endlink))));
4135
4136  /* End of 4 bit literals.  */
4137
4138  tree v4sf_ftype_v4sf_v4sf
4139    = build_function_type (V4SF_type_node,
4140			   tree_cons (NULL_TREE, V4SF_type_node,
4141				      tree_cons (NULL_TREE, V4SF_type_node,
4142						 endlink)));
4143  tree v4sf_ftype_v4sf_v4sf_v4si
4144    = build_function_type (V4SF_type_node,
4145			   tree_cons (NULL_TREE, V4SF_type_node,
4146				      tree_cons (NULL_TREE, V4SF_type_node,
4147						 tree_cons (NULL_TREE,
4148							    V4SI_type_node,
4149							    endlink))));
4150  tree v4sf_ftype_v4sf_v4sf_v4sf
4151    = build_function_type (V4SF_type_node,
4152			   tree_cons (NULL_TREE, V4SF_type_node,
4153				      tree_cons (NULL_TREE, V4SF_type_node,
4154						 tree_cons (NULL_TREE,
4155							    V4SF_type_node,
4156							    endlink))));
4157  tree v4si_ftype_v4si_v4si_v4si
4158    = build_function_type (V4SI_type_node,
4159			   tree_cons (NULL_TREE, V4SI_type_node,
4160				      tree_cons (NULL_TREE, V4SI_type_node,
4161						 tree_cons (NULL_TREE,
4162							    V4SI_type_node,
4163							    endlink))));
4164
4165  tree v8hi_ftype_v8hi_v8hi
4166    = build_function_type (V8HI_type_node,
4167			   tree_cons (NULL_TREE, V8HI_type_node,
4168				      tree_cons (NULL_TREE, V8HI_type_node,
4169						 endlink)));
4170  tree v8hi_ftype_v8hi_v8hi_v8hi
4171    = build_function_type (V8HI_type_node,
4172			   tree_cons (NULL_TREE, V8HI_type_node,
4173				      tree_cons (NULL_TREE, V8HI_type_node,
4174						 tree_cons (NULL_TREE,
4175							    V8HI_type_node,
4176							    endlink))));
4177 tree v4si_ftype_v8hi_v8hi_v4si
4178    = build_function_type (V4SI_type_node,
4179			   tree_cons (NULL_TREE, V8HI_type_node,
4180				      tree_cons (NULL_TREE, V8HI_type_node,
4181						 tree_cons (NULL_TREE,
4182							    V4SI_type_node,
4183							    endlink))));
4184 tree v4si_ftype_v16qi_v16qi_v4si
4185    = build_function_type (V4SI_type_node,
4186			   tree_cons (NULL_TREE, V16QI_type_node,
4187				      tree_cons (NULL_TREE, V16QI_type_node,
4188						 tree_cons (NULL_TREE,
4189							    V4SI_type_node,
4190							    endlink))));
4191
4192  tree v16qi_ftype_v16qi_v16qi
4193    = build_function_type (V16QI_type_node,
4194			   tree_cons (NULL_TREE, V16QI_type_node,
4195				      tree_cons (NULL_TREE, V16QI_type_node,
4196						 endlink)));
4197
4198  tree v4si_ftype_v4sf_v4sf
4199    = build_function_type (V4SI_type_node,
4200			   tree_cons (NULL_TREE, V4SF_type_node,
4201				      tree_cons (NULL_TREE, V4SF_type_node,
4202						 endlink)));
4203
4204  tree v8hi_ftype_v16qi_v16qi
4205    = build_function_type (V8HI_type_node,
4206			   tree_cons (NULL_TREE, V16QI_type_node,
4207				      tree_cons (NULL_TREE, V16QI_type_node,
4208						 endlink)));
4209
4210  tree v4si_ftype_v8hi_v8hi
4211    = build_function_type (V4SI_type_node,
4212			   tree_cons (NULL_TREE, V8HI_type_node,
4213				      tree_cons (NULL_TREE, V8HI_type_node,
4214						 endlink)));
4215
4216  tree v8hi_ftype_v4si_v4si
4217    = build_function_type (V8HI_type_node,
4218			   tree_cons (NULL_TREE, V4SI_type_node,
4219				      tree_cons (NULL_TREE, V4SI_type_node,
4220						 endlink)));
4221
4222  tree v16qi_ftype_v8hi_v8hi
4223    = build_function_type (V16QI_type_node,
4224			   tree_cons (NULL_TREE, V8HI_type_node,
4225				      tree_cons (NULL_TREE, V8HI_type_node,
4226						 endlink)));
4227
4228  tree v4si_ftype_v16qi_v4si
4229    = build_function_type (V4SI_type_node,
4230			   tree_cons (NULL_TREE, V16QI_type_node,
4231				      tree_cons (NULL_TREE, V4SI_type_node,
4232						 endlink)));
4233
4234  tree v4si_ftype_v16qi_v16qi
4235    = build_function_type (V4SI_type_node,
4236			   tree_cons (NULL_TREE, V16QI_type_node,
4237				      tree_cons (NULL_TREE, V16QI_type_node,
4238						 endlink)));
4239
4240  tree v4si_ftype_v8hi_v4si
4241    = build_function_type (V4SI_type_node,
4242			   tree_cons (NULL_TREE, V8HI_type_node,
4243				      tree_cons (NULL_TREE, V4SI_type_node,
4244						 endlink)));
4245
4246  tree v4si_ftype_v8hi
4247    = build_function_type (V4SI_type_node,
4248			   tree_cons (NULL_TREE, V8HI_type_node, endlink));
4249
4250  tree int_ftype_v4si_v4si
4251    = build_function_type (integer_type_node,
4252			   tree_cons (NULL_TREE, V4SI_type_node,
4253				      tree_cons (NULL_TREE, V4SI_type_node,
4254						 endlink)));
4255
4256  tree int_ftype_v4sf_v4sf
4257    = build_function_type (integer_type_node,
4258			   tree_cons (NULL_TREE, V4SF_type_node,
4259				      tree_cons (NULL_TREE, V4SF_type_node,
4260						 endlink)));
4261
4262  tree int_ftype_v16qi_v16qi
4263    = build_function_type (integer_type_node,
4264			   tree_cons (NULL_TREE, V16QI_type_node,
4265				      tree_cons (NULL_TREE, V16QI_type_node,
4266						 endlink)));
4267
4268  tree v16qi_ftype_int_pvoid
4269    = build_function_type (V16QI_type_node,
4270			   tree_cons (NULL_TREE, integer_type_node,
4271				      tree_cons (NULL_TREE, pvoid_type_node,
4272						 endlink)));
4273
4274  tree v4si_ftype_int_pvoid
4275    = build_function_type (V4SI_type_node,
4276			   tree_cons (NULL_TREE, integer_type_node,
4277				      tree_cons (NULL_TREE, pvoid_type_node,
4278						 endlink)));
4279
4280  tree v8hi_ftype_int_pvoid
4281    = build_function_type (V8HI_type_node,
4282			   tree_cons (NULL_TREE, integer_type_node,
4283				      tree_cons (NULL_TREE, pvoid_type_node,
4284						 endlink)));
4285
4286  tree int_ftype_v8hi_v8hi
4287    = build_function_type (integer_type_node,
4288			   tree_cons (NULL_TREE, V8HI_type_node,
4289				      tree_cons (NULL_TREE, V8HI_type_node,
4290						 endlink)));
4291
4292  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4293  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4294  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4295  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4296  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4297  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4298  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4299  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4300  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4301  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4302  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4303  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4304  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4305  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4306  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4307  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4308  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4309  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4310  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4311  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4312  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4313  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4314  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4315  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4316
4317  /* Add the simple ternary operators.  */
4318  d = (struct builtin_description *) bdesc_3arg;
4319  for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4320    {
4321
4322      enum machine_mode mode0, mode1, mode2, mode3;
4323      tree type;
4324
4325      if (d->name == 0)
4326	continue;
4327
4328      mode0 = insn_data[d->icode].operand[0].mode;
4329      mode1 = insn_data[d->icode].operand[1].mode;
4330      mode2 = insn_data[d->icode].operand[2].mode;
4331      mode3 = insn_data[d->icode].operand[3].mode;
4332
4333      /* When all four are of the same mode.  */
4334      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4335	{
4336	  switch (mode0)
4337	    {
4338	    case V4SImode:
4339	      type = v4si_ftype_v4si_v4si_v4si;
4340	      break;
4341	    case V4SFmode:
4342	      type = v4sf_ftype_v4sf_v4sf_v4sf;
4343	      break;
4344	    case V8HImode:
4345	      type = v8hi_ftype_v8hi_v8hi_v8hi;
4346	      break;
4347	    case V16QImode:
4348	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4349	      break;
4350	    default:
4351	      abort();
4352	    }
4353	}
4354      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4355        {
4356	  switch (mode0)
4357	    {
4358	    case V4SImode:
4359	      type = v4si_ftype_v4si_v4si_v16qi;
4360	      break;
4361	    case V4SFmode:
4362	      type = v4sf_ftype_v4sf_v4sf_v16qi;
4363	      break;
4364	    case V8HImode:
4365	      type = v8hi_ftype_v8hi_v8hi_v16qi;
4366	      break;
4367	    case V16QImode:
4368	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4369	      break;
4370	    default:
4371	      abort();
4372	    }
4373	}
4374      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4375	       && mode3 == V4SImode)
4376	type = v4si_ftype_v16qi_v16qi_v4si;
4377      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4378	       && mode3 == V4SImode)
4379	type = v4si_ftype_v8hi_v8hi_v4si;
4380      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4381	       && mode3 == V4SImode)
4382	type = v4sf_ftype_v4sf_v4sf_v4si;
4383
4384      /* vchar, vchar, vchar, 4 bit literal.  */
4385      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4386	       && mode3 == QImode)
4387	type = v16qi_ftype_v16qi_v16qi_char;
4388
4389      /* vshort, vshort, vshort, 4 bit literal.  */
4390      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4391	       && mode3 == QImode)
4392	type = v8hi_ftype_v8hi_v8hi_char;
4393
4394      /* vint, vint, vint, 4 bit literal.  */
4395      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4396	       && mode3 == QImode)
4397	type = v4si_ftype_v4si_v4si_char;
4398
4399      /* vfloat, vfloat, vfloat, 4 bit literal.  */
4400      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4401	       && mode3 == QImode)
4402	type = v4sf_ftype_v4sf_v4sf_char;
4403
4404      else
4405	abort ();
4406
4407      def_builtin (d->mask, d->name, type, d->code);
4408    }
4409
4410  /* Add the DST variants.  */
4411  d = (struct builtin_description *) bdesc_dst;
4412  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4413    def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4414
4415  /* Add the simple binary operators.  */
4416  d = (struct builtin_description *) bdesc_2arg;
4417  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4418    {
4419      enum machine_mode mode0, mode1, mode2;
4420      tree type;
4421
4422      if (d->name == 0)
4423	continue;
4424
4425      mode0 = insn_data[d->icode].operand[0].mode;
4426      mode1 = insn_data[d->icode].operand[1].mode;
4427      mode2 = insn_data[d->icode].operand[2].mode;
4428
4429      /* When all three operands are of the same mode.  */
4430      if (mode0 == mode1 && mode1 == mode2)
4431	{
4432	  switch (mode0)
4433	    {
4434	    case V4SFmode:
4435	      type = v4sf_ftype_v4sf_v4sf;
4436	      break;
4437	    case V4SImode:
4438	      type = v4si_ftype_v4si_v4si;
4439	      break;
4440	    case V16QImode:
4441	      type = v16qi_ftype_v16qi_v16qi;
4442	      break;
4443	    case V8HImode:
4444	      type = v8hi_ftype_v8hi_v8hi;
4445	      break;
4446	    default:
4447	      abort ();
4448	    }
4449	}
4450
4451      /* A few other combos we really don't want to do manually.  */
4452
4453      /* vint, vfloat, vfloat.  */
4454      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4455	type = v4si_ftype_v4sf_v4sf;
4456
4457      /* vshort, vchar, vchar.  */
4458      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4459	type = v8hi_ftype_v16qi_v16qi;
4460
4461      /* vint, vshort, vshort.  */
4462      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4463	type = v4si_ftype_v8hi_v8hi;
4464
4465      /* vshort, vint, vint.  */
4466      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4467	type = v8hi_ftype_v4si_v4si;
4468
4469      /* vchar, vshort, vshort.  */
4470      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4471	type = v16qi_ftype_v8hi_v8hi;
4472
4473      /* vint, vchar, vint.  */
4474      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4475	type = v4si_ftype_v16qi_v4si;
4476
4477      /* vint, vchar, vchar.  */
4478      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4479	type = v4si_ftype_v16qi_v16qi;
4480
4481      /* vint, vshort, vint.  */
4482      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4483	type = v4si_ftype_v8hi_v4si;
4484
4485      /* vint, vint, 5 bit literal.  */
4486      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4487	type = v4si_ftype_v4si_char;
4488
4489      /* vshort, vshort, 5 bit literal.  */
4490      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4491	type = v8hi_ftype_v8hi_char;
4492
4493      /* vchar, vchar, 5 bit literal.  */
4494      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4495	type = v16qi_ftype_v16qi_char;
4496
4497      /* vfloat, vint, 5 bit literal.  */
4498      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4499	type = v4sf_ftype_v4si_char;
4500
4501      /* vint, vfloat, 5 bit literal.  */
4502      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4503	type = v4si_ftype_v4sf_char;
4504
4505      /* int, x, x.  */
4506      else if (mode0 == SImode)
4507	{
4508	  switch (mode1)
4509	    {
4510	    case V4SImode:
4511	      type = int_ftype_v4si_v4si;
4512	      break;
4513	    case V4SFmode:
4514	      type = int_ftype_v4sf_v4sf;
4515	      break;
4516	    case V16QImode:
4517	      type = int_ftype_v16qi_v16qi;
4518	      break;
4519	    case V8HImode:
4520	      type = int_ftype_v8hi_v8hi;
4521	      break;
4522	    default:
4523	      abort ();
4524	    }
4525	}
4526
4527      else
4528	abort ();
4529
4530      def_builtin (d->mask, d->name, type, d->code);
4531    }
4532
4533  /* Add the simple unary operators.  */
4534  d = (struct builtin_description *) bdesc_1arg;
4535  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4536    {
4537      enum machine_mode mode0, mode1;
4538      tree type;
4539
4540      if (d->name == 0)
4541	continue;
4542
4543      mode0 = insn_data[d->icode].operand[0].mode;
4544      mode1 = insn_data[d->icode].operand[1].mode;
4545
4546      if (mode0 == V4SImode && mode1 == QImode)
4547        type = v4si_ftype_char;
4548      else if (mode0 == V8HImode && mode1 == QImode)
4549        type = v8hi_ftype_char;
4550      else if (mode0 == V16QImode && mode1 == QImode)
4551        type = v16qi_ftype_char;
4552      else if (mode0 == V4SFmode && mode1 == V4SFmode)
4553	type = v4sf_ftype_v4sf;
4554      else if (mode0 == V8HImode && mode1 == V16QImode)
4555	type = v8hi_ftype_v16qi;
4556      else if (mode0 == V4SImode && mode1 == V8HImode)
4557	type = v4si_ftype_v8hi;
4558      else
4559	abort ();
4560
4561      def_builtin (d->mask, d->name, type, d->code);
4562    }
4563}
4564
4565
4566/* Generate a memory reference for expand_block_move, copying volatile,
4567   and other bits from an original memory reference.  */
4568
4569static rtx
4570expand_block_move_mem (mode, addr, orig_mem)
4571     enum machine_mode mode;
4572     rtx addr;
4573     rtx orig_mem;
4574{
4575  rtx mem = gen_rtx_MEM (mode, addr);
4576
4577  MEM_COPY_ATTRIBUTES (mem, orig_mem);
4578  return mem;
4579}
4580
4581/* Expand a block move operation, and return 1 if successful.  Return 0
4582   if we should let the compiler generate normal code.
4583
4584   operands[0] is the destination
4585   operands[1] is the source
4586   operands[2] is the length
4587   operands[3] is the alignment */
4588
4589#define MAX_MOVE_REG 4
4590
4591int
4592expand_block_move (operands)
4593     rtx operands[];
4594{
4595  rtx orig_dest = operands[0];
4596  rtx orig_src	= operands[1];
4597  rtx bytes_rtx	= operands[2];
4598  rtx align_rtx = operands[3];
4599  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
4600  int align;
4601  int bytes;
4602  int offset;
4603  int num_reg;
4604  int i;
4605  rtx src_reg;
4606  rtx dest_reg;
4607  rtx src_addr;
4608  rtx dest_addr;
4609  rtx tmp_reg;
4610  rtx stores[MAX_MOVE_REG];
4611  int move_bytes;
4612
4613  /* If this is not a fixed size move, just call memcpy */
4614  if (! constp)
4615    return 0;
4616
4617  /* If this is not a fixed size alignment, abort */
4618  if (GET_CODE (align_rtx) != CONST_INT)
4619    abort ();
4620  align = INTVAL (align_rtx);
4621
4622  /* Anything to move? */
4623  bytes = INTVAL (bytes_rtx);
4624  if (bytes <= 0)
4625    return 1;
4626
4627  /* store_one_arg depends on expand_block_move to handle at least the size of
4628     reg_parm_stack_space.  */
4629  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4630    return 0;
4631
4632  /* Move the address into scratch registers.  */
4633  dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4634  src_reg  = copy_addr_to_reg (XEXP (orig_src,  0));
4635
4636  if (TARGET_STRING)	/* string instructions are available */
4637    {
4638      for ( ; bytes > 0; bytes -= move_bytes)
4639	{
4640	  if (bytes > 24		/* move up to 32 bytes at a time */
4641	      && ! fixed_regs[5]
4642	      && ! fixed_regs[6]
4643	      && ! fixed_regs[7]
4644	      && ! fixed_regs[8]
4645	      && ! fixed_regs[9]
4646	      && ! fixed_regs[10]
4647	      && ! fixed_regs[11]
4648	      && ! fixed_regs[12])
4649	    {
4650	      move_bytes = (bytes > 32) ? 32 : bytes;
4651	      emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4652								   dest_reg,
4653								   orig_dest),
4654					    expand_block_move_mem (BLKmode,
4655								   src_reg,
4656								   orig_src),
4657					    GEN_INT ((move_bytes == 32)
4658						     ? 0 : move_bytes),
4659					    align_rtx));
4660	    }
4661	  else if (bytes > 16	/* move up to 24 bytes at a time */
4662		   && ! fixed_regs[5]
4663		   && ! fixed_regs[6]
4664		   && ! fixed_regs[7]
4665		   && ! fixed_regs[8]
4666		   && ! fixed_regs[9]
4667		   && ! fixed_regs[10])
4668	    {
4669	      move_bytes = (bytes > 24) ? 24 : bytes;
4670	      emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4671								   dest_reg,
4672								   orig_dest),
4673					    expand_block_move_mem (BLKmode,
4674								   src_reg,
4675								   orig_src),
4676					    GEN_INT (move_bytes),
4677					    align_rtx));
4678	    }
4679	  else if (bytes > 8	/* move up to 16 bytes at a time */
4680		   && ! fixed_regs[5]
4681		   && ! fixed_regs[6]
4682		   && ! fixed_regs[7]
4683		   && ! fixed_regs[8])
4684	    {
4685	      move_bytes = (bytes > 16) ? 16 : bytes;
4686	      emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
4687								   dest_reg,
4688								   orig_dest),
4689					    expand_block_move_mem (BLKmode,
4690								   src_reg,
4691								   orig_src),
4692					    GEN_INT (move_bytes),
4693					    align_rtx));
4694	    }
4695	  else if (bytes >= 8 && TARGET_POWERPC64
4696		   /* 64-bit loads and stores require word-aligned
4697                      displacements.  */
4698		   && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4699	    {
4700	      move_bytes = 8;
4701	      tmp_reg = gen_reg_rtx (DImode);
4702	      emit_move_insn (tmp_reg,
4703			      expand_block_move_mem (DImode,
4704						     src_reg, orig_src));
4705	      emit_move_insn (expand_block_move_mem (DImode,
4706						     dest_reg, orig_dest),
4707			      tmp_reg);
4708	    }
4709	  else if (bytes > 4 && !TARGET_POWERPC64)
4710	    {			/* move up to 8 bytes at a time */
4711	      move_bytes = (bytes > 8) ? 8 : bytes;
4712	      emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
4713								   dest_reg,
4714								   orig_dest),
4715					    expand_block_move_mem (BLKmode,
4716								   src_reg,
4717								   orig_src),
4718					    GEN_INT (move_bytes),
4719					    align_rtx));
4720	    }
4721	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4722	    {			/* move 4 bytes */
4723	      move_bytes = 4;
4724	      tmp_reg = gen_reg_rtx (SImode);
4725	      emit_move_insn (tmp_reg,
4726			      expand_block_move_mem (SImode,
4727						     src_reg, orig_src));
4728	      emit_move_insn (expand_block_move_mem (SImode,
4729						     dest_reg, orig_dest),
4730			      tmp_reg);
4731	    }
4732	  else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4733	    {			/* move 2 bytes */
4734	      move_bytes = 2;
4735	      tmp_reg = gen_reg_rtx (HImode);
4736	      emit_move_insn (tmp_reg,
4737			      expand_block_move_mem (HImode,
4738						     src_reg, orig_src));
4739	      emit_move_insn (expand_block_move_mem (HImode,
4740						     dest_reg, orig_dest),
4741			      tmp_reg);
4742	    }
4743	  else if (bytes == 1)	/* move 1 byte */
4744	    {
4745	      move_bytes = 1;
4746	      tmp_reg = gen_reg_rtx (QImode);
4747	      emit_move_insn (tmp_reg,
4748			      expand_block_move_mem (QImode,
4749						     src_reg, orig_src));
4750	      emit_move_insn (expand_block_move_mem (QImode,
4751						     dest_reg, orig_dest),
4752			      tmp_reg);
4753	    }
4754	  else
4755	    {			/* move up to 4 bytes at a time */
4756	      move_bytes = (bytes > 4) ? 4 : bytes;
4757	      emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
4758								   dest_reg,
4759								   orig_dest),
4760					    expand_block_move_mem (BLKmode,
4761								   src_reg,
4762								   orig_src),
4763					    GEN_INT (move_bytes),
4764					    align_rtx));
4765	    }
4766
4767	  if (bytes > move_bytes)
4768	    {
4769	      if (! TARGET_POWERPC64)
4770		{
4771		  emit_insn (gen_addsi3 (src_reg, src_reg,
4772					 GEN_INT (move_bytes)));
4773		  emit_insn (gen_addsi3 (dest_reg, dest_reg,
4774					 GEN_INT (move_bytes)));
4775		}
4776	      else
4777		{
4778		  emit_insn (gen_adddi3 (src_reg, src_reg,
4779					 GEN_INT (move_bytes)));
4780		  emit_insn (gen_adddi3 (dest_reg, dest_reg,
4781					 GEN_INT (move_bytes)));
4782		}
4783	    }
4784	}
4785    }
4786
4787  else			/* string instructions not available */
4788    {
4789      num_reg = offset = 0;
4790      for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
4791	{
4792	  /* Calculate the correct offset for src/dest */
4793	  if (offset == 0)
4794	    {
4795	      src_addr  = src_reg;
4796	      dest_addr = dest_reg;
4797	    }
4798	  else
4799	    {
4800	      src_addr = plus_constant (src_reg, offset);
4801	      dest_addr = plus_constant (dest_reg, offset);
4802	    }
4803
4804	  /* Generate the appropriate load and store, saving the stores
4805	     for later.  */
4806	  if (bytes >= 8 && TARGET_POWERPC64
4807	      /* 64-bit loads and stores require word-aligned
4808                 displacements.  */
4809	      && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4810	    {
4811	      move_bytes = 8;
4812	      tmp_reg = gen_reg_rtx (DImode);
4813	      emit_insn (gen_movdi (tmp_reg,
4814				    expand_block_move_mem (DImode,
4815							   src_addr,
4816							   orig_src)));
4817	      stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
4818								    dest_addr,
4819								    orig_dest),
4820					     tmp_reg);
4821	    }
4822	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4823	    {
4824	      move_bytes = 4;
4825	      tmp_reg = gen_reg_rtx (SImode);
4826	      emit_insn (gen_movsi (tmp_reg,
4827				    expand_block_move_mem (SImode,
4828							   src_addr,
4829							   orig_src)));
4830	      stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
4831								    dest_addr,
4832								    orig_dest),
4833					     tmp_reg);
4834	    }
4835	  else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4836	    {
4837	      move_bytes = 2;
4838	      tmp_reg = gen_reg_rtx (HImode);
4839	      emit_insn (gen_movhi (tmp_reg,
4840				    expand_block_move_mem (HImode,
4841							   src_addr,
4842							   orig_src)));
4843	      stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
4844								    dest_addr,
4845								    orig_dest),
4846					     tmp_reg);
4847	    }
4848	  else
4849	    {
4850	      move_bytes = 1;
4851	      tmp_reg = gen_reg_rtx (QImode);
4852	      emit_insn (gen_movqi (tmp_reg,
4853				    expand_block_move_mem (QImode,
4854							   src_addr,
4855							   orig_src)));
4856	      stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
4857								    dest_addr,
4858								    orig_dest),
4859					     tmp_reg);
4860	    }
4861
4862	  if (num_reg >= MAX_MOVE_REG)
4863	    {
4864	      for (i = 0; i < num_reg; i++)
4865		emit_insn (stores[i]);
4866	      num_reg = 0;
4867	    }
4868	}
4869
4870      for (i = 0; i < num_reg; i++)
4871	emit_insn (stores[i]);
4872    }
4873
4874  return 1;
4875}
4876
4877
4878/* Return 1 if OP is a load multiple operation.  It is known to be a
4879   PARALLEL and the first section will be tested.  */
4880
4881int
4882load_multiple_operation (op, mode)
4883     rtx op;
4884     enum machine_mode mode ATTRIBUTE_UNUSED;
4885{
4886  int count = XVECLEN (op, 0);
4887  unsigned int dest_regno;
4888  rtx src_addr;
4889  int i;
4890
4891  /* Perform a quick check so we don't blow up below.  */
4892  if (count <= 1
4893      || GET_CODE (XVECEXP (op, 0, 0)) != SET
4894      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4895      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
4896    return 0;
4897
4898  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4899  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
4900
4901  for (i = 1; i < count; i++)
4902    {
4903      rtx elt = XVECEXP (op, 0, i);
4904
4905      if (GET_CODE (elt) != SET
4906	  || GET_CODE (SET_DEST (elt)) != REG
4907	  || GET_MODE (SET_DEST (elt)) != SImode
4908	  || REGNO (SET_DEST (elt)) != dest_regno + i
4909	  || GET_CODE (SET_SRC (elt)) != MEM
4910	  || GET_MODE (SET_SRC (elt)) != SImode
4911	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4912	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4913	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4914	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
4915	return 0;
4916    }
4917
4918  return 1;
4919}
4920
4921/* Similar, but tests for store multiple.  Here, the second vector element
4922   is a CLOBBER.  It will be tested later.  */
4923
4924int
4925store_multiple_operation (op, mode)
4926     rtx op;
4927     enum machine_mode mode ATTRIBUTE_UNUSED;
4928{
4929  int count = XVECLEN (op, 0) - 1;
4930  unsigned int src_regno;
4931  rtx dest_addr;
4932  int i;
4933
4934  /* Perform a quick check so we don't blow up below.  */
4935  if (count <= 1
4936      || GET_CODE (XVECEXP (op, 0, 0)) != SET
4937      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
4938      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
4939    return 0;
4940
4941  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4942  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
4943
4944  for (i = 1; i < count; i++)
4945    {
4946      rtx elt = XVECEXP (op, 0, i + 1);
4947
4948      if (GET_CODE (elt) != SET
4949	  || GET_CODE (SET_SRC (elt)) != REG
4950	  || GET_MODE (SET_SRC (elt)) != SImode
4951	  || REGNO (SET_SRC (elt)) != src_regno + i
4952	  || GET_CODE (SET_DEST (elt)) != MEM
4953	  || GET_MODE (SET_DEST (elt)) != SImode
4954	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4955	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4956	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4957	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
4958	return 0;
4959    }
4960
4961  return 1;
4962}
4963
4964/* Return 1 for a parallel vrsave operation.  */
4965
4966int
4967vrsave_operation (op, mode)
4968     rtx op;
4969     enum machine_mode mode ATTRIBUTE_UNUSED;
4970{
4971  int count = XVECLEN (op, 0);
4972  unsigned int dest_regno, src_regno;
4973  int i;
4974
4975  if (count <= 1
4976      || GET_CODE (XVECEXP (op, 0, 0)) != SET
4977      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4978      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
4979    return 0;
4980
4981  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4982  src_regno  = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4983
4984  if (dest_regno != VRSAVE_REGNO
4985      && src_regno != VRSAVE_REGNO)
4986    return 0;
4987
4988  for (i = 1; i < count; i++)
4989    {
4990      rtx elt = XVECEXP (op, 0, i);
4991
4992      if (GET_CODE (elt) != CLOBBER
4993	  && GET_CODE (elt) != SET)
4994	return 0;
4995    }
4996
4997  return 1;
4998}
4999
5000/* Return 1 for an PARALLEL suitable for mtcrf.  */
5001
5002int
5003mtcrf_operation (op, mode)
5004     rtx op;
5005     enum machine_mode mode ATTRIBUTE_UNUSED;
5006{
5007  int count = XVECLEN (op, 0);
5008  int i;
5009  rtx src_reg;
5010
5011  /* Perform a quick check so we don't blow up below.  */
5012  if (count < 1
5013      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5014      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5015      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5016    return 0;
5017  src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5018
5019  if (GET_CODE (src_reg) != REG
5020      || GET_MODE (src_reg) != SImode
5021      || ! INT_REGNO_P (REGNO (src_reg)))
5022    return 0;
5023
5024  for (i = 0; i < count; i++)
5025    {
5026      rtx exp = XVECEXP (op, 0, i);
5027      rtx unspec;
5028      int maskval;
5029
5030      if (GET_CODE (exp) != SET
5031	  || GET_CODE (SET_DEST (exp)) != REG
5032	  || GET_MODE (SET_DEST (exp)) != CCmode
5033	  || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5034	return 0;
5035      unspec = SET_SRC (exp);
5036      maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5037
5038      if (GET_CODE (unspec) != UNSPEC
5039	  || XINT (unspec, 1) != 20
5040	  || XVECLEN (unspec, 0) != 2
5041	  || XVECEXP (unspec, 0, 0) != src_reg
5042	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5043	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5044	return 0;
5045    }
5046  return 1;
5047}
5048
5049/* Return 1 for an PARALLEL suitable for lmw.  */
5050
5051int
5052lmw_operation (op, mode)
5053     rtx op;
5054     enum machine_mode mode ATTRIBUTE_UNUSED;
5055{
5056  int count = XVECLEN (op, 0);
5057  unsigned int dest_regno;
5058  rtx src_addr;
5059  unsigned int base_regno;
5060  HOST_WIDE_INT offset;
5061  int i;
5062
5063  /* Perform a quick check so we don't blow up below.  */
5064  if (count <= 1
5065      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5066      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5067      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5068    return 0;
5069
5070  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5071  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5072
5073  if (dest_regno > 31
5074      || count != 32 - (int) dest_regno)
5075    return 0;
5076
5077  if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5078    {
5079      offset = 0;
5080      base_regno = REGNO (src_addr);
5081      if (base_regno == 0)
5082	return 0;
5083    }
5084  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5085    {
5086      offset = INTVAL (XEXP (src_addr, 1));
5087      base_regno = REGNO (XEXP (src_addr, 0));
5088    }
5089  else
5090    return 0;
5091
5092  for (i = 0; i < count; i++)
5093    {
5094      rtx elt = XVECEXP (op, 0, i);
5095      rtx newaddr;
5096      rtx addr_reg;
5097      HOST_WIDE_INT newoffset;
5098
5099      if (GET_CODE (elt) != SET
5100	  || GET_CODE (SET_DEST (elt)) != REG
5101	  || GET_MODE (SET_DEST (elt)) != SImode
5102	  || REGNO (SET_DEST (elt)) != dest_regno + i
5103	  || GET_CODE (SET_SRC (elt)) != MEM
5104	  || GET_MODE (SET_SRC (elt)) != SImode)
5105	return 0;
5106      newaddr = XEXP (SET_SRC (elt), 0);
5107      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5108	{
5109	  newoffset = 0;
5110	  addr_reg = newaddr;
5111	}
5112      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5113	{
5114	  addr_reg = XEXP (newaddr, 0);
5115	  newoffset = INTVAL (XEXP (newaddr, 1));
5116	}
5117      else
5118	return 0;
5119      if (REGNO (addr_reg) != base_regno
5120	  || newoffset != offset + 4 * i)
5121	return 0;
5122    }
5123
5124  return 1;
5125}
5126
5127/* Return 1 for an PARALLEL suitable for stmw.  */
5128
5129int
5130stmw_operation (op, mode)
5131     rtx op;
5132     enum machine_mode mode ATTRIBUTE_UNUSED;
5133{
5134  int count = XVECLEN (op, 0);
5135  unsigned int src_regno;
5136  rtx dest_addr;
5137  unsigned int base_regno;
5138  HOST_WIDE_INT offset;
5139  int i;
5140
5141  /* Perform a quick check so we don't blow up below.  */
5142  if (count <= 1
5143      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5144      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5145      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5146    return 0;
5147
5148  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5149  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5150
5151  if (src_regno > 31
5152      || count != 32 - (int) src_regno)
5153    return 0;
5154
5155  if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5156    {
5157      offset = 0;
5158      base_regno = REGNO (dest_addr);
5159      if (base_regno == 0)
5160	return 0;
5161    }
5162  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5163    {
5164      offset = INTVAL (XEXP (dest_addr, 1));
5165      base_regno = REGNO (XEXP (dest_addr, 0));
5166    }
5167  else
5168    return 0;
5169
5170  for (i = 0; i < count; i++)
5171    {
5172      rtx elt = XVECEXP (op, 0, i);
5173      rtx newaddr;
5174      rtx addr_reg;
5175      HOST_WIDE_INT newoffset;
5176
5177      if (GET_CODE (elt) != SET
5178	  || GET_CODE (SET_SRC (elt)) != REG
5179	  || GET_MODE (SET_SRC (elt)) != SImode
5180	  || REGNO (SET_SRC (elt)) != src_regno + i
5181	  || GET_CODE (SET_DEST (elt)) != MEM
5182	  || GET_MODE (SET_DEST (elt)) != SImode)
5183	return 0;
5184      newaddr = XEXP (SET_DEST (elt), 0);
5185      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5186	{
5187	  newoffset = 0;
5188	  addr_reg = newaddr;
5189	}
5190      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5191	{
5192	  addr_reg = XEXP (newaddr, 0);
5193	  newoffset = INTVAL (XEXP (newaddr, 1));
5194	}
5195      else
5196	return 0;
5197      if (REGNO (addr_reg) != base_regno
5198	  || newoffset != offset + 4 * i)
5199	return 0;
5200    }
5201
5202  return 1;
5203}
5204
5205/* A validation routine: say whether CODE, a condition code, and MODE
5206   match.  The other alternatives either don't make sense or should
5207   never be generated.  */
5208
5209static void
5210validate_condition_mode (code, mode)
5211     enum rtx_code code;
5212     enum machine_mode mode;
5213{
5214  if (GET_RTX_CLASS (code) != '<'
5215      || GET_MODE_CLASS (mode) != MODE_CC)
5216    abort ();
5217
5218  /* These don't make sense.  */
5219  if ((code == GT || code == LT || code == GE || code == LE)
5220      && mode == CCUNSmode)
5221    abort ();
5222
5223  if ((code == GTU || code == LTU || code == GEU || code == LEU)
5224      && mode != CCUNSmode)
5225    abort ();
5226
5227  if (mode != CCFPmode
5228      && (code == ORDERED || code == UNORDERED
5229	  || code == UNEQ || code == LTGT
5230	  || code == UNGT || code == UNLT
5231	  || code == UNGE || code == UNLE))
5232    abort ();
5233
5234  /* These should never be generated except for
5235     flag_unsafe_math_optimizations.  */
5236  if (mode == CCFPmode
5237      && ! flag_unsafe_math_optimizations
5238      && (code == LE || code == GE
5239	  || code == UNEQ || code == LTGT
5240	  || code == UNGT || code == UNLT))
5241    abort ();
5242
5243  /* These are invalid; the information is not there.  */
5244  if (mode == CCEQmode
5245      && code != EQ && code != NE)
5246    abort ();
5247}
5248
5249/* Return 1 if OP is a comparison operation that is valid for a branch insn.
5250   We only check the opcode against the mode of the CC value here.  */
5251
5252int
5253branch_comparison_operator (op, mode)
5254     rtx op;
5255     enum machine_mode mode ATTRIBUTE_UNUSED;
5256{
5257  enum rtx_code code = GET_CODE (op);
5258  enum machine_mode cc_mode;
5259
5260  if (GET_RTX_CLASS (code) != '<')
5261    return 0;
5262
5263  cc_mode = GET_MODE (XEXP (op, 0));
5264  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5265    return 0;
5266
5267  validate_condition_mode (code, cc_mode);
5268
5269  return 1;
5270}
5271
5272/* Return 1 if OP is a comparison operation that is valid for a branch
5273   insn and which is true if the corresponding bit in the CC register
5274   is set.  */
5275
5276int
5277branch_positive_comparison_operator (op, mode)
5278     rtx op;
5279     enum machine_mode mode;
5280{
5281  enum rtx_code code;
5282
5283  if (! branch_comparison_operator (op, mode))
5284    return 0;
5285
5286  code = GET_CODE (op);
5287  return (code == EQ || code == LT || code == GT
5288	  || code == LTU || code == GTU
5289	  || code == UNORDERED);
5290}
5291
5292/* Return 1 if OP is a comparison operation that is valid for an scc insn.
5293   We check the opcode against the mode of the CC value and disallow EQ or
5294   NE comparisons for integers.  */
5295
5296int
5297scc_comparison_operator (op, mode)
5298     rtx op;
5299     enum machine_mode mode;
5300{
5301  enum rtx_code code = GET_CODE (op);
5302  enum machine_mode cc_mode;
5303
5304  if (GET_MODE (op) != mode && mode != VOIDmode)
5305    return 0;
5306
5307  if (GET_RTX_CLASS (code) != '<')
5308    return 0;
5309
5310  cc_mode = GET_MODE (XEXP (op, 0));
5311  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5312    return 0;
5313
5314  validate_condition_mode (code, cc_mode);
5315
5316  if (code == NE && cc_mode != CCFPmode)
5317    return 0;
5318
5319  return 1;
5320}
5321
5322int
5323trap_comparison_operator (op, mode)
5324    rtx op;
5325    enum machine_mode mode;
5326{
5327  if (mode != VOIDmode && mode != GET_MODE (op))
5328    return 0;
5329  return GET_RTX_CLASS (GET_CODE (op)) == '<';
5330}
5331
5332int
5333boolean_operator (op, mode)
5334    rtx op;
5335    enum machine_mode mode ATTRIBUTE_UNUSED;
5336{
5337  enum rtx_code code = GET_CODE (op);
5338  return (code == AND || code == IOR || code == XOR);
5339}
5340
5341int
5342boolean_or_operator (op, mode)
5343    rtx op;
5344    enum machine_mode mode ATTRIBUTE_UNUSED;
5345{
5346  enum rtx_code code = GET_CODE (op);
5347  return (code == IOR || code == XOR);
5348}
5349
5350int
5351min_max_operator (op, mode)
5352    rtx op;
5353    enum machine_mode mode ATTRIBUTE_UNUSED;
5354{
5355  enum rtx_code code = GET_CODE (op);
5356  return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5357}
5358
5359/* Return 1 if ANDOP is a mask that has no bits on that are not in the
5360   mask required to convert the result of a rotate insn into a shift
5361   left insn of SHIFTOP bits.  Both are known to be CONST_INT.  */
5362
5363int
5364includes_lshift_p (shiftop, andop)
5365     rtx shiftop;
5366     rtx andop;
5367{
5368  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5369
5370  shift_mask <<= INTVAL (shiftop);
5371
5372  return (INTVAL (andop) & ~shift_mask) == 0;
5373}
5374
5375/* Similar, but for right shift.  */
5376
5377int
5378includes_rshift_p (shiftop, andop)
5379     rtx shiftop;
5380     rtx andop;
5381{
5382  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5383
5384  shift_mask >>= INTVAL (shiftop);
5385
5386  return (INTVAL (andop) & ~shift_mask) == 0;
5387}
5388
5389/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5390   to perform a left shift.  It must have exactly SHIFTOP least
5391   signifigant 0's, then one or more 1's, then zero or more 0's.  */
5392
5393int
5394includes_rldic_lshift_p (shiftop, andop)
5395     rtx shiftop;
5396     rtx andop;
5397{
5398  if (GET_CODE (andop) == CONST_INT)
5399    {
5400      HOST_WIDE_INT c, lsb, shift_mask;
5401
5402      c = INTVAL (andop);
5403      if (c == 0 || c == ~0)
5404	return 0;
5405
5406      shift_mask = ~0;
5407      shift_mask <<= INTVAL (shiftop);
5408
5409      /* Find the least signifigant one bit.  */
5410      lsb = c & -c;
5411
5412      /* It must coincide with the LSB of the shift mask.  */
5413      if (-lsb != shift_mask)
5414	return 0;
5415
5416      /* Invert to look for the next transition (if any).  */
5417      c = ~c;
5418
5419      /* Remove the low group of ones (originally low group of zeros).  */
5420      c &= -lsb;
5421
5422      /* Again find the lsb, and check we have all 1's above.  */
5423      lsb = c & -c;
5424      return c == -lsb;
5425    }
5426  else if (GET_CODE (andop) == CONST_DOUBLE
5427	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5428    {
5429      HOST_WIDE_INT low, high, lsb;
5430      HOST_WIDE_INT shift_mask_low, shift_mask_high;
5431
5432      low = CONST_DOUBLE_LOW (andop);
5433      if (HOST_BITS_PER_WIDE_INT < 64)
5434	high = CONST_DOUBLE_HIGH (andop);
5435
5436      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5437	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5438	return 0;
5439
5440      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5441	{
5442	  shift_mask_high = ~0;
5443	  if (INTVAL (shiftop) > 32)
5444	    shift_mask_high <<= INTVAL (shiftop) - 32;
5445
5446	  lsb = high & -high;
5447
5448	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5449	    return 0;
5450
5451	  high = ~high;
5452	  high &= -lsb;
5453
5454	  lsb = high & -high;
5455	  return high == -lsb;
5456	}
5457
5458      shift_mask_low = ~0;
5459      shift_mask_low <<= INTVAL (shiftop);
5460
5461      lsb = low & -low;
5462
5463      if (-lsb != shift_mask_low)
5464	return 0;
5465
5466      if (HOST_BITS_PER_WIDE_INT < 64)
5467	high = ~high;
5468      low = ~low;
5469      low &= -lsb;
5470
5471      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5472	{
5473	  lsb = high & -high;
5474	  return high == -lsb;
5475	}
5476
5477      lsb = low & -low;
5478      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5479    }
5480  else
5481    return 0;
5482}
5483
5484/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5485   to perform a left shift.  It must have SHIFTOP or more least
5486   signifigant 0's, with the remainder of the word 1's.  */
5487
5488int
5489includes_rldicr_lshift_p (shiftop, andop)
5490     rtx shiftop;
5491     rtx andop;
5492{
5493  if (GET_CODE (andop) == CONST_INT)
5494    {
5495      HOST_WIDE_INT c, lsb, shift_mask;
5496
5497      shift_mask = ~0;
5498      shift_mask <<= INTVAL (shiftop);
5499      c = INTVAL (andop);
5500
5501      /* Find the least signifigant one bit.  */
5502      lsb = c & -c;
5503
5504      /* It must be covered by the shift mask.
5505	 This test also rejects c == 0.  */
5506      if ((lsb & shift_mask) == 0)
5507	return 0;
5508
5509      /* Check we have all 1's above the transition, and reject all 1's.  */
5510      return c == -lsb && lsb != 1;
5511    }
5512  else if (GET_CODE (andop) == CONST_DOUBLE
5513	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5514    {
5515      HOST_WIDE_INT low, lsb, shift_mask_low;
5516
5517      low = CONST_DOUBLE_LOW (andop);
5518
5519      if (HOST_BITS_PER_WIDE_INT < 64)
5520	{
5521	  HOST_WIDE_INT high, shift_mask_high;
5522
5523	  high = CONST_DOUBLE_HIGH (andop);
5524
5525	  if (low == 0)
5526	    {
5527	      shift_mask_high = ~0;
5528	      if (INTVAL (shiftop) > 32)
5529		shift_mask_high <<= INTVAL (shiftop) - 32;
5530
5531	      lsb = high & -high;
5532
5533	      if ((lsb & shift_mask_high) == 0)
5534		return 0;
5535
5536	      return high == -lsb;
5537	    }
5538	  if (high != ~0)
5539	    return 0;
5540	}
5541
5542      shift_mask_low = ~0;
5543      shift_mask_low <<= INTVAL (shiftop);
5544
5545      lsb = low & -low;
5546
5547      if ((lsb & shift_mask_low) == 0)
5548	return 0;
5549
5550      return low == -lsb && lsb != 1;
5551    }
5552  else
5553    return 0;
5554}
5555
5556/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5557   for lfq and stfq insns.
5558
5559   Note reg1 and reg2 *must* be hard registers.  To be sure we will
5560   abort if we are passed pseudo registers.  */
5561
5562int
5563registers_ok_for_quad_peep (reg1, reg2)
5564     rtx reg1, reg2;
5565{
5566  /* We might have been passed a SUBREG.  */
5567  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5568    return 0;
5569
5570  return (REGNO (reg1) == REGNO (reg2) - 1);
5571}
5572
5573/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5574   addr1 and addr2 must be in consecutive memory locations
5575   (addr2 == addr1 + 8).  */
5576
5577int
5578addrs_ok_for_quad_peep (addr1, addr2)
5579     rtx addr1;
5580     rtx addr2;
5581{
5582  unsigned int reg1;
5583  int offset1;
5584
5585  /* Extract an offset (if used) from the first addr.  */
5586  if (GET_CODE (addr1) == PLUS)
5587    {
5588      /* If not a REG, return zero.  */
5589      if (GET_CODE (XEXP (addr1, 0)) != REG)
5590	return 0;
5591      else
5592	{
5593          reg1 = REGNO (XEXP (addr1, 0));
5594	  /* The offset must be constant!  */
5595	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5596            return 0;
5597          offset1 = INTVAL (XEXP (addr1, 1));
5598	}
5599    }
5600  else if (GET_CODE (addr1) != REG)
5601    return 0;
5602  else
5603    {
5604      reg1 = REGNO (addr1);
5605      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
5606      offset1 = 0;
5607    }
5608
5609  /* Make sure the second address is a (mem (plus (reg) (const_int))).  */
5610  if (GET_CODE (addr2) != PLUS)
5611    return 0;
5612
5613  if (GET_CODE (XEXP (addr2, 0)) != REG
5614      || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5615    return 0;
5616
5617  if (reg1 != REGNO (XEXP (addr2, 0)))
5618    return 0;
5619
5620  /* The offset for the second addr must be 8 more than the first addr.  */
5621  if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5622    return 0;
5623
5624  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
5625     instructions.  */
5626  return 1;
5627}
5628
5629/* Return the register class of a scratch register needed to copy IN into
5630   or out of a register in CLASS in MODE.  If it can be done directly,
5631   NO_REGS is returned.  */
5632
5633enum reg_class
5634secondary_reload_class (class, mode, in)
5635     enum reg_class class;
5636     enum machine_mode mode ATTRIBUTE_UNUSED;
5637     rtx in;
5638{
5639  int regno;
5640
5641  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5642    {
5643      /* We cannot copy a symbolic operand directly into anything
5644         other than BASE_REGS for TARGET_ELF.  So indicate that a
5645         register from BASE_REGS is needed as an intermediate
5646         register.
5647
5648	 On Darwin, pic addresses require a load from memory, which
5649	 needs a base register.  */
5650      if (class != BASE_REGS
5651          && (GET_CODE (in) == SYMBOL_REF
5652              || GET_CODE (in) == HIGH
5653              || GET_CODE (in) == LABEL_REF
5654              || GET_CODE (in) == CONST))
5655        return BASE_REGS;
5656    }
5657
5658  if (GET_CODE (in) == REG)
5659    {
5660      regno = REGNO (in);
5661      if (regno >= FIRST_PSEUDO_REGISTER)
5662	{
5663	  regno = true_regnum (in);
5664	  if (regno >= FIRST_PSEUDO_REGISTER)
5665	    regno = -1;
5666	}
5667    }
5668  else if (GET_CODE (in) == SUBREG)
5669    {
5670      regno = true_regnum (in);
5671      if (regno >= FIRST_PSEUDO_REGISTER)
5672	regno = -1;
5673    }
5674  else
5675    regno = -1;
5676
5677  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5678     into anything.  */
5679  if (class == GENERAL_REGS || class == BASE_REGS
5680      || (regno >= 0 && INT_REGNO_P (regno)))
5681    return NO_REGS;
5682
5683  /* Constants, memory, and FP registers can go into FP registers.  */
5684  if ((regno == -1 || FP_REGNO_P (regno))
5685      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5686    return NO_REGS;
5687
5688  /* Memory, and AltiVec registers can go into AltiVec registers.  */
5689  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5690      && class == ALTIVEC_REGS)
5691    return NO_REGS;
5692
5693  /* We can copy among the CR registers.  */
5694  if ((class == CR_REGS || class == CR0_REGS)
5695      && regno >= 0 && CR_REGNO_P (regno))
5696    return NO_REGS;
5697
5698  /* Otherwise, we need GENERAL_REGS.  */
5699  return GENERAL_REGS;
5700}
5701
5702/* Given a comparison operation, return the bit number in CCR to test.  We
5703   know this is a valid comparison.
5704
5705   SCC_P is 1 if this is for an scc.  That means that %D will have been
5706   used instead of %C, so the bits will be in different places.
5707
5708   Return -1 if OP isn't a valid comparison for some reason.  */
5709
5710int
5711ccr_bit (op, scc_p)
5712     rtx op;
5713     int scc_p;
5714{
5715  enum rtx_code code = GET_CODE (op);
5716  enum machine_mode cc_mode;
5717  int cc_regnum;
5718  int base_bit;
5719  rtx reg;
5720
5721  if (GET_RTX_CLASS (code) != '<')
5722    return -1;
5723
5724  reg = XEXP (op, 0);
5725
5726  if (GET_CODE (reg) != REG
5727      || ! CR_REGNO_P (REGNO (reg)))
5728    abort ();
5729
5730  cc_mode = GET_MODE (reg);
5731  cc_regnum = REGNO (reg);
5732  base_bit = 4 * (cc_regnum - CR0_REGNO);
5733
5734  validate_condition_mode (code, cc_mode);
5735
5736  switch (code)
5737    {
5738    case NE:
5739      return scc_p ? base_bit + 3 : base_bit + 2;
5740    case EQ:
5741      return base_bit + 2;
5742    case GT:  case GTU:  case UNLE:
5743      return base_bit + 1;
5744    case LT:  case LTU:  case UNGE:
5745      return base_bit;
5746    case ORDERED:  case UNORDERED:
5747      return base_bit + 3;
5748
5749    case GE:  case GEU:
5750      /* If scc, we will have done a cror to put the bit in the
5751	 unordered position.  So test that bit.  For integer, this is ! LT
5752	 unless this is an scc insn.  */
5753      return scc_p ? base_bit + 3 : base_bit;
5754
5755    case LE:  case LEU:
5756      return scc_p ? base_bit + 3 : base_bit + 1;
5757
5758    default:
5759      abort ();
5760    }
5761}
5762
5763/* Return the GOT register.  */
5764
5765struct rtx_def *
5766rs6000_got_register (value)
5767     rtx value ATTRIBUTE_UNUSED;
5768{
5769  /* The second flow pass currently (June 1999) can't update
5770     regs_ever_live without disturbing other parts of the compiler, so
5771     update it here to make the prolog/epilogue code happy.  */
5772  if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5773    regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
5774
5775  current_function_uses_pic_offset_table = 1;
5776
5777  return pic_offset_table_rtx;
5778}
5779
5780/* Functions to init, mark and free struct machine_function.
5781   These will be called, via pointer variables,
5782   from push_function_context and pop_function_context.  */
5783
5784static void
5785rs6000_init_machine_status (p)
5786     struct function *p;
5787{
5788  p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
5789}
5790
5791static void
5792rs6000_free_machine_status (p)
5793     struct function *p;
5794{
5795  if (p->machine == NULL)
5796    return;
5797
5798  free (p->machine);
5799  p->machine = NULL;
5800}
5801
5802
5803/* Print an operand.  Recognize special options, documented below.  */
5804
5805#if TARGET_ELF
5806#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
5807#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
5808#else
5809#define SMALL_DATA_RELOC "sda21"
5810#define SMALL_DATA_REG 0
5811#endif
5812
5813void
5814print_operand (file, x, code)
5815    FILE *file;
5816    rtx x;
5817    int code;
5818{
5819  int i;
5820  HOST_WIDE_INT val;
5821
5822  /* These macros test for integers and extract the low-order bits.  */
5823#define INT_P(X)  \
5824((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
5825 && GET_MODE (X) == VOIDmode)
5826
5827#define INT_LOWPART(X) \
5828  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
5829
5830  switch (code)
5831    {
5832    case '.':
5833      /* Write out an instruction after the call which may be replaced
5834	 with glue code by the loader.  This depends on the AIX version.  */
5835      asm_fprintf (file, RS6000_CALL_GLUE);
5836      return;
5837
5838      /* %a is output_address.  */
5839
5840    case 'A':
5841      /* If X is a constant integer whose low-order 5 bits are zero,
5842	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
5843	 in the AIX assembler where "sri" with a zero shift count
5844	 writes a trash instruction.  */
5845      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
5846	putc ('l', file);
5847      else
5848	putc ('r', file);
5849      return;
5850
5851    case 'b':
5852      /* If constant, low-order 16 bits of constant, unsigned.
5853	 Otherwise, write normally.  */
5854      if (INT_P (x))
5855	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
5856      else
5857	print_operand (file, x, 0);
5858      return;
5859
5860    case 'B':
5861      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
5862	 for 64-bit mask direction.  */
5863      putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
5864      return;
5865
5866      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
5867	 output_operand.  */
5868
5869    case 'D':
5870      /* There used to be a comment for 'C' reading "This is an
5871	   optional cror needed for certain floating-point
5872	   comparisons.  Otherwise write nothing."  */
5873
5874      /* Similar, except that this is for an scc, so we must be able to
5875	 encode the test in a single bit that is one.  We do the above
5876	 for any LE, GE, GEU, or LEU and invert the bit for NE.  */
5877      if (GET_CODE (x) == LE || GET_CODE (x) == GE
5878	  || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
5879	{
5880	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
5881
5882	  fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
5883		   base_bit + 2,
5884		   base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
5885	}
5886
5887      else if (GET_CODE (x) == NE)
5888	{
5889	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
5890
5891	  fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
5892		   base_bit + 2, base_bit + 2);
5893	}
5894      return;
5895
5896    case 'E':
5897      /* X is a CR register.  Print the number of the EQ bit of the CR */
5898      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5899	output_operand_lossage ("invalid %%E value");
5900      else
5901	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
5902      return;
5903
5904    case 'f':
5905      /* X is a CR register.  Print the shift count needed to move it
5906	 to the high-order four bits.  */
5907      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5908	output_operand_lossage ("invalid %%f value");
5909      else
5910	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
5911      return;
5912
5913    case 'F':
5914      /* Similar, but print the count for the rotate in the opposite
5915	 direction.  */
5916      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5917	output_operand_lossage ("invalid %%F value");
5918      else
5919	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
5920      return;
5921
5922    case 'G':
5923      /* X is a constant integer.  If it is negative, print "m",
5924	 otherwise print "z".  This is to make a aze or ame insn.  */
5925      if (GET_CODE (x) != CONST_INT)
5926	output_operand_lossage ("invalid %%G value");
5927      else if (INTVAL (x) >= 0)
5928	putc ('z', file);
5929      else
5930	putc ('m', file);
5931      return;
5932
5933    case 'h':
5934      /* If constant, output low-order five bits.  Otherwise, write
5935	 normally.  */
5936      if (INT_P (x))
5937	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
5938      else
5939	print_operand (file, x, 0);
5940      return;
5941
5942    case 'H':
5943      /* If constant, output low-order six bits.  Otherwise, write
5944	 normally.  */
5945      if (INT_P (x))
5946	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
5947      else
5948	print_operand (file, x, 0);
5949      return;
5950
5951    case 'I':
5952      /* Print `i' if this is a constant, else nothing.  */
5953      if (INT_P (x))
5954	putc ('i', file);
5955      return;
5956
5957    case 'j':
5958      /* Write the bit number in CCR for jump.  */
5959      i = ccr_bit (x, 0);
5960      if (i == -1)
5961	output_operand_lossage ("invalid %%j code");
5962      else
5963	fprintf (file, "%d", i);
5964      return;
5965
5966    case 'J':
5967      /* Similar, but add one for shift count in rlinm for scc and pass
5968	 scc flag to `ccr_bit'.  */
5969      i = ccr_bit (x, 1);
5970      if (i == -1)
5971	output_operand_lossage ("invalid %%J code");
5972      else
5973	/* If we want bit 31, write a shift count of zero, not 32.  */
5974	fprintf (file, "%d", i == 31 ? 0 : i + 1);
5975      return;
5976
5977    case 'k':
5978      /* X must be a constant.  Write the 1's complement of the
5979	 constant.  */
5980      if (! INT_P (x))
5981	output_operand_lossage ("invalid %%k value");
5982      else
5983	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
5984      return;
5985
5986    case 'K':
5987      /* X must be a symbolic constant on ELF.  Write an
5988	 expression suitable for an 'addi' that adds in the low 16
5989	 bits of the MEM.  */
5990      if (GET_CODE (x) != CONST)
5991	{
5992	  print_operand_address (file, x);
5993	  fputs ("@l", file);
5994	}
5995      else
5996	{
5997	  if (GET_CODE (XEXP (x, 0)) != PLUS
5998	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
5999		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6000	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6001	    output_operand_lossage ("invalid %%K value");
6002	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
6003	  fputs ("@l", file);
6004	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6005	}
6006      return;
6007
6008      /* %l is output_asm_label.  */
6009
6010    case 'L':
6011      /* Write second word of DImode or DFmode reference.  Works on register
6012	 or non-indexed memory only.  */
6013      if (GET_CODE (x) == REG)
6014	fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6015      else if (GET_CODE (x) == MEM)
6016	{
6017	  /* Handle possible auto-increment.  Since it is pre-increment and
6018	     we have already done it, we can just use an offset of word.  */
6019	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6020	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6021	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6022					   UNITS_PER_WORD));
6023	  else
6024	    output_address (XEXP (adjust_address_nv (x, SImode,
6025						     UNITS_PER_WORD),
6026				  0));
6027
6028	  if (small_data_operand (x, GET_MODE (x)))
6029	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6030		     reg_names[SMALL_DATA_REG]);
6031	}
6032      return;
6033
6034    case 'm':
6035      /* MB value for a mask operand.  */
6036      if (! mask_operand (x, VOIDmode))
6037	output_operand_lossage ("invalid %%m value");
6038
6039      val = INT_LOWPART (x);
6040
6041      /* If the high bit is set and the low bit is not, the value is zero.
6042	 If the high bit is zero, the value is the first 1 bit we find from
6043	 the left.  */
6044      if ((val & 0x80000000) && ((val & 1) == 0))
6045	{
6046	  putc ('0', file);
6047	  return;
6048	}
6049      else if ((val & 0x80000000) == 0)
6050	{
6051	  for (i = 1; i < 32; i++)
6052	    if ((val <<= 1) & 0x80000000)
6053	      break;
6054	  fprintf (file, "%d", i);
6055	  return;
6056	}
6057
6058      /* Otherwise, look for the first 0 bit from the right.  The result is its
6059	 number plus 1. We know the low-order bit is one.  */
6060      for (i = 0; i < 32; i++)
6061	if (((val >>= 1) & 1) == 0)
6062	  break;
6063
6064      /* If we ended in ...01, i would be 0.  The correct value is 31, so
6065	 we want 31 - i.  */
6066      fprintf (file, "%d", 31 - i);
6067      return;
6068
6069    case 'M':
6070      /* ME value for a mask operand.  */
6071      if (! mask_operand (x, VOIDmode))
6072	output_operand_lossage ("invalid %%M value");
6073
6074      val = INT_LOWPART (x);
6075
6076      /* If the low bit is set and the high bit is not, the value is 31.
6077	 If the low bit is zero, the value is the first 1 bit we find from
6078	 the right.  */
6079      if ((val & 1) && ((val & 0x80000000) == 0))
6080	{
6081	  fputs ("31", file);
6082	  return;
6083	}
6084      else if ((val & 1) == 0)
6085	{
6086	  for (i = 0; i < 32; i++)
6087	    if ((val >>= 1) & 1)
6088	      break;
6089
6090	  /* If we had ....10, i would be 0.  The result should be
6091	     30, so we need 30 - i.  */
6092	  fprintf (file, "%d", 30 - i);
6093	  return;
6094	}
6095
6096      /* Otherwise, look for the first 0 bit from the left.  The result is its
6097	 number minus 1. We know the high-order bit is one.  */
6098      for (i = 0; i < 32; i++)
6099	if (((val <<= 1) & 0x80000000) == 0)
6100	  break;
6101
6102      fprintf (file, "%d", i);
6103      return;
6104
6105      /* %n outputs the negative of its operand.  */
6106
6107    case 'N':
6108      /* Write the number of elements in the vector times 4.  */
6109      if (GET_CODE (x) != PARALLEL)
6110	output_operand_lossage ("invalid %%N value");
6111      else
6112	fprintf (file, "%d", XVECLEN (x, 0) * 4);
6113      return;
6114
6115    case 'O':
6116      /* Similar, but subtract 1 first.  */
6117      if (GET_CODE (x) != PARALLEL)
6118	output_operand_lossage ("invalid %%O value");
6119      else
6120	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6121      return;
6122
6123    case 'p':
6124      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
6125      if (! INT_P (x)
6126	  || INT_LOWPART (x) < 0
6127	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
6128	output_operand_lossage ("invalid %%p value");
6129      else
6130	fprintf (file, "%d", i);
6131      return;
6132
6133    case 'P':
6134      /* The operand must be an indirect memory reference.  The result
6135	 is the register number.  */
6136      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6137	  || REGNO (XEXP (x, 0)) >= 32)
6138	output_operand_lossage ("invalid %%P value");
6139      else
6140	fprintf (file, "%d", REGNO (XEXP (x, 0)));
6141      return;
6142
6143    case 'q':
6144      /* This outputs the logical code corresponding to a boolean
6145	 expression.  The expression may have one or both operands
6146	 negated (if one, only the first one).  For condition register
6147         logical operations, it will also treat the negated
6148         CR codes as NOTs, but not handle NOTs of them.  */
6149      {
6150	const char *const *t = 0;
6151	const char *s;
6152	enum rtx_code code = GET_CODE (x);
6153	static const char * const tbl[3][3] = {
6154	  { "and", "andc", "nor" },
6155	  { "or", "orc", "nand" },
6156	  { "xor", "eqv", "xor" } };
6157
6158	if (code == AND)
6159	  t = tbl[0];
6160	else if (code == IOR)
6161	  t = tbl[1];
6162	else if (code == XOR)
6163	  t = tbl[2];
6164	else
6165	  output_operand_lossage ("invalid %%q value");
6166
6167	if (GET_CODE (XEXP (x, 0)) != NOT)
6168	  s = t[0];
6169	else
6170	  {
6171	    if (GET_CODE (XEXP (x, 1)) == NOT)
6172	      s = t[2];
6173	    else
6174	      s = t[1];
6175	  }
6176
6177	fputs (s, file);
6178      }
6179      return;
6180
6181    case 'R':
6182      /* X is a CR register.  Print the mask for `mtcrf'.  */
6183      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6184	output_operand_lossage ("invalid %%R value");
6185      else
6186	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6187      return;
6188
6189    case 's':
6190      /* Low 5 bits of 32 - value */
6191      if (! INT_P (x))
6192	output_operand_lossage ("invalid %%s value");
6193      else
6194	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6195      return;
6196
6197    case 'S':
6198      /* PowerPC64 mask position.  All 0's and all 1's are excluded.
6199	 CONST_INT 32-bit mask is considered sign-extended so any
6200	 transition must occur within the CONST_INT, not on the boundary.  */
6201      if (! mask64_operand (x, VOIDmode))
6202	output_operand_lossage ("invalid %%S value");
6203
6204      val = INT_LOWPART (x);
6205
6206      if (val & 1)      /* Clear Left */
6207	{
6208	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6209	    if (!((val >>= 1) & 1))
6210	      break;
6211
6212#if HOST_BITS_PER_WIDE_INT == 32
6213	  if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6214	    {
6215	      val = CONST_DOUBLE_HIGH (x);
6216
6217	      if (val == 0)
6218		--i;
6219	      else
6220		for (i = 32; i < 64; i++)
6221		  if (!((val >>= 1) & 1))
6222		    break;
6223	    }
6224#endif
6225	/* i = index of last set bit from right
6226	   mask begins at 63 - i from left */
6227	  if (i > 63)
6228	    output_operand_lossage ("%%S computed all 1's mask");
6229
6230	  fprintf (file, "%d", 63 - i);
6231	  return;
6232	}
6233      else	/* Clear Right */
6234	{
6235	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6236	    if ((val >>= 1) & 1)
6237	      break;
6238
6239#if HOST_BITS_PER_WIDE_INT == 32
6240	if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6241	  {
6242	    val = CONST_DOUBLE_HIGH (x);
6243
6244	    if (val == (HOST_WIDE_INT) -1)
6245	      --i;
6246	    else
6247	      for (i = 32; i < 64; i++)
6248		if ((val >>= 1) & 1)
6249		  break;
6250	  }
6251#endif
6252	/* i = index of last clear bit from right
6253	   mask ends at 62 - i from left */
6254	  if (i > 62)
6255	    output_operand_lossage ("%%S computed all 0's mask");
6256
6257	  fprintf (file, "%d", 62 - i);
6258	  return;
6259	}
6260
6261    case 'T':
6262      /* Print the symbolic name of a branch target register.  */
6263      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6264				  && REGNO (x) != COUNT_REGISTER_REGNUM))
6265	output_operand_lossage ("invalid %%T value");
6266      else if (REGNO (x) == LINK_REGISTER_REGNUM)
6267	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6268      else
6269	fputs ("ctr", file);
6270      return;
6271
6272    case 'u':
6273      /* High-order 16 bits of constant for use in unsigned operand.  */
6274      if (! INT_P (x))
6275	output_operand_lossage ("invalid %%u value");
6276      else
6277	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6278		 (INT_LOWPART (x) >> 16) & 0xffff);
6279      return;
6280
6281    case 'v':
6282      /* High-order 16 bits of constant for use in signed operand.  */
6283      if (! INT_P (x))
6284	output_operand_lossage ("invalid %%v value");
6285      else
6286	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6287		 (INT_LOWPART (x) >> 16) & 0xffff);
6288      return;
6289
6290    case 'U':
6291      /* Print `u' if this has an auto-increment or auto-decrement.  */
6292      if (GET_CODE (x) == MEM
6293	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
6294	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6295	putc ('u', file);
6296      return;
6297
6298    case 'V':
6299      /* Print the trap code for this operand.  */
6300      switch (GET_CODE (x))
6301	{
6302	case EQ:
6303	  fputs ("eq", file);   /* 4 */
6304	  break;
6305	case NE:
6306	  fputs ("ne", file);   /* 24 */
6307	  break;
6308	case LT:
6309	  fputs ("lt", file);   /* 16 */
6310	  break;
6311	case LE:
6312	  fputs ("le", file);   /* 20 */
6313	  break;
6314	case GT:
6315	  fputs ("gt", file);   /* 8 */
6316	  break;
6317	case GE:
6318	  fputs ("ge", file);   /* 12 */
6319	  break;
6320	case LTU:
6321	  fputs ("llt", file);  /* 2 */
6322	  break;
6323	case LEU:
6324	  fputs ("lle", file);  /* 6 */
6325	  break;
6326	case GTU:
6327	  fputs ("lgt", file);  /* 1 */
6328	  break;
6329	case GEU:
6330	  fputs ("lge", file);  /* 5 */
6331	  break;
6332	default:
6333	  abort ();
6334	}
6335      break;
6336
6337    case 'w':
6338      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
6339	 normally.  */
6340      if (INT_P (x))
6341	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6342		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6343      else
6344	print_operand (file, x, 0);
6345      return;
6346
6347    case 'W':
6348      /* MB value for a PowerPC64 rldic operand.  */
6349      val = (GET_CODE (x) == CONST_INT
6350	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6351
6352      if (val < 0)
6353	i = -1;
6354      else
6355	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6356	  if ((val <<= 1) < 0)
6357	    break;
6358
6359#if HOST_BITS_PER_WIDE_INT == 32
6360      if (GET_CODE (x) == CONST_INT && i >= 0)
6361	i += 32;  /* zero-extend high-part was all 0's */
6362      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6363	{
6364	  val = CONST_DOUBLE_LOW (x);
6365
6366	  if (val == 0)
6367	    abort ();
6368	  else if (val < 0)
6369	    --i;
6370	  else
6371	    for ( ; i < 64; i++)
6372	      if ((val <<= 1) < 0)
6373		break;
6374	}
6375#endif
6376
6377      fprintf (file, "%d", i + 1);
6378      return;
6379
6380    case 'X':
6381      if (GET_CODE (x) == MEM
6382	  && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6383	putc ('x', file);
6384      return;
6385
6386    case 'Y':
6387      /* Like 'L', for third word of TImode  */
6388      if (GET_CODE (x) == REG)
6389	fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6390      else if (GET_CODE (x) == MEM)
6391	{
6392	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6393	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6394	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6395	  else
6396	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6397	  if (small_data_operand (x, GET_MODE (x)))
6398	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6399		     reg_names[SMALL_DATA_REG]);
6400	}
6401      return;
6402
6403    case 'z':
6404      /* X is a SYMBOL_REF.  Write out the name preceded by a
6405	 period and without any trailing data in brackets.  Used for function
6406	 names.  If we are configured for System V (or the embedded ABI) on
6407	 the PowerPC, do not emit the period, since those systems do not use
6408	 TOCs and the like.  */
6409      if (GET_CODE (x) != SYMBOL_REF)
6410	abort ();
6411
6412      if (XSTR (x, 0)[0] != '.')
6413	{
6414	  switch (DEFAULT_ABI)
6415	    {
6416	    default:
6417	      abort ();
6418
6419	    case ABI_AIX:
6420	      putc ('.', file);
6421	      break;
6422
6423	    case ABI_V4:
6424	    case ABI_AIX_NODESC:
6425	    case ABI_DARWIN:
6426	      break;
6427	    }
6428	}
6429#if TARGET_AIX
6430      RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6431#else
6432      assemble_name (file, XSTR (x, 0));
6433#endif
6434      return;
6435
6436    case 'Z':
6437      /* Like 'L', for last word of TImode.  */
6438      if (GET_CODE (x) == REG)
6439	fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6440      else if (GET_CODE (x) == MEM)
6441	{
6442	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6443	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6444	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6445	  else
6446	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6447	  if (small_data_operand (x, GET_MODE (x)))
6448	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6449		     reg_names[SMALL_DATA_REG]);
6450	}
6451      return;
6452
6453      /* Print AltiVec memory operand.  */
6454    case 'y':
6455      {
6456	rtx tmp;
6457
6458	if (GET_CODE (x) != MEM)
6459	  abort ();
6460
6461	tmp = XEXP (x, 0);
6462
6463	if (GET_CODE (tmp) == REG)
6464	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6465	else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6466	  {
6467	    if (REGNO (XEXP (tmp, 0)) == 0)
6468	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6469		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
6470	    else
6471	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6472		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
6473	  }
6474	else
6475	  abort ();
6476	break;
6477      }
6478
6479    case 0:
6480      if (GET_CODE (x) == REG)
6481	fprintf (file, "%s", reg_names[REGNO (x)]);
6482      else if (GET_CODE (x) == MEM)
6483	{
6484	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
6485	     know the width from the mode.  */
6486	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6487	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6488		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6489	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6490	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6491		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6492	  else
6493	    output_address (XEXP (x, 0));
6494	}
6495      else
6496	output_addr_const (file, x);
6497      return;
6498
6499    default:
6500      output_operand_lossage ("invalid %%xn code");
6501    }
6502}
6503
6504/* Print the address of an operand.  */
6505
6506void
6507print_operand_address (file, x)
6508     FILE *file;
6509     rtx x;
6510{
6511  if (GET_CODE (x) == REG)
6512    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6513  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6514	   || GET_CODE (x) == LABEL_REF)
6515    {
6516      output_addr_const (file, x);
6517      if (small_data_operand (x, GET_MODE (x)))
6518	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6519		 reg_names[SMALL_DATA_REG]);
6520      else if (TARGET_TOC)
6521	abort ();
6522    }
6523  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6524    {
6525      if (REGNO (XEXP (x, 0)) == 0)
6526	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6527		 reg_names[ REGNO (XEXP (x, 0)) ]);
6528      else
6529	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6530		 reg_names[ REGNO (XEXP (x, 1)) ]);
6531    }
6532  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6533    {
6534      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6535      fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6536    }
6537#if TARGET_ELF
6538  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6539           && CONSTANT_P (XEXP (x, 1)))
6540    {
6541      output_addr_const (file, XEXP (x, 1));
6542      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6543    }
6544#endif
6545#if TARGET_MACHO
6546  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6547           && CONSTANT_P (XEXP (x, 1)))
6548    {
6549      fprintf (file, "lo16(");
6550      output_addr_const (file, XEXP (x, 1));
6551      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6552    }
6553#endif
6554  else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6555    {
6556      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6557	{
6558	  rtx contains_minus = XEXP (x, 1);
6559	  rtx minus, symref;
6560	  const char *name;
6561
6562	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
6563	     turn it into (sym) for output_addr_const.  */
6564	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6565	    contains_minus = XEXP (contains_minus, 0);
6566
6567	  minus = XEXP (contains_minus, 0);
6568	  symref = XEXP (minus, 0);
6569	  XEXP (contains_minus, 0) = symref;
6570	  if (TARGET_ELF)
6571	    {
6572	      char *newname;
6573
6574	      name = XSTR (symref, 0);
6575	      newname = alloca (strlen (name) + sizeof ("@toc"));
6576	      strcpy (newname, name);
6577	      strcat (newname, "@toc");
6578	      XSTR (symref, 0) = newname;
6579	    }
6580	  output_addr_const (file, XEXP (x, 1));
6581	  if (TARGET_ELF)
6582	    XSTR (symref, 0) = name;
6583	  XEXP (contains_minus, 0) = minus;
6584	}
6585      else
6586	output_addr_const (file, XEXP (x, 1));
6587
6588      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6589    }
6590  else
6591    abort ();
6592}
6593
6594/* Target hook for assembling integer objects.  The powerpc version has
6595   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6596   is defined.  It also needs to handle DI-mode objects on 64-bit
6597   targets.  */
6598
6599static bool
6600rs6000_assemble_integer (x, size, aligned_p)
6601     rtx x;
6602     unsigned int size;
6603     int aligned_p;
6604{
6605#ifdef RELOCATABLE_NEEDS_FIXUP
6606  /* Special handling for SI values.  */
6607  if (size == 4 && aligned_p)
6608    {
6609      extern int in_toc_section PARAMS ((void));
6610      static int recurse = 0;
6611
6612      /* For -mrelocatable, we mark all addresses that need to be fixed up
6613	 in the .fixup section.  */
6614      if (TARGET_RELOCATABLE
6615	  && !in_toc_section ()
6616	  && !in_text_section ()
6617	  && !recurse
6618	  && GET_CODE (x) != CONST_INT
6619	  && GET_CODE (x) != CONST_DOUBLE
6620	  && CONSTANT_P (x))
6621	{
6622	  char buf[256];
6623
6624	  recurse = 1;
6625	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6626	  fixuplabelno++;
6627	  ASM_OUTPUT_LABEL (asm_out_file, buf);
6628	  fprintf (asm_out_file, "\t.long\t(");
6629	  output_addr_const (asm_out_file, x);
6630	  fprintf (asm_out_file, ")@fixup\n");
6631	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6632	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
6633	  fprintf (asm_out_file, "\t.long\t");
6634	  assemble_name (asm_out_file, buf);
6635	  fprintf (asm_out_file, "\n\t.previous\n");
6636	  recurse = 0;
6637	  return true;
6638	}
6639      /* Remove initial .'s to turn a -mcall-aixdesc function
6640	 address into the address of the descriptor, not the function
6641	 itself.  */
6642      else if (GET_CODE (x) == SYMBOL_REF
6643	       && XSTR (x, 0)[0] == '.'
6644	       && DEFAULT_ABI == ABI_AIX)
6645	{
6646	  const char *name = XSTR (x, 0);
6647	  while (*name == '.')
6648	    name++;
6649
6650	  fprintf (asm_out_file, "\t.long\t%s\n", name);
6651	  return true;
6652	}
6653    }
6654#endif /* RELOCATABLE_NEEDS_FIXUP */
6655  return default_assemble_integer (x, size, aligned_p);
6656}
6657
6658enum rtx_code
6659rs6000_reverse_condition (mode, code)
6660     enum machine_mode mode;
6661     enum rtx_code code;
6662{
6663  /* Reversal of FP compares takes care -- an ordered compare
6664     becomes an unordered compare and vice versa.  */
6665  if (mode == CCFPmode)
6666    return reverse_condition_maybe_unordered (code);
6667  else
6668    return reverse_condition (code);
6669}
6670
6671/* Generate a compare for CODE.  Return a brand-new rtx that
6672   represents the result of the compare.  */
6673
6674static rtx
6675rs6000_generate_compare (code)
6676     enum rtx_code code;
6677{
6678  enum machine_mode comp_mode;
6679  rtx compare_result;
6680
6681  if (rs6000_compare_fp_p)
6682    comp_mode = CCFPmode;
6683  else if (code == GTU || code == LTU
6684	  || code == GEU || code == LEU)
6685    comp_mode = CCUNSmode;
6686  else
6687    comp_mode = CCmode;
6688
6689  /* First, the compare.  */
6690  compare_result = gen_reg_rtx (comp_mode);
6691  emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6692			  gen_rtx_COMPARE (comp_mode,
6693					   rs6000_compare_op0,
6694					   rs6000_compare_op1)));
6695
6696  /* Some kinds of FP comparisons need an OR operation;
6697     except for flag_unsafe_math_optimizations we don't bother.  */
6698  if (rs6000_compare_fp_p
6699      && ! flag_unsafe_math_optimizations
6700      && (code == LE || code == GE
6701	  || code == UNEQ || code == LTGT
6702	  || code == UNGT || code == UNLT))
6703    {
6704      enum rtx_code or1, or2;
6705      rtx or1_rtx, or2_rtx, compare2_rtx;
6706      rtx or_result = gen_reg_rtx (CCEQmode);
6707
6708      switch (code)
6709	{
6710	case LE: or1 = LT;  or2 = EQ;  break;
6711	case GE: or1 = GT;  or2 = EQ;  break;
6712	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
6713	case LTGT: or1 = LT;  or2 = GT;  break;
6714	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
6715	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
6716	default:  abort ();
6717	}
6718      validate_condition_mode (or1, comp_mode);
6719      validate_condition_mode (or2, comp_mode);
6720      or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6721      or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6722      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6723				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6724				      const_true_rtx);
6725      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6726
6727      compare_result = or_result;
6728      code = EQ;
6729    }
6730
6731  validate_condition_mode (code, GET_MODE (compare_result));
6732
6733  return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6734}
6735
6736
6737/* Emit the RTL for an sCOND pattern.  */
6738
6739void
6740rs6000_emit_sCOND (code, result)
6741     enum rtx_code code;
6742     rtx result;
6743{
6744  rtx condition_rtx;
6745  enum machine_mode op_mode;
6746
6747  condition_rtx = rs6000_generate_compare (code);
6748
6749  op_mode = GET_MODE (rs6000_compare_op0);
6750  if (op_mode == VOIDmode)
6751    op_mode = GET_MODE (rs6000_compare_op1);
6752
6753  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
6754    {
6755      PUT_MODE (condition_rtx, DImode);
6756      convert_move (result, condition_rtx, 0);
6757    }
6758  else
6759    {
6760      PUT_MODE (condition_rtx, SImode);
6761      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
6762    }
6763}
6764
6765/* Emit a branch of kind CODE to location LOC.  */
6766
6767void
6768rs6000_emit_cbranch (code, loc)
6769     enum rtx_code code;
6770     rtx loc;
6771{
6772  rtx condition_rtx, loc_ref;
6773
6774  condition_rtx = rs6000_generate_compare (code);
6775  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
6776  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
6777			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
6778						     loc_ref, pc_rtx)));
6779}
6780
6781/* Return the string to output a conditional branch to LABEL, which is
6782   the operand number of the label, or -1 if the branch is really a
6783   conditional return.
6784
6785   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
6786   condition code register and its mode specifies what kind of
6787   comparison we made.
6788
6789   REVERSED is non-zero if we should reverse the sense of the comparison.
6790
6791   INSN is the insn.  */
6792
6793char *
6794output_cbranch (op, label, reversed, insn)
6795     rtx op;
6796     const char * label;
6797     int reversed;
6798     rtx insn;
6799{
6800  static char string[64];
6801  enum rtx_code code = GET_CODE (op);
6802  rtx cc_reg = XEXP (op, 0);
6803  enum machine_mode mode = GET_MODE (cc_reg);
6804  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
6805  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
6806  int really_reversed = reversed ^ need_longbranch;
6807  char *s = string;
6808  const char *ccode;
6809  const char *pred;
6810  rtx note;
6811
6812  validate_condition_mode (code, mode);
6813
6814  /* Work out which way this really branches.  We could use
6815     reverse_condition_maybe_unordered here always but this
6816     makes the resulting assembler clearer.  */
6817  if (really_reversed)
6818    code = rs6000_reverse_condition (mode, code);
6819
6820  switch (code)
6821    {
6822      /* Not all of these are actually distinct opcodes, but
6823	 we distinguish them for clarity of the resulting assembler.  */
6824    case NE: case LTGT:
6825      ccode = "ne"; break;
6826    case EQ: case UNEQ:
6827      ccode = "eq"; break;
6828    case GE: case GEU:
6829      ccode = "ge"; break;
6830    case GT: case GTU: case UNGT:
6831      ccode = "gt"; break;
6832    case LE: case LEU:
6833      ccode = "le"; break;
6834    case LT: case LTU: case UNLT:
6835      ccode = "lt"; break;
6836    case UNORDERED: ccode = "un"; break;
6837    case ORDERED: ccode = "nu"; break;
6838    case UNGE: ccode = "nl"; break;
6839    case UNLE: ccode = "ng"; break;
6840    default:
6841      abort ();
6842    }
6843
6844  /* Maybe we have a guess as to how likely the branch is.
6845     The old mnemonics don't have a way to specify this information.  */
6846  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
6847  if (note != NULL_RTX)
6848    {
6849      /* PROB is the difference from 50%.  */
6850      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
6851
6852      /* For branches that are very close to 50%, assume not-taken.  */
6853      if (abs (prob) > REG_BR_PROB_BASE / 20
6854	  && ((prob > 0) ^ need_longbranch))
6855	pred = "+";
6856      else
6857	pred = "-";
6858    }
6859  else
6860    pred = "";
6861
6862  if (label == NULL)
6863    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
6864  else
6865    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
6866
6867  /* We need to escape any '%' characters in the reg_names string.
6868     Assume they'd only be the first character...  */
6869  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
6870    *s++ = '%';
6871  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
6872
6873  if (label != NULL)
6874    {
6875      /* If the branch distance was too far, we may have to use an
6876	 unconditional branch to go the distance.  */
6877      if (need_longbranch)
6878	s += sprintf (s, ",$+8\n\tb %s", label);
6879      else
6880	s += sprintf (s, ",%s", label);
6881    }
6882
6883  return string;
6884}
6885
6886/* Emit a conditional move: move TRUE_COND to DEST if OP of the
6887   operands of the last comparison is nonzero/true, FALSE_COND if it
6888   is zero/false.  Return 0 if the hardware has no such operation.  */
6889
6890int
6891rs6000_emit_cmove (dest, op, true_cond, false_cond)
6892     rtx dest;
6893     rtx op;
6894     rtx true_cond;
6895     rtx false_cond;
6896{
6897  enum rtx_code code = GET_CODE (op);
6898  rtx op0 = rs6000_compare_op0;
6899  rtx op1 = rs6000_compare_op1;
6900  REAL_VALUE_TYPE c1;
6901  enum machine_mode mode = GET_MODE (op0);
6902  rtx temp;
6903
6904  /* First, work out if the hardware can do this at all, or
6905     if it's too slow...  */
6906  /* If the comparison is an integer one, since we only have fsel
6907     it'll be cheaper to use a branch.  */
6908  if (! rs6000_compare_fp_p)
6909    return 0;
6910
6911  /* Eliminate half of the comparisons by switching operands, this
6912     makes the remaining code simpler.  */
6913  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
6914      || code == LTGT || code == LT)
6915    {
6916      code = reverse_condition_maybe_unordered (code);
6917      temp = true_cond;
6918      true_cond = false_cond;
6919      false_cond = temp;
6920    }
6921
6922  /* UNEQ and LTGT take four instructions for a comparison with zero,
6923     it'll probably be faster to use a branch here too.  */
6924  if (code == UNEQ)
6925    return 0;
6926
6927  if (GET_CODE (op1) == CONST_DOUBLE)
6928    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
6929
6930  /* We're going to try to implement comparions by performing
6931     a subtract, then comparing against zero.  Unfortunately,
6932     Inf - Inf is NaN which is not zero, and so if we don't
6933     know that the the operand is finite and the comparison
6934     would treat EQ different to UNORDERED, we can't do it.  */
6935  if (! flag_unsafe_math_optimizations
6936      && code != GT && code != UNGE
6937      && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
6938      /* Constructs of the form (a OP b ? a : b) are safe.  */
6939      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
6940	  || (! rtx_equal_p (op0, true_cond)
6941	      && ! rtx_equal_p (op1, true_cond))))
6942    return 0;
6943  /* At this point we know we can use fsel.  */
6944
6945  /* Reduce the comparison to a comparison against zero.  */
6946  temp = gen_reg_rtx (mode);
6947  emit_insn (gen_rtx_SET (VOIDmode, temp,
6948			  gen_rtx_MINUS (mode, op0, op1)));
6949  op0 = temp;
6950  op1 = CONST0_RTX (mode);
6951
6952  /* If we don't care about NaNs we can reduce some of the comparisons
6953     down to faster ones.  */
6954  if (flag_unsafe_math_optimizations)
6955    switch (code)
6956      {
6957      case GT:
6958	code = LE;
6959	temp = true_cond;
6960	true_cond = false_cond;
6961	false_cond = temp;
6962	break;
6963      case UNGE:
6964	code = GE;
6965	break;
6966      case UNEQ:
6967	code = EQ;
6968	break;
6969      default:
6970	break;
6971      }
6972
6973  /* Now, reduce everything down to a GE.  */
6974  switch (code)
6975    {
6976    case GE:
6977      break;
6978
6979    case LE:
6980      temp = gen_reg_rtx (mode);
6981      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6982      op0 = temp;
6983      break;
6984
6985    case ORDERED:
6986      temp = gen_reg_rtx (mode);
6987      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
6988      op0 = temp;
6989      break;
6990
6991    case EQ:
6992      temp = gen_reg_rtx (mode);
6993      emit_insn (gen_rtx_SET (VOIDmode, temp,
6994			      gen_rtx_NEG (mode,
6995					   gen_rtx_ABS (mode, op0))));
6996      op0 = temp;
6997      break;
6998
6999    case UNGE:
7000      temp = gen_reg_rtx (mode);
7001      emit_insn (gen_rtx_SET (VOIDmode, temp,
7002			      gen_rtx_IF_THEN_ELSE (mode,
7003						    gen_rtx_GE (VOIDmode,
7004								op0, op1),
7005						    true_cond, false_cond)));
7006      false_cond = temp;
7007      true_cond = false_cond;
7008
7009      temp = gen_reg_rtx (mode);
7010      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7011      op0 = temp;
7012      break;
7013
7014    case GT:
7015      temp = gen_reg_rtx (mode);
7016      emit_insn (gen_rtx_SET (VOIDmode, temp,
7017			      gen_rtx_IF_THEN_ELSE (mode,
7018						    gen_rtx_GE (VOIDmode,
7019								op0, op1),
7020						    true_cond, false_cond)));
7021      true_cond = temp;
7022      false_cond = true_cond;
7023
7024      temp = gen_reg_rtx (mode);
7025      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7026      op0 = temp;
7027      break;
7028
7029    default:
7030      abort ();
7031    }
7032
7033  emit_insn (gen_rtx_SET (VOIDmode, dest,
7034			  gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
7035						gen_rtx_GE (VOIDmode,
7036							    op0, op1),
7037						true_cond, false_cond)));
7038  return 1;
7039}
7040
7041void
7042rs6000_emit_minmax (dest, code, op0, op1)
7043     rtx dest;
7044     enum rtx_code code;
7045     rtx op0;
7046     rtx op1;
7047{
7048  enum machine_mode mode = GET_MODE (op0);
7049  rtx target;
7050  if (code == SMAX || code == UMAX)
7051    target = emit_conditional_move (dest, GE, op0, op1, mode,
7052				    op0, op1, mode, 0);
7053  else
7054    target = emit_conditional_move (dest, GE, op0, op1, mode,
7055				    op1, op0, mode, 0);
7056  if (target == NULL_RTX)
7057    abort ();
7058  if (target != dest)
7059    emit_move_insn (dest, target);
7060}
7061
7062/* This page contains routines that are used to determine what the
7063   function prologue and epilogue code will do and write them out.  */
7064
7065/* Return the first fixed-point register that is required to be
7066   saved. 32 if none.  */
7067
7068int
7069first_reg_to_save ()
7070{
7071  int first_reg;
7072
7073  /* Find lowest numbered live register.  */
7074  for (first_reg = 13; first_reg <= 31; first_reg++)
7075    if (regs_ever_live[first_reg]
7076	&& (! call_used_regs[first_reg]
7077	    || (first_reg == PIC_OFFSET_TABLE_REGNUM
7078		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7079		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7080      break;
7081
7082  if (current_function_profile)
7083    {
7084      /* AIX must save/restore every register that contains a parameter
7085	 before/after the .__mcount call plus an additional register
7086	 for the static chain, if needed; use registers from 30 down to 22
7087	 to do this.  */
7088      if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7089	{
7090	  int last_parm_reg, profile_first_reg;
7091
7092	  /* Figure out last used parameter register.  The proper thing
7093	     to do is to walk incoming args of the function.  A function
7094	     might have live parameter registers even if it has no
7095	     incoming args.  */
7096	  for (last_parm_reg = 10;
7097	       last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7098	       last_parm_reg--)
7099	    ;
7100
7101	  /* Calculate first reg for saving parameter registers
7102	     and static chain.
7103	     Skip reg 31 which may contain the frame pointer.  */
7104	  profile_first_reg = (33 - last_parm_reg
7105			       - (current_function_needs_context ? 1 : 0));
7106#if TARGET_MACHO
7107          /* Need to skip another reg to account for R31 being PICBASE
7108             (when flag_pic is set) or R30 being used as the frame
7109             pointer (when flag_pic is not set).  */
7110          --profile_first_reg;
7111#endif
7112	  /* Do not save frame pointer if no parameters needs to be saved.  */
7113	  if (profile_first_reg == 31)
7114	    profile_first_reg = 32;
7115
7116	  if (first_reg > profile_first_reg)
7117	    first_reg = profile_first_reg;
7118	}
7119
7120      /* SVR4 may need one register to preserve the static chain.  */
7121      else if (current_function_needs_context)
7122	{
7123	  /* Skip reg 31 which may contain the frame pointer.  */
7124	  if (first_reg > 30)
7125	    first_reg = 30;
7126	}
7127    }
7128
7129#if TARGET_MACHO
7130  if (flag_pic && current_function_uses_pic_offset_table &&
7131      (first_reg > PIC_OFFSET_TABLE_REGNUM))
7132    return PIC_OFFSET_TABLE_REGNUM;
7133#endif
7134
7135  return first_reg;
7136}
7137
7138/* Similar, for FP regs.  */
7139
7140int
7141first_fp_reg_to_save ()
7142{
7143  int first_reg;
7144
7145  /* Find lowest numbered live register.  */
7146  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7147    if (regs_ever_live[first_reg])
7148      break;
7149
7150  return first_reg;
7151}
7152
7153/* Similar, for AltiVec regs.  */
7154
7155static int
7156first_altivec_reg_to_save ()
7157{
7158  int i;
7159
7160  /* Stack frame remains as is unless we are in AltiVec ABI.  */
7161  if (! TARGET_ALTIVEC_ABI)
7162    return LAST_ALTIVEC_REGNO + 1;
7163
7164  /* Find lowest numbered live register.  */
7165  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7166    if (regs_ever_live[i])
7167      break;
7168
7169  return i;
7170}
7171
7172/* Return a 32-bit mask of the AltiVec registers we need to set in
7173   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
7174   the 32-bit word is 0.  */
7175
7176static unsigned int
7177compute_vrsave_mask ()
7178{
7179  unsigned int i, mask = 0;
7180
7181  /* First, find out if we use _any_ altivec registers.  */
7182  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7183    if (regs_ever_live[i])
7184      mask |= ALTIVEC_REG_BIT (i);
7185
7186  if (mask == 0)
7187    return mask;
7188
7189  /* Next, add all registers that are call-clobbered.  We do this
7190     because post-reload register optimizers such as regrename_optimize
7191     may choose to use them.  They never change the register class
7192     chosen by reload, so cannot create new uses of altivec registers
7193     if there were none before, so the early exit above is safe.  */
7194  /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7195     altivec registers not saved in the mask, which might well make the
7196     adjustments below more effective in eliding the save/restore of
7197     VRSAVE in small functions.  */
7198  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7199    if (call_used_regs[i])
7200      mask |= ALTIVEC_REG_BIT (i);
7201
7202  /* Next, remove the argument registers from the set.  These must
7203     be in the VRSAVE mask set by the caller, so we don't need to add
7204     them in again.  More importantly, the mask we compute here is
7205     used to generate CLOBBERs in the set_vrsave insn, and we do not
7206     wish the argument registers to die.  */
7207  for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7208    mask &= ~ALTIVEC_REG_BIT (i);
7209
7210  /* Similarly, remove the return value from the set.  */
7211  {
7212    bool yes = false;
7213    diddle_return_value (is_altivec_return_reg, &yes);
7214    if (yes)
7215      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7216  }
7217
7218  return mask;
7219}
7220
7221static void
7222is_altivec_return_reg (reg, xyes)
7223     rtx reg;
7224     void *xyes;
7225{
7226  bool *yes = (bool *) xyes;
7227  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7228    *yes = true;
7229}
7230
7231
7232/* Calculate the stack information for the current function.  This is
7233   complicated by having two separate calling sequences, the AIX calling
7234   sequence and the V.4 calling sequence.
7235
7236   AIX (and Darwin/Mac OS X) stack frames look like:
7237							  32-bit  64-bit
7238	SP---->	+---------------------------------------+
7239		| back chain to caller			| 0	  0
7240		+---------------------------------------+
7241		| saved CR				| 4       8 (8-11)
7242		+---------------------------------------+
7243		| saved LR				| 8       16
7244		+---------------------------------------+
7245		| reserved for compilers		| 12      24
7246		+---------------------------------------+
7247		| reserved for binders			| 16      32
7248		+---------------------------------------+
7249		| saved TOC pointer			| 20      40
7250		+---------------------------------------+
7251		| Parameter save area (P)		| 24      48
7252		+---------------------------------------+
7253		| Alloca space (A)			| 24+P    etc.
7254		+---------------------------------------+
7255		| Local variable space (L)		| 24+P+A
7256		+---------------------------------------+
7257		| Float/int conversion temporary (X)	| 24+P+A+L
7258		+---------------------------------------+
7259		| Save area for AltiVec registers (W)	| 24+P+A+L+X
7260		+---------------------------------------+
7261		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
7262		+---------------------------------------+
7263		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
7264		+---------------------------------------+
7265		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
7266		+---------------------------------------+
7267		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
7268		+---------------------------------------+
7269	old SP->| back chain to caller's caller		|
7270		+---------------------------------------+
7271
7272   The required alignment for AIX configurations is two words (i.e., 8
7273   or 16 bytes).
7274
7275
7276   V.4 stack frames look like:
7277
7278	SP---->	+---------------------------------------+
7279		| back chain to caller			| 0
7280		+---------------------------------------+
7281		| caller's saved LR			| 4
7282		+---------------------------------------+
7283		| Parameter save area (P)		| 8
7284		+---------------------------------------+
7285		| Alloca space (A)			| 8+P
7286		+---------------------------------------+
7287		| Varargs save area (V)			| 8+P+A
7288		+---------------------------------------+
7289		| Local variable space (L)		| 8+P+A+V
7290		+---------------------------------------+
7291		| Float/int conversion temporary (X)	| 8+P+A+V+L
7292		+---------------------------------------+
7293		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
7294		+---------------------------------------+
7295		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
7296		+---------------------------------------+
7297		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
7298		+---------------------------------------+
7299		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
7300		+---------------------------------------+
7301		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
7302		+---------------------------------------+
7303		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
7304		+---------------------------------------+
7305	old SP->| back chain to caller's caller		|
7306		+---------------------------------------+
7307
7308   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7309   given.  (But note below and in sysv4.h that we require only 8 and
7310   may round up the size of our stack frame anyways.  The historical
7311   reason is early versions of powerpc-linux which didn't properly
7312   align the stack at program startup.  A happy side-effect is that
7313   -mno-eabi libraries can be used with -meabi programs.)
7314
7315   The EABI configuration defaults to the V.4 layout, unless
7316   -mcall-aix is used, in which case the AIX layout is used.  However,
7317   the stack alignment requirements may differ.  If -mno-eabi is not
7318   given, the required stack alignment is 8 bytes; if -mno-eabi is
7319   given, the required alignment is 16 bytes.  (But see V.4 comment
7320   above.)  */
7321
7322#ifndef ABI_STACK_BOUNDARY
7323#define ABI_STACK_BOUNDARY STACK_BOUNDARY
7324#endif
7325
7326rs6000_stack_t *
7327rs6000_stack_info ()
7328{
7329  static rs6000_stack_t info, zero_info;
7330  rs6000_stack_t *info_ptr = &info;
7331  int reg_size = TARGET_POWERPC64 ? 8 : 4;
7332  enum rs6000_abi abi;
7333  int ehrd_size;
7334  int total_raw_size;
7335
7336  /* Zero all fields portably.  */
7337  info = zero_info;
7338
7339  /* Select which calling sequence.  */
7340  info_ptr->abi = abi = DEFAULT_ABI;
7341
7342  /* Calculate which registers need to be saved & save area size.  */
7343  info_ptr->first_gp_reg_save = first_reg_to_save ();
7344  /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
7345     even if it currently looks like we won't.  */
7346  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7347       || (flag_pic == 1 && abi == ABI_V4)
7348       || (flag_pic && abi == ABI_DARWIN))
7349      && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
7350    info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
7351  else
7352    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7353
7354  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7355  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7356
7357  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7358  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7359				 - info_ptr->first_altivec_reg_save);
7360
7361  /* Does this function call anything?  */
7362  info_ptr->calls_p = (! current_function_is_leaf
7363		       || cfun->machine->ra_needs_full_frame);
7364
7365  /* Determine if we need to save the link register.  */
7366  if (rs6000_ra_ever_killed ()
7367      || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7368#ifdef TARGET_RELOCATABLE
7369      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7370#endif
7371      || (info_ptr->first_fp_reg_save != 64
7372	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7373      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7374      || (abi == ABI_V4 && current_function_calls_alloca)
7375      || (DEFAULT_ABI == ABI_DARWIN
7376	  && flag_pic
7377	  && current_function_uses_pic_offset_table)
7378      || info_ptr->calls_p)
7379    {
7380      info_ptr->lr_save_p = 1;
7381      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7382    }
7383
7384  /* Determine if we need to save the condition code registers.  */
7385  if (regs_ever_live[CR2_REGNO]
7386      || regs_ever_live[CR3_REGNO]
7387      || regs_ever_live[CR4_REGNO])
7388    {
7389      info_ptr->cr_save_p = 1;
7390      if (abi == ABI_V4)
7391	info_ptr->cr_size = reg_size;
7392    }
7393
7394  /* If the current function calls __builtin_eh_return, then we need
7395     to allocate stack space for registers that will hold data for
7396     the exception handler.  */
7397  if (current_function_calls_eh_return)
7398    {
7399      unsigned int i;
7400      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7401	continue;
7402      ehrd_size = i * UNITS_PER_WORD;
7403    }
7404  else
7405    ehrd_size = 0;
7406
7407  /* Determine various sizes.  */
7408  info_ptr->reg_size     = reg_size;
7409  info_ptr->fixed_size   = RS6000_SAVE_AREA;
7410  info_ptr->varargs_size = RS6000_VARARGS_AREA;
7411  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
7412  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
7413					 8);
7414
7415  if (TARGET_ALTIVEC_ABI)
7416    {
7417      info_ptr->vrsave_mask = compute_vrsave_mask ();
7418      info_ptr->vrsave_size  = info_ptr->vrsave_mask ? 4 : 0;
7419    }
7420  else
7421    {
7422      info_ptr->vrsave_mask = 0;
7423      info_ptr->vrsave_size = 0;
7424    }
7425
7426  /* Calculate the offsets.  */
7427  switch (abi)
7428    {
7429    case ABI_NONE:
7430    default:
7431      abort ();
7432
7433    case ABI_AIX:
7434    case ABI_AIX_NODESC:
7435    case ABI_DARWIN:
7436      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7437      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7438
7439      if (TARGET_ALTIVEC_ABI)
7440	{
7441	  info_ptr->vrsave_save_offset
7442	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7443
7444	  /* Align stack so vector save area is on a quadword boundary.  */
7445	  if (info_ptr->altivec_size != 0)
7446	    info_ptr->altivec_padding_size
7447	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7448	  else
7449	    info_ptr->altivec_padding_size = 0;
7450
7451	  info_ptr->altivec_save_offset
7452	    = info_ptr->vrsave_save_offset
7453	    - info_ptr->altivec_padding_size
7454	    - info_ptr->altivec_size;
7455
7456	  /* Adjust for AltiVec case.  */
7457	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7458	}
7459      else
7460	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
7461      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
7462      info_ptr->lr_save_offset   = 2*reg_size;
7463      break;
7464
7465    case ABI_V4:
7466      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7467      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7468      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
7469
7470      if (TARGET_ALTIVEC_ABI)
7471	{
7472	  info_ptr->vrsave_save_offset
7473	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7474
7475	  /* Align stack so vector save area is on a quadword boundary.  */
7476	  if (info_ptr->altivec_size != 0)
7477	    info_ptr->altivec_padding_size
7478	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7479	  else
7480	    info_ptr->altivec_padding_size = 0;
7481
7482	  info_ptr->altivec_save_offset
7483	    = info_ptr->vrsave_save_offset
7484	    - info_ptr->altivec_padding_size
7485	    - info_ptr->altivec_size;
7486
7487	  /* Adjust for AltiVec case.  */
7488	  info_ptr->toc_save_offset
7489	    = info_ptr->altivec_save_offset - info_ptr->toc_size;
7490	}
7491      else
7492	info_ptr->toc_save_offset  = info_ptr->cr_save_offset - info_ptr->toc_size;
7493      info_ptr->ehrd_offset      = info_ptr->toc_save_offset - ehrd_size;
7494      info_ptr->lr_save_offset   = reg_size;
7495      break;
7496    }
7497
7498  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
7499					 + info_ptr->gp_size
7500					 + info_ptr->altivec_size
7501					 + info_ptr->altivec_padding_size
7502					 + info_ptr->vrsave_size
7503					 + ehrd_size
7504					 + info_ptr->cr_size
7505					 + info_ptr->lr_size
7506					 + info_ptr->vrsave_size
7507					 + info_ptr->toc_size,
7508					 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7509					 ? 16 : 8);
7510
7511  total_raw_size	 = (info_ptr->vars_size
7512			    + info_ptr->parm_size
7513			    + info_ptr->save_size
7514			    + info_ptr->varargs_size
7515			    + info_ptr->fixed_size);
7516
7517  info_ptr->total_size =
7518    RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7519
7520  /* Determine if we need to allocate any stack frame:
7521
7522     For AIX we need to push the stack if a frame pointer is needed
7523     (because the stack might be dynamically adjusted), if we are
7524     debugging, if we make calls, or if the sum of fp_save, gp_save,
7525     and local variables are more than the space needed to save all
7526     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7527     + 18*8 = 288 (GPR13 reserved).
7528
7529     For V.4 we don't have the stack cushion that AIX uses, but assume
7530     that the debugger can handle stackless frames.  */
7531
7532  if (info_ptr->calls_p)
7533    info_ptr->push_p = 1;
7534
7535  else if (abi == ABI_V4)
7536    info_ptr->push_p = (total_raw_size > info_ptr->fixed_size
7537			|| info_ptr->calls_p);
7538
7539  else
7540    info_ptr->push_p = (frame_pointer_needed
7541			|| (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7542			|| ((total_raw_size - info_ptr->fixed_size)
7543			    > (TARGET_32BIT ? 220 : 288)));
7544
7545  /* Zero offsets if we're not saving those registers.  */
7546  if (info_ptr->fp_size == 0)
7547    info_ptr->fp_save_offset = 0;
7548
7549  if (info_ptr->gp_size == 0)
7550    info_ptr->gp_save_offset = 0;
7551
7552  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7553    info_ptr->altivec_save_offset = 0;
7554
7555  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7556    info_ptr->vrsave_save_offset = 0;
7557
7558  if (! info_ptr->lr_save_p)
7559    info_ptr->lr_save_offset = 0;
7560
7561  if (! info_ptr->cr_save_p)
7562    info_ptr->cr_save_offset = 0;
7563
7564  if (! info_ptr->toc_save_p)
7565    info_ptr->toc_save_offset = 0;
7566
7567  return info_ptr;
7568}
7569
7570void
7571debug_stack_info (info)
7572     rs6000_stack_t *info;
7573{
7574  const char *abi_string;
7575
7576  if (! info)
7577    info = rs6000_stack_info ();
7578
7579  fprintf (stderr, "\nStack information for function %s:\n",
7580	   ((current_function_decl && DECL_NAME (current_function_decl))
7581	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7582	    : "<unknown>"));
7583
7584  switch (info->abi)
7585    {
7586    default:		 abi_string = "Unknown";	break;
7587    case ABI_NONE:	 abi_string = "NONE";		break;
7588    case ABI_AIX:
7589    case ABI_AIX_NODESC: abi_string = "AIX";		break;
7590    case ABI_DARWIN:	 abi_string = "Darwin";		break;
7591    case ABI_V4:	 abi_string = "V.4";		break;
7592    }
7593
7594  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
7595
7596  if (TARGET_ALTIVEC_ABI)
7597    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7598
7599  if (info->first_gp_reg_save != 32)
7600    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
7601
7602  if (info->first_fp_reg_save != 64)
7603    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
7604
7605  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7606    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7607	     info->first_altivec_reg_save);
7608
7609  if (info->lr_save_p)
7610    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
7611
7612  if (info->cr_save_p)
7613    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
7614
7615  if (info->toc_save_p)
7616    fprintf (stderr, "\ttoc_save_p          = %5d\n", info->toc_save_p);
7617
7618  if (info->vrsave_mask)
7619    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
7620
7621  if (info->push_p)
7622    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
7623
7624  if (info->calls_p)
7625    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
7626
7627  if (info->gp_save_offset)
7628    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
7629
7630  if (info->fp_save_offset)
7631    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
7632
7633  if (info->altivec_save_offset)
7634    fprintf (stderr, "\taltivec_save_offset = %5d\n",
7635	     info->altivec_save_offset);
7636
7637  if (info->vrsave_save_offset)
7638    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
7639	     info->vrsave_save_offset);
7640
7641  if (info->lr_save_offset)
7642    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
7643
7644  if (info->cr_save_offset)
7645    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
7646
7647  if (info->toc_save_offset)
7648    fprintf (stderr, "\ttoc_save_offset     = %5d\n", info->toc_save_offset);
7649
7650  if (info->varargs_save_offset)
7651    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7652
7653  if (info->total_size)
7654    fprintf (stderr, "\ttotal_size          = %5d\n", info->total_size);
7655
7656  if (info->varargs_size)
7657    fprintf (stderr, "\tvarargs_size        = %5d\n", info->varargs_size);
7658
7659  if (info->vars_size)
7660    fprintf (stderr, "\tvars_size           = %5d\n", info->vars_size);
7661
7662  if (info->parm_size)
7663    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
7664
7665  if (info->fixed_size)
7666    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
7667
7668  if (info->gp_size)
7669    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
7670
7671  if (info->fp_size)
7672    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
7673
7674  if (info->altivec_size)
7675    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
7676
7677  if (info->vrsave_size)
7678    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
7679
7680  if (info->altivec_padding_size)
7681    fprintf (stderr, "\taltivec_padding_size= %5d\n",
7682	     info->altivec_padding_size);
7683
7684  if (info->lr_size)
7685    fprintf (stderr, "\tlr_size             = %5d\n", info->lr_size);
7686
7687  if (info->cr_size)
7688    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
7689
7690  if (info->toc_size)
7691    fprintf (stderr, "\ttoc_size            = %5d\n", info->toc_size);
7692
7693  if (info->save_size)
7694    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
7695
7696  if (info->reg_size != 4)
7697    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
7698
7699  fprintf (stderr, "\n");
7700}
7701
7702rtx
7703rs6000_return_addr (count, frame)
7704     int count;
7705     rtx frame;
7706{
7707  /* Currently we don't optimize very well between prolog and body
7708     code and for PIC code the code can be actually quite bad, so
7709     don't try to be too clever here.  */
7710  if (count != 0
7711      || flag_pic != 0
7712      || DEFAULT_ABI == ABI_AIX
7713      || DEFAULT_ABI == ABI_AIX_NODESC)
7714    {
7715      cfun->machine->ra_needs_full_frame = 1;
7716
7717      return
7718	gen_rtx_MEM
7719	  (Pmode,
7720	   memory_address
7721	   (Pmode,
7722	    plus_constant (copy_to_reg
7723			   (gen_rtx_MEM (Pmode,
7724					 memory_address (Pmode, frame))),
7725			   RETURN_ADDRESS_OFFSET)));
7726    }
7727
7728  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
7729}
7730
7731static int
7732rs6000_ra_ever_killed ()
7733{
7734  rtx top;
7735
7736#ifdef ASM_OUTPUT_MI_THUNK
7737  if (current_function_is_thunk)
7738    return 0;
7739#endif
7740  if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
7741      || cfun->machine->ra_needs_full_frame)
7742    return regs_ever_live[LINK_REGISTER_REGNUM];
7743
7744  push_topmost_sequence ();
7745  top = get_insns ();
7746  pop_topmost_sequence ();
7747
7748  return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7749			    top, NULL_RTX);
7750}
7751
7752/* Add a REG_MAYBE_DEAD note to the insn.  */
7753static void
7754rs6000_maybe_dead (insn)
7755     rtx insn;
7756{
7757  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
7758					const0_rtx,
7759					REG_NOTES (insn));
7760}
7761
7762/* Emit instructions needed to load the TOC register.
7763   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
7764   a constant pool; or for SVR4 -fpic.  */
7765
7766void
7767rs6000_emit_load_toc_table (fromprolog)
7768     int fromprolog;
7769{
7770  rtx dest;
7771  dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
7772
7773  if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
7774    {
7775      if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7776	{
7777	  rtx temp = (fromprolog
7778		      ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7779		      : gen_reg_rtx (Pmode));
7780	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
7781	  rs6000_maybe_dead (emit_move_insn (dest, temp));
7782	}
7783      else if (flag_pic == 2)
7784        {
7785	  char buf[30];
7786	  rtx tempLR = (fromprolog
7787			? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7788			: gen_reg_rtx (Pmode));
7789	  rtx temp0 = (fromprolog
7790			? gen_rtx_REG (Pmode, 0)
7791			: gen_reg_rtx (Pmode));
7792	  rtx symF;
7793
7794	  /* possibly create the toc section */
7795	  if (! toc_initialized)
7796	    {
7797	      toc_section ();
7798	      function_section (current_function_decl);
7799	    }
7800
7801	  if (fromprolog)
7802	    {
7803	      rtx symL;
7804
7805	      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
7806	      symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7807
7808	      ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
7809	      symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7810
7811	      rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
7812								   symF)));
7813	      rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7814	      rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
7815								   symL,
7816								   symF)));
7817	    }
7818	  else
7819	    {
7820	      rtx tocsym;
7821	      static int reload_toc_labelno = 0;
7822
7823	      tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
7824
7825	      ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
7826	      symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7827
7828	      rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
7829								    symF,
7830								    tocsym)));
7831	      rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7832	      rs6000_maybe_dead (emit_move_insn (temp0,
7833						 gen_rtx_MEM (Pmode, dest)));
7834	    }
7835	  rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
7836	}
7837      else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
7838        {
7839	  /* This is for AIX code running in non-PIC ELF.  */
7840	  char buf[30];
7841	  rtx realsym;
7842	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
7843	  realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7844
7845	  rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
7846	  rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
7847	}
7848      else
7849        abort ();
7850    }
7851  else
7852    {
7853      if (TARGET_32BIT)
7854        rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
7855      else
7856        rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
7857    }
7858}
7859
7860int
7861get_TOC_alias_set ()
7862{
7863    static int set = -1;
7864    if (set == -1)
7865      set = new_alias_set ();
7866    return set;
7867}
7868
7869/* This retuns nonzero if the current function uses the TOC.  This is
7870   determined by the presence of (unspec ... 7), which is generated by
7871   the various load_toc_* patterns.  */
7872
7873int
7874uses_TOC ()
7875{
7876    rtx insn;
7877
7878    for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7879      if (INSN_P (insn))
7880	{
7881	  rtx pat = PATTERN (insn);
7882	  int i;
7883
7884	  if (GET_CODE (pat) == PARALLEL)
7885	    for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7886	      if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
7887		 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
7888		  return 1;
7889	}
7890    return 0;
7891}
7892
7893rtx
7894create_TOC_reference (symbol)
7895    rtx symbol;
7896{
7897  return gen_rtx_PLUS (Pmode,
7898	   gen_rtx_REG (Pmode, TOC_REGISTER),
7899	     gen_rtx_CONST (Pmode,
7900	       gen_rtx_MINUS (Pmode, symbol,
7901		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
7902}
7903
7904#if TARGET_AIX
7905/* __throw will restore its own return address to be the same as the
7906   return address of the function that the throw is being made to.
7907   This is unfortunate, because we want to check the original
7908   return address to see if we need to restore the TOC.
7909   So we have to squirrel it away here.
7910   This is used only in compiling __throw and __rethrow.
7911
7912   Most of this code should be removed by CSE.  */
7913static rtx insn_after_throw;
7914
7915/* This does the saving...  */
7916void
7917rs6000_aix_emit_builtin_unwind_init ()
7918{
7919  rtx mem;
7920  rtx stack_top = gen_reg_rtx (Pmode);
7921  rtx opcode_addr = gen_reg_rtx (Pmode);
7922
7923  insn_after_throw = gen_reg_rtx (SImode);
7924
7925  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
7926  emit_move_insn (stack_top, mem);
7927
7928  mem = gen_rtx_MEM (Pmode,
7929		     gen_rtx_PLUS (Pmode, stack_top,
7930				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
7931  emit_move_insn (opcode_addr, mem);
7932  emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
7933}
7934
7935/* Emit insns to _restore_ the TOC register, at runtime (specifically
7936   in _eh.o).  Only used on AIX.
7937
7938   The idea is that on AIX, function calls look like this:
7939	bl  somefunction-trampoline
7940	lwz r2,20(sp)
7941
7942   and later,
7943	somefunction-trampoline:
7944	stw r2,20(sp)
7945	 ... load function address in the count register ...
7946	bctr
7947   or like this, if the linker determines that this is not a cross-module call
7948   and so the TOC need not be restored:
7949	bl  somefunction
7950	nop
7951   or like this, if the compiler could determine that this is not a
7952   cross-module call:
7953	bl  somefunction
7954   now, the tricky bit here is that register 2 is saved and restored
7955   by the _linker_, so we can't readily generate debugging information
7956   for it.  So we need to go back up the call chain looking at the
7957   insns at return addresses to see which calls saved the TOC register
7958   and so see where it gets restored from.
7959
7960   Oh, and all this gets done in RTL inside the eh_epilogue pattern,
7961   just before the actual epilogue.
7962
7963   On the bright side, this incurs no space or time overhead unless an
7964   exception is thrown, except for the extra code in libgcc.a.
7965
7966   The parameter STACKSIZE is a register containing (at runtime)
7967   the amount to be popped off the stack in addition to the stack frame
7968   of this routine (which will be __throw or __rethrow, and so is
7969   guaranteed to have a stack frame).  */
7970
7971void
7972rs6000_emit_eh_toc_restore (stacksize)
7973     rtx stacksize;
7974{
7975  rtx top_of_stack;
7976  rtx bottom_of_stack = gen_reg_rtx (Pmode);
7977  rtx tocompare = gen_reg_rtx (SImode);
7978  rtx opcode = gen_reg_rtx (SImode);
7979  rtx opcode_addr = gen_reg_rtx (Pmode);
7980  rtx mem;
7981  rtx loop_start = gen_label_rtx ();
7982  rtx no_toc_restore_needed = gen_label_rtx ();
7983  rtx loop_exit = gen_label_rtx ();
7984
7985  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
7986  set_mem_alias_set (mem, rs6000_sr_alias_set);
7987  emit_move_insn (bottom_of_stack, mem);
7988
7989  top_of_stack = expand_binop (Pmode, add_optab,
7990			       bottom_of_stack, stacksize,
7991			       NULL_RTX, 1, OPTAB_WIDEN);
7992
7993  emit_move_insn (tocompare,
7994		  GEN_INT (trunc_int_for_mode (TARGET_32BIT
7995					       ? 0x80410014
7996					       : 0xE8410028, SImode)));
7997
7998  if (insn_after_throw == NULL_RTX)
7999    abort ();
8000  emit_move_insn (opcode, insn_after_throw);
8001
8002  emit_note (NULL, NOTE_INSN_LOOP_BEG);
8003  emit_label (loop_start);
8004
8005  do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8006			   SImode, NULL_RTX, NULL_RTX,
8007			   no_toc_restore_needed);
8008
8009  mem = gen_rtx_MEM (Pmode,
8010		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8011				   GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8012  emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8013
8014  emit_label (no_toc_restore_needed);
8015  do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8016			   Pmode, NULL_RTX, NULL_RTX,
8017			   loop_exit);
8018
8019  mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8020  set_mem_alias_set (mem, rs6000_sr_alias_set);
8021  emit_move_insn (bottom_of_stack, mem);
8022
8023  mem = gen_rtx_MEM (Pmode,
8024		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8025				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8026  emit_move_insn (opcode_addr, mem);
8027  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8028
8029  emit_note (NULL, NOTE_INSN_LOOP_CONT);
8030  emit_jump (loop_start);
8031  emit_note (NULL, NOTE_INSN_LOOP_END);
8032  emit_label (loop_exit);
8033}
8034#endif /* TARGET_AIX */
8035
8036/* This ties together stack memory (MEM with an alias set of
8037   rs6000_sr_alias_set) and the change to the stack pointer.  */
8038
8039static void
8040rs6000_emit_stack_tie ()
8041{
8042  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8043
8044  set_mem_alias_set (mem, rs6000_sr_alias_set);
8045  emit_insn (gen_stack_tie (mem));
8046}
8047
8048/* Emit the correct code for allocating stack space, as insns.
8049   If COPY_R12, make sure a copy of the old frame is left in r12.
8050   The generated code may use hard register 0 as a temporary.  */
8051
8052static void
8053rs6000_emit_allocate_stack (size, copy_r12)
8054     HOST_WIDE_INT size;
8055     int copy_r12;
8056{
8057  rtx insn;
8058  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8059  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8060  rtx todec = GEN_INT (-size);
8061
8062  if (current_function_limit_stack)
8063    {
8064      if (REG_P (stack_limit_rtx)
8065	  && REGNO (stack_limit_rtx) > 1
8066	  && REGNO (stack_limit_rtx) <= 31)
8067	{
8068	  emit_insn (Pmode == SImode
8069		     ? gen_addsi3 (tmp_reg,
8070				   stack_limit_rtx,
8071				   GEN_INT (size))
8072		     : gen_adddi3 (tmp_reg,
8073				   stack_limit_rtx,
8074				   GEN_INT (size)));
8075
8076	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8077				    const0_rtx));
8078	}
8079      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8080	       && TARGET_32BIT
8081	       && DEFAULT_ABI == ABI_V4)
8082	{
8083	  rtx toload = gen_rtx_CONST (VOIDmode,
8084				      gen_rtx_PLUS (Pmode,
8085						    stack_limit_rtx,
8086						    GEN_INT (size)));
8087
8088	  emit_insn (gen_elf_high (tmp_reg, toload));
8089	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8090	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8091				    const0_rtx));
8092	}
8093      else
8094	warning ("stack limit expression is not supported");
8095    }
8096
8097  if (copy_r12 || ! TARGET_UPDATE)
8098    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8099
8100  if (TARGET_UPDATE)
8101    {
8102      if (size > 32767)
8103	{
8104	  /* Need a note here so that try_split doesn't get confused.  */
8105	  if (get_last_insn() == NULL_RTX)
8106	    emit_note (0, NOTE_INSN_DELETED);
8107	  insn = emit_move_insn (tmp_reg, todec);
8108	  try_split (PATTERN (insn), insn, 0);
8109	  todec = tmp_reg;
8110	}
8111
8112      if (Pmode == SImode)
8113	insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8114					    todec, stack_reg));
8115      else
8116	insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8117					    todec, stack_reg));
8118    }
8119  else
8120    {
8121      if (Pmode == SImode)
8122	insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8123      else
8124	insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8125      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8126		      gen_rtx_REG (Pmode, 12));
8127    }
8128
8129  RTX_FRAME_RELATED_P (insn) = 1;
8130  REG_NOTES (insn) =
8131    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8132		       gen_rtx_SET (VOIDmode, stack_reg,
8133				    gen_rtx_PLUS (Pmode, stack_reg,
8134						  GEN_INT (-size))),
8135		       REG_NOTES (insn));
8136}
8137
8138/* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8139   knows that:
8140
8141     (mem (plus (blah) (regXX)))
8142
8143   is really:
8144
8145     (mem (plus (blah) (const VALUE_OF_REGXX))).  */
8146
8147static void
8148altivec_frame_fixup (insn, reg, val)
8149     rtx insn, reg;
8150     HOST_WIDE_INT val;
8151{
8152  rtx real;
8153
8154  real = copy_rtx (PATTERN (insn));
8155
8156  real = replace_rtx (real, reg, GEN_INT (val));
8157
8158  RTX_FRAME_RELATED_P (insn) = 1;
8159  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8160					real,
8161					REG_NOTES (insn));
8162}
8163
8164/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8165   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8166   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
8167   deduce these equivalences by itself so it wasn't necessary to hold
8168   its hand so much.  */
8169
8170static void
8171rs6000_frame_related (insn, reg, val, reg2, rreg)
8172     rtx insn;
8173     rtx reg;
8174     HOST_WIDE_INT val;
8175     rtx reg2;
8176     rtx rreg;
8177{
8178  rtx real, temp;
8179
8180  real = copy_rtx (PATTERN (insn));
8181
8182  real = replace_rtx (real, reg,
8183		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8184							STACK_POINTER_REGNUM),
8185				    GEN_INT (val)));
8186
8187  /* We expect that 'real' is either a SET or a PARALLEL containing
8188     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
8189     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
8190
8191  if (GET_CODE (real) == SET)
8192    {
8193      rtx set = real;
8194
8195      temp = simplify_rtx (SET_SRC (set));
8196      if (temp)
8197	SET_SRC (set) = temp;
8198      temp = simplify_rtx (SET_DEST (set));
8199      if (temp)
8200	SET_DEST (set) = temp;
8201      if (GET_CODE (SET_DEST (set)) == MEM)
8202	{
8203	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8204	  if (temp)
8205	    XEXP (SET_DEST (set), 0) = temp;
8206	}
8207    }
8208  else if (GET_CODE (real) == PARALLEL)
8209    {
8210      int i;
8211      for (i = 0; i < XVECLEN (real, 0); i++)
8212	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8213	  {
8214	    rtx set = XVECEXP (real, 0, i);
8215
8216	    temp = simplify_rtx (SET_SRC (set));
8217	    if (temp)
8218	      SET_SRC (set) = temp;
8219	    temp = simplify_rtx (SET_DEST (set));
8220	    if (temp)
8221	      SET_DEST (set) = temp;
8222	    if (GET_CODE (SET_DEST (set)) == MEM)
8223	      {
8224		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8225		if (temp)
8226		  XEXP (SET_DEST (set), 0) = temp;
8227	      }
8228	    RTX_FRAME_RELATED_P (set) = 1;
8229	  }
8230    }
8231  else
8232    abort ();
8233
8234  if (reg2 != NULL_RTX)
8235    real = replace_rtx (real, reg2, rreg);
8236
8237  RTX_FRAME_RELATED_P (insn) = 1;
8238  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8239					real,
8240					REG_NOTES (insn));
8241}
8242
8243/* Returns an insn that has a vrsave set operation with the
8244   appropriate CLOBBERs.  */
8245
8246static rtx
8247generate_set_vrsave (reg, info, epiloguep)
8248     rtx reg;
8249     rs6000_stack_t *info;
8250     int epiloguep;
8251{
8252  int nclobs, i;
8253  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8254  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8255
8256  clobs[0]
8257    = gen_rtx_SET (VOIDmode,
8258		   vrsave,
8259		   gen_rtx_UNSPEC_VOLATILE (SImode,
8260					    gen_rtvec (2, reg, vrsave),
8261					    30));
8262
8263  nclobs = 1;
8264
8265  /* We need to clobber the registers in the mask so the scheduler
8266     does not move sets to VRSAVE before sets of AltiVec registers.
8267
8268     However, if the function receives nonlocal gotos, reload will set
8269     all call saved registers live.  We will end up with:
8270
8271     	(set (reg 999) (mem))
8272	(parallel [ (set (reg vrsave) (unspec blah))
8273		    (clobber (reg 999))])
8274
8275     The clobber will cause the store into reg 999 to be dead, and
8276     flow will attempt to delete an epilogue insn.  In this case, we
8277     need an unspec use/set of the register.  */
8278
8279  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8280    if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8281      {
8282	if (!epiloguep || call_used_regs [i])
8283	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8284					     gen_rtx_REG (V4SImode, i));
8285	else
8286	  {
8287	    rtx reg = gen_rtx_REG (V4SImode, i);
8288
8289	    clobs[nclobs++]
8290	      = gen_rtx_SET (VOIDmode,
8291			     reg,
8292			     gen_rtx_UNSPEC (V4SImode,
8293					     gen_rtvec (1, reg), 27));
8294	  }
8295      }
8296
8297  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8298
8299  for (i = 0; i < nclobs; ++i)
8300    XVECEXP (insn, 0, i) = clobs[i];
8301
8302  return insn;
8303}
8304
8305/* Emit function prologue as insns.  */
8306
8307void
8308rs6000_emit_prologue ()
8309{
8310  rs6000_stack_t *info = rs6000_stack_info ();
8311  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8312  int reg_size = TARGET_POWERPC64 ? 8 : 4;
8313  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8314  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8315  rtx frame_reg_rtx = sp_reg_rtx;
8316  rtx cr_save_rtx = NULL;
8317  rtx insn;
8318  int saving_FPRs_inline;
8319  int using_store_multiple;
8320  HOST_WIDE_INT sp_offset = 0;
8321
8322  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8323			  && info->first_gp_reg_save < 31);
8324  saving_FPRs_inline = (info->first_fp_reg_save == 64
8325			|| FP_SAVE_INLINE (info->first_fp_reg_save));
8326
8327  /* For V.4, update stack before we do any saving and set back pointer.  */
8328  if (info->push_p && DEFAULT_ABI == ABI_V4)
8329    {
8330      if (info->total_size < 32767)
8331	sp_offset = info->total_size;
8332      else
8333	frame_reg_rtx = frame_ptr_rtx;
8334      rs6000_emit_allocate_stack (info->total_size,
8335				  (frame_reg_rtx != sp_reg_rtx
8336				   && (info->cr_save_p
8337				       || info->lr_save_p
8338				       || info->first_fp_reg_save < 64
8339				       || info->first_gp_reg_save < 32
8340				       )));
8341      if (frame_reg_rtx != sp_reg_rtx)
8342	rs6000_emit_stack_tie ();
8343    }
8344
8345  /* Save AltiVec registers if needed.  */
8346  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8347    {
8348      int i;
8349
8350      /* There should be a non inline version of this, for when we
8351	 are saving lots of vector registers.  */
8352      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8353	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8354	  {
8355	    rtx areg, savereg, mem;
8356	    int offset;
8357
8358	    offset = info->altivec_save_offset + sp_offset
8359	      + 16 * (i - info->first_altivec_reg_save);
8360
8361	    savereg = gen_rtx_REG (V4SImode, i);
8362
8363	    areg = gen_rtx_REG (Pmode, 0);
8364	    emit_move_insn (areg, GEN_INT (offset));
8365
8366	    /* AltiVec addressing mode is [reg+reg].  */
8367	    mem = gen_rtx_MEM (V4SImode,
8368			       gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8369
8370	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8371
8372	    insn = emit_move_insn (mem, savereg);
8373
8374	    altivec_frame_fixup (insn, areg, offset);
8375	  }
8376    }
8377
8378  /* VRSAVE is a bit vector representing which AltiVec registers
8379     are used.  The OS uses this to determine which vector
8380     registers to save on a context switch.  We need to save
8381     VRSAVE on the stack frame, add whatever AltiVec registers we
8382     used in this function, and do the corresponding magic in the
8383     epilogue.  */
8384
8385  if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8386    {
8387      rtx reg, mem, vrsave;
8388      int offset;
8389
8390      /* Get VRSAVE onto a GPR.  */
8391      reg = gen_rtx_REG (SImode, 12);
8392      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8393      emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8394
8395      /* Save VRSAVE.  */
8396      offset = info->vrsave_save_offset + sp_offset;
8397      mem
8398	= gen_rtx_MEM (SImode,
8399		       gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8400      set_mem_alias_set (mem, rs6000_sr_alias_set);
8401      insn = emit_move_insn (mem, reg);
8402
8403      /* Include the registers in the mask.  */
8404      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8405
8406      insn = emit_insn (generate_set_vrsave (reg, info, 0));
8407    }
8408
8409  /* If we use the link register, get it into r0.  */
8410  if (info->lr_save_p)
8411    emit_move_insn (gen_rtx_REG (Pmode, 0),
8412		    gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8413
8414  /* If we need to save CR, put it into r12.  */
8415  if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8416    {
8417      cr_save_rtx = gen_rtx_REG (SImode, 12);
8418      emit_insn (gen_movesi_from_cr (cr_save_rtx));
8419    }
8420
8421  /* Do any required saving of fpr's.  If only one or two to save, do
8422     it ourselves.  Otherwise, call function.  */
8423  if (saving_FPRs_inline)
8424    {
8425      int i;
8426      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8427	if ((regs_ever_live[info->first_fp_reg_save+i]
8428	     && ! call_used_regs[info->first_fp_reg_save+i]))
8429	  {
8430	    rtx addr, reg, mem;
8431	    reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8432	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8433				 GEN_INT (info->fp_save_offset
8434					  + sp_offset
8435					  + 8 * i));
8436	    mem = gen_rtx_MEM (DFmode, addr);
8437	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8438
8439	    insn = emit_move_insn (mem, reg);
8440	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8441				  NULL_RTX, NULL_RTX);
8442	  }
8443    }
8444  else if (info->first_fp_reg_save != 64)
8445    {
8446      int i;
8447      char rname[30];
8448      const char *alloc_rname;
8449      rtvec p;
8450      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8451
8452      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8453					  gen_rtx_REG (Pmode,
8454						       LINK_REGISTER_REGNUM));
8455      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8456	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8457      alloc_rname = ggc_strdup (rname);
8458      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8459				      gen_rtx_SYMBOL_REF (Pmode,
8460							  alloc_rname));
8461      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8462	{
8463	  rtx addr, reg, mem;
8464	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8465	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8466			       GEN_INT (info->fp_save_offset
8467					+ sp_offset + 8*i));
8468	  mem = gen_rtx_MEM (DFmode, addr);
8469	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8470
8471	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8472	}
8473      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8474      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8475			    NULL_RTX, NULL_RTX);
8476    }
8477
8478  /* Save GPRs.  This is done as a PARALLEL if we are using
8479     the store-multiple instructions.  */
8480  if (using_store_multiple)
8481    {
8482      rtvec p, dwarfp;
8483      int i;
8484      p = rtvec_alloc (32 - info->first_gp_reg_save);
8485      dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8486      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8487	{
8488	  rtx addr, reg, mem;
8489	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8490	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8491			       GEN_INT (info->gp_save_offset
8492					+ sp_offset
8493					+ reg_size * i));
8494	  mem = gen_rtx_MEM (reg_mode, addr);
8495	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8496
8497	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8498	}
8499      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8500      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8501			    NULL_RTX, NULL_RTX);
8502    }
8503  else
8504    {
8505      int i;
8506      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8507	if ((regs_ever_live[info->first_gp_reg_save+i]
8508	     && ! call_used_regs[info->first_gp_reg_save+i])
8509	    || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8510		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8511		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8512	  {
8513	    rtx addr, reg, mem;
8514	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8515	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8516				 GEN_INT (info->gp_save_offset
8517					  + sp_offset
8518					  + reg_size * i));
8519	    mem = gen_rtx_MEM (reg_mode, addr);
8520	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8521
8522	    insn = emit_move_insn (mem, reg);
8523	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8524				  NULL_RTX, NULL_RTX);
8525	  }
8526    }
8527
8528  /* ??? There's no need to emit actual instructions here, but it's the
8529     easiest way to get the frame unwind information emitted.  */
8530  if (current_function_calls_eh_return)
8531    {
8532      unsigned int i, regno;
8533
8534      for (i = 0; ; ++i)
8535	{
8536	  rtx addr, reg, mem;
8537
8538	  regno = EH_RETURN_DATA_REGNO (i);
8539	  if (regno == INVALID_REGNUM)
8540	    break;
8541
8542	  reg = gen_rtx_REG (reg_mode, regno);
8543	  addr = plus_constant (frame_reg_rtx,
8544				info->ehrd_offset + sp_offset
8545				+ reg_size * (int) i);
8546	  mem = gen_rtx_MEM (reg_mode, addr);
8547	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8548
8549	  insn = emit_move_insn (mem, reg);
8550	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8551				NULL_RTX, NULL_RTX);
8552	}
8553    }
8554
8555  /* Save lr if we used it.  */
8556  if (info->lr_save_p)
8557    {
8558      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8559			       GEN_INT (info->lr_save_offset + sp_offset));
8560      rtx reg = gen_rtx_REG (Pmode, 0);
8561      rtx mem = gen_rtx_MEM (Pmode, addr);
8562      /* This should not be of rs6000_sr_alias_set, because of
8563	 __builtin_return_address.  */
8564
8565      insn = emit_move_insn (mem, reg);
8566      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8567			    reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8568    }
8569
8570  /* Save CR if we use any that must be preserved.  */
8571  if (info->cr_save_p)
8572    {
8573      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8574			       GEN_INT (info->cr_save_offset + sp_offset));
8575      rtx mem = gen_rtx_MEM (SImode, addr);
8576
8577      set_mem_alias_set (mem, rs6000_sr_alias_set);
8578
8579      /* If r12 was used to hold the original sp, copy cr into r0 now
8580	 that it's free.  */
8581      if (REGNO (frame_reg_rtx) == 12)
8582	{
8583	  cr_save_rtx = gen_rtx_REG (SImode, 0);
8584	  emit_insn (gen_movesi_from_cr (cr_save_rtx));
8585	}
8586      insn = emit_move_insn (mem, cr_save_rtx);
8587
8588      /* Now, there's no way that dwarf2out_frame_debug_expr is going
8589	 to understand '(unspec:SI [(reg:CC 68) ...] 19)'.  But that's
8590	 OK.  All we have to do is specify that _one_ condition code
8591	 register is saved in this stack slot.  The thrower's epilogue
8592	 will then restore all the call-saved registers.
8593	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
8594      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8595			    cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8596    }
8597
8598  /* Update stack and set back pointer unless this is V.4,
8599     for which it was done previously.  */
8600  if (info->push_p && DEFAULT_ABI != ABI_V4)
8601    rs6000_emit_allocate_stack (info->total_size, FALSE);
8602
8603  /* Set frame pointer, if needed.  */
8604  if (frame_pointer_needed)
8605    {
8606      insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8607			     sp_reg_rtx);
8608      RTX_FRAME_RELATED_P (insn) = 1;
8609    }
8610
8611  /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
8612  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8613      || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8614	  && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
8615  {
8616    /* If emit_load_toc_table will use the link register, we need to save
8617       it.  We use R11 for this purpose because emit_load_toc_table
8618       can use register 0.  This allows us to use a plain 'blr' to return
8619       from the procedure more often.  */
8620    int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8621				    && ! info->lr_save_p
8622				    && EXIT_BLOCK_PTR->pred != NULL);
8623    if (save_LR_around_toc_setup)
8624      emit_move_insn (gen_rtx_REG (Pmode, 11),
8625		      gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8626
8627    rs6000_emit_load_toc_table (TRUE);
8628
8629    if (save_LR_around_toc_setup)
8630      emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8631		      gen_rtx_REG (Pmode, 11));
8632  }
8633
8634  if (DEFAULT_ABI == ABI_DARWIN
8635      && flag_pic && current_function_uses_pic_offset_table)
8636    {
8637      rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8638
8639      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8640
8641      rs6000_maybe_dead (
8642	emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
8643			gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8644    }
8645}
8646
8647/* Write function prologue.  */
8648
8649static void
8650rs6000_output_function_prologue (file, size)
8651     FILE *file;
8652     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8653{
8654  rs6000_stack_t *info = rs6000_stack_info ();
8655
8656  if (TARGET_DEBUG_STACK)
8657    debug_stack_info (info);
8658
8659  /* Write .extern for any function we will call to save and restore
8660     fp values.  */
8661  if (info->first_fp_reg_save < 64
8662      && !FP_SAVE_INLINE (info->first_fp_reg_save))
8663    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8664	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8665	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8666	     RESTORE_FP_SUFFIX);
8667
8668  /* Write .extern for AIX common mode routines, if needed.  */
8669  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8670    {
8671      fputs ("\t.extern __mulh\n", file);
8672      fputs ("\t.extern __mull\n", file);
8673      fputs ("\t.extern __divss\n", file);
8674      fputs ("\t.extern __divus\n", file);
8675      fputs ("\t.extern __quoss\n", file);
8676      fputs ("\t.extern __quous\n", file);
8677      common_mode_defined = 1;
8678    }
8679
8680  if (! HAVE_prologue)
8681    {
8682      start_sequence ();
8683
8684      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8685	 the "toplevel" insn chain.  */
8686      emit_note (0, NOTE_INSN_DELETED);
8687      rs6000_emit_prologue ();
8688      emit_note (0, NOTE_INSN_DELETED);
8689
8690      if (TARGET_DEBUG_STACK)
8691	debug_rtx_list (get_insns (), 100);
8692      final (get_insns (), file, FALSE, FALSE);
8693      end_sequence ();
8694    }
8695
8696  rs6000_pic_labelno++;
8697}
8698
8699/* Emit function epilogue as insns.
8700
8701   At present, dwarf2out_frame_debug_expr doesn't understand
8702   register restores, so we don't bother setting RTX_FRAME_RELATED_P
8703   anywhere in the epilogue.  Most of the insns below would in any case
8704   need special notes to explain where r11 is in relation to the stack.  */
8705
8706void
8707rs6000_emit_epilogue (sibcall)
8708     int sibcall;
8709{
8710  rs6000_stack_t *info;
8711  int restoring_FPRs_inline;
8712  int using_load_multiple;
8713  int using_mfcr_multiple;
8714  int use_backchain_to_restore_sp;
8715  int sp_offset = 0;
8716  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
8717  rtx frame_reg_rtx = sp_reg_rtx;
8718  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8719  int reg_size = TARGET_POWERPC64 ? 8 : 4;
8720  int i;
8721
8722  info = rs6000_stack_info ();
8723  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8724			 && info->first_gp_reg_save < 31);
8725  restoring_FPRs_inline = (sibcall
8726			   || current_function_calls_eh_return
8727			   || info->first_fp_reg_save == 64
8728			   || FP_SAVE_INLINE (info->first_fp_reg_save));
8729  use_backchain_to_restore_sp = (frame_pointer_needed
8730				 || current_function_calls_alloca
8731				 || info->total_size > 32767);
8732  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
8733			 || rs6000_cpu == PROCESSOR_PPC603
8734			 || rs6000_cpu == PROCESSOR_PPC750
8735			 || optimize_size);
8736
8737  /* If we have a frame pointer, a call to alloca,  or a large stack
8738     frame, restore the old stack pointer using the backchain.  Otherwise,
8739     we know what size to update it with.  */
8740  if (use_backchain_to_restore_sp)
8741    {
8742      /* Under V.4, don't reset the stack pointer until after we're done
8743	 loading the saved registers.  */
8744      if (DEFAULT_ABI == ABI_V4)
8745	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
8746
8747      emit_move_insn (frame_reg_rtx,
8748		      gen_rtx_MEM (Pmode, sp_reg_rtx));
8749
8750    }
8751  else if (info->push_p)
8752    {
8753      if (DEFAULT_ABI == ABI_V4)
8754	sp_offset = info->total_size;
8755      else
8756	{
8757	  emit_insn (TARGET_32BIT
8758		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8759				   GEN_INT (info->total_size))
8760		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8761				   GEN_INT (info->total_size)));
8762	}
8763    }
8764
8765  /* Restore AltiVec registers if needed.  */
8766  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8767    {
8768      int i;
8769
8770      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8771	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8772	  {
8773	    rtx addr, areg, mem;
8774
8775	    areg = gen_rtx_REG (Pmode, 0);
8776	    emit_move_insn
8777	      (areg, GEN_INT (info->altivec_save_offset
8778			      + sp_offset
8779			      + 16 * (i - info->first_altivec_reg_save)));
8780
8781	    /* AltiVec addressing mode is [reg+reg].  */
8782	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
8783	    mem = gen_rtx_MEM (V4SImode, addr);
8784	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8785
8786	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
8787	  }
8788    }
8789
8790  /* Restore VRSAVE if needed.  */
8791  if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
8792    {
8793      rtx addr, mem, reg;
8794
8795      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8796			   GEN_INT (info->vrsave_save_offset + sp_offset));
8797      mem = gen_rtx_MEM (SImode, addr);
8798      set_mem_alias_set (mem, rs6000_sr_alias_set);
8799      reg = gen_rtx_REG (SImode, 12);
8800      emit_move_insn (reg, mem);
8801
8802      emit_insn (generate_set_vrsave (reg, info, 1));
8803    }
8804
8805  /* Get the old lr if we saved it.  */
8806  if (info->lr_save_p)
8807    {
8808      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8809			       GEN_INT (info->lr_save_offset + sp_offset));
8810      rtx mem = gen_rtx_MEM (Pmode, addr);
8811
8812      set_mem_alias_set (mem, rs6000_sr_alias_set);
8813
8814      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
8815    }
8816
8817  /* Get the old cr if we saved it.  */
8818  if (info->cr_save_p)
8819    {
8820      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8821			       GEN_INT (info->cr_save_offset + sp_offset));
8822      rtx mem = gen_rtx_MEM (SImode, addr);
8823
8824      set_mem_alias_set (mem, rs6000_sr_alias_set);
8825
8826      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
8827    }
8828
8829  /* Set LR here to try to overlap restores below.  */
8830  if (info->lr_save_p)
8831    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8832		    gen_rtx_REG (Pmode, 0));
8833
8834  /* Load exception handler data registers, if needed.  */
8835  if (current_function_calls_eh_return)
8836    {
8837      unsigned int i, regno;
8838
8839      for (i = 0; ; ++i)
8840	{
8841	  rtx addr, mem;
8842
8843	  regno = EH_RETURN_DATA_REGNO (i);
8844	  if (regno == INVALID_REGNUM)
8845	    break;
8846
8847	  addr = plus_constant (frame_reg_rtx,
8848				info->ehrd_offset + sp_offset
8849				+ reg_size * (int) i);
8850	  mem = gen_rtx_MEM (reg_mode, addr);
8851	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8852
8853	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
8854	}
8855    }
8856
8857  /* Restore GPRs.  This is done as a PARALLEL if we are using
8858     the load-multiple instructions.  */
8859  if (using_load_multiple)
8860    {
8861      rtvec p;
8862      p = rtvec_alloc (32 - info->first_gp_reg_save);
8863      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8864	{
8865	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8866				   GEN_INT (info->gp_save_offset
8867					    + sp_offset
8868					    + reg_size * i));
8869	  rtx mem = gen_rtx_MEM (reg_mode, addr);
8870
8871	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8872
8873	  RTVEC_ELT (p, i) =
8874	    gen_rtx_SET (VOIDmode,
8875			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
8876			 mem);
8877	}
8878      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8879    }
8880  else
8881    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8882      if ((regs_ever_live[info->first_gp_reg_save+i]
8883	   && ! call_used_regs[info->first_gp_reg_save+i])
8884	  || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8885	      && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8886		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8887	{
8888	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8889				   GEN_INT (info->gp_save_offset
8890					    + sp_offset
8891					    + reg_size * i));
8892	  rtx mem = gen_rtx_MEM (reg_mode, addr);
8893
8894	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8895
8896	  emit_move_insn (gen_rtx_REG (reg_mode,
8897				       info->first_gp_reg_save + i),
8898			  mem);
8899	}
8900
8901  /* Restore fpr's if we need to do it without calling a function.  */
8902  if (restoring_FPRs_inline)
8903    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8904      if ((regs_ever_live[info->first_fp_reg_save+i]
8905	   && ! call_used_regs[info->first_fp_reg_save+i]))
8906	{
8907	  rtx addr, mem;
8908	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8909			       GEN_INT (info->fp_save_offset
8910					+ sp_offset
8911					+ 8 * i));
8912	  mem = gen_rtx_MEM (DFmode, addr);
8913	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8914
8915	  emit_move_insn (gen_rtx_REG (DFmode,
8916				       info->first_fp_reg_save + i),
8917			  mem);
8918	}
8919
8920  /* If we saved cr, restore it here.  Just those that were used.  */
8921  if (info->cr_save_p)
8922    {
8923      rtx r12_rtx = gen_rtx_REG (SImode, 12);
8924      int count = 0;
8925
8926      if (using_mfcr_multiple)
8927	{
8928	  for (i = 0; i < 8; i++)
8929	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8930	      count++;
8931	  if (count == 0)
8932	    abort ();
8933	}
8934
8935      if (using_mfcr_multiple && count > 1)
8936	{
8937	  rtvec p;
8938	  int ndx;
8939
8940	  p = rtvec_alloc (count);
8941
8942	  ndx = 0;
8943	  for (i = 0; i < 8; i++)
8944	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8945	      {
8946		rtvec r = rtvec_alloc (2);
8947		RTVEC_ELT (r, 0) = r12_rtx;
8948		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
8949		RTVEC_ELT (p, ndx) =
8950		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
8951			       gen_rtx_UNSPEC (CCmode, r, 20));
8952		ndx++;
8953	      }
8954	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8955	  if (ndx != count)
8956	    abort ();
8957	}
8958      else
8959	for (i = 0; i < 8; i++)
8960	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8961	    {
8962	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
8963							   CR0_REGNO+i),
8964					      r12_rtx));
8965	    }
8966    }
8967
8968  /* If this is V.4, unwind the stack pointer after all of the loads
8969     have been done.  We need to emit a block here so that sched
8970     doesn't decide to move the sp change before the register restores
8971     (which may not have any obvious dependency on the stack).  This
8972     doesn't hurt performance, because there is no scheduling that can
8973     be done after this point.  */
8974  if (DEFAULT_ABI == ABI_V4)
8975    {
8976      if (frame_reg_rtx != sp_reg_rtx)
8977	  rs6000_emit_stack_tie ();
8978
8979      if (use_backchain_to_restore_sp)
8980	{
8981	  emit_move_insn (sp_reg_rtx, frame_reg_rtx);
8982	}
8983      else if (sp_offset != 0)
8984	{
8985	  emit_insn (Pmode == SImode
8986		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8987				   GEN_INT (sp_offset))
8988		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8989				   GEN_INT (sp_offset)));
8990	}
8991    }
8992
8993  if (current_function_calls_eh_return)
8994    {
8995      rtx sa = EH_RETURN_STACKADJ_RTX;
8996      emit_insn (Pmode == SImode
8997		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
8998		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
8999    }
9000
9001  if (!sibcall)
9002    {
9003      rtvec p;
9004      if (! restoring_FPRs_inline)
9005	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9006      else
9007	p = rtvec_alloc (2);
9008
9009      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9010      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9011				      gen_rtx_REG (Pmode,
9012						   LINK_REGISTER_REGNUM));
9013
9014      /* If we have to restore more than two FP registers, branch to the
9015	 restore function.  It will return to our caller.  */
9016      if (! restoring_FPRs_inline)
9017	{
9018	  int i;
9019	  char rname[30];
9020	  const char *alloc_rname;
9021
9022	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9023		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9024	  alloc_rname = ggc_strdup (rname);
9025	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9026					  gen_rtx_SYMBOL_REF (Pmode,
9027							      alloc_rname));
9028
9029	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9030	    {
9031	      rtx addr, mem;
9032	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9033				   GEN_INT (info->fp_save_offset + 8*i));
9034	      mem = gen_rtx_MEM (DFmode, addr);
9035	      set_mem_alias_set (mem, rs6000_sr_alias_set);
9036
9037	      RTVEC_ELT (p, i+3) =
9038		gen_rtx_SET (VOIDmode,
9039			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9040			     mem);
9041	    }
9042	}
9043
9044      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9045    }
9046}
9047
9048/* Write function epilogue.  */
9049
9050static void
9051rs6000_output_function_epilogue (file, size)
9052     FILE *file;
9053     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9054{
9055  rs6000_stack_t *info = rs6000_stack_info ();
9056  int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9057
9058  if (! HAVE_epilogue)
9059    {
9060      rtx insn = get_last_insn ();
9061      /* If the last insn was a BARRIER, we don't have to write anything except
9062	 the trace table.  */
9063      if (GET_CODE (insn) == NOTE)
9064	insn = prev_nonnote_insn (insn);
9065      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
9066	{
9067	  /* This is slightly ugly, but at least we don't have two
9068	     copies of the epilogue-emitting code.  */
9069	  start_sequence ();
9070
9071	  /* A NOTE_INSN_DELETED is supposed to be at the start
9072	     and end of the "toplevel" insn chain.  */
9073	  emit_note (0, NOTE_INSN_DELETED);
9074	  rs6000_emit_epilogue (FALSE);
9075	  emit_note (0, NOTE_INSN_DELETED);
9076
9077	  if (TARGET_DEBUG_STACK)
9078	    debug_rtx_list (get_insns (), 100);
9079	  final (get_insns (), file, FALSE, FALSE);
9080	  end_sequence ();
9081	}
9082    }
9083
9084  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
9085     on its format.
9086
9087     We don't output a traceback table if -finhibit-size-directive was
9088     used.  The documentation for -finhibit-size-directive reads
9089     ``don't output a @code{.size} assembler directive, or anything
9090     else that would cause trouble if the function is split in the
9091     middle, and the two halves are placed at locations far apart in
9092     memory.''  The traceback table has this property, since it
9093     includes the offset from the start of the function to the
9094     traceback table itself.
9095
9096     System V.4 Powerpc's (and the embedded ABI derived from it) use a
9097     different traceback table.  */
9098  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9099    {
9100      const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9101      const char *language_string = lang_hooks.name;
9102      int fixed_parms = 0, float_parms = 0, parm_info = 0;
9103      int i;
9104
9105      while (*fname == '.')	/* V.4 encodes . in the name */
9106	fname++;
9107
9108      /* Need label immediately before tbtab, so we can compute its offset
9109	 from the function start.  */
9110      if (*fname == '*')
9111	++fname;
9112      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9113      ASM_OUTPUT_LABEL (file, fname);
9114
9115      /* The .tbtab pseudo-op can only be used for the first eight
9116	 expressions, since it can't handle the possibly variable
9117	 length fields that follow.  However, if you omit the optional
9118	 fields, the assembler outputs zeros for all optional fields
9119	 anyways, giving each variable length field is minimum length
9120	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
9121	 pseudo-op at all.  */
9122
9123      /* An all-zero word flags the start of the tbtab, for debuggers
9124	 that have to find it by searching forward from the entry
9125	 point or from the current pc.  */
9126      fputs ("\t.long 0\n", file);
9127
9128      /* Tbtab format type.  Use format type 0.  */
9129      fputs ("\t.byte 0,", file);
9130
9131      /* Language type.  Unfortunately, there doesn't seem to be any
9132	 official way to get this info, so we use language_string.  C
9133	 is 0.  C++ is 9.  No number defined for Obj-C, so use the
9134	 value for C for now.  There is no official value for Java,
9135         although IBM appears to be using 13.  There is no official value
9136	 for Chill, so we've chosen 44 pseudo-randomly.  */
9137      if (! strcmp (language_string, "GNU C")
9138	  || ! strcmp (language_string, "GNU Objective-C"))
9139	i = 0;
9140      else if (! strcmp (language_string, "GNU F77"))
9141	i = 1;
9142      else if (! strcmp (language_string, "GNU Ada"))
9143	i = 3;
9144      else if (! strcmp (language_string, "GNU Pascal"))
9145	i = 2;
9146      else if (! strcmp (language_string, "GNU C++"))
9147	i = 9;
9148      else if (! strcmp (language_string, "GNU Java"))
9149	i = 13;
9150      else if (! strcmp (language_string, "GNU CHILL"))
9151	i = 44;
9152      else
9153	abort ();
9154      fprintf (file, "%d,", i);
9155
9156      /* 8 single bit fields: global linkage (not set for C extern linkage,
9157	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9158	 from start of procedure stored in tbtab, internal function, function
9159	 has controlled storage, function has no toc, function uses fp,
9160	 function logs/aborts fp operations.  */
9161      /* Assume that fp operations are used if any fp reg must be saved.  */
9162      fprintf (file, "%d,",
9163	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9164
9165      /* 6 bitfields: function is interrupt handler, name present in
9166	 proc table, function calls alloca, on condition directives
9167	 (controls stack walks, 3 bits), saves condition reg, saves
9168	 link reg.  */
9169      /* The `function calls alloca' bit seems to be set whenever reg 31 is
9170	 set up as a frame pointer, even when there is no alloca call.  */
9171      fprintf (file, "%d,",
9172	       ((optional_tbtab << 6)
9173		| ((optional_tbtab & frame_pointer_needed) << 5)
9174		| (info->cr_save_p << 1)
9175		| (info->lr_save_p)));
9176
9177      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9178	 (6 bits).  */
9179      fprintf (file, "%d,",
9180	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
9181
9182      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
9183      fprintf (file, "%d,", (32 - first_reg_to_save ()));
9184
9185      if (optional_tbtab)
9186	{
9187	  /* Compute the parameter info from the function decl argument
9188	     list.  */
9189	  tree decl;
9190	  int next_parm_info_bit = 31;
9191
9192	  for (decl = DECL_ARGUMENTS (current_function_decl);
9193	       decl; decl = TREE_CHAIN (decl))
9194	    {
9195	      rtx parameter = DECL_INCOMING_RTL (decl);
9196	      enum machine_mode mode = GET_MODE (parameter);
9197
9198	      if (GET_CODE (parameter) == REG)
9199		{
9200		  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9201		    {
9202		      int bits;
9203
9204		      float_parms++;
9205
9206		      if (mode == SFmode)
9207			bits = 0x2;
9208		      else if (mode == DFmode)
9209			bits = 0x3;
9210		      else
9211			abort ();
9212
9213		      /* If only one bit will fit, don't or in this entry.  */
9214		      if (next_parm_info_bit > 0)
9215			parm_info |= (bits << (next_parm_info_bit - 1));
9216		      next_parm_info_bit -= 2;
9217		    }
9218		  else
9219		    {
9220		      fixed_parms += ((GET_MODE_SIZE (mode)
9221				       + (UNITS_PER_WORD - 1))
9222				      / UNITS_PER_WORD);
9223		      next_parm_info_bit -= 1;
9224		    }
9225		}
9226	    }
9227	}
9228
9229      /* Number of fixed point parameters.  */
9230      /* This is actually the number of words of fixed point parameters; thus
9231	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
9232      fprintf (file, "%d,", fixed_parms);
9233
9234      /* 2 bitfields: number of floating point parameters (7 bits), parameters
9235	 all on stack.  */
9236      /* This is actually the number of fp registers that hold parameters;
9237	 and thus the maximum value is 13.  */
9238      /* Set parameters on stack bit if parameters are not in their original
9239	 registers, regardless of whether they are on the stack?  Xlc
9240	 seems to set the bit when not optimizing.  */
9241      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9242
9243      if (! optional_tbtab)
9244	return;
9245
9246      /* Optional fields follow.  Some are variable length.  */
9247
9248      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9249	 11 double float.  */
9250      /* There is an entry for each parameter in a register, in the order that
9251	 they occur in the parameter list.  Any intervening arguments on the
9252	 stack are ignored.  If the list overflows a long (max possible length
9253	 34 bits) then completely leave off all elements that don't fit.  */
9254      /* Only emit this long if there was at least one parameter.  */
9255      if (fixed_parms || float_parms)
9256	fprintf (file, "\t.long %d\n", parm_info);
9257
9258      /* Offset from start of code to tb table.  */
9259      fputs ("\t.long ", file);
9260      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9261#if TARGET_AIX
9262      RS6000_OUTPUT_BASENAME (file, fname);
9263#else
9264      assemble_name (file, fname);
9265#endif
9266      fputs ("-.", file);
9267#if TARGET_AIX
9268      RS6000_OUTPUT_BASENAME (file, fname);
9269#else
9270      assemble_name (file, fname);
9271#endif
9272      putc ('\n', file);
9273
9274      /* Interrupt handler mask.  */
9275      /* Omit this long, since we never set the interrupt handler bit
9276	 above.  */
9277
9278      /* Number of CTL (controlled storage) anchors.  */
9279      /* Omit this long, since the has_ctl bit is never set above.  */
9280
9281      /* Displacement into stack of each CTL anchor.  */
9282      /* Omit this list of longs, because there are no CTL anchors.  */
9283
9284      /* Length of function name.  */
9285      fprintf (file, "\t.short %d\n", (int) strlen (fname));
9286
9287      /* Function name.  */
9288      assemble_string (fname, strlen (fname));
9289
9290      /* Register for alloca automatic storage; this is always reg 31.
9291	 Only emit this if the alloca bit was set above.  */
9292      if (frame_pointer_needed)
9293	fputs ("\t.byte 31\n", file);
9294    }
9295  return;
9296}
9297
9298/* A C compound statement that outputs the assembler code for a thunk
9299   function, used to implement C++ virtual function calls with
9300   multiple inheritance.  The thunk acts as a wrapper around a virtual
9301   function, adjusting the implicit object parameter before handing
9302   control off to the real function.
9303
9304   First, emit code to add the integer DELTA to the location that
9305   contains the incoming first argument.  Assume that this argument
9306   contains a pointer, and is the one used to pass the `this' pointer
9307   in C++.  This is the incoming argument *before* the function
9308   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
9309   values of all other incoming arguments.
9310
9311   After the addition, emit code to jump to FUNCTION, which is a
9312   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
9313   not touch the return address.  Hence returning from FUNCTION will
9314   return to whoever called the current `thunk'.
9315
9316   The effect must be as if FUNCTION had been called directly with the
9317   adjusted first argument.  This macro is responsible for emitting
9318   all of the code for a thunk function; output_function_prologue()
9319   and output_function_epilogue() are not invoked.
9320
9321   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
9322   been extracted from it.)  It might possibly be useful on some
9323   targets, but probably not.
9324
9325   If you do not define this macro, the target-independent code in the
9326   C++ frontend will generate a less efficient heavyweight thunk that
9327   calls FUNCTION instead of jumping to it.  The generic approach does
9328   not support varargs.  */
9329
9330void
9331output_mi_thunk (file, thunk_fndecl, delta, function)
9332     FILE *file;
9333     tree thunk_fndecl ATTRIBUTE_UNUSED;
9334     int delta;
9335     tree function;
9336{
9337  const char *this_reg =
9338    reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9339  const char *prefix;
9340  const char *fname;
9341  const char *r0	 = reg_names[0];
9342  const char *toc	 = reg_names[2];
9343  const char *schain	 = reg_names[11];
9344  const char *r12	 = reg_names[12];
9345  char buf[512];
9346  static int labelno = 0;
9347
9348  /* Small constants that can be done by one add instruction.  */
9349  if (delta >= -32768 && delta <= 32767)
9350    {
9351      if (! TARGET_NEW_MNEMONICS)
9352	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9353      else
9354	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9355    }
9356
9357  /* Large constants that can be done by one addis instruction.  */
9358  else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9359    asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9360		 delta >> 16);
9361
9362  /* 32-bit constants that can be done by an add and addis instruction.  */
9363  else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9364    {
9365      /* Break into two pieces, propagating the sign bit from the low
9366	 word to the upper word.  */
9367      int delta_high = delta >> 16;
9368      int delta_low  = delta & 0xffff;
9369      if ((delta_low & 0x8000) != 0)
9370	{
9371	  delta_high++;
9372	  delta_low = (delta_low ^ 0x8000) - 0x8000;	/* sign extend */
9373	}
9374
9375      asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9376		   delta_high);
9377
9378      if (! TARGET_NEW_MNEMONICS)
9379	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9380      else
9381	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9382    }
9383
9384  /* 64-bit constants, fixme */
9385  else
9386    abort ();
9387
9388  /* Get the prefix in front of the names.  */
9389  switch (DEFAULT_ABI)
9390    {
9391    default:
9392      abort ();
9393
9394    case ABI_AIX:
9395      prefix = ".";
9396      break;
9397
9398    case ABI_V4:
9399    case ABI_AIX_NODESC:
9400      prefix = "";
9401      break;
9402    }
9403
9404  /* If the function is compiled in this module, jump to it directly.
9405     Otherwise, load up its address and jump to it.  */
9406
9407  fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9408
9409  if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9410      && ! lookup_attribute ("longcall",
9411			     TYPE_ATTRIBUTES (TREE_TYPE (function))))
9412    {
9413      fprintf (file, "\tb %s", prefix);
9414      assemble_name (file, fname);
9415      if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9416      putc ('\n', file);
9417    }
9418
9419  else
9420    {
9421      switch (DEFAULT_ABI)
9422	{
9423	default:
9424	  abort ();
9425
9426	case ABI_AIX:
9427	  /* Set up a TOC entry for the function.  */
9428	  ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9429	  toc_section ();
9430	  ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9431	  labelno++;
9432
9433	  if (TARGET_MINIMAL_TOC)
9434	    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9435	  else
9436	    {
9437	      fputs ("\t.tc ", file);
9438	      assemble_name (file, fname);
9439	      fputs ("[TC],", file);
9440	    }
9441	  assemble_name (file, fname);
9442	  putc ('\n', file);
9443	  text_section ();
9444	  if (TARGET_MINIMAL_TOC)
9445	    asm_fprintf (file, (TARGET_32BIT)
9446			 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9447			 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9448	  asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9449	  assemble_name (file, buf);
9450	  if (TARGET_ELF && TARGET_MINIMAL_TOC)
9451	    fputs ("-(.LCTOC1)", file);
9452	  asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9453	  asm_fprintf (file,
9454		       (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9455		       r0, r12);
9456
9457	  asm_fprintf (file,
9458		       (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9459		       toc, r12);
9460
9461	  asm_fprintf (file, "\tmtctr %s\n", r0);
9462	  asm_fprintf (file,
9463		       (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9464		       schain, r12);
9465
9466	  asm_fprintf (file, "\tbctr\n");
9467	  break;
9468
9469	case ABI_AIX_NODESC:
9470	case ABI_V4:
9471	  fprintf (file, "\tb %s", prefix);
9472	  assemble_name (file, fname);
9473	  if (flag_pic) fputs ("@plt", file);
9474	  putc ('\n', file);
9475	  break;
9476
9477#if TARGET_MACHO
9478	case ABI_DARWIN:
9479	  fprintf (file, "\tb %s", prefix);
9480	  if (flag_pic && !machopic_name_defined_p (fname))
9481	    assemble_name (file, machopic_stub_name (fname));
9482	  else
9483	    assemble_name (file, fname);
9484	  putc ('\n', file);
9485	  break;
9486#endif
9487	}
9488    }
9489}
9490
9491
9492/* A quick summary of the various types of 'constant-pool tables'
9493   under PowerPC:
9494
9495   Target	Flags		Name		One table per
9496   AIX		(none)		AIX TOC		object file
9497   AIX		-mfull-toc	AIX TOC		object file
9498   AIX		-mminimal-toc	AIX minimal TOC	translation unit
9499   SVR4/EABI	(none)		SVR4 SDATA	object file
9500   SVR4/EABI	-fpic		SVR4 pic	object file
9501   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
9502   SVR4/EABI	-mrelocatable	EABI TOC	function
9503   SVR4/EABI	-maix		AIX TOC		object file
9504   SVR4/EABI	-maix -mminimal-toc
9505				AIX minimal TOC	translation unit
9506
9507   Name			Reg.	Set by	entries	      contains:
9508					made by	 addrs?	fp?	sum?
9509
9510   AIX TOC		2	crt0	as	 Y	option	option
9511   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
9512   SVR4 SDATA		13	crt0	gcc	 N	Y	N
9513   SVR4 pic		30	prolog	ld	 Y	not yet	N
9514   SVR4 PIC		30	prolog	gcc	 Y	option	option
9515   EABI TOC		30	prolog	gcc	 Y	option	option
9516
9517*/
9518
9519/* Hash table stuff for keeping track of TOC entries.  */
9520
9521struct toc_hash_struct
9522{
9523  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9524     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
9525  rtx key;
9526  enum machine_mode key_mode;
9527  int labelno;
9528};
9529
9530static htab_t toc_hash_table;
9531
9532/* Hash functions for the hash table.  */
9533
9534static unsigned
9535rs6000_hash_constant (k)
9536     rtx k;
9537{
9538  unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9539  const char *format = GET_RTX_FORMAT (GET_CODE (k));
9540  int flen = strlen (format);
9541  int fidx;
9542
9543  if (GET_CODE (k) == LABEL_REF)
9544    return result * 1231 + X0INT (XEXP (k, 0), 3);
9545
9546  if (GET_CODE (k) == CONST_DOUBLE)
9547    fidx = 1;
9548  else if (GET_CODE (k) == CODE_LABEL)
9549    fidx = 3;
9550  else
9551    fidx = 0;
9552
9553  for (; fidx < flen; fidx++)
9554    switch (format[fidx])
9555      {
9556      case 's':
9557	{
9558	  unsigned i, len;
9559	  const char *str = XSTR (k, fidx);
9560	  len = strlen (str);
9561	  result = result * 613 + len;
9562	  for (i = 0; i < len; i++)
9563	    result = result * 613 + (unsigned) str[i];
9564	  break;
9565	}
9566      case 'u':
9567      case 'e':
9568	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9569	break;
9570      case 'i':
9571      case 'n':
9572	result = result * 613 + (unsigned) XINT (k, fidx);
9573	break;
9574      case 'w':
9575	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9576	  result = result * 613 + (unsigned) XWINT (k, fidx);
9577	else
9578	  {
9579	    size_t i;
9580	    for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9581	      result = result * 613 + (unsigned) (XWINT (k, fidx)
9582						  >> CHAR_BIT * i);
9583	  }
9584	break;
9585      default:
9586	abort ();
9587      }
9588  return result;
9589}
9590
9591static unsigned
9592toc_hash_function (hash_entry)
9593     const void * hash_entry;
9594{
9595  const struct toc_hash_struct *thc =
9596    (const struct toc_hash_struct *) hash_entry;
9597  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9598}
9599
9600/* Compare H1 and H2 for equivalence.  */
9601
9602static int
9603toc_hash_eq (h1, h2)
9604     const void * h1;
9605     const void * h2;
9606{
9607  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9608  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9609
9610  if (((const struct toc_hash_struct *) h1)->key_mode
9611      != ((const struct toc_hash_struct *) h2)->key_mode)
9612    return 0;
9613
9614  /* Gotcha:  One of these const_doubles will be in memory.
9615     The other may be on the constant-pool chain.
9616     So rtx_equal_p will think they are different...  */
9617  if (r1 == r2)
9618    return 1;
9619  if (GET_CODE (r1) != GET_CODE (r2)
9620      || GET_MODE (r1) != GET_MODE (r2))
9621    return 0;
9622  if (GET_CODE (r1) == CONST_DOUBLE)
9623    {
9624      int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9625      int i;
9626      for (i = 1; i < format_len; i++)
9627	if (XWINT (r1, i) != XWINT (r2, i))
9628	  return 0;
9629
9630      return 1;
9631    }
9632  else if (GET_CODE (r1) == LABEL_REF)
9633    return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9634	    == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9635  else
9636    return rtx_equal_p (r1, r2);
9637}
9638
9639/* Mark the hash table-entry HASH_ENTRY.  */
9640
9641static int
9642toc_hash_mark_entry (hash_slot, unused)
9643     void ** hash_slot;
9644     void * unused ATTRIBUTE_UNUSED;
9645{
9646  const struct toc_hash_struct * hash_entry =
9647    *(const struct toc_hash_struct **) hash_slot;
9648  rtx r = hash_entry->key;
9649  ggc_set_mark (hash_entry);
9650  /* For CODE_LABELS, we don't want to drag in the whole insn chain...  */
9651  if (GET_CODE (r) == LABEL_REF)
9652    {
9653      ggc_set_mark (r);
9654      ggc_set_mark (XEXP (r, 0));
9655    }
9656  else
9657    ggc_mark_rtx (r);
9658  return 1;
9659}
9660
9661/* Mark all the elements of the TOC hash-table *HT.  */
9662
9663static void
9664toc_hash_mark_table (vht)
9665     void *vht;
9666{
9667  htab_t *ht = vht;
9668
9669  htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9670}
9671
9672/* These are the names given by the C++ front-end to vtables, and
9673   vtable-like objects.  Ideally, this logic should not be here;
9674   instead, there should be some programmatic way of inquiring as
9675   to whether or not an object is a vtable.  */
9676
9677#define VTABLE_NAME_P(NAME)				\
9678  (strncmp ("_vt.", name, strlen("_vt.")) == 0		\
9679  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
9680  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
9681  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9682
9683void
9684rs6000_output_symbol_ref (file, x)
9685     FILE *file;
9686     rtx x;
9687{
9688  /* Currently C++ toc references to vtables can be emitted before it
9689     is decided whether the vtable is public or private.  If this is
9690     the case, then the linker will eventually complain that there is
9691     a reference to an unknown section.  Thus, for vtables only,
9692     we emit the TOC reference to reference the symbol and not the
9693     section.  */
9694  const char *name = XSTR (x, 0);
9695
9696  if (VTABLE_NAME_P (name))
9697    {
9698      RS6000_OUTPUT_BASENAME (file, name);
9699    }
9700  else
9701    assemble_name (file, name);
9702}
9703
9704/* Output a TOC entry.  We derive the entry name from what is being
9705   written.  */
9706
9707void
9708output_toc (file, x, labelno, mode)
9709     FILE *file;
9710     rtx x;
9711     int labelno;
9712     enum machine_mode mode;
9713{
9714  char buf[256];
9715  const char *name = buf;
9716  const char *real_name;
9717  rtx base = x;
9718  int offset = 0;
9719
9720  if (TARGET_NO_TOC)
9721    abort ();
9722
9723  /* When the linker won't eliminate them, don't output duplicate
9724     TOC entries (this happens on AIX if there is any kind of TOC,
9725     and on SVR4 under -fPIC or -mrelocatable).  */
9726  if (TARGET_TOC)
9727    {
9728      struct toc_hash_struct *h;
9729      void * * found;
9730
9731      h = ggc_alloc (sizeof (*h));
9732      h->key = x;
9733      h->key_mode = mode;
9734      h->labelno = labelno;
9735
9736      found = htab_find_slot (toc_hash_table, h, 1);
9737      if (*found == NULL)
9738	*found = h;
9739      else  /* This is indeed a duplicate.
9740	       Set this label equal to that label.  */
9741	{
9742	  fputs ("\t.set ", file);
9743	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9744	  fprintf (file, "%d,", labelno);
9745	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9746	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9747					      found)->labelno));
9748	  return;
9749	}
9750    }
9751
9752  /* If we're going to put a double constant in the TOC, make sure it's
9753     aligned properly when strict alignment is on.  */
9754  if (GET_CODE (x) == CONST_DOUBLE
9755      && STRICT_ALIGNMENT
9756      && GET_MODE_BITSIZE (mode) >= 64
9757      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
9758    ASM_OUTPUT_ALIGN (file, 3);
9759  }
9760
9761  ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
9762
9763  /* Handle FP constants specially.  Note that if we have a minimal
9764     TOC, things we put here aren't actually in the TOC, so we can allow
9765     FP constants.  */
9766  if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
9767    {
9768      REAL_VALUE_TYPE rv;
9769      long k[2];
9770
9771      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9772      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
9773
9774      if (TARGET_64BIT)
9775	{
9776	  if (TARGET_MINIMAL_TOC)
9777	    fputs (DOUBLE_INT_ASM_OP, file);
9778	  else
9779	    fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9780	  fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
9781	  return;
9782	}
9783      else
9784	{
9785	  if (TARGET_MINIMAL_TOC)
9786	    fputs ("\t.long ", file);
9787	  else
9788	    fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9789	  fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
9790	  return;
9791	}
9792    }
9793  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
9794    {
9795      REAL_VALUE_TYPE rv;
9796      long l;
9797
9798      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9799      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
9800
9801      if (TARGET_64BIT)
9802	{
9803	  if (TARGET_MINIMAL_TOC)
9804	    fputs (DOUBLE_INT_ASM_OP, file);
9805	  else
9806	    fprintf (file, "\t.tc FS_%lx[TC],", l);
9807	  fprintf (file, "0x%lx00000000\n", l);
9808	  return;
9809	}
9810      else
9811	{
9812	  if (TARGET_MINIMAL_TOC)
9813	    fputs ("\t.long ", file);
9814	  else
9815	    fprintf (file, "\t.tc FS_%lx[TC],", l);
9816	  fprintf (file, "0x%lx\n", l);
9817	  return;
9818	}
9819    }
9820  else if (GET_MODE (x) == VOIDmode
9821	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
9822    {
9823      unsigned HOST_WIDE_INT low;
9824      HOST_WIDE_INT high;
9825
9826      if (GET_CODE (x) == CONST_DOUBLE)
9827	{
9828	  low = CONST_DOUBLE_LOW (x);
9829	  high = CONST_DOUBLE_HIGH (x);
9830	}
9831      else
9832#if HOST_BITS_PER_WIDE_INT == 32
9833	{
9834	  low = INTVAL (x);
9835	  high = (low & 0x80000000) ? ~0 : 0;
9836	}
9837#else
9838	{
9839          low = INTVAL (x) & 0xffffffff;
9840          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
9841	}
9842#endif
9843
9844      /* TOC entries are always Pmode-sized, but since this
9845	 is a bigendian machine then if we're putting smaller
9846	 integer constants in the TOC we have to pad them.
9847	 (This is still a win over putting the constants in
9848	 a separate constant pool, because then we'd have
9849	 to have both a TOC entry _and_ the actual constant.)
9850
9851	 For a 32-bit target, CONST_INT values are loaded and shifted
9852	 entirely within `low' and can be stored in one TOC entry.  */
9853
9854      if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
9855	abort ();/* It would be easy to make this work, but it doesn't now.  */
9856
9857      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
9858	lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
9859		       POINTER_SIZE, &low, &high, 0);
9860
9861      if (TARGET_64BIT)
9862	{
9863	  if (TARGET_MINIMAL_TOC)
9864	    fputs (DOUBLE_INT_ASM_OP, file);
9865	  else
9866	    fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long)high, (long)low);
9867	  fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
9868	  return;
9869	}
9870      else
9871	{
9872	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
9873	    {
9874	      if (TARGET_MINIMAL_TOC)
9875		fputs ("\t.long ", file);
9876	      else
9877		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
9878			 (long)high, (long)low);
9879	      fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
9880	    }
9881	  else
9882	    {
9883	      if (TARGET_MINIMAL_TOC)
9884		fputs ("\t.long ", file);
9885	      else
9886		fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
9887	      fprintf (file, "0x%lx\n", (long) low);
9888	    }
9889	  return;
9890	}
9891    }
9892
9893  if (GET_CODE (x) == CONST)
9894    {
9895      if (GET_CODE (XEXP (x, 0)) != PLUS)
9896	abort ();
9897
9898      base = XEXP (XEXP (x, 0), 0);
9899      offset = INTVAL (XEXP (XEXP (x, 0), 1));
9900    }
9901
9902  if (GET_CODE (base) == SYMBOL_REF)
9903    name = XSTR (base, 0);
9904  else if (GET_CODE (base) == LABEL_REF)
9905    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
9906  else if (GET_CODE (base) == CODE_LABEL)
9907    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
9908  else
9909    abort ();
9910
9911  STRIP_NAME_ENCODING (real_name, name);
9912  if (TARGET_MINIMAL_TOC)
9913    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9914  else
9915    {
9916      fprintf (file, "\t.tc %s", real_name);
9917
9918      if (offset < 0)
9919	fprintf (file, ".N%d", - offset);
9920      else if (offset)
9921	fprintf (file, ".P%d", offset);
9922
9923      fputs ("[TC],", file);
9924    }
9925
9926  /* Currently C++ toc references to vtables can be emitted before it
9927     is decided whether the vtable is public or private.  If this is
9928     the case, then the linker will eventually complain that there is
9929     a TOC reference to an unknown section.  Thus, for vtables only,
9930     we emit the TOC reference to reference the symbol and not the
9931     section.  */
9932  if (VTABLE_NAME_P (name))
9933    {
9934      RS6000_OUTPUT_BASENAME (file, name);
9935      if (offset < 0)
9936	fprintf (file, "%d", offset);
9937      else if (offset > 0)
9938	fprintf (file, "+%d", offset);
9939    }
9940  else
9941    output_addr_const (file, x);
9942  putc ('\n', file);
9943}
9944
9945/* Output an assembler pseudo-op to write an ASCII string of N characters
9946   starting at P to FILE.
9947
9948   On the RS/6000, we have to do this using the .byte operation and
9949   write out special characters outside the quoted string.
9950   Also, the assembler is broken; very long strings are truncated,
9951   so we must artificially break them up early.  */
9952
9953void
9954output_ascii (file, p, n)
9955     FILE *file;
9956     const char *p;
9957     int n;
9958{
9959  char c;
9960  int i, count_string;
9961  const char *for_string = "\t.byte \"";
9962  const char *for_decimal = "\t.byte ";
9963  const char *to_close = NULL;
9964
9965  count_string = 0;
9966  for (i = 0; i < n; i++)
9967    {
9968      c = *p++;
9969      if (c >= ' ' && c < 0177)
9970	{
9971	  if (for_string)
9972	    fputs (for_string, file);
9973	  putc (c, file);
9974
9975	  /* Write two quotes to get one.  */
9976	  if (c == '"')
9977	    {
9978	      putc (c, file);
9979	      ++count_string;
9980	    }
9981
9982	  for_string = NULL;
9983	  for_decimal = "\"\n\t.byte ";
9984	  to_close = "\"\n";
9985	  ++count_string;
9986
9987	  if (count_string >= 512)
9988	    {
9989	      fputs (to_close, file);
9990
9991	      for_string = "\t.byte \"";
9992	      for_decimal = "\t.byte ";
9993	      to_close = NULL;
9994	      count_string = 0;
9995	    }
9996	}
9997      else
9998	{
9999	  if (for_decimal)
10000	    fputs (for_decimal, file);
10001	  fprintf (file, "%d", c);
10002
10003	  for_string = "\n\t.byte \"";
10004	  for_decimal = ", ";
10005	  to_close = "\n";
10006	  count_string = 0;
10007	}
10008    }
10009
10010  /* Now close the string if we have written one.  Then end the line.  */
10011  if (to_close)
10012    fputs (to_close, file);
10013}
10014
10015/* Generate a unique section name for FILENAME for a section type
10016   represented by SECTION_DESC.  Output goes into BUF.
10017
10018   SECTION_DESC can be any string, as long as it is different for each
10019   possible section type.
10020
10021   We name the section in the same manner as xlc.  The name begins with an
10022   underscore followed by the filename (after stripping any leading directory
10023   names) with the last period replaced by the string SECTION_DESC.  If
10024   FILENAME does not contain a period, SECTION_DESC is appended to the end of
10025   the name.  */
10026
10027void
10028rs6000_gen_section_name (buf, filename, section_desc)
10029     char **buf;
10030     const char *filename;
10031     const char *section_desc;
10032{
10033  const char *q, *after_last_slash, *last_period = 0;
10034  char *p;
10035  int len;
10036
10037  after_last_slash = filename;
10038  for (q = filename; *q; q++)
10039    {
10040      if (*q == '/')
10041	after_last_slash = q + 1;
10042      else if (*q == '.')
10043	last_period = q;
10044    }
10045
10046  len = strlen (after_last_slash) + strlen (section_desc) + 2;
10047  *buf = (char *) permalloc (len);
10048
10049  p = *buf;
10050  *p++ = '_';
10051
10052  for (q = after_last_slash; *q; q++)
10053    {
10054      if (q == last_period)
10055        {
10056	  strcpy (p, section_desc);
10057	  p += strlen (section_desc);
10058        }
10059
10060      else if (ISALNUM (*q))
10061        *p++ = *q;
10062    }
10063
10064  if (last_period == 0)
10065    strcpy (p, section_desc);
10066  else
10067    *p = '\0';
10068}
10069
10070/* Emit profile function.  */
10071
10072void
10073output_profile_hook (labelno)
10074     int labelno;
10075{
10076  if (DEFAULT_ABI == ABI_AIX)
10077    {
10078      char buf[30];
10079      const char *label_name;
10080      rtx fun;
10081
10082      labelno += 1;
10083
10084      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10085      STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10086      fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10087
10088      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10089                         fun, Pmode);
10090    }
10091  else if (DEFAULT_ABI == ABI_DARWIN)
10092    {
10093      const char *mcount_name = RS6000_MCOUNT;
10094      int caller_addr_regno = LINK_REGISTER_REGNUM;
10095
10096      /* Be conservative and always set this, at least for now.  */
10097      current_function_uses_pic_offset_table = 1;
10098
10099#if TARGET_MACHO
10100      /* For PIC code, set up a stub and collect the caller's address
10101	 from r0, which is where the prologue puts it.  */
10102      if (flag_pic)
10103	{
10104	  mcount_name = machopic_stub_name (mcount_name);
10105	  if (current_function_uses_pic_offset_table)
10106	    caller_addr_regno = 0;
10107	}
10108#endif
10109      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10110			 0, VOIDmode, 1,
10111			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10112    }
10113}
10114
10115/* Write function profiler code.  */
10116
10117void
10118output_function_profiler (file, labelno)
10119  FILE *file;
10120  int labelno;
10121{
10122  char buf[100];
10123
10124  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10125  switch (DEFAULT_ABI)
10126    {
10127    default:
10128      abort ();
10129
10130    case ABI_V4:
10131    case ABI_AIX_NODESC:
10132      fprintf (file, "\tmflr %s\n", reg_names[0]);
10133      if (flag_pic == 1)
10134	{
10135	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10136	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10137		       reg_names[0], reg_names[1]);
10138	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10139	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10140	  assemble_name (file, buf);
10141	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10142	}
10143      else if (flag_pic > 1)
10144	{
10145	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10146		       reg_names[0], reg_names[1]);
10147	  /* Now, we need to get the address of the label.  */
10148	  fputs ("\tbl 1f\n\t.long ", file);
10149	  assemble_name (file, buf);
10150	  fputs ("-.\n1:", file);
10151	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10152	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10153		       reg_names[0], reg_names[11]);
10154	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10155		       reg_names[0], reg_names[0], reg_names[11]);
10156	}
10157      else
10158	{
10159	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10160	  assemble_name (file, buf);
10161	  fputs ("@ha\n", file);
10162	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10163		       reg_names[0], reg_names[1]);
10164	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10165	  assemble_name (file, buf);
10166	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10167	}
10168
10169      if (current_function_needs_context)
10170	asm_fprintf (file, "\tmr %s,%s\n",
10171		     reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10172      fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10173      if (current_function_needs_context)
10174	asm_fprintf (file, "\tmr %s,%s\n",
10175		     reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10176      break;
10177
10178    case ABI_AIX:
10179    case ABI_DARWIN:
10180      /* Don't do anything, done in output_profile_hook ().  */
10181      break;
10182
10183    }
10184}
10185
10186/* Adjust the cost of a scheduling dependency.  Return the new cost of
10187   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
10188
10189static int
10190rs6000_adjust_cost (insn, link, dep_insn, cost)
10191     rtx insn;
10192     rtx link;
10193     rtx dep_insn ATTRIBUTE_UNUSED;
10194     int cost;
10195{
10196  if (! recog_memoized (insn))
10197    return 0;
10198
10199  if (REG_NOTE_KIND (link) != 0)
10200    return 0;
10201
10202  if (REG_NOTE_KIND (link) == 0)
10203    {
10204      /* Data dependency; DEP_INSN writes a register that INSN reads
10205	 some cycles later.  */
10206      switch (get_attr_type (insn))
10207	{
10208	case TYPE_JMPREG:
10209          /* Tell the first scheduling pass about the latency between
10210	     a mtctr and bctr (and mtlr and br/blr).  The first
10211	     scheduling pass will not know about this latency since
10212	     the mtctr instruction, which has the latency associated
10213	     to it, will be generated by reload.  */
10214          return TARGET_POWER ? 5 : 4;
10215	case TYPE_BRANCH:
10216	  /* Leave some extra cycles between a compare and its
10217	     dependent branch, to inhibit expensive mispredicts.  */
10218	  if ((rs6000_cpu_attr == CPU_PPC750
10219               || rs6000_cpu_attr == CPU_PPC7400
10220               || rs6000_cpu_attr == CPU_PPC7450)
10221	      && recog_memoized (dep_insn)
10222	      && (INSN_CODE (dep_insn) >= 0)
10223	      && (get_attr_type (dep_insn) == TYPE_COMPARE
10224		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10225		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10226		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10227	    return cost + 2;
10228	default:
10229	  break;
10230	}
10231      /* Fall out to return default cost.  */
10232    }
10233
10234  return cost;
10235}
10236
10237/* A C statement (sans semicolon) to update the integer scheduling
10238   priority INSN_PRIORITY (INSN).  Reduce the priority to execute the
10239   INSN earlier, increase the priority to execute INSN later.  Do not
10240   define this macro if you do not need to adjust the scheduling
10241   priorities of insns.  */
10242
10243static int
10244rs6000_adjust_priority (insn, priority)
10245     rtx insn ATTRIBUTE_UNUSED;
10246     int priority;
10247{
10248  /* On machines (like the 750) which have asymmetric integer units,
10249     where one integer unit can do multiply and divides and the other
10250     can't, reduce the priority of multiply/divide so it is scheduled
10251     before other integer operations.  */
10252
10253#if 0
10254  if (! INSN_P (insn))
10255    return priority;
10256
10257  if (GET_CODE (PATTERN (insn)) == USE)
10258    return priority;
10259
10260  switch (rs6000_cpu_attr) {
10261  case CPU_PPC750:
10262    switch (get_attr_type (insn))
10263      {
10264      default:
10265	break;
10266
10267      case TYPE_IMUL:
10268      case TYPE_IDIV:
10269	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10270		 priority, priority);
10271	if (priority >= 0 && priority < 0x01000000)
10272	  priority >>= 3;
10273	break;
10274      }
10275  }
10276#endif
10277
10278  return priority;
10279}
10280
10281/* Return how many instructions the machine can issue per cycle.  */
10282
10283static int
10284rs6000_issue_rate ()
10285{
10286  switch (rs6000_cpu_attr) {
10287  case CPU_RIOS1:  /* ? */
10288  case CPU_RS64A:
10289  case CPU_PPC601: /* ? */
10290  case CPU_PPC7450:
10291    return 3;
10292  case CPU_PPC603:
10293  case CPU_PPC750:
10294  case CPU_PPC7400:
10295    return 2;
10296  case CPU_RIOS2:
10297  case CPU_PPC604:
10298  case CPU_PPC604E:
10299  case CPU_PPC620:
10300  case CPU_PPC630:
10301    return 4;
10302  default:
10303    return 1;
10304  }
10305}
10306
10307
10308/* Length in units of the trampoline for entering a nested function.  */
10309
10310int
10311rs6000_trampoline_size ()
10312{
10313  int ret = 0;
10314
10315  switch (DEFAULT_ABI)
10316    {
10317    default:
10318      abort ();
10319
10320    case ABI_AIX:
10321      ret = (TARGET_32BIT) ? 12 : 24;
10322      break;
10323
10324    case ABI_DARWIN:
10325    case ABI_V4:
10326    case ABI_AIX_NODESC:
10327      ret = (TARGET_32BIT) ? 40 : 48;
10328      break;
10329    }
10330
10331  return ret;
10332}
10333
10334/* Emit RTL insns to initialize the variable parts of a trampoline.
10335   FNADDR is an RTX for the address of the function's pure code.
10336   CXT is an RTX for the static chain value for the function.  */
10337
10338void
10339rs6000_initialize_trampoline (addr, fnaddr, cxt)
10340     rtx addr;
10341     rtx fnaddr;
10342     rtx cxt;
10343{
10344  enum machine_mode pmode = Pmode;
10345  int regsize = (TARGET_32BIT) ? 4 : 8;
10346  rtx ctx_reg = force_reg (pmode, cxt);
10347
10348  switch (DEFAULT_ABI)
10349    {
10350    default:
10351      abort ();
10352
10353/* Macros to shorten the code expansions below.  */
10354#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10355#define MEM_PLUS(addr,offset) \
10356  gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10357
10358    /* Under AIX, just build the 3 word function descriptor */
10359    case ABI_AIX:
10360      {
10361	rtx fn_reg = gen_reg_rtx (pmode);
10362	rtx toc_reg = gen_reg_rtx (pmode);
10363	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10364	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10365	emit_move_insn (MEM_DEREF (addr), fn_reg);
10366	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10367	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10368      }
10369      break;
10370
10371    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
10372    case ABI_DARWIN:
10373    case ABI_V4:
10374    case ABI_AIX_NODESC:
10375      emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10376			 FALSE, VOIDmode, 4,
10377			 addr, pmode,
10378			 GEN_INT (rs6000_trampoline_size ()), SImode,
10379			 fnaddr, pmode,
10380			 ctx_reg, pmode);
10381      break;
10382    }
10383
10384  return;
10385}
10386
10387
10388/* Table of valid machine attributes.  */
10389
10390const struct attribute_spec rs6000_attribute_table[] =
10391{
10392  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10393  { "longcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
10394  { NULL,       0, 0, false, false, false, NULL }
10395};
10396
10397/* Handle a "longcall" attribute; arguments as in struct
10398   attribute_spec.handler.  */
10399
10400static tree
10401rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10402     tree *node;
10403     tree name;
10404     tree args ATTRIBUTE_UNUSED;
10405     int flags ATTRIBUTE_UNUSED;
10406     bool *no_add_attrs;
10407{
10408  if (TREE_CODE (*node) != FUNCTION_TYPE
10409      && TREE_CODE (*node) != FIELD_DECL
10410      && TREE_CODE (*node) != TYPE_DECL)
10411    {
10412      warning ("`%s' attribute only applies to functions",
10413	       IDENTIFIER_POINTER (name));
10414      *no_add_attrs = true;
10415    }
10416
10417  return NULL_TREE;
10418}
10419
10420/* Return a reference suitable for calling a function with the
10421   longcall attribute.  */
10422
10423struct rtx_def *
10424rs6000_longcall_ref (call_ref)
10425     rtx call_ref;
10426{
10427  const char *call_name;
10428  tree node;
10429
10430  if (GET_CODE (call_ref) != SYMBOL_REF)
10431    return call_ref;
10432
10433  /* System V adds '.' to the internal name, so skip them.  */
10434  call_name = XSTR (call_ref, 0);
10435  if (*call_name == '.')
10436    {
10437      while (*call_name == '.')
10438	call_name++;
10439
10440      node = get_identifier (call_name);
10441      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10442    }
10443
10444  return force_reg (Pmode, call_ref);
10445}
10446
10447
10448/* A C statement or statements to switch to the appropriate section
10449   for output of RTX in mode MODE.  You can assume that RTX is some
10450   kind of constant in RTL.  The argument MODE is redundant except in
10451   the case of a `const_int' rtx.  Select the section by calling
10452   `text_section' or one of the alternatives for other sections.
10453
10454   Do not define this macro if you put all constants in the read-only
10455   data section.  */
10456
10457#ifdef USING_ELFOS_H
10458
10459void
10460rs6000_select_rtx_section (mode, x)
10461     enum machine_mode mode;
10462     rtx x;
10463{
10464  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10465    toc_section ();
10466  else
10467    const_section ();
10468}
10469
10470/* A C statement or statements to switch to the appropriate
10471   section for output of DECL.  DECL is either a `VAR_DECL' node
10472   or a constant of some sort.  RELOC indicates whether forming
10473   the initial value of DECL requires link-time relocations.  */
10474
10475void
10476rs6000_select_section (decl, reloc)
10477     tree decl;
10478     int reloc;
10479{
10480  int size = int_size_in_bytes (TREE_TYPE (decl));
10481  int needs_sdata;
10482  int readonly;
10483  static void (* const sec_funcs[4]) PARAMS ((void)) = {
10484    &const_section,
10485    &sdata2_section,
10486    &data_section,
10487    &sdata_section
10488  };
10489
10490  needs_sdata = (size > 0
10491		 && size <= g_switch_value
10492		 && rs6000_sdata != SDATA_NONE
10493		 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10494
10495  if (TREE_CODE (decl) == STRING_CST)
10496    readonly = ! flag_writable_strings;
10497  else if (TREE_CODE (decl) == VAR_DECL)
10498    readonly = (! (flag_pic && reloc)
10499		&& TREE_READONLY (decl)
10500		&& ! TREE_SIDE_EFFECTS (decl)
10501		&& DECL_INITIAL (decl)
10502		&& DECL_INITIAL (decl) != error_mark_node
10503		&& TREE_CONSTANT (DECL_INITIAL (decl)));
10504  else if (TREE_CODE (decl) == CONSTRUCTOR)
10505    readonly = (! (flag_pic && reloc)
10506		&& ! TREE_SIDE_EFFECTS (decl)
10507		&& TREE_CONSTANT (decl));
10508  else
10509    readonly = 1;
10510  if (needs_sdata && rs6000_sdata != SDATA_EABI)
10511    readonly = 0;
10512
10513  (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10514}
10515
10516/* A C statement to build up a unique section name, expressed as a
10517   STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10518   RELOC indicates whether the initial value of EXP requires
10519   link-time relocations.  If you do not define this macro, GCC will use
10520   the symbol name prefixed by `.' as the section name.  Note - this
10521   macro can now be called for uninitialized data items as well as
10522   initialised data and functions.  */
10523
10524void
10525rs6000_unique_section (decl, reloc)
10526     tree decl;
10527     int reloc;
10528{
10529  int len;
10530  int sec;
10531  const char *name;
10532  char *string;
10533  const char *prefix;
10534
10535  static const char *const prefixes[7][2] =
10536  {
10537    { ".rodata.", ".gnu.linkonce.r." },
10538    { ".sdata2.", ".gnu.linkonce.s2." },
10539    { ".data.",   ".gnu.linkonce.d." },
10540    { ".sdata.",  ".gnu.linkonce.s." },
10541    { ".bss.",    ".gnu.linkonce.b." },
10542    { ".sbss.",   ".gnu.linkonce.sb." },
10543    { ".text.",   ".gnu.linkonce.t." }
10544  };
10545
10546  if (TREE_CODE (decl) == FUNCTION_DECL)
10547    sec = 6;
10548  else
10549    {
10550      int readonly;
10551      int needs_sdata;
10552      int size;
10553
10554      readonly = 1;
10555      if (TREE_CODE (decl) == STRING_CST)
10556	readonly = ! flag_writable_strings;
10557      else if (TREE_CODE (decl) == VAR_DECL)
10558	readonly = (! (flag_pic && reloc)
10559		    && TREE_READONLY (decl)
10560		    && ! TREE_SIDE_EFFECTS (decl)
10561		    && TREE_CONSTANT (DECL_INITIAL (decl)));
10562
10563      size = int_size_in_bytes (TREE_TYPE (decl));
10564      needs_sdata = (size > 0
10565		     && size <= g_switch_value
10566		     && rs6000_sdata != SDATA_NONE
10567		     && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10568
10569      if (DECL_INITIAL (decl) == 0
10570	  || DECL_INITIAL (decl) == error_mark_node)
10571	sec = 4;
10572      else if (! readonly)
10573	sec = 2;
10574      else
10575	sec = 0;
10576
10577      if (needs_sdata)
10578	{
10579	  /* .sdata2 is only for EABI.  */
10580	  if (sec == 0 && rs6000_sdata != SDATA_EABI)
10581	    sec = 2;
10582	  sec += 1;
10583	}
10584    }
10585
10586  STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10587  prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10588  len    = strlen (name) + strlen (prefix);
10589  string = alloca (len + 1);
10590
10591  sprintf (string, "%s%s", prefix, name);
10592
10593  DECL_SECTION_NAME (decl) = build_string (len, string);
10594}
10595
10596
10597/* If we are referencing a function that is static or is known to be
10598   in this file, make the SYMBOL_REF special.  We can use this to indicate
10599   that we can branch to this function without emitting a no-op after the
10600   call.  For real AIX calling sequences, we also replace the
10601   function name with the real name (1 or 2 leading .'s), rather than
10602   the function descriptor name.  This saves a lot of overriding code
10603   to read the prefixes.  */
10604
10605void
10606rs6000_encode_section_info (decl)
10607     tree decl;
10608{
10609  if (TREE_CODE (decl) == FUNCTION_DECL)
10610    {
10611      rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10612      if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10613          && ! DECL_WEAK (decl))
10614	SYMBOL_REF_FLAG (sym_ref) = 1;
10615
10616      if (DEFAULT_ABI == ABI_AIX)
10617	{
10618	  size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10619	  size_t len2 = strlen (XSTR (sym_ref, 0));
10620	  char *str = alloca (len1 + len2 + 1);
10621	  str[0] = '.';
10622	  str[1] = '.';
10623	  memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10624
10625	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10626	}
10627    }
10628  else if (rs6000_sdata != SDATA_NONE
10629	   && DEFAULT_ABI == ABI_V4
10630	   && TREE_CODE (decl) == VAR_DECL)
10631    {
10632      int size = int_size_in_bytes (TREE_TYPE (decl));
10633      tree section_name = DECL_SECTION_NAME (decl);
10634      const char *name = (char *)0;
10635      int len = 0;
10636
10637      if (section_name)
10638	{
10639	  if (TREE_CODE (section_name) == STRING_CST)
10640	    {
10641	      name = TREE_STRING_POINTER (section_name);
10642	      len = TREE_STRING_LENGTH (section_name);
10643	    }
10644	  else
10645	    abort ();
10646	}
10647
10648      if ((size > 0 && size <= g_switch_value)
10649	  || (name
10650	      && ((len == sizeof (".sdata") - 1
10651		   && strcmp (name, ".sdata") == 0)
10652		  || (len == sizeof (".sdata2") - 1
10653		      && strcmp (name, ".sdata2") == 0)
10654		  || (len == sizeof (".sbss") - 1
10655		      && strcmp (name, ".sbss") == 0)
10656		  || (len == sizeof (".sbss2") - 1
10657		      && strcmp (name, ".sbss2") == 0)
10658		  || (len == sizeof (".PPC.EMB.sdata0") - 1
10659		      && strcmp (name, ".PPC.EMB.sdata0") == 0)
10660		  || (len == sizeof (".PPC.EMB.sbss0") - 1
10661		      && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10662	{
10663	  rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10664	  size_t len = strlen (XSTR (sym_ref, 0));
10665	  char *str = alloca (len + 2);
10666
10667	  str[0] = '@';
10668	  memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
10669	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
10670	}
10671    }
10672}
10673
10674#endif /* USING_ELFOS_H */
10675
10676
10677/* Return a REG that occurs in ADDR with coefficient 1.
10678   ADDR can be effectively incremented by incrementing REG.
10679
10680   r0 is special and we must not select it as an address
10681   register by this routine since our caller will try to
10682   increment the returned register via an "la" instruction.  */
10683
10684struct rtx_def *
10685find_addr_reg (addr)
10686     rtx addr;
10687{
10688  while (GET_CODE (addr) == PLUS)
10689    {
10690      if (GET_CODE (XEXP (addr, 0)) == REG
10691	  && REGNO (XEXP (addr, 0)) != 0)
10692	addr = XEXP (addr, 0);
10693      else if (GET_CODE (XEXP (addr, 1)) == REG
10694	       && REGNO (XEXP (addr, 1)) != 0)
10695	addr = XEXP (addr, 1);
10696      else if (CONSTANT_P (XEXP (addr, 0)))
10697	addr = XEXP (addr, 1);
10698      else if (CONSTANT_P (XEXP (addr, 1)))
10699	addr = XEXP (addr, 0);
10700      else
10701	abort ();
10702    }
10703  if (GET_CODE (addr) == REG && REGNO (addr) != 0)
10704    return addr;
10705  abort ();
10706}
10707
10708void
10709rs6000_fatal_bad_address (op)
10710  rtx op;
10711{
10712  fatal_insn ("bad address", op);
10713}
10714
10715/* Called to register all of our global variables with the garbage
10716   collector.  */
10717
10718static void
10719rs6000_add_gc_roots ()
10720{
10721  ggc_add_rtx_root (&rs6000_compare_op0, 1);
10722  ggc_add_rtx_root (&rs6000_compare_op1, 1);
10723
10724  toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
10725  ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
10726		toc_hash_mark_table);
10727
10728#if TARGET_MACHO
10729  machopic_add_gc_roots ();
10730#endif
10731}
10732
10733#if TARGET_MACHO
10734
10735#if 0
10736/* Returns 1 if OP is either a symbol reference or a sum of a symbol
10737   reference and a constant.  */
10738
10739int
10740symbolic_operand (op)
10741     rtx op;
10742{
10743  switch (GET_CODE (op))
10744    {
10745    case SYMBOL_REF:
10746    case LABEL_REF:
10747      return 1;
10748    case CONST:
10749      op = XEXP (op, 0);
10750      return (GET_CODE (op) == SYMBOL_REF ||
10751	      (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
10752	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
10753	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
10754    default:
10755      return 0;
10756    }
10757}
10758#endif
10759
10760#ifdef RS6000_LONG_BRANCH
10761
10762static tree stub_list = 0;
10763
10764/* ADD_COMPILER_STUB adds the compiler generated stub for handling
10765   procedure calls to the linked list.  */
10766
10767void
10768add_compiler_stub (label_name, function_name, line_number)
10769     tree label_name;
10770     tree function_name;
10771     int line_number;
10772{
10773  tree stub = build_tree_list (function_name, label_name);
10774  TREE_TYPE (stub) = build_int_2 (line_number, 0);
10775  TREE_CHAIN (stub) = stub_list;
10776  stub_list = stub;
10777}
10778
10779#define STUB_LABEL_NAME(STUB)     TREE_VALUE (STUB)
10780#define STUB_FUNCTION_NAME(STUB)  TREE_PURPOSE (STUB)
10781#define STUB_LINE_NUMBER(STUB)    TREE_INT_CST_LOW (TREE_TYPE (STUB))
10782
10783/* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
10784   handling procedure calls from the linked list and initializes the
10785   linked list.  */
10786
10787void
10788output_compiler_stub ()
10789{
10790  char tmp_buf[256];
10791  char label_buf[256];
10792  char *label;
10793  tree tmp_stub, stub;
10794
10795  if (!flag_pic)
10796    for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10797      {
10798	fprintf (asm_out_file,
10799		 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
10800
10801#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10802	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10803	  fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
10804#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10805
10806	if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
10807	  strcpy (label_buf,
10808		  IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
10809	else
10810	  {
10811	    label_buf[0] = '_';
10812	    strcpy (label_buf+1,
10813		    IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
10814	  }
10815
10816	strcpy (tmp_buf, "lis r12,hi16(");
10817	strcat (tmp_buf, label_buf);
10818	strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
10819	strcat (tmp_buf, label_buf);
10820	strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
10821	output_asm_insn (tmp_buf, 0);
10822
10823#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10824	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10825	  fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
10826#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10827      }
10828
10829  stub_list = 0;
10830}
10831
10832/* NO_PREVIOUS_DEF checks in the link list whether the function name is
10833   already there or not.  */
10834
10835int
10836no_previous_def (function_name)
10837     tree function_name;
10838{
10839  tree stub;
10840  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10841    if (function_name == STUB_FUNCTION_NAME (stub))
10842      return 0;
10843  return 1;
10844}
10845
10846/* GET_PREV_LABEL gets the label name from the previous definition of
10847   the function.  */
10848
10849tree
10850get_prev_label (function_name)
10851     tree function_name;
10852{
10853  tree stub;
10854  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10855    if (function_name == STUB_FUNCTION_NAME (stub))
10856      return STUB_LABEL_NAME (stub);
10857  return 0;
10858}
10859
10860/* INSN is either a function call or a millicode call.  It may have an
10861   unconditional jump in its delay slot.
10862
10863   CALL_DEST is the routine we are calling.  */
10864
10865char *
10866output_call (insn, call_dest, operand_number)
10867     rtx insn;
10868     rtx call_dest;
10869     int operand_number;
10870{
10871  static char buf[256];
10872  if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
10873    {
10874      tree labelname;
10875      tree funname = get_identifier (XSTR (call_dest, 0));
10876
10877      if (no_previous_def (funname))
10878	{
10879	  int line_number;
10880	  rtx label_rtx = gen_label_rtx ();
10881	  char *label_buf, temp_buf[256];
10882	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
10883				       CODE_LABEL_NUMBER (label_rtx));
10884	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
10885	  labelname = get_identifier (label_buf);
10886	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
10887	  if (insn)
10888	    line_number = NOTE_LINE_NUMBER (insn);
10889	  add_compiler_stub (labelname, funname, line_number);
10890	}
10891      else
10892	labelname = get_prev_label (funname);
10893
10894      sprintf (buf, "jbsr %%z%d,%.246s",
10895	       operand_number, IDENTIFIER_POINTER (labelname));
10896      return buf;
10897    }
10898  else
10899    {
10900      sprintf (buf, "bl %%z%d", operand_number);
10901      return buf;
10902    }
10903}
10904
10905#endif /* RS6000_LONG_BRANCH */
10906
10907#define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N)		\
10908  do {								\
10909    const char *const symbol_ = (SYMBOL);			\
10910    char *buffer_ = (BUF);					\
10911    if (symbol_[0] == '"')					\
10912      {								\
10913        sprintf(buffer_, "\"L%d$%s", (N), symbol_+1);		\
10914      }								\
10915    else if (name_needs_quotes(symbol_))			\
10916      {								\
10917        sprintf(buffer_, "\"L%d$%s\"", (N), symbol_);		\
10918      }								\
10919    else							\
10920      {								\
10921        sprintf(buffer_, "L%d$%s", (N), symbol_);		\
10922      }								\
10923  } while (0)
10924
10925
10926/* Generate PIC and indirect symbol stubs.  */
10927
10928void
10929machopic_output_stub (file, symb, stub)
10930     FILE *file;
10931     const char *symb, *stub;
10932{
10933  unsigned int length;
10934  char *symbol_name, *lazy_ptr_name;
10935  char *local_label_0;
10936  static int label = 0;
10937
10938  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
10939  STRIP_NAME_ENCODING (symb, symb);
10940
10941  label += 1;
10942
10943  length = strlen (symb);
10944  symbol_name = alloca (length + 32);
10945  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
10946
10947  lazy_ptr_name = alloca (length + 32);
10948  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
10949
10950  local_label_0 = alloca (length + 32);
10951  GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
10952
10953  if (flag_pic == 2)
10954    machopic_picsymbol_stub_section ();
10955  else
10956    machopic_symbol_stub_section ();
10957
10958  fprintf (file, "%s:\n", stub);
10959  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
10960
10961  if (flag_pic == 2)
10962    {
10963      fprintf (file, "\tmflr r0\n");
10964      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
10965      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
10966      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
10967	       lazy_ptr_name, local_label_0);
10968      fprintf (file, "\tmtlr r0\n");
10969      fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
10970	       lazy_ptr_name, local_label_0);
10971      fprintf (file, "\tmtctr r12\n");
10972      fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
10973	       lazy_ptr_name, local_label_0);
10974      fprintf (file, "\tbctr\n");
10975    }
10976  else
10977    fprintf (file, "non-pure not supported\n");
10978
10979  machopic_lazy_symbol_ptr_section ();
10980  fprintf (file, "%s:\n", lazy_ptr_name);
10981  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
10982  fprintf (file, "\t.long dyld_stub_binding_helper\n");
10983}
10984
10985/* Legitimize PIC addresses.  If the address is already
10986   position-independent, we return ORIG.  Newly generated
10987   position-independent addresses go into a reg.  This is REG if non
10988   zero, otherwise we allocate register(s) as necessary.  */
10989
10990#define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
10991
10992rtx
10993rs6000_machopic_legitimize_pic_address (orig, mode, reg)
10994     rtx orig;
10995     enum machine_mode mode;
10996     rtx reg;
10997{
10998  rtx base, offset;
10999
11000  if (reg == NULL && ! reload_in_progress && ! reload_completed)
11001    reg = gen_reg_rtx (Pmode);
11002
11003  if (GET_CODE (orig) == CONST)
11004    {
11005      if (GET_CODE (XEXP (orig, 0)) == PLUS
11006	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11007	return orig;
11008
11009      if (GET_CODE (XEXP (orig, 0)) == PLUS)
11010	{
11011	  base =
11012	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11013						    Pmode, reg);
11014	  offset =
11015	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11016						    Pmode, reg);
11017	}
11018      else
11019	abort ();
11020
11021      if (GET_CODE (offset) == CONST_INT)
11022	{
11023	  if (SMALL_INT (offset))
11024	    return plus_constant (base, INTVAL (offset));
11025	  else if (! reload_in_progress && ! reload_completed)
11026	    offset = force_reg (Pmode, offset);
11027	  else
11028	    {
11029 	      rtx mem = force_const_mem (Pmode, orig);
11030	      return machopic_legitimize_pic_address (mem, Pmode, reg);
11031	    }
11032	}
11033      return gen_rtx (PLUS, Pmode, base, offset);
11034    }
11035
11036  /* Fall back on generic machopic code.  */
11037  return machopic_legitimize_pic_address (orig, mode, reg);
11038}
11039
11040/* This is just a placeholder to make linking work without having to
11041   add this to the generic Darwin EXTRA_SECTIONS.  If -mcall-aix is
11042   ever needed for Darwin (not too likely!) this would have to get a
11043   real definition.  */
11044
11045void
11046toc_section ()
11047{
11048}
11049
11050#endif /* TARGET_MACHO */
11051
11052#if TARGET_ELF
11053static unsigned int
11054rs6000_elf_section_type_flags (decl, name, reloc)
11055     tree decl;
11056     const char *name;
11057     int reloc;
11058{
11059  unsigned int flags = default_section_type_flags (decl, name, reloc);
11060
11061  if (TARGET_RELOCATABLE)
11062    flags |= SECTION_WRITE;
11063
11064  return flags;
11065}
11066
11067/* Record an element in the table of global constructors.  SYMBOL is
11068   a SYMBOL_REF of the function to be called; PRIORITY is a number
11069   between 0 and MAX_INIT_PRIORITY.
11070
11071   This differs from default_named_section_asm_out_constructor in
11072   that we have special handling for -mrelocatable.  */
11073
11074static void
11075rs6000_elf_asm_out_constructor (symbol, priority)
11076     rtx symbol;
11077     int priority;
11078{
11079  const char *section = ".ctors";
11080  char buf[16];
11081
11082  if (priority != DEFAULT_INIT_PRIORITY)
11083    {
11084      sprintf (buf, ".ctors.%.5u",
11085               /* Invert the numbering so the linker puts us in the proper
11086                  order; constructors are run from right to left, and the
11087                  linker sorts in increasing order.  */
11088               MAX_INIT_PRIORITY - priority);
11089      section = buf;
11090    }
11091
11092  named_section_flags (section, SECTION_WRITE);
11093  assemble_align (POINTER_SIZE);
11094
11095  if (TARGET_RELOCATABLE)
11096    {
11097      fputs ("\t.long (", asm_out_file);
11098      output_addr_const (asm_out_file, symbol);
11099      fputs (")@fixup\n", asm_out_file);
11100    }
11101  else
11102    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11103}
11104
11105static void
11106rs6000_elf_asm_out_destructor (symbol, priority)
11107     rtx symbol;
11108     int priority;
11109{
11110  const char *section = ".dtors";
11111  char buf[16];
11112
11113  if (priority != DEFAULT_INIT_PRIORITY)
11114    {
11115      sprintf (buf, ".dtors.%.5u",
11116               /* Invert the numbering so the linker puts us in the proper
11117                  order; constructors are run from right to left, and the
11118                  linker sorts in increasing order.  */
11119               MAX_INIT_PRIORITY - priority);
11120      section = buf;
11121    }
11122
11123  named_section_flags (section, SECTION_WRITE);
11124  assemble_align (POINTER_SIZE);
11125
11126  if (TARGET_RELOCATABLE)
11127    {
11128      fputs ("\t.long (", asm_out_file);
11129      output_addr_const (asm_out_file, symbol);
11130      fputs (")@fixup\n", asm_out_file);
11131    }
11132  else
11133    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11134}
11135#endif
11136
11137#ifdef OBJECT_FORMAT_COFF
11138static void
11139xcoff_asm_named_section (name, flags)
11140     const char *name;
11141     unsigned int flags ATTRIBUTE_UNUSED;
11142{
11143  fprintf (asm_out_file, "\t.csect %s\n", name);
11144}
11145#endif
11146