rs6000.c revision 122180
1/* Subroutines used for code generation on IBM RS/6000.
2   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING.  If not, write to
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
28#include "real.h"
29#include "insn-config.h"
30#include "conditions.h"
31#include "insn-attr.h"
32#include "flags.h"
33#include "recog.h"
34#include "obstack.h"
35#include "tree.h"
36#include "expr.h"
37#include "optabs.h"
38#include "except.h"
39#include "function.h"
40#include "output.h"
41#include "basic-block.h"
42#include "integrate.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "hashtab.h"
46#include "tm_p.h"
47#include "target.h"
48#include "target-def.h"
49#include "langhooks.h"
50#include "reload.h"
51
52#ifndef TARGET_NO_PROTOTYPE
53#define TARGET_NO_PROTOTYPE 0
54#endif
55
56#define min(A,B)	((A) < (B) ? (A) : (B))
57#define max(A,B)	((A) > (B) ? (A) : (B))
58
59/* Target cpu type */
60
61enum processor_type rs6000_cpu;
62struct rs6000_cpu_select rs6000_select[3] =
63{
64  /* switch		name,			tune	arch */
65  { (const char *)0,	"--with-cpu=",		1,	1 },
66  { (const char *)0,	"-mcpu=",		1,	1 },
67  { (const char *)0,	"-mtune=",		1,	0 },
68};
69
70/* Size of long double */
71const char *rs6000_long_double_size_string;
72int rs6000_long_double_type_size;
73
74/* Whether -mabi=altivec has appeared */
75int rs6000_altivec_abi;
76
77/* Whether VRSAVE instructions should be generated.  */
78int rs6000_altivec_vrsave;
79
80/* String from -mvrsave= option.  */
81const char *rs6000_altivec_vrsave_string;
82
83/* Nonzero if we want SPE ABI extensions.  */
84int rs6000_spe_abi;
85
86/* Whether isel instructions should be generated.  */
87int rs6000_isel;
88
89/* Nonzero if we have FPRs.  */
90int rs6000_fprs = 1;
91
92/* String from -misel=.  */
93const char *rs6000_isel_string;
94
95/* Set to nonzero once AIX common-mode calls have been defined.  */
96static int common_mode_defined;
97
98/* Private copy of original value of flag_pic for ABI_AIX.  */
99static int rs6000_flag_pic;
100
101/* Save information from a "cmpxx" operation until the branch or scc is
102   emitted.  */
103rtx rs6000_compare_op0, rs6000_compare_op1;
104int rs6000_compare_fp_p;
105
106/* Label number of label created for -mrelocatable, to call to so we can
107   get the address of the GOT section */
108int rs6000_pic_labelno;
109
110#ifdef USING_ELFOS_H
111/* Which abi to adhere to */
112const char *rs6000_abi_name = RS6000_ABI_NAME;
113
114/* Semantics of the small data area */
115enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
116
117/* Which small data model to use */
118const char *rs6000_sdata_name = (char *)0;
119
120/* Counter for labels which are to be placed in .fixup.  */
121int fixuplabelno = 0;
122#endif
123
124/* ABI enumeration available for subtarget to use.  */
125enum rs6000_abi rs6000_current_abi;
126
127/* ABI string from -mabi= option.  */
128const char *rs6000_abi_string;
129
130/* Debug flags */
131const char *rs6000_debug_name;
132int rs6000_debug_stack;		/* debug stack applications */
133int rs6000_debug_arg;		/* debug argument handling */
134
135const char *rs6000_traceback_name;
136static enum {
137  traceback_default = 0,
138  traceback_none,
139  traceback_part,
140  traceback_full
141} rs6000_traceback;
142
143/* Flag to say the TOC is initialized */
144int toc_initialized;
145char toc_label_name[10];
146
147/* Alias set for saves and restores from the rs6000 stack.  */
148static int rs6000_sr_alias_set;
149
150/* Call distance, overridden by -mlongcall and #pragma longcall(1).
151   The only place that looks at this is rs6000_set_default_type_attributes;
152   everywhere else should rely on the presence or absence of a longcall
153   attribute on the function declaration.  */
154int rs6000_default_long_calls;
155const char *rs6000_longcall_switch;
156
157struct builtin_description
158{
159  /* mask is not const because we're going to alter it below.  This
160     nonsense will go away when we rewrite the -march infrastructure
161     to give us more target flag bits.  */
162  unsigned int mask;
163  const enum insn_code icode;
164  const char *const name;
165  const enum rs6000_builtins code;
166};
167
168static void rs6000_add_gc_roots PARAMS ((void));
169static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
170static void validate_condition_mode
171  PARAMS ((enum rtx_code, enum machine_mode));
172static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
173static void rs6000_maybe_dead PARAMS ((rtx));
174static void rs6000_emit_stack_tie PARAMS ((void));
175static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
176static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
177				     unsigned int, int, int));
178static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
179static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
180static unsigned rs6000_hash_constant PARAMS ((rtx));
181static unsigned toc_hash_function PARAMS ((const void *));
182static int toc_hash_eq PARAMS ((const void *, const void *));
183static int toc_hash_mark_entry PARAMS ((void **, void *));
184static void toc_hash_mark_table PARAMS ((void *));
185static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
186static struct machine_function * rs6000_init_machine_status PARAMS ((void));
187static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
188#ifdef HAVE_GAS_HIDDEN
189static void rs6000_assemble_visibility PARAMS ((tree, int));
190#endif
191static int rs6000_ra_ever_killed PARAMS ((void));
192static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
193const struct attribute_spec rs6000_attribute_table[];
194static void rs6000_set_default_type_attributes PARAMS ((tree));
195static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
196static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
197static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
198					    HOST_WIDE_INT, tree));
199static rtx rs6000_emit_set_long_const PARAMS ((rtx,
200  HOST_WIDE_INT, HOST_WIDE_INT));
201#if TARGET_ELF
202static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
203							   int));
204static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
205static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
206static void rs6000_elf_select_section PARAMS ((tree, int,
207						 unsigned HOST_WIDE_INT));
208static void rs6000_elf_unique_section PARAMS ((tree, int));
209static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
210						   unsigned HOST_WIDE_INT));
211static void rs6000_elf_encode_section_info PARAMS ((tree, int))
212     ATTRIBUTE_UNUSED;
213static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
214static bool rs6000_elf_in_small_data_p PARAMS ((tree));
215#endif
216#if TARGET_XCOFF
217static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
218static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
219static void rs6000_xcoff_select_section PARAMS ((tree, int,
220						 unsigned HOST_WIDE_INT));
221static void rs6000_xcoff_unique_section PARAMS ((tree, int));
222static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
223						     unsigned HOST_WIDE_INT));
224static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
225static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
226#endif
227static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
228     ATTRIBUTE_UNUSED;
229static bool rs6000_binds_local_p PARAMS ((tree));
230static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
231static int rs6000_adjust_priority PARAMS ((rtx, int));
232static int rs6000_issue_rate PARAMS ((void));
233
234static void rs6000_init_builtins PARAMS ((void));
235static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
236static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
237static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
238static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
239static void altivec_init_builtins PARAMS ((void));
240static void rs6000_common_init_builtins PARAMS ((void));
241
242static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
243					      int, enum rs6000_builtins,
244					      enum rs6000_builtins));
245static void spe_init_builtins PARAMS ((void));
246static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
247static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
248static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
249static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
250
251static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
252static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
253static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
254static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
255static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
256static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
257static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
258static void rs6000_parse_abi_options PARAMS ((void));
259static void rs6000_parse_vrsave_option PARAMS ((void));
260static void rs6000_parse_isel_option PARAMS ((void));
261static int first_altivec_reg_to_save PARAMS ((void));
262static unsigned int compute_vrsave_mask PARAMS ((void));
263static void is_altivec_return_reg PARAMS ((rtx, void *));
264static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
265static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
266static int easy_vector_constant PARAMS ((rtx));
267
268/* Default register names.  */
269char rs6000_reg_names[][8] =
270{
271      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
272      "8",  "9", "10", "11", "12", "13", "14", "15",
273     "16", "17", "18", "19", "20", "21", "22", "23",
274     "24", "25", "26", "27", "28", "29", "30", "31",
275      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
276      "8",  "9", "10", "11", "12", "13", "14", "15",
277     "16", "17", "18", "19", "20", "21", "22", "23",
278     "24", "25", "26", "27", "28", "29", "30", "31",
279     "mq", "lr", "ctr","ap",
280      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
281      "xer",
282      /* AltiVec registers.  */
283      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
284      "8",  "9",  "10", "11", "12", "13", "14", "15",
285      "16", "17", "18", "19", "20", "21", "22", "23",
286      "24", "25", "26", "27", "28", "29", "30", "31",
287      "vrsave", "vscr",
288      /* SPE registers.  */
289      "spe_acc", "spefscr"
290};
291
292#ifdef TARGET_REGNAMES
293static const char alt_reg_names[][8] =
294{
295   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
296   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
297  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
298  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
299   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
300   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
301  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
302  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
303    "mq",    "lr",  "ctr",   "ap",
304  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
305   "xer",
306  /* AltiVec registers.  */
307   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
308   "%v8",  "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
309  "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
310  "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
311  "vrsave", "vscr",
312  /* SPE registers.  */
313  "spe_acc", "spefscr"
314};
315#endif
316
317#ifndef MASK_STRICT_ALIGN
318#define MASK_STRICT_ALIGN 0
319#endif
320
321/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
322#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
323
324/* Initialize the GCC target structure.  */
325#undef TARGET_ATTRIBUTE_TABLE
326#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
327#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
328#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
329
330#undef TARGET_ASM_ALIGNED_DI_OP
331#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
332
333/* Default unaligned ops are only provided for ELF.  Find the ops needed
334   for non-ELF systems.  */
335#ifndef OBJECT_FORMAT_ELF
336#if TARGET_XCOFF
337/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
338   64-bit targets.  */
339#undef TARGET_ASM_UNALIGNED_HI_OP
340#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
341#undef TARGET_ASM_UNALIGNED_SI_OP
342#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
343#undef TARGET_ASM_UNALIGNED_DI_OP
344#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
345#else
346/* For Darwin.  */
347#undef TARGET_ASM_UNALIGNED_HI_OP
348#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
349#undef TARGET_ASM_UNALIGNED_SI_OP
350#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
351#endif
352#endif
353
354/* This hook deals with fixups for relocatable code and DI-mode objects
355   in 64-bit code.  */
356#undef TARGET_ASM_INTEGER
357#define TARGET_ASM_INTEGER rs6000_assemble_integer
358
359#ifdef HAVE_GAS_HIDDEN
360#undef TARGET_ASM_ASSEMBLE_VISIBILITY
361#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
362#endif
363
364#undef TARGET_ASM_FUNCTION_PROLOGUE
365#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
366#undef TARGET_ASM_FUNCTION_EPILOGUE
367#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
368
369#undef TARGET_SCHED_ISSUE_RATE
370#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
371#undef TARGET_SCHED_ADJUST_COST
372#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
373#undef TARGET_SCHED_ADJUST_PRIORITY
374#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
375
376#undef TARGET_INIT_BUILTINS
377#define TARGET_INIT_BUILTINS rs6000_init_builtins
378
379#undef TARGET_EXPAND_BUILTIN
380#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
381
382#undef TARGET_BINDS_LOCAL_P
383#define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
384
385#undef TARGET_ASM_OUTPUT_MI_THUNK
386#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
387
388/* ??? Should work everywhere, but ask dje@watson.ibm.com before
389   enabling for AIX.  */
390#if TARGET_OBJECT_FORMAT != OBJECT_XCOFF
391#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
392#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
393#endif
394
395struct gcc_target targetm = TARGET_INITIALIZER;
396
397/* Override command line options.  Mostly we process the processor
398   type and sometimes adjust other TARGET_ options.  */
399
400void
401rs6000_override_options (default_cpu)
402     const char *default_cpu;
403{
404  size_t i, j;
405  struct rs6000_cpu_select *ptr;
406
407  /* Simplify the entries below by making a mask for any POWER
408     variant and any PowerPC variant.  */
409
410#define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
411#define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
412		       | MASK_PPC_GFXOPT | MASK_POWERPC64)
413#define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
414
415  static struct ptt
416    {
417      const char *const name;		/* Canonical processor name.  */
418      const enum processor_type processor; /* Processor type enum value.  */
419      const int target_enable;	/* Target flags to enable.  */
420      const int target_disable;	/* Target flags to disable.  */
421    } const processor_target_table[]
422      = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
423	    POWER_MASKS | POWERPC_MASKS},
424	 {"power", PROCESSOR_POWER,
425	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
426	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
427	 {"power2", PROCESSOR_POWER,
428	    MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
429	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
430	 {"power3", PROCESSOR_PPC630,
431	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
432	    POWER_MASKS | MASK_PPC_GPOPT},
433	 {"power4", PROCESSOR_POWER4,
434	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
435	    POWER_MASKS | MASK_PPC_GPOPT},
436	 {"powerpc", PROCESSOR_POWERPC,
437	    MASK_POWERPC | MASK_NEW_MNEMONICS,
438	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
439	 {"powerpc64", PROCESSOR_POWERPC64,
440	    MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
441	    POWER_MASKS | POWERPC_OPT_MASKS},
442	 {"rios", PROCESSOR_RIOS1,
443	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
444	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
445	 {"rios1", PROCESSOR_RIOS1,
446	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
447	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
448	 {"rsc", PROCESSOR_PPC601,
449	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
450	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
451	 {"rsc1", PROCESSOR_PPC601,
452	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
453	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
454	 {"rios2", PROCESSOR_RIOS2,
455	    MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
456	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
457	 {"rs64a", PROCESSOR_RS64A,
458	    MASK_POWERPC | MASK_NEW_MNEMONICS,
459	    POWER_MASKS | POWERPC_OPT_MASKS},
460	 {"401", PROCESSOR_PPC403,
461	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
462	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
463	 {"403", PROCESSOR_PPC403,
464	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
465	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
466	 {"405", PROCESSOR_PPC405,
467	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
468	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
469	 {"505", PROCESSOR_MPCCORE,
470	    MASK_POWERPC | MASK_NEW_MNEMONICS,
471	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
472	 {"601", PROCESSOR_PPC601,
473	    MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
474	    MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
475	 {"602", PROCESSOR_PPC603,
476	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
477	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
478	 {"603", PROCESSOR_PPC603,
479	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
480	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
481	 {"603e", PROCESSOR_PPC603,
482	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
483	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
484	 {"ec603e", PROCESSOR_PPC603,
485	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
486	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
487	 {"604", PROCESSOR_PPC604,
488	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
489	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
490	 {"604e", PROCESSOR_PPC604e,
491	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
492	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
493	 {"620", PROCESSOR_PPC620,
494	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
495	    POWER_MASKS | MASK_PPC_GPOPT},
496	 {"630", PROCESSOR_PPC630,
497	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
498	    POWER_MASKS | MASK_PPC_GPOPT},
499	 {"740", PROCESSOR_PPC750,
500 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
501 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
502	 {"750", PROCESSOR_PPC750,
503 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
504 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
505	 {"7400", PROCESSOR_PPC7400,
506            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
507            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
508	 {"7450", PROCESSOR_PPC7450,
509            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
510            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
511	 {"8540", PROCESSOR_PPC8540,
512	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
513	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
514	 {"801", PROCESSOR_MPCCORE,
515	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
516	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
517	 {"821", PROCESSOR_MPCCORE,
518	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
519	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
520	 {"823", PROCESSOR_MPCCORE,
521	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
522	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
523	 {"860", PROCESSOR_MPCCORE,
524	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
525	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
526
527  const size_t ptt_size = ARRAY_SIZE (processor_target_table);
528
529  /* Save current -mmultiple/-mno-multiple status.  */
530  int multiple = TARGET_MULTIPLE;
531  /* Save current -mstring/-mno-string status.  */
532  int string = TARGET_STRING;
533
534  /* Identify the processor type.  */
535  rs6000_select[0].string = default_cpu;
536  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
537
538  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
539    {
540      ptr = &rs6000_select[i];
541      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
542	{
543	  for (j = 0; j < ptt_size; j++)
544	    if (! strcmp (ptr->string, processor_target_table[j].name))
545	      {
546		if (ptr->set_tune_p)
547		  rs6000_cpu = processor_target_table[j].processor;
548
549		if (ptr->set_arch_p)
550		  {
551		    target_flags |= processor_target_table[j].target_enable;
552		    target_flags &= ~processor_target_table[j].target_disable;
553		  }
554		break;
555	      }
556
557	  if (j == ptt_size)
558	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
559	}
560    }
561
562  if (rs6000_cpu == PROCESSOR_PPC8540)
563    rs6000_isel = 1;
564
565  /* If we are optimizing big endian systems for space, use the load/store
566     multiple and string instructions.  */
567  if (BYTES_BIG_ENDIAN && optimize_size)
568    target_flags |= MASK_MULTIPLE | MASK_STRING;
569
570  /* If -mmultiple or -mno-multiple was explicitly used, don't
571     override with the processor default */
572  if (TARGET_MULTIPLE_SET)
573    target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
574
575  /* If -mstring or -mno-string was explicitly used, don't override
576     with the processor default.  */
577  if (TARGET_STRING_SET)
578    target_flags = (target_flags & ~MASK_STRING) | string;
579
580  /* Don't allow -mmultiple or -mstring on little endian systems
581     unless the cpu is a 750, because the hardware doesn't support the
582     instructions used in little endian mode, and causes an alignment
583     trap.  The 750 does not cause an alignment trap (except when the
584     target is unaligned).  */
585
586  if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
587    {
588      if (TARGET_MULTIPLE)
589	{
590	  target_flags &= ~MASK_MULTIPLE;
591	  if (TARGET_MULTIPLE_SET)
592	    warning ("-mmultiple is not supported on little endian systems");
593	}
594
595      if (TARGET_STRING)
596	{
597	  target_flags &= ~MASK_STRING;
598	  if (TARGET_STRING_SET)
599	    warning ("-mstring is not supported on little endian systems");
600	}
601    }
602
603  if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
604    {
605      rs6000_flag_pic = flag_pic;
606      flag_pic = 0;
607    }
608
609  /* For Darwin, always silently make -fpic and -fPIC identical.  */
610  if (flag_pic == 1 && DEFAULT_ABI == ABI_DARWIN)
611    flag_pic = 2;
612
613  /* Set debug flags */
614  if (rs6000_debug_name)
615    {
616      if (! strcmp (rs6000_debug_name, "all"))
617	rs6000_debug_stack = rs6000_debug_arg = 1;
618      else if (! strcmp (rs6000_debug_name, "stack"))
619	rs6000_debug_stack = 1;
620      else if (! strcmp (rs6000_debug_name, "arg"))
621	rs6000_debug_arg = 1;
622      else
623	error ("unknown -mdebug-%s switch", rs6000_debug_name);
624    }
625
626  if (rs6000_traceback_name)
627    {
628      if (! strncmp (rs6000_traceback_name, "full", 4))
629	rs6000_traceback = traceback_full;
630      else if (! strncmp (rs6000_traceback_name, "part", 4))
631	rs6000_traceback = traceback_part;
632      else if (! strncmp (rs6000_traceback_name, "no", 2))
633	rs6000_traceback = traceback_none;
634      else
635	error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
636	       rs6000_traceback_name);
637    }
638
639  /* Set size of long double */
640  rs6000_long_double_type_size = 64;
641  if (rs6000_long_double_size_string)
642    {
643      char *tail;
644      int size = strtol (rs6000_long_double_size_string, &tail, 10);
645      if (*tail != '\0' || (size != 64 && size != 128))
646	error ("Unknown switch -mlong-double-%s",
647	       rs6000_long_double_size_string);
648      else
649	rs6000_long_double_type_size = size;
650    }
651
652  /* Handle -mabi= options.  */
653  rs6000_parse_abi_options ();
654
655  /* Handle -mvrsave= option.  */
656  rs6000_parse_vrsave_option ();
657
658  /* Handle -misel= option.  */
659  rs6000_parse_isel_option ();
660
661#ifdef SUBTARGET_OVERRIDE_OPTIONS
662  SUBTARGET_OVERRIDE_OPTIONS;
663#endif
664#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
665  SUBSUBTARGET_OVERRIDE_OPTIONS;
666#endif
667
668  /* Handle -m(no-)longcall option.  This is a bit of a cheap hack,
669     using TARGET_OPTIONS to handle a toggle switch, but we're out of
670     bits in target_flags so TARGET_SWITCHES cannot be used.
671     Assumption here is that rs6000_longcall_switch points into the
672     text of the complete option, rather than being a copy, so we can
673     scan back for the presence or absence of the no- modifier.  */
674  if (rs6000_longcall_switch)
675    {
676      const char *base = rs6000_longcall_switch;
677      while (base[-1] != 'm') base--;
678
679      if (*rs6000_longcall_switch != '\0')
680	error ("invalid option `%s'", base);
681      rs6000_default_long_calls = (base[0] != 'n');
682    }
683
684#ifdef TARGET_REGNAMES
685  /* If the user desires alternate register names, copy in the
686     alternate names now.  */
687  if (TARGET_REGNAMES)
688    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
689#endif
690
691  /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
692     If -maix-struct-return or -msvr4-struct-return was explicitly
693     used, don't override with the ABI default.  */
694  if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
695    {
696      if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
697	target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
698      else
699	target_flags |= MASK_AIX_STRUCT_RET;
700    }
701
702  if (TARGET_LONG_DOUBLE_128
703      && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
704    real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
705
706  /* Register global variables with the garbage collector.  */
707  rs6000_add_gc_roots ();
708
709  /* Allocate an alias set for register saves & restores from stack.  */
710  rs6000_sr_alias_set = new_alias_set ();
711
712  if (TARGET_TOC)
713    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
714
715  /* We can only guarantee the availability of DI pseudo-ops when
716     assembling for 64-bit targets.  */
717  if (!TARGET_64BIT)
718    {
719      targetm.asm_out.aligned_op.di = NULL;
720      targetm.asm_out.unaligned_op.di = NULL;
721    }
722
723  /* Arrange to save and restore machine status around nested functions.  */
724  init_machine_status = rs6000_init_machine_status;
725}
726
727/* Handle -misel= option.  */
728static void
729rs6000_parse_isel_option ()
730{
731  if (rs6000_isel_string == 0)
732    return;
733  else if (! strcmp (rs6000_isel_string, "yes"))
734    rs6000_isel = 1;
735  else if (! strcmp (rs6000_isel_string, "no"))
736    rs6000_isel = 0;
737  else
738    error ("unknown -misel= option specified: '%s'",
739         rs6000_isel_string);
740}
741
742/* Handle -mvrsave= options.  */
743static void
744rs6000_parse_vrsave_option ()
745{
746  /* Generate VRSAVE instructions by default.  */
747  if (rs6000_altivec_vrsave_string == 0
748      || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
749    rs6000_altivec_vrsave = 1;
750  else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
751    rs6000_altivec_vrsave = 0;
752  else
753    error ("unknown -mvrsave= option specified: '%s'",
754	   rs6000_altivec_vrsave_string);
755}
756
757/* Handle -mabi= options.  */
758static void
759rs6000_parse_abi_options ()
760{
761  if (rs6000_abi_string == 0)
762    return;
763  else if (! strcmp (rs6000_abi_string, "altivec"))
764    rs6000_altivec_abi = 1;
765  else if (! strcmp (rs6000_abi_string, "no-altivec"))
766    rs6000_altivec_abi = 0;
767  else if (! strcmp (rs6000_abi_string, "spe"))
768    rs6000_spe_abi = 1;
769  else if (! strcmp (rs6000_abi_string, "no-spe"))
770    rs6000_spe_abi = 0;
771  else
772    error ("unknown ABI specified: '%s'", rs6000_abi_string);
773}
774
775void
776optimization_options (level, size)
777     int level ATTRIBUTE_UNUSED;
778     int size ATTRIBUTE_UNUSED;
779{
780}
781
782/* Do anything needed at the start of the asm file.  */
783
784void
785rs6000_file_start (file, default_cpu)
786     FILE *file;
787     const char *default_cpu;
788{
789  size_t i;
790  char buffer[80];
791  const char *start = buffer;
792  struct rs6000_cpu_select *ptr;
793
794  if (flag_verbose_asm)
795    {
796      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
797      rs6000_select[0].string = default_cpu;
798
799      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
800	{
801	  ptr = &rs6000_select[i];
802	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
803	    {
804	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
805	      start = "";
806	    }
807	}
808
809#ifdef USING_ELFOS_H
810      switch (rs6000_sdata)
811	{
812	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
813	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
814	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
815	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
816	}
817
818      if (rs6000_sdata && g_switch_value)
819	{
820	  fprintf (file, "%s -G %d", start, g_switch_value);
821	  start = "";
822	}
823#endif
824
825      if (*start == '\0')
826	putc ('\n', file);
827    }
828}
829
830/* Return nonzero if this function is known to have a null epilogue.  */
831
832int
833direct_return ()
834{
835  if (reload_completed)
836    {
837      rs6000_stack_t *info = rs6000_stack_info ();
838
839      if (info->first_gp_reg_save == 32
840	  && info->first_fp_reg_save == 64
841	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
842	  && ! info->lr_save_p
843	  && ! info->cr_save_p
844	  && info->vrsave_mask == 0
845	  && ! info->push_p)
846	return 1;
847    }
848
849  return 0;
850}
851
852/* Returns 1 always.  */
853
854int
855any_operand (op, mode)
856     rtx op ATTRIBUTE_UNUSED;
857     enum machine_mode mode ATTRIBUTE_UNUSED;
858{
859  return 1;
860}
861
862/* Returns 1 if op is the count register.  */
863int
864count_register_operand (op, mode)
865     rtx op;
866     enum machine_mode mode ATTRIBUTE_UNUSED;
867{
868  if (GET_CODE (op) != REG)
869    return 0;
870
871  if (REGNO (op) == COUNT_REGISTER_REGNUM)
872    return 1;
873
874  if (REGNO (op) > FIRST_PSEUDO_REGISTER)
875    return 1;
876
877  return 0;
878}
879
880/* Returns 1 if op is an altivec register.  */
881int
882altivec_register_operand (op, mode)
883     rtx op;
884     enum machine_mode mode ATTRIBUTE_UNUSED;
885{
886
887  return (register_operand (op, mode)
888	  && (GET_CODE (op) != REG
889	      || REGNO (op) > FIRST_PSEUDO_REGISTER
890	      || ALTIVEC_REGNO_P (REGNO (op))));
891}
892
893int
894xer_operand (op, mode)
895     rtx op;
896     enum machine_mode mode ATTRIBUTE_UNUSED;
897{
898  if (GET_CODE (op) != REG)
899    return 0;
900
901  if (XER_REGNO_P (REGNO (op)))
902    return 1;
903
904  return 0;
905}
906
907/* Return 1 if OP is a signed 8-bit constant.  Int multiplication
908   by such constants completes more quickly.  */
909
910int
911s8bit_cint_operand (op, mode)
912     rtx op;
913     enum machine_mode mode ATTRIBUTE_UNUSED;
914{
915  return ( GET_CODE (op) == CONST_INT
916	  && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
917}
918
919/* Return 1 if OP is a constant that can fit in a D field.  */
920
921int
922short_cint_operand (op, mode)
923     rtx op;
924     enum machine_mode mode ATTRIBUTE_UNUSED;
925{
926  return (GET_CODE (op) == CONST_INT
927	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
928}
929
930/* Similar for an unsigned D field.  */
931
932int
933u_short_cint_operand (op, mode)
934     rtx op;
935     enum machine_mode mode ATTRIBUTE_UNUSED;
936{
937  return (GET_CODE (op) == CONST_INT
938	  && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
939}
940
941/* Return 1 if OP is a CONST_INT that cannot fit in a signed D field.  */
942
943int
944non_short_cint_operand (op, mode)
945     rtx op;
946     enum machine_mode mode ATTRIBUTE_UNUSED;
947{
948  return (GET_CODE (op) == CONST_INT
949	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
950}
951
952/* Returns 1 if OP is a CONST_INT that is a positive value
953   and an exact power of 2.  */
954
955int
956exact_log2_cint_operand (op, mode)
957     rtx op;
958     enum machine_mode mode ATTRIBUTE_UNUSED;
959{
960  return (GET_CODE (op) == CONST_INT
961	  && INTVAL (op) > 0
962	  && exact_log2 (INTVAL (op)) >= 0);
963}
964
965/* Returns 1 if OP is a register that is not special (i.e., not MQ,
966   ctr, or lr).  */
967
968int
969gpc_reg_operand (op, mode)
970     rtx op;
971     enum machine_mode mode;
972{
973  return (register_operand (op, mode)
974	  && (GET_CODE (op) != REG
975	      || (REGNO (op) >= ARG_POINTER_REGNUM
976		  && !XER_REGNO_P (REGNO (op)))
977	      || REGNO (op) < MQ_REGNO));
978}
979
980/* Returns 1 if OP is either a pseudo-register or a register denoting a
981   CR field.  */
982
983int
984cc_reg_operand (op, mode)
985     rtx op;
986     enum machine_mode mode;
987{
988  return (register_operand (op, mode)
989	  && (GET_CODE (op) != REG
990	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
991	      || CR_REGNO_P (REGNO (op))));
992}
993
994/* Returns 1 if OP is either a pseudo-register or a register denoting a
995   CR field that isn't CR0.  */
996
997int
998cc_reg_not_cr0_operand (op, mode)
999     rtx op;
1000     enum machine_mode mode;
1001{
1002  return (register_operand (op, mode)
1003	  && (GET_CODE (op) != REG
1004	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
1005	      || CR_REGNO_NOT_CR0_P (REGNO (op))));
1006}
1007
1008/* Returns 1 if OP is either a constant integer valid for a D-field or
1009   a non-special register.  If a register, it must be in the proper
1010   mode unless MODE is VOIDmode.  */
1011
1012int
1013reg_or_short_operand (op, mode)
1014      rtx op;
1015      enum machine_mode mode;
1016{
1017  return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1018}
1019
1020/* Similar, except check if the negation of the constant would be
1021   valid for a D-field.  */
1022
1023int
1024reg_or_neg_short_operand (op, mode)
1025      rtx op;
1026      enum machine_mode mode;
1027{
1028  if (GET_CODE (op) == CONST_INT)
1029    return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1030
1031  return gpc_reg_operand (op, mode);
1032}
1033
1034/* Returns 1 if OP is either a constant integer valid for a DS-field or
1035   a non-special register.  If a register, it must be in the proper
1036   mode unless MODE is VOIDmode.  */
1037
1038int
1039reg_or_aligned_short_operand (op, mode)
1040      rtx op;
1041      enum machine_mode mode;
1042{
1043  if (gpc_reg_operand (op, mode))
1044    return 1;
1045  else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1046    return 1;
1047
1048  return 0;
1049}
1050
1051
1052/* Return 1 if the operand is either a register or an integer whose
1053   high-order 16 bits are zero.  */
1054
1055int
1056reg_or_u_short_operand (op, mode)
1057     rtx op;
1058     enum machine_mode mode;
1059{
1060  return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1061}
1062
1063/* Return 1 is the operand is either a non-special register or ANY
1064   constant integer.  */
1065
1066int
1067reg_or_cint_operand (op, mode)
1068    rtx op;
1069    enum machine_mode mode;
1070{
1071  return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1072}
1073
1074/* Return 1 is the operand is either a non-special register or ANY
1075   32-bit signed constant integer.  */
1076
1077int
1078reg_or_arith_cint_operand (op, mode)
1079    rtx op;
1080    enum machine_mode mode;
1081{
1082  return (gpc_reg_operand (op, mode)
1083	  || (GET_CODE (op) == CONST_INT
1084#if HOST_BITS_PER_WIDE_INT != 32
1085	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1086		  < (unsigned HOST_WIDE_INT) 0x100000000ll)
1087#endif
1088	      ));
1089}
1090
1091/* Return 1 is the operand is either a non-special register or a 32-bit
1092   signed constant integer valid for 64-bit addition.  */
1093
1094int
1095reg_or_add_cint64_operand (op, mode)
1096    rtx op;
1097    enum machine_mode mode;
1098{
1099  return (gpc_reg_operand (op, mode)
1100	  || (GET_CODE (op) == CONST_INT
1101#if HOST_BITS_PER_WIDE_INT == 32
1102	      && INTVAL (op) < 0x7fff8000
1103#else
1104	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1105		  < 0x100000000ll)
1106#endif
1107	      ));
1108}
1109
1110/* Return 1 is the operand is either a non-special register or a 32-bit
1111   signed constant integer valid for 64-bit subtraction.  */
1112
1113int
1114reg_or_sub_cint64_operand (op, mode)
1115    rtx op;
1116    enum machine_mode mode;
1117{
1118  return (gpc_reg_operand (op, mode)
1119	  || (GET_CODE (op) == CONST_INT
1120#if HOST_BITS_PER_WIDE_INT == 32
1121	      && (- INTVAL (op)) < 0x7fff8000
1122#else
1123	      && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1124		  < 0x100000000ll)
1125#endif
1126	      ));
1127}
1128
1129/* Return 1 is the operand is either a non-special register or ANY
1130   32-bit unsigned constant integer.  */
1131
1132int
1133reg_or_logical_cint_operand (op, mode)
1134    rtx op;
1135    enum machine_mode mode;
1136{
1137  if (GET_CODE (op) == CONST_INT)
1138    {
1139      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1140	{
1141	  if (GET_MODE_BITSIZE (mode) <= 32)
1142	    abort ();
1143
1144	  if (INTVAL (op) < 0)
1145	    return 0;
1146	}
1147
1148      return ((INTVAL (op) & GET_MODE_MASK (mode)
1149	       & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1150    }
1151  else if (GET_CODE (op) == CONST_DOUBLE)
1152    {
1153      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1154	  || mode != DImode)
1155	abort ();
1156
1157      return CONST_DOUBLE_HIGH (op) == 0;
1158    }
1159  else
1160    return gpc_reg_operand (op, mode);
1161}
1162
1163/* Return 1 if the operand is an operand that can be loaded via the GOT.  */
1164
1165int
1166got_operand (op, mode)
1167     rtx op;
1168     enum machine_mode mode ATTRIBUTE_UNUSED;
1169{
1170  return (GET_CODE (op) == SYMBOL_REF
1171	  || GET_CODE (op) == CONST
1172	  || GET_CODE (op) == LABEL_REF);
1173}
1174
1175/* Return 1 if the operand is a simple references that can be loaded via
1176   the GOT (labels involving addition aren't allowed).  */
1177
1178int
1179got_no_const_operand (op, mode)
1180     rtx op;
1181     enum machine_mode mode ATTRIBUTE_UNUSED;
1182{
1183  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1184}
1185
1186/* Return the number of instructions it takes to form a constant in an
1187   integer register.  */
1188
1189static int
1190num_insns_constant_wide (value)
1191     HOST_WIDE_INT value;
1192{
1193  /* signed constant loadable with {cal|addi} */
1194  if (CONST_OK_FOR_LETTER_P (value, 'I'))
1195    return 1;
1196
1197  /* constant loadable with {cau|addis} */
1198  else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1199    return 1;
1200
1201#if HOST_BITS_PER_WIDE_INT == 64
1202  else if (TARGET_POWERPC64)
1203    {
1204      HOST_WIDE_INT low  = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1205      HOST_WIDE_INT high = value >> 31;
1206
1207      if (high == 0 || high == -1)
1208	return 2;
1209
1210      high >>= 1;
1211
1212      if (low == 0)
1213	return num_insns_constant_wide (high) + 1;
1214      else
1215	return (num_insns_constant_wide (high)
1216		+ num_insns_constant_wide (low) + 1);
1217    }
1218#endif
1219
1220  else
1221    return 2;
1222}
1223
1224int
1225num_insns_constant (op, mode)
1226     rtx op;
1227     enum machine_mode mode;
1228{
1229  if (GET_CODE (op) == CONST_INT)
1230    {
1231#if HOST_BITS_PER_WIDE_INT == 64
1232      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1233	  && mask64_operand (op, mode))
1234	    return 2;
1235      else
1236#endif
1237	return num_insns_constant_wide (INTVAL (op));
1238    }
1239
1240  else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1241    {
1242      long l;
1243      REAL_VALUE_TYPE rv;
1244
1245      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1246      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1247      return num_insns_constant_wide ((HOST_WIDE_INT) l);
1248    }
1249
1250  else if (GET_CODE (op) == CONST_DOUBLE)
1251    {
1252      HOST_WIDE_INT low;
1253      HOST_WIDE_INT high;
1254      long l[2];
1255      REAL_VALUE_TYPE rv;
1256      int endian = (WORDS_BIG_ENDIAN == 0);
1257
1258      if (mode == VOIDmode || mode == DImode)
1259	{
1260	  high = CONST_DOUBLE_HIGH (op);
1261	  low  = CONST_DOUBLE_LOW (op);
1262	}
1263      else
1264	{
1265	  REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1266	  REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1267	  high = l[endian];
1268	  low  = l[1 - endian];
1269	}
1270
1271      if (TARGET_32BIT)
1272	return (num_insns_constant_wide (low)
1273		+ num_insns_constant_wide (high));
1274
1275      else
1276	{
1277	  if (high == 0 && low >= 0)
1278	    return num_insns_constant_wide (low);
1279
1280	  else if (high == -1 && low < 0)
1281	    return num_insns_constant_wide (low);
1282
1283	  else if (mask64_operand (op, mode))
1284	    return 2;
1285
1286	  else if (low == 0)
1287	    return num_insns_constant_wide (high) + 1;
1288
1289	  else
1290	    return (num_insns_constant_wide (high)
1291		    + num_insns_constant_wide (low) + 1);
1292	}
1293    }
1294
1295  else
1296    abort ();
1297}
1298
1299/* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1300   register with one instruction per word.  We only do this if we can
1301   safely read CONST_DOUBLE_{LOW,HIGH}.  */
1302
1303int
1304easy_fp_constant (op, mode)
1305     rtx op;
1306     enum machine_mode mode;
1307{
1308  if (GET_CODE (op) != CONST_DOUBLE
1309      || GET_MODE (op) != mode
1310      || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1311    return 0;
1312
1313  /* Consider all constants with -msoft-float to be easy.  */
1314  if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1315      && mode != DImode)
1316    return 1;
1317
1318  /* If we are using V.4 style PIC, consider all constants to be hard.  */
1319  if (flag_pic && DEFAULT_ABI == ABI_V4)
1320    return 0;
1321
1322#ifdef TARGET_RELOCATABLE
1323  /* Similarly if we are using -mrelocatable, consider all constants
1324     to be hard.  */
1325  if (TARGET_RELOCATABLE)
1326    return 0;
1327#endif
1328
1329  if (mode == TFmode)
1330    {
1331      long k[4];
1332      REAL_VALUE_TYPE rv;
1333
1334      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1335      REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1336
1337      return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1338	      && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1339	      && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1340	      && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1341    }
1342
1343  else if (mode == DFmode)
1344    {
1345      long k[2];
1346      REAL_VALUE_TYPE rv;
1347
1348      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1349      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1350
1351      return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1352	      && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1353    }
1354
1355  else if (mode == SFmode)
1356    {
1357      long l;
1358      REAL_VALUE_TYPE rv;
1359
1360      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1361      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1362
1363      return num_insns_constant_wide (l) == 1;
1364    }
1365
1366  else if (mode == DImode)
1367    return ((TARGET_POWERPC64
1368	     && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1369	    || (num_insns_constant (op, DImode) <= 2));
1370
1371  else if (mode == SImode)
1372    return 1;
1373  else
1374    abort ();
1375}
1376
1377/* Return 1 if the operand is a CONST_INT and can be put into a
1378   register with one instruction.  */
1379
1380static int
1381easy_vector_constant (op)
1382     rtx op;
1383{
1384  rtx elt;
1385  int units, i;
1386
1387  if (GET_CODE (op) != CONST_VECTOR)
1388    return 0;
1389
1390  units = CONST_VECTOR_NUNITS (op);
1391
1392  /* We can generate 0 easily.  Look for that.  */
1393  for (i = 0; i < units; ++i)
1394    {
1395      elt = CONST_VECTOR_ELT (op, i);
1396
1397      /* We could probably simplify this by just checking for equality
1398	 with CONST0_RTX for the current mode, but let's be safe
1399	 instead.  */
1400
1401      switch (GET_CODE (elt))
1402	{
1403	case CONST_INT:
1404	  if (INTVAL (elt) != 0)
1405	    return 0;
1406	  break;
1407	case CONST_DOUBLE:
1408	  if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1409	    return 0;
1410	  break;
1411	default:
1412	  return 0;
1413	}
1414    }
1415
1416  /* We could probably generate a few other constants trivially, but
1417     gcc doesn't generate them yet.  FIXME later.  */
1418  return 1;
1419}
1420
1421/* Return 1 if the operand is the constant 0.  This works for scalars
1422   as well as vectors.  */
1423int
1424zero_constant (op, mode)
1425     rtx op;
1426     enum machine_mode mode;
1427{
1428  return op == CONST0_RTX (mode);
1429}
1430
1431/* Return 1 if the operand is 0.0.  */
1432int
1433zero_fp_constant (op, mode)
1434     rtx op;
1435     enum machine_mode mode;
1436{
1437  return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1438}
1439
1440/* Return 1 if the operand is in volatile memory.  Note that during
1441   the RTL generation phase, memory_operand does not return TRUE for
1442   volatile memory references.  So this function allows us to
1443   recognize volatile references where its safe.  */
1444
1445int
1446volatile_mem_operand (op, mode)
1447     rtx op;
1448     enum machine_mode mode;
1449{
1450  if (GET_CODE (op) != MEM)
1451    return 0;
1452
1453  if (!MEM_VOLATILE_P (op))
1454    return 0;
1455
1456  if (mode != GET_MODE (op))
1457    return 0;
1458
1459  if (reload_completed)
1460    return memory_operand (op, mode);
1461
1462  if (reload_in_progress)
1463    return strict_memory_address_p (mode, XEXP (op, 0));
1464
1465  return memory_address_p (mode, XEXP (op, 0));
1466}
1467
1468/* Return 1 if the operand is an offsettable memory operand.  */
1469
1470int
1471offsettable_mem_operand (op, mode)
1472     rtx op;
1473     enum machine_mode mode;
1474{
1475  return ((GET_CODE (op) == MEM)
1476	  && offsettable_address_p (reload_completed || reload_in_progress,
1477				    mode, XEXP (op, 0)));
1478}
1479
1480/* Return 1 if the operand is either an easy FP constant (see above) or
1481   memory.  */
1482
1483int
1484mem_or_easy_const_operand (op, mode)
1485     rtx op;
1486     enum machine_mode mode;
1487{
1488  return memory_operand (op, mode) || easy_fp_constant (op, mode);
1489}
1490
1491/* Return 1 if the operand is either a non-special register or an item
1492   that can be used as the operand of a `mode' add insn.  */
1493
1494int
1495add_operand (op, mode)
1496    rtx op;
1497    enum machine_mode mode;
1498{
1499  if (GET_CODE (op) == CONST_INT)
1500    return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1501	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1502
1503  return gpc_reg_operand (op, mode);
1504}
1505
1506/* Return 1 if OP is a constant but not a valid add_operand.  */
1507
1508int
1509non_add_cint_operand (op, mode)
1510     rtx op;
1511     enum machine_mode mode ATTRIBUTE_UNUSED;
1512{
1513  return (GET_CODE (op) == CONST_INT
1514	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1515	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1516}
1517
1518/* Return 1 if the operand is a non-special register or a constant that
1519   can be used as the operand of an OR or XOR insn on the RS/6000.  */
1520
1521int
1522logical_operand (op, mode)
1523     rtx op;
1524     enum machine_mode mode;
1525{
1526  HOST_WIDE_INT opl, oph;
1527
1528  if (gpc_reg_operand (op, mode))
1529    return 1;
1530
1531  if (GET_CODE (op) == CONST_INT)
1532    {
1533      opl = INTVAL (op) & GET_MODE_MASK (mode);
1534
1535#if HOST_BITS_PER_WIDE_INT <= 32
1536      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1537	return 0;
1538#endif
1539    }
1540  else if (GET_CODE (op) == CONST_DOUBLE)
1541    {
1542      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1543	abort ();
1544
1545      opl = CONST_DOUBLE_LOW (op);
1546      oph = CONST_DOUBLE_HIGH (op);
1547      if (oph != 0)
1548	return 0;
1549    }
1550  else
1551    return 0;
1552
1553  return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1554	  || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1555}
1556
1557/* Return 1 if C is a constant that is not a logical operand (as
1558   above), but could be split into one.  */
1559
1560int
1561non_logical_cint_operand (op, mode)
1562     rtx op;
1563     enum machine_mode mode;
1564{
1565  return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1566	  && ! logical_operand (op, mode)
1567	  && reg_or_logical_cint_operand (op, mode));
1568}
1569
1570/* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1571   RS/6000.  It is if there are no more than two 1->0 or 0->1 transitions.
1572   Reject all ones and all zeros, since these should have been optimized
1573   away and confuse the making of MB and ME.  */
1574
1575int
1576mask_operand (op, mode)
1577     rtx op;
1578     enum machine_mode mode ATTRIBUTE_UNUSED;
1579{
1580  HOST_WIDE_INT c, lsb;
1581
1582  if (GET_CODE (op) != CONST_INT)
1583    return 0;
1584
1585  c = INTVAL (op);
1586
1587  /* Fail in 64-bit mode if the mask wraps around because the upper
1588     32-bits of the mask will all be 1s, contrary to GCC's internal view.  */
1589  if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1590    return 0;
1591
1592  /* We don't change the number of transitions by inverting,
1593     so make sure we start with the LS bit zero.  */
1594  if (c & 1)
1595    c = ~c;
1596
1597  /* Reject all zeros or all ones.  */
1598  if (c == 0)
1599    return 0;
1600
1601  /* Find the first transition.  */
1602  lsb = c & -c;
1603
1604  /* Invert to look for a second transition.  */
1605  c = ~c;
1606
1607  /* Erase first transition.  */
1608  c &= -lsb;
1609
1610  /* Find the second transition (if any).  */
1611  lsb = c & -c;
1612
1613  /* Match if all the bits above are 1's (or c is zero).  */
1614  return c == -lsb;
1615}
1616
1617/* Return 1 for the PowerPC64 rlwinm corner case.  */
1618
1619int
1620mask_operand_wrap (op, mode)
1621     rtx op;
1622     enum machine_mode mode ATTRIBUTE_UNUSED;
1623{
1624  HOST_WIDE_INT c, lsb;
1625
1626  if (GET_CODE (op) != CONST_INT)
1627    return 0;
1628
1629  c = INTVAL (op);
1630
1631  if ((c & 0x80000001) != 0x80000001)
1632    return 0;
1633
1634  c = ~c;
1635  if (c == 0)
1636    return 0;
1637
1638  lsb = c & -c;
1639  c = ~c;
1640  c &= -lsb;
1641  lsb = c & -c;
1642  return c == -lsb;
1643}
1644
1645/* Return 1 if the operand is a constant that is a PowerPC64 mask.
1646   It is if there are no more than one 1->0 or 0->1 transitions.
1647   Reject all zeros, since zero should have been optimized away and
1648   confuses the making of MB and ME.  */
1649
1650int
1651mask64_operand (op, mode)
1652     rtx op;
1653     enum machine_mode mode ATTRIBUTE_UNUSED;
1654{
1655  if (GET_CODE (op) == CONST_INT)
1656    {
1657      HOST_WIDE_INT c, lsb;
1658
1659      c = INTVAL (op);
1660
1661      /* Reject all zeros.  */
1662      if (c == 0)
1663	return 0;
1664
1665      /* We don't change the number of transitions by inverting,
1666	 so make sure we start with the LS bit zero.  */
1667      if (c & 1)
1668	c = ~c;
1669
1670      /* Find the transition, and check that all bits above are 1's.  */
1671      lsb = c & -c;
1672      return c == -lsb;
1673    }
1674  return 0;
1675}
1676
1677/* Like mask64_operand, but allow up to three transitions.  This
1678   predicate is used by insn patterns that generate two rldicl or
1679   rldicr machine insns.  */
1680
1681int
1682mask64_2_operand (op, mode)
1683     rtx op;
1684     enum machine_mode mode ATTRIBUTE_UNUSED;
1685{
1686  if (GET_CODE (op) == CONST_INT)
1687    {
1688      HOST_WIDE_INT c, lsb;
1689
1690      c = INTVAL (op);
1691
1692      /* Disallow all zeros.  */
1693      if (c == 0)
1694	return 0;
1695
1696      /* We don't change the number of transitions by inverting,
1697	 so make sure we start with the LS bit zero.  */
1698      if (c & 1)
1699	c = ~c;
1700
1701      /* Find the first transition.  */
1702      lsb = c & -c;
1703
1704      /* Invert to look for a second transition.  */
1705      c = ~c;
1706
1707      /* Erase first transition.  */
1708      c &= -lsb;
1709
1710      /* Find the second transition.  */
1711      lsb = c & -c;
1712
1713      /* Invert to look for a third transition.  */
1714      c = ~c;
1715
1716      /* Erase second transition.  */
1717      c &= -lsb;
1718
1719      /* Find the third transition (if any).  */
1720      lsb = c & -c;
1721
1722      /* Match if all the bits above are 1's (or c is zero).  */
1723      return c == -lsb;
1724    }
1725  return 0;
1726}
1727
1728/* Generates shifts and masks for a pair of rldicl or rldicr insns to
1729   implement ANDing by the mask IN.  */
1730void
1731build_mask64_2_operands (in, out)
1732     rtx in;
1733     rtx *out;
1734{
1735#if HOST_BITS_PER_WIDE_INT >= 64
1736  unsigned HOST_WIDE_INT c, lsb, m1, m2;
1737  int shift;
1738
1739  if (GET_CODE (in) != CONST_INT)
1740    abort ();
1741
1742  c = INTVAL (in);
1743  if (c & 1)
1744    {
1745      /* Assume c initially something like 0x00fff000000fffff.  The idea
1746	 is to rotate the word so that the middle ^^^^^^ group of zeros
1747	 is at the MS end and can be cleared with an rldicl mask.  We then
1748	 rotate back and clear off the MS    ^^ group of zeros with a
1749	 second rldicl.  */
1750      c = ~c;			/*   c == 0xff000ffffff00000 */
1751      lsb = c & -c;		/* lsb == 0x0000000000100000 */
1752      m1 = -lsb;		/*  m1 == 0xfffffffffff00000 */
1753      c = ~c;			/*   c == 0x00fff000000fffff */
1754      c &= -lsb;		/*   c == 0x00fff00000000000 */
1755      lsb = c & -c;		/* lsb == 0x0000100000000000 */
1756      c = ~c;			/*   c == 0xff000fffffffffff */
1757      c &= -lsb;		/*   c == 0xff00000000000000 */
1758      shift = 0;
1759      while ((lsb >>= 1) != 0)
1760	shift++;		/* shift == 44 on exit from loop */
1761      m1 <<= 64 - shift;	/*  m1 == 0xffffff0000000000 */
1762      m1 = ~m1;			/*  m1 == 0x000000ffffffffff */
1763      m2 = ~c;			/*  m2 == 0x00ffffffffffffff */
1764    }
1765  else
1766    {
1767      /* Assume c initially something like 0xff000f0000000000.  The idea
1768	 is to rotate the word so that the     ^^^  middle group of zeros
1769	 is at the LS end and can be cleared with an rldicr mask.  We then
1770	 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1771	 a second rldicr.  */
1772      lsb = c & -c;		/* lsb == 0x0000010000000000 */
1773      m2 = -lsb;		/*  m2 == 0xffffff0000000000 */
1774      c = ~c;			/*   c == 0x00fff0ffffffffff */
1775      c &= -lsb;		/*   c == 0x00fff00000000000 */
1776      lsb = c & -c;		/* lsb == 0x0000100000000000 */
1777      c = ~c;			/*   c == 0xff000fffffffffff */
1778      c &= -lsb;		/*   c == 0xff00000000000000 */
1779      shift = 0;
1780      while ((lsb >>= 1) != 0)
1781	shift++;		/* shift == 44 on exit from loop */
1782      m1 = ~c;			/*  m1 == 0x00ffffffffffffff */
1783      m1 >>= shift;		/*  m1 == 0x0000000000000fff */
1784      m1 = ~m1;			/*  m1 == 0xfffffffffffff000 */
1785    }
1786
1787  /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1788     masks will be all 1's.  We are guaranteed more than one transition.  */
1789  out[0] = GEN_INT (64 - shift);
1790  out[1] = GEN_INT (m1);
1791  out[2] = GEN_INT (shift);
1792  out[3] = GEN_INT (m2);
1793#else
1794  (void)in;
1795  (void)out;
1796  abort ();
1797#endif
1798}
1799
1800/* Return 1 if the operand is either a non-special register or a constant
1801   that can be used as the operand of a PowerPC64 logical AND insn.  */
1802
1803int
1804and64_operand (op, mode)
1805    rtx op;
1806    enum machine_mode mode;
1807{
1808  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1809    return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1810
1811  return (logical_operand (op, mode) || mask64_operand (op, mode));
1812}
1813
1814/* Like the above, but also match constants that can be implemented
1815   with two rldicl or rldicr insns.  */
1816
1817int
1818and64_2_operand (op, mode)
1819    rtx op;
1820    enum machine_mode mode;
1821{
1822  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis. */
1823    return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1824
1825  return logical_operand (op, mode) || mask64_2_operand (op, mode);
1826}
1827
1828/* Return 1 if the operand is either a non-special register or a
1829   constant that can be used as the operand of an RS/6000 logical AND insn.  */
1830
1831int
1832and_operand (op, mode)
1833    rtx op;
1834    enum machine_mode mode;
1835{
1836  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1837    return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1838
1839  return (logical_operand (op, mode) || mask_operand (op, mode));
1840}
1841
1842/* Return 1 if the operand is a general register or memory operand.  */
1843
1844int
1845reg_or_mem_operand (op, mode)
1846     rtx op;
1847     enum machine_mode mode;
1848{
1849  return (gpc_reg_operand (op, mode)
1850	  || memory_operand (op, mode)
1851	  || volatile_mem_operand (op, mode));
1852}
1853
1854/* Return 1 if the operand is a general register or memory operand without
1855   pre_inc or pre_dec which produces invalid form of PowerPC lwa
1856   instruction.  */
1857
1858int
1859lwa_operand (op, mode)
1860     rtx op;
1861     enum machine_mode mode;
1862{
1863  rtx inner = op;
1864
1865  if (reload_completed && GET_CODE (inner) == SUBREG)
1866    inner = SUBREG_REG (inner);
1867
1868  return gpc_reg_operand (inner, mode)
1869    || (memory_operand (inner, mode)
1870	&& GET_CODE (XEXP (inner, 0)) != PRE_INC
1871	&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
1872	&& (GET_CODE (XEXP (inner, 0)) != PLUS
1873	    || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1874	    || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1875}
1876
1877/* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF.  */
1878
1879int
1880symbol_ref_operand (op, mode)
1881     rtx op;
1882     enum machine_mode mode;
1883{
1884  if (mode != VOIDmode && GET_MODE (op) != mode)
1885    return 0;
1886
1887  return (GET_CODE (op) == SYMBOL_REF);
1888}
1889
1890/* Return 1 if the operand, used inside a MEM, is a valid first argument
1891   to CALL.  This is a SYMBOL_REF, a pseudo-register, LR or CTR.  */
1892
1893int
1894call_operand (op, mode)
1895     rtx op;
1896     enum machine_mode mode;
1897{
1898  if (mode != VOIDmode && GET_MODE (op) != mode)
1899    return 0;
1900
1901  return (GET_CODE (op) == SYMBOL_REF
1902	  || (GET_CODE (op) == REG
1903	      && (REGNO (op) == LINK_REGISTER_REGNUM
1904		  || REGNO (op) == COUNT_REGISTER_REGNUM
1905		  || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1906}
1907
1908/* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1909   this file and the function is not weakly defined.  */
1910
1911int
1912current_file_function_operand (op, mode)
1913     rtx op;
1914     enum machine_mode mode ATTRIBUTE_UNUSED;
1915{
1916  return (GET_CODE (op) == SYMBOL_REF
1917	  && (SYMBOL_REF_FLAG (op)
1918	      || (op == XEXP (DECL_RTL (current_function_decl), 0)
1919	          && ! DECL_WEAK (current_function_decl))));
1920}
1921
1922/* Return 1 if this operand is a valid input for a move insn.  */
1923
1924int
1925input_operand (op, mode)
1926     rtx op;
1927     enum machine_mode mode;
1928{
1929  /* Memory is always valid.  */
1930  if (memory_operand (op, mode))
1931    return 1;
1932
1933  /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary.  */
1934  if (GET_CODE (op) == CONSTANT_P_RTX)
1935    return 1;
1936
1937  /* For floating-point, easy constants are valid.  */
1938  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1939      && CONSTANT_P (op)
1940      && easy_fp_constant (op, mode))
1941    return 1;
1942
1943  /* Allow any integer constant.  */
1944  if (GET_MODE_CLASS (mode) == MODE_INT
1945      && (GET_CODE (op) == CONST_INT
1946	  || GET_CODE (op) == CONST_DOUBLE))
1947    return 1;
1948
1949  /* For floating-point or multi-word mode, the only remaining valid type
1950     is a register.  */
1951  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1952      || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1953    return register_operand (op, mode);
1954
1955  /* The only cases left are integral modes one word or smaller (we
1956     do not get called for MODE_CC values).  These can be in any
1957     register.  */
1958  if (register_operand (op, mode))
1959    return 1;
1960
1961  /* A SYMBOL_REF referring to the TOC is valid.  */
1962  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1963    return 1;
1964
1965  /* A constant pool expression (relative to the TOC) is valid */
1966  if (TOC_RELATIVE_EXPR_P (op))
1967    return 1;
1968
1969  /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1970     to be valid.  */
1971  if (DEFAULT_ABI == ABI_V4
1972      && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1973      && small_data_operand (op, Pmode))
1974    return 1;
1975
1976  return 0;
1977}
1978
1979/* Return 1 for an operand in small memory on V.4/eabi.  */
1980
1981int
1982small_data_operand (op, mode)
1983     rtx op ATTRIBUTE_UNUSED;
1984     enum machine_mode mode ATTRIBUTE_UNUSED;
1985{
1986#if TARGET_ELF
1987  rtx sym_ref;
1988
1989  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1990    return 0;
1991
1992  if (DEFAULT_ABI != ABI_V4)
1993    return 0;
1994
1995  if (GET_CODE (op) == SYMBOL_REF)
1996    sym_ref = op;
1997
1998  else if (GET_CODE (op) != CONST
1999	   || GET_CODE (XEXP (op, 0)) != PLUS
2000	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2001	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2002    return 0;
2003
2004  else
2005    {
2006      rtx sum = XEXP (op, 0);
2007      HOST_WIDE_INT summand;
2008
2009      /* We have to be careful here, because it is the referenced address
2010        that must be 32k from _SDA_BASE_, not just the symbol.  */
2011      summand = INTVAL (XEXP (sum, 1));
2012      if (summand < 0 || summand > g_switch_value)
2013       return 0;
2014
2015      sym_ref = XEXP (sum, 0);
2016    }
2017
2018  if (*XSTR (sym_ref, 0) != '@')
2019    return 0;
2020
2021  return 1;
2022
2023#else
2024  return 0;
2025#endif
2026}
2027
2028static int
2029constant_pool_expr_1 (op, have_sym, have_toc)
2030    rtx op;
2031    int *have_sym;
2032    int *have_toc;
2033{
2034  switch (GET_CODE(op))
2035    {
2036    case SYMBOL_REF:
2037      if (CONSTANT_POOL_ADDRESS_P (op))
2038	{
2039	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2040	    {
2041	      *have_sym = 1;
2042	      return 1;
2043	    }
2044	  else
2045	    return 0;
2046	}
2047      else if (! strcmp (XSTR (op, 0), toc_label_name))
2048	{
2049	  *have_toc = 1;
2050	  return 1;
2051	}
2052      else
2053	return 0;
2054    case PLUS:
2055    case MINUS:
2056      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2057	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2058    case CONST:
2059      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2060    case CONST_INT:
2061      return 1;
2062    default:
2063      return 0;
2064    }
2065}
2066
2067int
2068constant_pool_expr_p (op)
2069    rtx op;
2070{
2071  int have_sym = 0;
2072  int have_toc = 0;
2073  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2074}
2075
2076int
2077toc_relative_expr_p (op)
2078    rtx op;
2079{
2080    int have_sym = 0;
2081    int have_toc = 0;
2082    return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2083}
2084
2085/* Try machine-dependent ways of modifying an illegitimate address
2086   to be legitimate.  If we find one, return the new, valid address.
2087   This is used from only one place: `memory_address' in explow.c.
2088
2089   OLDX is the address as it was before break_out_memory_refs was
2090   called.  In some cases it is useful to look at this to decide what
2091   needs to be done.
2092
2093   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2094
2095   It is always safe for this function to do nothing.  It exists to
2096   recognize opportunities to optimize the output.
2097
2098   On RS/6000, first check for the sum of a register with a constant
2099   integer that is out of range.  If so, generate code to add the
2100   constant with the low-order 16 bits masked to the register and force
2101   this result into another register (this can be done with `cau').
2102   Then generate an address of REG+(CONST&0xffff), allowing for the
2103   possibility of bit 16 being a one.
2104
2105   Then check for the sum of a register and something not constant, try to
2106   load the other things into a register and return the sum.  */
2107rtx
2108rs6000_legitimize_address (x, oldx, mode)
2109     rtx x;
2110     rtx oldx ATTRIBUTE_UNUSED;
2111     enum machine_mode mode;
2112{
2113  if (GET_CODE (x) == PLUS
2114      && GET_CODE (XEXP (x, 0)) == REG
2115      && GET_CODE (XEXP (x, 1)) == CONST_INT
2116      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2117    {
2118      HOST_WIDE_INT high_int, low_int;
2119      rtx sum;
2120      low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2121      high_int = INTVAL (XEXP (x, 1)) - low_int;
2122      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2123					 GEN_INT (high_int)), 0);
2124      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2125    }
2126  else if (GET_CODE (x) == PLUS
2127	   && GET_CODE (XEXP (x, 0)) == REG
2128	   && GET_CODE (XEXP (x, 1)) != CONST_INT
2129	   && GET_MODE_NUNITS (mode) == 1
2130	   && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2131	       || TARGET_POWERPC64
2132	       || (mode != DFmode && mode != TFmode))
2133	   && (TARGET_POWERPC64 || mode != DImode)
2134	   && mode != TImode)
2135    {
2136      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2137			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2138    }
2139  else if (ALTIVEC_VECTOR_MODE (mode))
2140    {
2141      rtx reg;
2142
2143      /* Make sure both operands are registers.  */
2144      if (GET_CODE (x) == PLUS)
2145	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2146			     force_reg (Pmode, XEXP (x, 1)));
2147
2148      reg = force_reg (Pmode, x);
2149      return reg;
2150    }
2151  else if (SPE_VECTOR_MODE (mode))
2152    {
2153      /* We accept [reg + reg] and [reg + OFFSET].  */
2154
2155      if (GET_CODE (x) == PLUS)
2156      {
2157        rtx op1 = XEXP (x, 0);
2158        rtx op2 = XEXP (x, 1);
2159
2160        op1 = force_reg (Pmode, op1);
2161
2162        if (GET_CODE (op2) != REG
2163            && (GET_CODE (op2) != CONST_INT
2164                || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2165          op2 = force_reg (Pmode, op2);
2166
2167        return gen_rtx_PLUS (Pmode, op1, op2);
2168      }
2169
2170      return force_reg (Pmode, x);
2171    }
2172  else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2173	   && GET_CODE (x) != CONST_INT
2174	   && GET_CODE (x) != CONST_DOUBLE
2175	   && CONSTANT_P (x)
2176	   && GET_MODE_NUNITS (mode) == 1
2177	   && (GET_MODE_BITSIZE (mode) <= 32
2178	       || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2179    {
2180      rtx reg = gen_reg_rtx (Pmode);
2181      emit_insn (gen_elf_high (reg, (x)));
2182      return gen_rtx_LO_SUM (Pmode, reg, (x));
2183    }
2184  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2185	   && ! flag_pic
2186	   && GET_CODE (x) != CONST_INT
2187	   && GET_CODE (x) != CONST_DOUBLE
2188	   && CONSTANT_P (x)
2189	   && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2190	   && mode != DImode
2191	   && mode != TImode)
2192    {
2193      rtx reg = gen_reg_rtx (Pmode);
2194      emit_insn (gen_macho_high (reg, (x)));
2195      return gen_rtx_LO_SUM (Pmode, reg, (x));
2196    }
2197  else if (TARGET_TOC
2198	   && CONSTANT_POOL_EXPR_P (x)
2199	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2200    {
2201      return create_TOC_reference (x);
2202    }
2203  else
2204    return NULL_RTX;
2205}
2206
2207/* The convention appears to be to define this wherever it is used.
2208   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2209   is now used here.  */
2210#ifndef REG_MODE_OK_FOR_BASE_P
2211#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2212#endif
2213
2214/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
2215   replace the input X, or the original X if no replacement is called for.
2216   The output parameter *WIN is 1 if the calling macro should goto WIN,
2217   0 if it should not.
2218
2219   For RS/6000, we wish to handle large displacements off a base
2220   register by splitting the addend across an addiu/addis and the mem insn.
2221   This cuts number of extra insns needed from 3 to 1.
2222
2223   On Darwin, we use this to generate code for floating point constants.
2224   A movsf_low is generated so we wind up with 2 instructions rather than 3.
2225   The Darwin code is inside #if TARGET_MACHO because only then is
2226   machopic_function_base_name() defined.  */
2227rtx
2228rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2229    rtx x;
2230    enum machine_mode mode;
2231    int opnum;
2232    int type;
2233    int ind_levels ATTRIBUTE_UNUSED;
2234    int *win;
2235{
2236  /* We must recognize output that we have already generated ourselves.  */
2237  if (GET_CODE (x) == PLUS
2238      && GET_CODE (XEXP (x, 0)) == PLUS
2239      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2240      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2241      && GET_CODE (XEXP (x, 1)) == CONST_INT)
2242    {
2243      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2244                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2245                   opnum, (enum reload_type)type);
2246      *win = 1;
2247      return x;
2248    }
2249
2250#if TARGET_MACHO
2251  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2252      && GET_CODE (x) == LO_SUM
2253      && GET_CODE (XEXP (x, 0)) == PLUS
2254      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2255      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2256      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2257      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2258      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2259      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2260      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2261    {
2262      /* Result of previous invocation of this function on Darwin
2263	 floating point constant.  */
2264      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2265		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2266		opnum, (enum reload_type)type);
2267      *win = 1;
2268      return x;
2269    }
2270#endif
2271  if (GET_CODE (x) == PLUS
2272      && GET_CODE (XEXP (x, 0)) == REG
2273      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2274      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2275      && GET_CODE (XEXP (x, 1)) == CONST_INT
2276      && !SPE_VECTOR_MODE (mode)
2277      && !ALTIVEC_VECTOR_MODE (mode))
2278    {
2279      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2280      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2281      HOST_WIDE_INT high
2282        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2283
2284      /* Check for 32-bit overflow.  */
2285      if (high + low != val)
2286        {
2287	  *win = 0;
2288	  return x;
2289	}
2290
2291      /* Reload the high part into a base reg; leave the low part
2292         in the mem directly.  */
2293
2294      x = gen_rtx_PLUS (GET_MODE (x),
2295                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2296                                      GEN_INT (high)),
2297                        GEN_INT (low));
2298
2299      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2300                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2301                   opnum, (enum reload_type)type);
2302      *win = 1;
2303      return x;
2304    }
2305#if TARGET_MACHO
2306  if (GET_CODE (x) == SYMBOL_REF
2307      && DEFAULT_ABI == ABI_DARWIN
2308      && !ALTIVEC_VECTOR_MODE (mode)
2309      && flag_pic)
2310    {
2311      /* Darwin load of floating point constant.  */
2312      rtx offset = gen_rtx (CONST, Pmode,
2313		    gen_rtx (MINUS, Pmode, x,
2314		    gen_rtx (SYMBOL_REF, Pmode,
2315			machopic_function_base_name ())));
2316      x = gen_rtx (LO_SUM, GET_MODE (x),
2317	    gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2318		gen_rtx (HIGH, Pmode, offset)), offset);
2319      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2320		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2321		opnum, (enum reload_type)type);
2322      *win = 1;
2323      return x;
2324    }
2325#endif
2326  if (TARGET_TOC
2327      && CONSTANT_POOL_EXPR_P (x)
2328      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2329    {
2330      (x) = create_TOC_reference (x);
2331      *win = 1;
2332      return x;
2333    }
2334  *win = 0;
2335  return x;
2336}
2337
2338/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2339   that is a valid memory address for an instruction.
2340   The MODE argument is the machine mode for the MEM expression
2341   that wants to use this address.
2342
2343   On the RS/6000, there are four valid address: a SYMBOL_REF that
2344   refers to a constant pool entry of an address (or the sum of it
2345   plus a constant), a short (16-bit signed) constant plus a register,
2346   the sum of two registers, or a register indirect, possibly with an
2347   auto-increment.  For DFmode and DImode with an constant plus register,
2348   we must ensure that both words are addressable or PowerPC64 with offset
2349   word aligned.
2350
2351   For modes spanning multiple registers (DFmode in 32-bit GPRs,
2352   32-bit DImode, TImode), indexed addressing cannot be used because
2353   adjacent memory cells are accessed by adding word-sized offsets
2354   during assembly output.  */
2355int
2356rs6000_legitimate_address (mode, x, reg_ok_strict)
2357    enum machine_mode mode;
2358    rtx x;
2359    int reg_ok_strict;
2360{
2361  if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2362    return 1;
2363  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2364      && !ALTIVEC_VECTOR_MODE (mode)
2365      && !SPE_VECTOR_MODE (mode)
2366      && TARGET_UPDATE
2367      && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2368    return 1;
2369  if (LEGITIMATE_SMALL_DATA_P (mode, x))
2370    return 1;
2371  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2372    return 1;
2373  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
2374  if (! reg_ok_strict
2375      && GET_CODE (x) == PLUS
2376      && GET_CODE (XEXP (x, 0)) == REG
2377      && XEXP (x, 0) == virtual_stack_vars_rtx
2378      && GET_CODE (XEXP (x, 1)) == CONST_INT)
2379    return 1;
2380  if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2381    return 1;
2382  if (mode != TImode
2383      && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2384	  || TARGET_POWERPC64
2385	  || (mode != DFmode && mode != TFmode))
2386      && (TARGET_POWERPC64 || mode != DImode)
2387      && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2388    return 1;
2389  if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2390    return 1;
2391  return 0;
2392}
2393
2394/* Try to output insns to set TARGET equal to the constant C if it can
2395   be done in less than N insns.  Do all computations in MODE.
2396   Returns the place where the output has been placed if it can be
2397   done and the insns have been emitted.  If it would take more than N
2398   insns, zero is returned and no insns and emitted.  */
2399
2400rtx
2401rs6000_emit_set_const (dest, mode, source, n)
2402     rtx dest, source;
2403     enum machine_mode mode;
2404     int n ATTRIBUTE_UNUSED;
2405{
2406  rtx result, insn, set;
2407  HOST_WIDE_INT c0, c1;
2408
2409  if (mode == QImode || mode == HImode)
2410    {
2411      if (dest == NULL)
2412        dest = gen_reg_rtx (mode);
2413      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2414      return dest;
2415    }
2416  else if (mode == SImode)
2417    {
2418      result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2419
2420      emit_insn (gen_rtx_SET (VOIDmode, result,
2421			      GEN_INT (INTVAL (source)
2422				       & (~ (HOST_WIDE_INT) 0xffff))));
2423      emit_insn (gen_rtx_SET (VOIDmode, dest,
2424			      gen_rtx_IOR (SImode, result,
2425					   GEN_INT (INTVAL (source) & 0xffff))));
2426      result = dest;
2427    }
2428  else if (mode == DImode)
2429    {
2430      if (GET_CODE (source) == CONST_INT)
2431	{
2432	  c0 = INTVAL (source);
2433	  c1 = -(c0 < 0);
2434	}
2435      else if (GET_CODE (source) == CONST_DOUBLE)
2436	{
2437#if HOST_BITS_PER_WIDE_INT >= 64
2438	  c0 = CONST_DOUBLE_LOW (source);
2439	  c1 = -(c0 < 0);
2440#else
2441	  c0 = CONST_DOUBLE_LOW (source);
2442	  c1 = CONST_DOUBLE_HIGH (source);
2443#endif
2444	}
2445      else
2446	abort ();
2447
2448      result = rs6000_emit_set_long_const (dest, c0, c1);
2449    }
2450  else
2451    abort ();
2452
2453  insn = get_last_insn ();
2454  set = single_set (insn);
2455  if (! CONSTANT_P (SET_SRC (set)))
2456    set_unique_reg_note (insn, REG_EQUAL, source);
2457
2458  return result;
2459}
2460
2461/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2462   fall back to a straight forward decomposition.  We do this to avoid
2463   exponential run times encountered when looking for longer sequences
2464   with rs6000_emit_set_const.  */
2465static rtx
2466rs6000_emit_set_long_const (dest, c1, c2)
2467     rtx dest;
2468     HOST_WIDE_INT c1, c2;
2469{
2470  if (!TARGET_POWERPC64)
2471    {
2472      rtx operand1, operand2;
2473
2474      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2475					DImode);
2476      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2477					DImode);
2478      emit_move_insn (operand1, GEN_INT (c1));
2479      emit_move_insn (operand2, GEN_INT (c2));
2480    }
2481  else
2482    {
2483      HOST_WIDE_INT ud1, ud2, ud3, ud4;
2484
2485      ud1 = c1 & 0xffff;
2486      ud2 = (c1 & 0xffff0000) >> 16;
2487#if HOST_BITS_PER_WIDE_INT >= 64
2488      c2 = c1 >> 32;
2489#endif
2490      ud3 = c2 & 0xffff;
2491      ud4 = (c2 & 0xffff0000) >> 16;
2492
2493      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2494	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2495	{
2496	  if (ud1 & 0x8000)
2497	    emit_move_insn (dest, GEN_INT (((ud1  ^ 0x8000) -  0x8000)));
2498	  else
2499	    emit_move_insn (dest, GEN_INT (ud1));
2500	}
2501
2502      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2503	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2504	{
2505	  if (ud2 & 0x8000)
2506	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2507					   - 0x80000000));
2508	  else
2509	    emit_move_insn (dest, GEN_INT (ud2 << 16));
2510	  if (ud1 != 0)
2511	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2512	}
2513      else if ((ud4 == 0xffff && (ud3 & 0x8000))
2514	       || (ud4 == 0 && ! (ud3 & 0x8000)))
2515	{
2516	  if (ud3 & 0x8000)
2517	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2518					   - 0x80000000));
2519	  else
2520	    emit_move_insn (dest, GEN_INT (ud3 << 16));
2521
2522	  if (ud2 != 0)
2523	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2524	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2525	  if (ud1 != 0)
2526	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2527	}
2528      else
2529	{
2530	  if (ud4 & 0x8000)
2531	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2532					   - 0x80000000));
2533	  else
2534	    emit_move_insn (dest, GEN_INT (ud4 << 16));
2535
2536	  if (ud3 != 0)
2537	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2538
2539	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2540	  if (ud2 != 0)
2541	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2542					       GEN_INT (ud2 << 16)));
2543	  if (ud1 != 0)
2544	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2545	}
2546    }
2547  return dest;
2548}
2549
2550/* Emit a move from SOURCE to DEST in mode MODE.  */
2551void
2552rs6000_emit_move (dest, source, mode)
2553     rtx dest;
2554     rtx source;
2555     enum machine_mode mode;
2556{
2557  rtx operands[2];
2558  operands[0] = dest;
2559  operands[1] = source;
2560
2561  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
2562  if (GET_CODE (operands[1]) == CONST_DOUBLE
2563      && ! FLOAT_MODE_P (mode)
2564      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2565    {
2566      /* FIXME.  This should never happen.  */
2567      /* Since it seems that it does, do the safe thing and convert
2568	 to a CONST_INT.  */
2569      operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2570    }
2571  if (GET_CODE (operands[1]) == CONST_DOUBLE
2572      && ! FLOAT_MODE_P (mode)
2573      && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2574	   && CONST_DOUBLE_LOW (operands[1]) >= 0)
2575	  || (CONST_DOUBLE_HIGH (operands[1]) == -1
2576	      && CONST_DOUBLE_LOW (operands[1]) < 0)))
2577    abort ();
2578
2579  /* Check if GCC is setting up a block move that will end up using FP
2580     registers as temporaries.  We must make sure this is acceptable.  */
2581  if (GET_CODE (operands[0]) == MEM
2582      && GET_CODE (operands[1]) == MEM
2583      && mode == DImode
2584      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2585	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2586      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2587					    ? 32 : MEM_ALIGN (operands[0])))
2588	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2589					       ? 32
2590					       : MEM_ALIGN (operands[1]))))
2591      && ! MEM_VOLATILE_P (operands [0])
2592      && ! MEM_VOLATILE_P (operands [1]))
2593    {
2594      emit_move_insn (adjust_address (operands[0], SImode, 0),
2595		      adjust_address (operands[1], SImode, 0));
2596      emit_move_insn (adjust_address (operands[0], SImode, 4),
2597		      adjust_address (operands[1], SImode, 4));
2598      return;
2599    }
2600
2601  if (!no_new_pseudos)
2602    {
2603      if (GET_CODE (operands[1]) == MEM && optimize > 0
2604	  && (mode == QImode || mode == HImode || mode == SImode)
2605	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2606	{
2607	  rtx reg = gen_reg_rtx (word_mode);
2608
2609	  emit_insn (gen_rtx_SET (word_mode, reg,
2610				  gen_rtx_ZERO_EXTEND (word_mode,
2611						       operands[1])));
2612	  operands[1] = gen_lowpart (mode, reg);
2613	}
2614      if (GET_CODE (operands[0]) != REG)
2615	operands[1] = force_reg (mode, operands[1]);
2616    }
2617
2618  if (mode == SFmode && ! TARGET_POWERPC
2619      && TARGET_HARD_FLOAT && TARGET_FPRS
2620      && GET_CODE (operands[0]) == MEM)
2621    {
2622      int regnum;
2623
2624      if (reload_in_progress || reload_completed)
2625	regnum = true_regnum (operands[1]);
2626      else if (GET_CODE (operands[1]) == REG)
2627	regnum = REGNO (operands[1]);
2628      else
2629	regnum = -1;
2630
2631      /* If operands[1] is a register, on POWER it may have
2632	 double-precision data in it, so truncate it to single
2633	 precision.  */
2634      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2635	{
2636	  rtx newreg;
2637	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2638	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2639	  operands[1] = newreg;
2640	}
2641    }
2642
2643  /* Handle the case where reload calls us with an invalid address.  */
2644  if (reload_in_progress && mode == Pmode
2645      && (! general_operand (operands[1], mode)
2646	  || ! nonimmediate_operand (operands[0], mode)))
2647    goto emit_set;
2648
2649  /* Handle the case of CONSTANT_P_RTX.  */
2650  if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
2651    goto emit_set;
2652
2653  /* FIXME:  In the long term, this switch statement should go away
2654     and be replaced by a sequence of tests based on things like
2655     mode == Pmode.  */
2656  switch (mode)
2657    {
2658    case HImode:
2659    case QImode:
2660      if (CONSTANT_P (operands[1])
2661	  && GET_CODE (operands[1]) != CONST_INT)
2662	operands[1] = force_const_mem (mode, operands[1]);
2663      break;
2664
2665    case TFmode:
2666    case DFmode:
2667    case SFmode:
2668      if (CONSTANT_P (operands[1])
2669	  && ! easy_fp_constant (operands[1], mode))
2670	operands[1] = force_const_mem (mode, operands[1]);
2671      break;
2672
2673    case V16QImode:
2674    case V8HImode:
2675    case V4SFmode:
2676    case V4SImode:
2677    case V4HImode:
2678    case V2SFmode:
2679    case V2SImode:
2680    case V1DImode:
2681      if (CONSTANT_P (operands[1])
2682	  && !easy_vector_constant (operands[1]))
2683	operands[1] = force_const_mem (mode, operands[1]);
2684      break;
2685
2686    case SImode:
2687    case DImode:
2688      /* Use default pattern for address of ELF small data */
2689      if (TARGET_ELF
2690	  && mode == Pmode
2691	  && DEFAULT_ABI == ABI_V4
2692	  && (GET_CODE (operands[1]) == SYMBOL_REF
2693	      || GET_CODE (operands[1]) == CONST)
2694	  && small_data_operand (operands[1], mode))
2695	{
2696	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2697	  return;
2698	}
2699
2700      if (DEFAULT_ABI == ABI_V4
2701	  && mode == Pmode && mode == SImode
2702	  && flag_pic == 1 && got_operand (operands[1], mode))
2703	{
2704	  emit_insn (gen_movsi_got (operands[0], operands[1]));
2705	  return;
2706	}
2707
2708      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2709	  && TARGET_NO_TOC && ! flag_pic
2710	  && mode == Pmode
2711	  && CONSTANT_P (operands[1])
2712	  && GET_CODE (operands[1]) != HIGH
2713	  && GET_CODE (operands[1]) != CONST_INT)
2714	{
2715	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2716
2717	  /* If this is a function address on -mcall-aixdesc,
2718	     convert it to the address of the descriptor.  */
2719	  if (DEFAULT_ABI == ABI_AIX
2720	      && GET_CODE (operands[1]) == SYMBOL_REF
2721	      && XSTR (operands[1], 0)[0] == '.')
2722	    {
2723	      const char *name = XSTR (operands[1], 0);
2724	      rtx new_ref;
2725	      while (*name == '.')
2726		name++;
2727	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2728	      CONSTANT_POOL_ADDRESS_P (new_ref)
2729		= CONSTANT_POOL_ADDRESS_P (operands[1]);
2730	      SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2731	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2732	      operands[1] = new_ref;
2733	    }
2734
2735	  if (DEFAULT_ABI == ABI_DARWIN)
2736	    {
2737	      emit_insn (gen_macho_high (target, operands[1]));
2738	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
2739	      return;
2740	    }
2741
2742	  emit_insn (gen_elf_high (target, operands[1]));
2743	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
2744	  return;
2745	}
2746
2747      /* If this is a SYMBOL_REF that refers to a constant pool entry,
2748	 and we have put it in the TOC, we just need to make a TOC-relative
2749	 reference to it.  */
2750      if (TARGET_TOC
2751	  && GET_CODE (operands[1]) == SYMBOL_REF
2752	  && CONSTANT_POOL_EXPR_P (operands[1])
2753	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2754					      get_pool_mode (operands[1])))
2755	{
2756	  operands[1] = create_TOC_reference (operands[1]);
2757	}
2758      else if (mode == Pmode
2759	       && CONSTANT_P (operands[1])
2760	       && ((GET_CODE (operands[1]) != CONST_INT
2761		    && ! easy_fp_constant (operands[1], mode))
2762		   || (GET_CODE (operands[1]) == CONST_INT
2763		       && num_insns_constant (operands[1], mode) > 2)
2764		   || (GET_CODE (operands[0]) == REG
2765		       && FP_REGNO_P (REGNO (operands[0]))))
2766	       && GET_CODE (operands[1]) != HIGH
2767	       && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2768	       && ! TOC_RELATIVE_EXPR_P (operands[1]))
2769	{
2770	  /* Emit a USE operation so that the constant isn't deleted if
2771	     expensive optimizations are turned on because nobody
2772	     references it.  This should only be done for operands that
2773	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2774	     This should not be done for operands that contain LABEL_REFs.
2775	     For now, we just handle the obvious case.  */
2776	  if (GET_CODE (operands[1]) != LABEL_REF)
2777	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2778
2779#if TARGET_MACHO
2780	  /* Darwin uses a special PIC legitimizer.  */
2781	  if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2782	    {
2783	      operands[1] =
2784		rs6000_machopic_legitimize_pic_address (operands[1], mode,
2785							operands[0]);
2786	      if (operands[0] != operands[1])
2787		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2788	      return;
2789	    }
2790#endif
2791
2792	  /* If we are to limit the number of things we put in the TOC and
2793	     this is a symbol plus a constant we can add in one insn,
2794	     just put the symbol in the TOC and add the constant.  Don't do
2795	     this if reload is in progress.  */
2796	  if (GET_CODE (operands[1]) == CONST
2797	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2798	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
2799	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2800	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2801		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2802	      && ! side_effects_p (operands[0]))
2803	    {
2804	      rtx sym =
2805		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2806	      rtx other = XEXP (XEXP (operands[1], 0), 1);
2807
2808	      sym = force_reg (mode, sym);
2809	      if (mode == SImode)
2810		emit_insn (gen_addsi3 (operands[0], sym, other));
2811	      else
2812		emit_insn (gen_adddi3 (operands[0], sym, other));
2813	      return;
2814	    }
2815
2816	  operands[1] = force_const_mem (mode, operands[1]);
2817
2818	  if (TARGET_TOC
2819	      && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2820	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2821			get_pool_constant (XEXP (operands[1], 0)),
2822			get_pool_mode (XEXP (operands[1], 0))))
2823	    {
2824	      operands[1]
2825		= gen_rtx_MEM (mode,
2826			       create_TOC_reference (XEXP (operands[1], 0)));
2827	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
2828	      RTX_UNCHANGING_P (operands[1]) = 1;
2829	    }
2830	}
2831      break;
2832
2833    case TImode:
2834      if (GET_CODE (operands[0]) == MEM
2835	  && GET_CODE (XEXP (operands[0], 0)) != REG
2836	  && ! reload_in_progress)
2837	operands[0]
2838	  = replace_equiv_address (operands[0],
2839				   copy_addr_to_reg (XEXP (operands[0], 0)));
2840
2841      if (GET_CODE (operands[1]) == MEM
2842	  && GET_CODE (XEXP (operands[1], 0)) != REG
2843	  && ! reload_in_progress)
2844	operands[1]
2845	  = replace_equiv_address (operands[1],
2846				   copy_addr_to_reg (XEXP (operands[1], 0)));
2847      if (TARGET_POWER)
2848        {
2849	  emit_insn (gen_rtx_PARALLEL (VOIDmode,
2850		       gen_rtvec (2,
2851				  gen_rtx_SET (VOIDmode,
2852					       operands[0], operands[1]),
2853				  gen_rtx_CLOBBER (VOIDmode,
2854						   gen_rtx_SCRATCH (SImode)))));
2855	  return;
2856	}
2857      break;
2858
2859    default:
2860      abort ();
2861    }
2862
2863  /* Above, we may have called force_const_mem which may have returned
2864     an invalid address.  If we can, fix this up; otherwise, reload will
2865     have to deal with it.  */
2866  if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
2867    operands[1] = validize_mem (operands[1]);
2868
2869 emit_set:
2870  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2871}
2872
2873/* Initialize a variable CUM of type CUMULATIVE_ARGS
2874   for a call to a function whose data type is FNTYPE.
2875   For a library call, FNTYPE is 0.
2876
2877   For incoming args we set the number of arguments in the prototype large
2878   so we never return a PARALLEL.  */
2879
2880void
2881init_cumulative_args (cum, fntype, libname, incoming, libcall)
2882     CUMULATIVE_ARGS *cum;
2883     tree fntype;
2884     rtx libname ATTRIBUTE_UNUSED;
2885     int incoming;
2886     int libcall;
2887{
2888  static CUMULATIVE_ARGS zero_cumulative;
2889
2890  *cum = zero_cumulative;
2891  cum->words = 0;
2892  cum->fregno = FP_ARG_MIN_REG;
2893  cum->vregno = ALTIVEC_ARG_MIN_REG;
2894  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2895  cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
2896		      ? CALL_LIBCALL : CALL_NORMAL);
2897  cum->sysv_gregno = GP_ARG_MIN_REG;
2898
2899  if (incoming)
2900    cum->nargs_prototype = 1000;		/* don't return a PARALLEL */
2901
2902  else if (cum->prototype)
2903    cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2904			    + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2905			       || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2906
2907  else
2908    cum->nargs_prototype = 0;
2909
2910  cum->orig_nargs = cum->nargs_prototype;
2911
2912  /* Check for a longcall attribute.  */
2913  if (fntype
2914      && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2915      && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2916    cum->call_cookie = CALL_LONG;
2917
2918  if (TARGET_DEBUG_ARG)
2919    {
2920      fprintf (stderr, "\ninit_cumulative_args:");
2921      if (fntype)
2922	{
2923	  tree ret_type = TREE_TYPE (fntype);
2924	  fprintf (stderr, " ret code = %s,",
2925		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
2926	}
2927
2928      if (cum->call_cookie & CALL_LONG)
2929	fprintf (stderr, " longcall,");
2930
2931      fprintf (stderr, " proto = %d, nargs = %d\n",
2932	       cum->prototype, cum->nargs_prototype);
2933    }
2934}
2935
2936/* If defined, a C expression which determines whether, and in which
2937   direction, to pad out an argument with extra space.  The value
2938   should be of type `enum direction': either `upward' to pad above
2939   the argument, `downward' to pad below, or `none' to inhibit
2940   padding.
2941
2942   For the AIX ABI structs are always stored left shifted in their
2943   argument slot.  */
2944
2945enum direction
2946function_arg_padding (mode, type)
2947     enum machine_mode mode;
2948     tree type;
2949{
2950  if (type != 0 && AGGREGATE_TYPE_P (type))
2951    return upward;
2952
2953  /* This is the default definition.  */
2954  return (! BYTES_BIG_ENDIAN
2955          ? upward
2956          : ((mode == BLKmode
2957              ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2958                 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2959              : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2960             ? downward : upward));
2961}
2962
2963/* If defined, a C expression that gives the alignment boundary, in bits,
2964   of an argument with the specified mode and type.  If it is not defined,
2965   PARM_BOUNDARY is used for all arguments.
2966
2967   V.4 wants long longs to be double word aligned.  */
2968
2969int
2970function_arg_boundary (mode, type)
2971     enum machine_mode mode;
2972     tree type ATTRIBUTE_UNUSED;
2973{
2974  if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2975    return 64;
2976   else if (SPE_VECTOR_MODE (mode))
2977     return 64;
2978  else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2979    return 128;
2980  else
2981    return PARM_BOUNDARY;
2982}
2983
2984/* Update the data in CUM to advance over an argument
2985   of mode MODE and data type TYPE.
2986   (TYPE is null for libcalls where that information may not be available.)  */
2987
2988void
2989function_arg_advance (cum, mode, type, named)
2990     CUMULATIVE_ARGS *cum;
2991     enum machine_mode mode;
2992     tree type;
2993     int named;
2994{
2995  cum->nargs_prototype--;
2996
2997  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2998    {
2999      if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3000	cum->vregno++;
3001      else
3002	cum->words += RS6000_ARG_SIZE (mode, type);
3003    }
3004  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3005	   && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3006    cum->sysv_gregno++;
3007  else if (DEFAULT_ABI == ABI_V4)
3008    {
3009      if (TARGET_HARD_FLOAT && TARGET_FPRS
3010	  && (mode == SFmode || mode == DFmode))
3011	{
3012	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
3013	    cum->fregno++;
3014	  else
3015	    {
3016	      if (mode == DFmode)
3017	        cum->words += cum->words & 1;
3018	      cum->words += RS6000_ARG_SIZE (mode, type);
3019	    }
3020	}
3021      else
3022	{
3023	  int n_words;
3024	  int gregno = cum->sysv_gregno;
3025
3026	  /* Aggregates and IEEE quad get passed by reference.  */
3027	  if ((type && AGGREGATE_TYPE_P (type))
3028	      || mode == TFmode)
3029	    n_words = 1;
3030	  else
3031	    n_words = RS6000_ARG_SIZE (mode, type);
3032
3033	  /* Long long and SPE vectors are put in odd registers.  */
3034	  if (n_words == 2 && (gregno & 1) == 0)
3035	    gregno += 1;
3036
3037	  /* Long long and SPE vectors are not split between registers
3038	     and stack.  */
3039	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3040	    {
3041	      /* Long long is aligned on the stack.  */
3042	      if (n_words == 2)
3043		cum->words += cum->words & 1;
3044	      cum->words += n_words;
3045	    }
3046
3047	  /* Note: continuing to accumulate gregno past when we've started
3048	     spilling to the stack indicates the fact that we've started
3049	     spilling to the stack to expand_builtin_saveregs.  */
3050	  cum->sysv_gregno = gregno + n_words;
3051	}
3052
3053      if (TARGET_DEBUG_ARG)
3054	{
3055	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3056		   cum->words, cum->fregno);
3057	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3058		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3059	  fprintf (stderr, "mode = %4s, named = %d\n",
3060		   GET_MODE_NAME (mode), named);
3061	}
3062    }
3063  else
3064    {
3065      int align = (TARGET_32BIT && (cum->words & 1) != 0
3066		   && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3067
3068      cum->words += align + RS6000_ARG_SIZE (mode, type);
3069
3070      if (GET_MODE_CLASS (mode) == MODE_FLOAT
3071	  && TARGET_HARD_FLOAT && TARGET_FPRS)
3072	cum->fregno += (mode == TFmode ? 2 : 1);
3073
3074      if (TARGET_DEBUG_ARG)
3075	{
3076	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3077		   cum->words, cum->fregno);
3078	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3079		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3080	  fprintf (stderr, "named = %d, align = %d\n", named, align);
3081	}
3082    }
3083}
3084
3085/* Determine where to put an argument to a function.
3086   Value is zero to push the argument on the stack,
3087   or a hard register in which to store the argument.
3088
3089   MODE is the argument's machine mode.
3090   TYPE is the data type of the argument (as a tree).
3091    This is null for libcalls where that information may
3092    not be available.
3093   CUM is a variable of type CUMULATIVE_ARGS which gives info about
3094    the preceding args and about the function being called.
3095   NAMED is nonzero if this argument is a named parameter
3096    (otherwise it is an extra parameter matching an ellipsis).
3097
3098   On RS/6000 the first eight words of non-FP are normally in registers
3099   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
3100   Under V.4, the first 8 FP args are in registers.
3101
3102   If this is floating-point and no prototype is specified, we use
3103   both an FP and integer register (or possibly FP reg and stack).  Library
3104   functions (when CALL_LIBCALL is set) always have the proper types for args,
3105   so we can pass the FP value just in one register.  emit_library_function
3106   doesn't support PARALLEL anyway.  */
3107
3108struct rtx_def *
3109function_arg (cum, mode, type, named)
3110     CUMULATIVE_ARGS *cum;
3111     enum machine_mode mode;
3112     tree type;
3113     int named;
3114{
3115  enum rs6000_abi abi = DEFAULT_ABI;
3116
3117  /* Return a marker to indicate whether CR1 needs to set or clear the
3118     bit that V.4 uses to say fp args were passed in registers.
3119     Assume that we don't need the marker for software floating point,
3120     or compiler generated library calls.  */
3121  if (mode == VOIDmode)
3122    {
3123      if (abi == ABI_V4
3124	  && cum->nargs_prototype < 0
3125	  && (cum->call_cookie & CALL_LIBCALL) == 0
3126	  && (cum->prototype || TARGET_NO_PROTOTYPE))
3127	{
3128	  /* For the SPE, we need to crxor CR6 always.  */
3129	  if (TARGET_SPE_ABI)
3130	    return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3131	  else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3132	    return GEN_INT (cum->call_cookie
3133			    | ((cum->fregno == FP_ARG_MIN_REG)
3134			       ? CALL_V4_SET_FP_ARGS
3135			       : CALL_V4_CLEAR_FP_ARGS));
3136	}
3137
3138      return GEN_INT (cum->call_cookie);
3139    }
3140
3141  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3142    {
3143      if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3144	return gen_rtx_REG (mode, cum->vregno);
3145      else
3146	return NULL;
3147    }
3148  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3149    {
3150      if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3151	return gen_rtx_REG (mode, cum->sysv_gregno);
3152      else
3153	return NULL;
3154    }
3155  else if (abi == ABI_V4)
3156    {
3157      if (TARGET_HARD_FLOAT && TARGET_FPRS
3158	  && (mode == SFmode || mode == DFmode))
3159	{
3160	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
3161	    return gen_rtx_REG (mode, cum->fregno);
3162	  else
3163	    return NULL;
3164	}
3165      else
3166	{
3167	  int n_words;
3168	  int gregno = cum->sysv_gregno;
3169
3170	  /* Aggregates and IEEE quad get passed by reference.  */
3171	  if ((type && AGGREGATE_TYPE_P (type))
3172	      || mode == TFmode)
3173	    n_words = 1;
3174	  else
3175	    n_words = RS6000_ARG_SIZE (mode, type);
3176
3177	  /* Long long and SPE vectors are put in odd registers.  */
3178	  if (n_words == 2 && (gregno & 1) == 0)
3179	    gregno += 1;
3180
3181	  /* Long long and SPE vectors are not split between registers
3182	     and stack.  */
3183	  if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3184	    {
3185	      /* SPE vectors in ... get split into 2 registers.  */
3186	      if (TARGET_SPE && TARGET_SPE_ABI
3187		  && SPE_VECTOR_MODE (mode) && !named)
3188		{
3189		  rtx r1, r2;
3190		  enum machine_mode m = SImode;
3191
3192		  r1 = gen_rtx_REG (m, gregno);
3193		  r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3194		  r2 = gen_rtx_REG (m, gregno + 1);
3195		  r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3196		  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3197		}
3198	      return gen_rtx_REG (mode, gregno);
3199	    }
3200	  else
3201	    return NULL;
3202	}
3203    }
3204  else
3205    {
3206      int align = (TARGET_32BIT && (cum->words & 1) != 0
3207	           && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3208      int align_words = cum->words + align;
3209
3210      if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3211        return NULL_RTX;
3212
3213      if (USE_FP_FOR_ARG_P (*cum, mode, type))
3214	{
3215	  if (! type
3216	      || ((cum->nargs_prototype > 0)
3217	          /* IBM AIX extended its linkage convention definition always
3218		     to require FP args after register save area hole on the
3219		     stack.  */
3220	          && (DEFAULT_ABI != ABI_AIX
3221		      || ! TARGET_XL_CALL
3222		      || (align_words < GP_ARG_NUM_REG))))
3223	    return gen_rtx_REG (mode, cum->fregno);
3224
3225          return gen_rtx_PARALLEL (mode,
3226	    gen_rtvec (2,
3227		       gen_rtx_EXPR_LIST (VOIDmode,
3228				((align_words >= GP_ARG_NUM_REG)
3229				 ? NULL_RTX
3230				 : (align_words
3231				    + RS6000_ARG_SIZE (mode, type)
3232				    > GP_ARG_NUM_REG
3233				    /* If this is partially on the stack, then
3234				       we only include the portion actually
3235				       in registers here.  */
3236				    ? gen_rtx_REG (SImode,
3237					       GP_ARG_MIN_REG + align_words)
3238				    : gen_rtx_REG (mode,
3239					       GP_ARG_MIN_REG + align_words))),
3240				const0_rtx),
3241		       gen_rtx_EXPR_LIST (VOIDmode,
3242				gen_rtx_REG (mode, cum->fregno),
3243				const0_rtx)));
3244	}
3245      else if (align_words < GP_ARG_NUM_REG)
3246	return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3247      else
3248	return NULL_RTX;
3249    }
3250}
3251
3252/* For an arg passed partly in registers and partly in memory,
3253   this is the number of registers used.
3254   For args passed entirely in registers or entirely in memory, zero.  */
3255
3256int
3257function_arg_partial_nregs (cum, mode, type, named)
3258     CUMULATIVE_ARGS *cum;
3259     enum machine_mode mode;
3260     tree type;
3261     int named ATTRIBUTE_UNUSED;
3262{
3263  if (DEFAULT_ABI == ABI_V4)
3264    return 0;
3265
3266  if (USE_FP_FOR_ARG_P (*cum, mode, type)
3267      || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3268    {
3269      if (cum->nargs_prototype >= 0)
3270	return 0;
3271    }
3272
3273  if (cum->words < GP_ARG_NUM_REG
3274      && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3275    {
3276      int ret = GP_ARG_NUM_REG - cum->words;
3277      if (ret && TARGET_DEBUG_ARG)
3278	fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3279
3280      return ret;
3281    }
3282
3283  return 0;
3284}
3285
3286/* A C expression that indicates when an argument must be passed by
3287   reference.  If nonzero for an argument, a copy of that argument is
3288   made in memory and a pointer to the argument is passed instead of
3289   the argument itself.  The pointer is passed in whatever way is
3290   appropriate for passing a pointer to that type.
3291
3292   Under V.4, structures and unions are passed by reference.
3293
3294   As an extension to all ABIs, variable sized types are passed by
3295   reference.  */
3296
3297int
3298function_arg_pass_by_reference (cum, mode, type, named)
3299     CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3300     enum machine_mode mode ATTRIBUTE_UNUSED;
3301     tree type;
3302     int named ATTRIBUTE_UNUSED;
3303{
3304  if (DEFAULT_ABI == ABI_V4
3305      && ((type && AGGREGATE_TYPE_P (type))
3306	  || mode == TFmode))
3307    {
3308      if (TARGET_DEBUG_ARG)
3309	fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3310
3311      return 1;
3312    }
3313  return type && int_size_in_bytes (type) < 0;
3314}
3315
3316/* Perform any needed actions needed for a function that is receiving a
3317   variable number of arguments.
3318
3319   CUM is as above.
3320
3321   MODE and TYPE are the mode and type of the current parameter.
3322
3323   PRETEND_SIZE is a variable that should be set to the amount of stack
3324   that must be pushed by the prolog to pretend that our caller pushed
3325   it.
3326
3327   Normally, this macro will push all remaining incoming registers on the
3328   stack and set PRETEND_SIZE to the length of the registers pushed.  */
3329
3330void
3331setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3332     CUMULATIVE_ARGS *cum;
3333     enum machine_mode mode;
3334     tree type;
3335     int *pretend_size;
3336     int no_rtl;
3337
3338{
3339  CUMULATIVE_ARGS next_cum;
3340  int reg_size = TARGET_32BIT ? 4 : 8;
3341  rtx save_area = NULL_RTX, mem;
3342  int first_reg_offset, set;
3343  tree fntype;
3344  int stdarg_p;
3345
3346  fntype = TREE_TYPE (current_function_decl);
3347  stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3348	      && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3349		  != void_type_node));
3350
3351  /* For varargs, we do not want to skip the dummy va_dcl argument.
3352     For stdargs, we do want to skip the last named argument.  */
3353  next_cum = *cum;
3354  if (stdarg_p)
3355    function_arg_advance (&next_cum, mode, type, 1);
3356
3357  if (DEFAULT_ABI == ABI_V4)
3358    {
3359      /* Indicate to allocate space on the stack for varargs save area.  */
3360      cfun->machine->sysv_varargs_p = 1;
3361      if (! no_rtl)
3362	save_area = plus_constant (virtual_stack_vars_rtx,
3363				   - RS6000_VARARGS_SIZE);
3364
3365      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3366    }
3367  else
3368    {
3369      first_reg_offset = next_cum.words;
3370      save_area = virtual_incoming_args_rtx;
3371      cfun->machine->sysv_varargs_p = 0;
3372
3373      if (MUST_PASS_IN_STACK (mode, type))
3374	first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3375    }
3376
3377  set = get_varargs_alias_set ();
3378  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3379    {
3380      mem = gen_rtx_MEM (BLKmode,
3381		         plus_constant (save_area,
3382					first_reg_offset * reg_size)),
3383      set_mem_alias_set (mem, set);
3384      set_mem_align (mem, BITS_PER_WORD);
3385
3386      move_block_from_reg
3387	(GP_ARG_MIN_REG + first_reg_offset, mem,
3388	 GP_ARG_NUM_REG - first_reg_offset,
3389	 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3390
3391      /* ??? Does ABI_V4 need this at all?  */
3392      *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3393    }
3394
3395  /* Save FP registers if needed.  */
3396  if (DEFAULT_ABI == ABI_V4
3397      && TARGET_HARD_FLOAT && TARGET_FPRS
3398      && ! no_rtl
3399      && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3400    {
3401      int fregno = next_cum.fregno;
3402      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3403      rtx lab = gen_label_rtx ();
3404      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3405
3406      emit_jump_insn (gen_rtx_SET (VOIDmode,
3407				   pc_rtx,
3408				   gen_rtx_IF_THEN_ELSE (VOIDmode,
3409					    gen_rtx_NE (VOIDmode, cr1,
3410						        const0_rtx),
3411					    gen_rtx_LABEL_REF (VOIDmode, lab),
3412					    pc_rtx)));
3413
3414      while (fregno <= FP_ARG_V4_MAX_REG)
3415	{
3416	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3417          set_mem_alias_set (mem, set);
3418	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3419	  fregno++;
3420	  off += 8;
3421	}
3422
3423      emit_label (lab);
3424    }
3425}
3426
3427/* Create the va_list data type.  */
3428
3429tree
3430rs6000_build_va_list ()
3431{
3432  tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3433
3434  /* For AIX, prefer 'char *' because that's what the system
3435     header files like.  */
3436  if (DEFAULT_ABI != ABI_V4)
3437    return build_pointer_type (char_type_node);
3438
3439  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3440  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3441
3442  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3443		      unsigned_char_type_node);
3444  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3445		      unsigned_char_type_node);
3446  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3447		      ptr_type_node);
3448  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3449		      ptr_type_node);
3450
3451  DECL_FIELD_CONTEXT (f_gpr) = record;
3452  DECL_FIELD_CONTEXT (f_fpr) = record;
3453  DECL_FIELD_CONTEXT (f_ovf) = record;
3454  DECL_FIELD_CONTEXT (f_sav) = record;
3455
3456  TREE_CHAIN (record) = type_decl;
3457  TYPE_NAME (record) = type_decl;
3458  TYPE_FIELDS (record) = f_gpr;
3459  TREE_CHAIN (f_gpr) = f_fpr;
3460  TREE_CHAIN (f_fpr) = f_ovf;
3461  TREE_CHAIN (f_ovf) = f_sav;
3462
3463  layout_type (record);
3464
3465  /* The correct type is an array type of one element.  */
3466  return build_array_type (record, build_index_type (size_zero_node));
3467}
3468
3469/* Implement va_start.  */
3470
3471void
3472rs6000_va_start (valist, nextarg)
3473     tree valist;
3474     rtx nextarg;
3475{
3476  HOST_WIDE_INT words, n_gpr, n_fpr;
3477  tree f_gpr, f_fpr, f_ovf, f_sav;
3478  tree gpr, fpr, ovf, sav, t;
3479
3480  /* Only SVR4 needs something special.  */
3481  if (DEFAULT_ABI != ABI_V4)
3482    {
3483      std_expand_builtin_va_start (valist, nextarg);
3484      return;
3485    }
3486
3487  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3488  f_fpr = TREE_CHAIN (f_gpr);
3489  f_ovf = TREE_CHAIN (f_fpr);
3490  f_sav = TREE_CHAIN (f_ovf);
3491
3492  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3493  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3494  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3495  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3496  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3497
3498  /* Count number of gp and fp argument registers used.  */
3499  words = current_function_args_info.words;
3500  n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3501  n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3502
3503  if (TARGET_DEBUG_ARG)
3504    {
3505      fputs ("va_start: words = ", stderr);
3506      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3507      fputs (", n_gpr = ", stderr);
3508      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3509      fputs (", n_fpr = ", stderr);
3510      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3511      putc ('\n', stderr);
3512    }
3513
3514  t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3515  TREE_SIDE_EFFECTS (t) = 1;
3516  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3517
3518  t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3519  TREE_SIDE_EFFECTS (t) = 1;
3520  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3521
3522  /* Find the overflow area.  */
3523  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3524  if (words != 0)
3525    t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3526	       build_int_2 (words * UNITS_PER_WORD, 0));
3527  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3528  TREE_SIDE_EFFECTS (t) = 1;
3529  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3530
3531  /* Find the register save area.  */
3532  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3533  t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3534	     build_int_2 (-RS6000_VARARGS_SIZE, -1));
3535  t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3536  TREE_SIDE_EFFECTS (t) = 1;
3537  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3538}
3539
3540/* Implement va_arg.  */
3541
3542rtx
3543rs6000_va_arg (valist, type)
3544     tree valist, type;
3545{
3546  tree f_gpr, f_fpr, f_ovf, f_sav;
3547  tree gpr, fpr, ovf, sav, reg, t, u;
3548  int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3549  rtx lab_false, lab_over, addr_rtx, r;
3550
3551  if (DEFAULT_ABI != ABI_V4)
3552    {
3553      /* Variable sized types are passed by reference.  */
3554      if (int_size_in_bytes (type) < 0)
3555	{
3556	  u = build_pointer_type (type);
3557
3558	  /* Args grow upward.  */
3559	  t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3560		     build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3561	  TREE_SIDE_EFFECTS (t) = 1;
3562
3563	  t = build1 (NOP_EXPR, build_pointer_type (u), t);
3564	  TREE_SIDE_EFFECTS (t) = 1;
3565
3566	  t = build1 (INDIRECT_REF, u, t);
3567	  TREE_SIDE_EFFECTS (t) = 1;
3568
3569	  return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3570	}
3571      else
3572	return std_expand_builtin_va_arg (valist, type);
3573    }
3574
3575  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3576  f_fpr = TREE_CHAIN (f_gpr);
3577  f_ovf = TREE_CHAIN (f_fpr);
3578  f_sav = TREE_CHAIN (f_ovf);
3579
3580  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3581  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3582  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3583  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3584  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3585
3586  size = int_size_in_bytes (type);
3587  rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3588
3589  if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3590    {
3591      /* Aggregates and long doubles are passed by reference.  */
3592      indirect_p = 1;
3593      reg = gpr;
3594      n_reg = 1;
3595      sav_ofs = 0;
3596      sav_scale = 4;
3597      size = UNITS_PER_WORD;
3598      rsize = 1;
3599    }
3600  else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3601    {
3602      /* FP args go in FP registers, if present.  */
3603      indirect_p = 0;
3604      reg = fpr;
3605      n_reg = 1;
3606      sav_ofs = 8*4;
3607      sav_scale = 8;
3608    }
3609  else
3610    {
3611      /* Otherwise into GP registers.  */
3612      indirect_p = 0;
3613      reg = gpr;
3614      n_reg = rsize;
3615      sav_ofs = 0;
3616      sav_scale = 4;
3617    }
3618
3619  /* Pull the value out of the saved registers ...  */
3620
3621  lab_false = gen_label_rtx ();
3622  lab_over = gen_label_rtx ();
3623  addr_rtx = gen_reg_rtx (Pmode);
3624
3625  /*  AltiVec vectors never go in registers.  */
3626  if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3627    {
3628      TREE_THIS_VOLATILE (reg) = 1;
3629      emit_cmp_and_jump_insns
3630	(expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3631	 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3632	 lab_false);
3633
3634      /* Long long is aligned in the registers.  */
3635      if (n_reg > 1)
3636	{
3637	  u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3638		     build_int_2 (n_reg - 1, 0));
3639	  u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3640	  u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3641	  TREE_SIDE_EFFECTS (u) = 1;
3642	  expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3643	}
3644
3645      if (sav_ofs)
3646	t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3647      else
3648	t = sav;
3649
3650      u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3651		 build_int_2 (n_reg, 0));
3652      TREE_SIDE_EFFECTS (u) = 1;
3653
3654      u = build1 (CONVERT_EXPR, integer_type_node, u);
3655      TREE_SIDE_EFFECTS (u) = 1;
3656
3657      u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3658      TREE_SIDE_EFFECTS (u) = 1;
3659
3660      t = build (PLUS_EXPR, ptr_type_node, t, u);
3661      TREE_SIDE_EFFECTS (t) = 1;
3662
3663      r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3664      if (r != addr_rtx)
3665	emit_move_insn (addr_rtx, r);
3666
3667      emit_jump_insn (gen_jump (lab_over));
3668      emit_barrier ();
3669    }
3670
3671  emit_label (lab_false);
3672
3673  /* ... otherwise out of the overflow area.  */
3674
3675  /* Make sure we don't find reg 7 for the next int arg.
3676
3677     All AltiVec vectors go in the overflow area.  So in the AltiVec
3678     case we need to get the vectors from the overflow area, but
3679     remember where the GPRs and FPRs are.  */
3680  if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3681		    || !TARGET_ALTIVEC))
3682    {
3683      t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3684      TREE_SIDE_EFFECTS (t) = 1;
3685      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3686    }
3687
3688  /* Care for on-stack alignment if needed.  */
3689  if (rsize <= 1)
3690    t = ovf;
3691  else
3692    {
3693      int align;
3694
3695      /* AltiVec vectors are 16 byte aligned.  */
3696      if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3697	align = 15;
3698      else
3699	align = 7;
3700
3701      t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3702      t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3703    }
3704  t = save_expr (t);
3705
3706  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3707  if (r != addr_rtx)
3708    emit_move_insn (addr_rtx, r);
3709
3710  t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3711  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3712  TREE_SIDE_EFFECTS (t) = 1;
3713  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3714
3715  emit_label (lab_over);
3716
3717  if (indirect_p)
3718    {
3719      r = gen_rtx_MEM (Pmode, addr_rtx);
3720      set_mem_alias_set (r, get_varargs_alias_set ());
3721      emit_move_insn (addr_rtx, r);
3722    }
3723
3724  return addr_rtx;
3725}
3726
3727/* Builtins.  */
3728
3729#define def_builtin(MASK, NAME, TYPE, CODE)			\
3730do {								\
3731  if ((MASK) & target_flags)					\
3732    builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD,	\
3733		      NULL, NULL_TREE);				\
3734} while (0)
3735
3736/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
3737
3738static const struct builtin_description bdesc_3arg[] =
3739{
3740  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3741  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3742  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3743  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3744  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3745  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3746  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3747  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3748  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3749  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3750  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3751  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3752  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3753  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3754  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3755  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3756  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3757  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3758  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3759  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3760  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3761  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3762  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3763};
3764
3765/* DST operations: void foo (void *, const int, const char).  */
3766
3767static const struct builtin_description bdesc_dst[] =
3768{
3769  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3770  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3771  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3772  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3773};
3774
3775/* Simple binary operations: VECc = foo (VECa, VECb).  */
3776
3777static struct builtin_description bdesc_2arg[] =
3778{
3779  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3780  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3781  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3782  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3783  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3784  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3785  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3786  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3787  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3788  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3789  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3790  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3791  { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3792  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3793  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3794  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3795  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3796  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3797  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3798  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3799  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3800  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3801  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3802  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3803  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3804  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3805  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3806  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3807  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3808  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3809  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3810  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3811  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3812  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3813  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3814  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3815  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3816  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3817  { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3818  { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3819  { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3820  { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3821  { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3822  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3823  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3824  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3825  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3826  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3827  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3828  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3829  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3830  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3831  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3832  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3833  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3834  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3835  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3836  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3837  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3838  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3839  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3840  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3841  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3842  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3843  { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3844  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3845  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3846  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3847  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3848  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3849  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3850  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3851  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3852  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3853  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3854  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3855  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3856  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3857  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3858  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3859  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3860  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3861  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3862  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3863  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3864  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3865  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3866  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3867  { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3868  { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3869  { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3870  { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3871  { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3872  { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3873  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3874  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3875  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3876  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3877  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3878  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3879  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3880  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3881  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3882  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3883  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3884  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3885  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3886  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3887  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3888  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3889  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3890  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3891  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3892
3893  /* Place holder, leave as first spe builtin.  */
3894  { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3895  { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3896  { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3897  { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3898  { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3899  { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3900  { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3901  { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3902  { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3903  { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3904  { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3905  { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3906  { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3907  { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3908  { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3909  { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3910  { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3911  { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3912  { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3913  { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3914  { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3915  { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3916  { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3917  { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3918  { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3919  { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3920  { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3921  { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3922  { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3923  { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3924  { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3925  { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3926  { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3927  { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3928  { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3929  { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3930  { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3931  { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3932  { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3933  { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3934  { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3935  { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3936  { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3937  { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3938  { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3939  { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3940  { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3941  { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3942  { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3943  { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3944  { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3945  { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3946  { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3947  { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3948  { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3949  { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3950  { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3951  { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3952  { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3953  { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3954  { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3955  { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3956  { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3957  { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3958  { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
3959  { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
3960  { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
3961  { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
3962  { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
3963  { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
3964  { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
3965  { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
3966  { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
3967  { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
3968  { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
3969  { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
3970  { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
3971  { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
3972  { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
3973  { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
3974  { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
3975  { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
3976  { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
3977  { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
3978  { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
3979  { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
3980  { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
3981  { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
3982  { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
3983  { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
3984  { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
3985  { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
3986  { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
3987  { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
3988  { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
3989  { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
3990  { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
3991  { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
3992  { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
3993  { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
3994  { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
3995  { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
3996  { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
3997  { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
3998  { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
3999  { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4000  { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4001  { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4002  { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4003
4004  /* SPE binary operations expecting a 5-bit unsigned literal.  */
4005  { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4006
4007  { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4008  { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4009  { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4010  { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4011  { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4012  { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4013  { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4014  { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4015  { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4016  { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4017  { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4018  { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4019  { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4020  { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4021  { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4022  { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4023  { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4024  { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4025  { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4026  { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4027  { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4028  { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4029  { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4030  { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4031  { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4032  { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4033
4034  /* Place-holder.  Leave as last binary SPE builtin.  */
4035  { 0, CODE_FOR_spe_evxor, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4036};
4037
4038/* AltiVec predicates.  */
4039
4040struct builtin_description_predicates
4041{
4042  const unsigned int mask;
4043  const enum insn_code icode;
4044  const char *opcode;
4045  const char *const name;
4046  const enum rs6000_builtins code;
4047};
4048
4049static const struct builtin_description_predicates bdesc_altivec_preds[] =
4050{
4051  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4052  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4053  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4054  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4055  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4056  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4057  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4058  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4059  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4060  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4061  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4062  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4063  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4064};
4065
4066/* SPE predicates.  */
4067static struct builtin_description bdesc_spe_predicates[] =
4068{
4069  /* Place-holder.  Leave as first.  */
4070  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4071  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4072  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4073  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4074  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4075  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4076  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4077  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4078  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4079  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4080  /* Place-holder.  Leave as last.  */
4081  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4082};
4083
4084/* SPE evsel predicates.  */
4085static struct builtin_description bdesc_spe_evsel[] =
4086{
4087  /* Place-holder.  Leave as first.  */
4088  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4089  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4090  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4091  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4092  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4093  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4094  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4095  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4096  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4097  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4098  /* Place-holder.  Leave as last.  */
4099  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4100};
4101
4102/* ABS* opreations.  */
4103
4104static const struct builtin_description bdesc_abs[] =
4105{
4106  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4107  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4108  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4109  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4110  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4111  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4112  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4113};
4114
4115/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4116   foo (VECa).  */
4117
4118static struct builtin_description bdesc_1arg[] =
4119{
4120  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4121  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4122  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4123  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4124  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4125  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4126  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4127  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4128  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4129  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4130  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4131  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4132  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4133  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4134  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4135  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4136  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4137
4138  /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4139     end with SPE_BUILTIN_EVSUBFUSIAAW.  */
4140  { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4141  { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4142  { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4143  { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4144  { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4145  { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4146  { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4147  { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4148  { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4149  { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4150  { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4151  { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4152  { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4153  { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4154  { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4155  { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4156  { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4157  { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4158  { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4159  { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4160  { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4161  { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4162  { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4163  { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4164  { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4165  { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4166  { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4167  { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4168  { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4169  { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4170
4171  /* Place-holder.  Leave as last unary SPE builtin.  */
4172  { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4173};
4174
4175static rtx
4176rs6000_expand_unop_builtin (icode, arglist, target)
4177     enum insn_code icode;
4178     tree arglist;
4179     rtx target;
4180{
4181  rtx pat;
4182  tree arg0 = TREE_VALUE (arglist);
4183  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4184  enum machine_mode tmode = insn_data[icode].operand[0].mode;
4185  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4186
4187  if (icode == CODE_FOR_nothing)
4188    /* Builtin not supported on this processor.  */
4189    return 0;
4190
4191  /* If we got invalid arguments bail out before generating bad rtl.  */
4192  if (arg0 == error_mark_node)
4193    return const0_rtx;
4194
4195  if (icode == CODE_FOR_altivec_vspltisb
4196      || icode == CODE_FOR_altivec_vspltish
4197      || icode == CODE_FOR_altivec_vspltisw
4198      || icode == CODE_FOR_spe_evsplatfi
4199      || icode == CODE_FOR_spe_evsplati)
4200    {
4201      /* Only allow 5-bit *signed* literals.  */
4202      if (GET_CODE (op0) != CONST_INT
4203	  || INTVAL (op0) > 0x1f
4204	  || INTVAL (op0) < -0x1f)
4205	{
4206	  error ("argument 1 must be a 5-bit signed literal");
4207	  return const0_rtx;
4208	}
4209    }
4210
4211  if (target == 0
4212      || GET_MODE (target) != tmode
4213      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4214    target = gen_reg_rtx (tmode);
4215
4216  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4217    op0 = copy_to_mode_reg (mode0, op0);
4218
4219  pat = GEN_FCN (icode) (target, op0);
4220  if (! pat)
4221    return 0;
4222  emit_insn (pat);
4223
4224  return target;
4225}
4226
4227static rtx
4228altivec_expand_abs_builtin (icode, arglist, target)
4229     enum insn_code icode;
4230     tree arglist;
4231     rtx target;
4232{
4233  rtx pat, scratch1, scratch2;
4234  tree arg0 = TREE_VALUE (arglist);
4235  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4236  enum machine_mode tmode = insn_data[icode].operand[0].mode;
4237  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4238
4239  /* If we have invalid arguments, bail out before generating bad rtl.  */
4240  if (arg0 == error_mark_node)
4241    return const0_rtx;
4242
4243  if (target == 0
4244      || GET_MODE (target) != tmode
4245      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4246    target = gen_reg_rtx (tmode);
4247
4248  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4249    op0 = copy_to_mode_reg (mode0, op0);
4250
4251  scratch1 = gen_reg_rtx (mode0);
4252  scratch2 = gen_reg_rtx (mode0);
4253
4254  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4255  if (! pat)
4256    return 0;
4257  emit_insn (pat);
4258
4259  return target;
4260}
4261
4262static rtx
4263rs6000_expand_binop_builtin (icode, arglist, target)
4264     enum insn_code icode;
4265     tree arglist;
4266     rtx target;
4267{
4268  rtx pat;
4269  tree arg0 = TREE_VALUE (arglist);
4270  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4271  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4272  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4273  enum machine_mode tmode = insn_data[icode].operand[0].mode;
4274  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4275  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4276
4277  if (icode == CODE_FOR_nothing)
4278    /* Builtin not supported on this processor.  */
4279    return 0;
4280
4281  /* If we got invalid arguments bail out before generating bad rtl.  */
4282  if (arg0 == error_mark_node || arg1 == error_mark_node)
4283    return const0_rtx;
4284
4285  if (icode == CODE_FOR_altivec_vcfux
4286      || icode == CODE_FOR_altivec_vcfsx
4287      || icode == CODE_FOR_altivec_vctsxs
4288      || icode == CODE_FOR_altivec_vctuxs
4289      || icode == CODE_FOR_altivec_vspltb
4290      || icode == CODE_FOR_altivec_vsplth
4291      || icode == CODE_FOR_altivec_vspltw
4292      || icode == CODE_FOR_spe_evaddiw
4293      || icode == CODE_FOR_spe_evldd
4294      || icode == CODE_FOR_spe_evldh
4295      || icode == CODE_FOR_spe_evldw
4296      || icode == CODE_FOR_spe_evlhhesplat
4297      || icode == CODE_FOR_spe_evlhhossplat
4298      || icode == CODE_FOR_spe_evlhhousplat
4299      || icode == CODE_FOR_spe_evlwhe
4300      || icode == CODE_FOR_spe_evlwhos
4301      || icode == CODE_FOR_spe_evlwhou
4302      || icode == CODE_FOR_spe_evlwhsplat
4303      || icode == CODE_FOR_spe_evlwwsplat
4304      || icode == CODE_FOR_spe_evrlwi
4305      || icode == CODE_FOR_spe_evslwi
4306      || icode == CODE_FOR_spe_evsrwis
4307      || icode == CODE_FOR_spe_evsrwiu)
4308    {
4309      /* Only allow 5-bit unsigned literals.  */
4310      if (TREE_CODE (arg1) != INTEGER_CST
4311	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
4312	{
4313	  error ("argument 2 must be a 5-bit unsigned literal");
4314	  return const0_rtx;
4315	}
4316    }
4317
4318  if (target == 0
4319      || GET_MODE (target) != tmode
4320      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4321    target = gen_reg_rtx (tmode);
4322
4323  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4324    op0 = copy_to_mode_reg (mode0, op0);
4325  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4326    op1 = copy_to_mode_reg (mode1, op1);
4327
4328  pat = GEN_FCN (icode) (target, op0, op1);
4329  if (! pat)
4330    return 0;
4331  emit_insn (pat);
4332
4333  return target;
4334}
4335
4336static rtx
4337altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4338     enum insn_code icode;
4339     const char *opcode;
4340     tree arglist;
4341     rtx target;
4342{
4343  rtx pat, scratch;
4344  tree cr6_form = TREE_VALUE (arglist);
4345  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4346  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4347  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4348  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4349  enum machine_mode tmode = SImode;
4350  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4351  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4352  int cr6_form_int;
4353
4354  if (TREE_CODE (cr6_form) != INTEGER_CST)
4355    {
4356      error ("argument 1 of __builtin_altivec_predicate must be a constant");
4357      return const0_rtx;
4358    }
4359  else
4360    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4361
4362  if (mode0 != mode1)
4363    abort ();
4364
4365  /* If we have invalid arguments, bail out before generating bad rtl.  */
4366  if (arg0 == error_mark_node || arg1 == error_mark_node)
4367    return const0_rtx;
4368
4369  if (target == 0
4370      || GET_MODE (target) != tmode
4371      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4372    target = gen_reg_rtx (tmode);
4373
4374  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4375    op0 = copy_to_mode_reg (mode0, op0);
4376  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4377    op1 = copy_to_mode_reg (mode1, op1);
4378
4379  scratch = gen_reg_rtx (mode0);
4380
4381  pat = GEN_FCN (icode) (scratch, op0, op1,
4382			 gen_rtx (SYMBOL_REF, Pmode, opcode));
4383  if (! pat)
4384    return 0;
4385  emit_insn (pat);
4386
4387  /* The vec_any* and vec_all* predicates use the same opcodes for two
4388     different operations, but the bits in CR6 will be different
4389     depending on what information we want.  So we have to play tricks
4390     with CR6 to get the right bits out.
4391
4392     If you think this is disgusting, look at the specs for the
4393     AltiVec predicates.  */
4394
4395     switch (cr6_form_int)
4396       {
4397       case 0:
4398	 emit_insn (gen_cr6_test_for_zero (target));
4399	 break;
4400       case 1:
4401	 emit_insn (gen_cr6_test_for_zero_reverse (target));
4402	 break;
4403       case 2:
4404	 emit_insn (gen_cr6_test_for_lt (target));
4405	 break;
4406       case 3:
4407	 emit_insn (gen_cr6_test_for_lt_reverse (target));
4408	 break;
4409       default:
4410	 error ("argument 1 of __builtin_altivec_predicate is out of range");
4411	 break;
4412       }
4413
4414  return target;
4415}
4416
4417static rtx
4418altivec_expand_stv_builtin (icode, arglist)
4419     enum insn_code icode;
4420     tree arglist;
4421{
4422  tree arg0 = TREE_VALUE (arglist);
4423  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4424  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4425  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4426  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4427  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4428  rtx pat;
4429  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4430  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4431  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4432
4433  /* Invalid arguments.  Bail before doing anything stoopid!  */
4434  if (arg0 == error_mark_node
4435      || arg1 == error_mark_node
4436      || arg2 == error_mark_node)
4437    return const0_rtx;
4438
4439  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4440    op0 = copy_to_mode_reg (mode2, op0);
4441  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4442    op1 = copy_to_mode_reg (mode0, op1);
4443  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4444    op2 = copy_to_mode_reg (mode1, op2);
4445
4446  pat = GEN_FCN (icode) (op1, op2, op0);
4447  if (pat)
4448    emit_insn (pat);
4449  return NULL_RTX;
4450}
4451
4452static rtx
4453rs6000_expand_ternop_builtin (icode, arglist, target)
4454     enum insn_code icode;
4455     tree arglist;
4456     rtx target;
4457{
4458  rtx pat;
4459  tree arg0 = TREE_VALUE (arglist);
4460  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4461  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4462  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4463  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4464  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4465  enum machine_mode tmode = insn_data[icode].operand[0].mode;
4466  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4467  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4468  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4469
4470  if (icode == CODE_FOR_nothing)
4471    /* Builtin not supported on this processor.  */
4472    return 0;
4473
4474  /* If we got invalid arguments bail out before generating bad rtl.  */
4475  if (arg0 == error_mark_node
4476      || arg1 == error_mark_node
4477      || arg2 == error_mark_node)
4478    return const0_rtx;
4479
4480  if (icode == CODE_FOR_altivec_vsldoi_4sf
4481      || icode == CODE_FOR_altivec_vsldoi_4si
4482      || icode == CODE_FOR_altivec_vsldoi_8hi
4483      || icode == CODE_FOR_altivec_vsldoi_16qi)
4484    {
4485      /* Only allow 4-bit unsigned literals.  */
4486      if (TREE_CODE (arg2) != INTEGER_CST
4487	  || TREE_INT_CST_LOW (arg2) & ~0xf)
4488	{
4489	  error ("argument 3 must be a 4-bit unsigned literal");
4490	  return const0_rtx;
4491	}
4492    }
4493
4494  if (target == 0
4495      || GET_MODE (target) != tmode
4496      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4497    target = gen_reg_rtx (tmode);
4498
4499  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4500    op0 = copy_to_mode_reg (mode0, op0);
4501  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4502    op1 = copy_to_mode_reg (mode1, op1);
4503  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4504    op2 = copy_to_mode_reg (mode2, op2);
4505
4506  pat = GEN_FCN (icode) (target, op0, op1, op2);
4507  if (! pat)
4508    return 0;
4509  emit_insn (pat);
4510
4511  return target;
4512}
4513
4514/* Expand the lvx builtins.  */
4515static rtx
4516altivec_expand_ld_builtin (exp, target, expandedp)
4517     tree exp;
4518     rtx target;
4519     bool *expandedp;
4520{
4521  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4522  tree arglist = TREE_OPERAND (exp, 1);
4523  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4524  tree arg0;
4525  enum machine_mode tmode, mode0;
4526  rtx pat, op0;
4527  enum insn_code icode;
4528
4529  switch (fcode)
4530    {
4531    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4532      icode = CODE_FOR_altivec_lvx_16qi;
4533      break;
4534    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4535      icode = CODE_FOR_altivec_lvx_8hi;
4536      break;
4537    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4538      icode = CODE_FOR_altivec_lvx_4si;
4539      break;
4540    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4541      icode = CODE_FOR_altivec_lvx_4sf;
4542      break;
4543    default:
4544      *expandedp = false;
4545      return NULL_RTX;
4546    }
4547
4548  *expandedp = true;
4549
4550  arg0 = TREE_VALUE (arglist);
4551  op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4552  tmode = insn_data[icode].operand[0].mode;
4553  mode0 = insn_data[icode].operand[1].mode;
4554
4555  if (target == 0
4556      || GET_MODE (target) != tmode
4557      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4558    target = gen_reg_rtx (tmode);
4559
4560  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4561    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4562
4563  pat = GEN_FCN (icode) (target, op0);
4564  if (! pat)
4565    return 0;
4566  emit_insn (pat);
4567  return target;
4568}
4569
4570/* Expand the stvx builtins.  */
4571static rtx
4572altivec_expand_st_builtin (exp, target, expandedp)
4573     tree exp;
4574     rtx target ATTRIBUTE_UNUSED;
4575     bool *expandedp;
4576{
4577  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4578  tree arglist = TREE_OPERAND (exp, 1);
4579  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4580  tree arg0, arg1;
4581  enum machine_mode mode0, mode1;
4582  rtx pat, op0, op1;
4583  enum insn_code icode;
4584
4585  switch (fcode)
4586    {
4587    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4588      icode = CODE_FOR_altivec_stvx_16qi;
4589      break;
4590    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4591      icode = CODE_FOR_altivec_stvx_8hi;
4592      break;
4593    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4594      icode = CODE_FOR_altivec_stvx_4si;
4595      break;
4596    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4597      icode = CODE_FOR_altivec_stvx_4sf;
4598      break;
4599    default:
4600      *expandedp = false;
4601      return NULL_RTX;
4602    }
4603
4604  arg0 = TREE_VALUE (arglist);
4605  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4606  op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4607  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4608  mode0 = insn_data[icode].operand[0].mode;
4609  mode1 = insn_data[icode].operand[1].mode;
4610
4611  if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4612    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4613  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4614    op1 = copy_to_mode_reg (mode1, op1);
4615
4616  pat = GEN_FCN (icode) (op0, op1);
4617  if (pat)
4618    emit_insn (pat);
4619
4620  *expandedp = true;
4621  return NULL_RTX;
4622}
4623
4624/* Expand the dst builtins.  */
4625static rtx
4626altivec_expand_dst_builtin (exp, target, expandedp)
4627     tree exp;
4628     rtx target ATTRIBUTE_UNUSED;
4629     bool *expandedp;
4630{
4631  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4632  tree arglist = TREE_OPERAND (exp, 1);
4633  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4634  tree arg0, arg1, arg2;
4635  enum machine_mode mode0, mode1, mode2;
4636  rtx pat, op0, op1, op2;
4637  struct builtin_description *d;
4638  size_t i;
4639
4640  *expandedp = false;
4641
4642  /* Handle DST variants.  */
4643  d = (struct builtin_description *) bdesc_dst;
4644  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4645    if (d->code == fcode)
4646      {
4647	arg0 = TREE_VALUE (arglist);
4648	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4649	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4650	op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4651	op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4652	op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4653	mode0 = insn_data[d->icode].operand[0].mode;
4654	mode1 = insn_data[d->icode].operand[1].mode;
4655	mode2 = insn_data[d->icode].operand[2].mode;
4656
4657	/* Invalid arguments, bail out before generating bad rtl.  */
4658	if (arg0 == error_mark_node
4659	    || arg1 == error_mark_node
4660	    || arg2 == error_mark_node)
4661	  return const0_rtx;
4662
4663	if (TREE_CODE (arg2) != INTEGER_CST
4664	    || TREE_INT_CST_LOW (arg2) & ~0x3)
4665	  {
4666	    error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4667	    return const0_rtx;
4668	  }
4669
4670	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4671	  op0 = copy_to_mode_reg (mode0, op0);
4672	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4673	  op1 = copy_to_mode_reg (mode1, op1);
4674
4675	pat = GEN_FCN (d->icode) (op0, op1, op2);
4676	if (pat != 0)
4677	  emit_insn (pat);
4678
4679	*expandedp = true;
4680	return NULL_RTX;
4681      }
4682
4683  return NULL_RTX;
4684}
4685
4686/* Expand the builtin in EXP and store the result in TARGET.  Store
4687   true in *EXPANDEDP if we found a builtin to expand.  */
4688static rtx
4689altivec_expand_builtin (exp, target, expandedp)
4690     tree exp;
4691     rtx target;
4692     bool *expandedp;
4693{
4694  struct builtin_description *d;
4695  struct builtin_description_predicates *dp;
4696  size_t i;
4697  enum insn_code icode;
4698  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4699  tree arglist = TREE_OPERAND (exp, 1);
4700  tree arg0;
4701  rtx op0, pat;
4702  enum machine_mode tmode, mode0;
4703  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4704
4705  target = altivec_expand_ld_builtin (exp, target, expandedp);
4706  if (*expandedp)
4707    return target;
4708
4709  target = altivec_expand_st_builtin (exp, target, expandedp);
4710  if (*expandedp)
4711    return target;
4712
4713  target = altivec_expand_dst_builtin (exp, target, expandedp);
4714  if (*expandedp)
4715    return target;
4716
4717  *expandedp = true;
4718
4719  switch (fcode)
4720    {
4721    case ALTIVEC_BUILTIN_STVX:
4722      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4723    case ALTIVEC_BUILTIN_STVEBX:
4724      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4725    case ALTIVEC_BUILTIN_STVEHX:
4726      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4727    case ALTIVEC_BUILTIN_STVEWX:
4728      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4729    case ALTIVEC_BUILTIN_STVXL:
4730      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4731
4732    case ALTIVEC_BUILTIN_MFVSCR:
4733      icode = CODE_FOR_altivec_mfvscr;
4734      tmode = insn_data[icode].operand[0].mode;
4735
4736      if (target == 0
4737	  || GET_MODE (target) != tmode
4738	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4739	target = gen_reg_rtx (tmode);
4740
4741      pat = GEN_FCN (icode) (target);
4742      if (! pat)
4743	return 0;
4744      emit_insn (pat);
4745      return target;
4746
4747    case ALTIVEC_BUILTIN_MTVSCR:
4748      icode = CODE_FOR_altivec_mtvscr;
4749      arg0 = TREE_VALUE (arglist);
4750      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4751      mode0 = insn_data[icode].operand[0].mode;
4752
4753      /* If we got invalid arguments bail out before generating bad rtl.  */
4754      if (arg0 == error_mark_node)
4755	return const0_rtx;
4756
4757      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4758	op0 = copy_to_mode_reg (mode0, op0);
4759
4760      pat = GEN_FCN (icode) (op0);
4761      if (pat)
4762	emit_insn (pat);
4763      return NULL_RTX;
4764
4765    case ALTIVEC_BUILTIN_DSSALL:
4766      emit_insn (gen_altivec_dssall ());
4767      return NULL_RTX;
4768
4769    case ALTIVEC_BUILTIN_DSS:
4770      icode = CODE_FOR_altivec_dss;
4771      arg0 = TREE_VALUE (arglist);
4772      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4773      mode0 = insn_data[icode].operand[0].mode;
4774
4775      /* If we got invalid arguments bail out before generating bad rtl.  */
4776      if (arg0 == error_mark_node)
4777	return const0_rtx;
4778
4779      if (TREE_CODE (arg0) != INTEGER_CST
4780	  || TREE_INT_CST_LOW (arg0) & ~0x3)
4781	{
4782	  error ("argument to dss must be a 2-bit unsigned literal");
4783	  return const0_rtx;
4784	}
4785
4786      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4787	op0 = copy_to_mode_reg (mode0, op0);
4788
4789      emit_insn (gen_altivec_dss (op0));
4790      return NULL_RTX;
4791    }
4792
4793  /* Expand abs* operations.  */
4794  d = (struct builtin_description *) bdesc_abs;
4795  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4796    if (d->code == fcode)
4797      return altivec_expand_abs_builtin (d->icode, arglist, target);
4798
4799  /* Expand the AltiVec predicates.  */
4800  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4801  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4802    if (dp->code == fcode)
4803      return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4804
4805  /* LV* are funky.  We initialized them differently.  */
4806  switch (fcode)
4807    {
4808    case ALTIVEC_BUILTIN_LVSL:
4809      return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4810					   arglist, target);
4811    case ALTIVEC_BUILTIN_LVSR:
4812      return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4813					  arglist, target);
4814    case ALTIVEC_BUILTIN_LVEBX:
4815      return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4816					  arglist, target);
4817    case ALTIVEC_BUILTIN_LVEHX:
4818      return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4819					  arglist, target);
4820    case ALTIVEC_BUILTIN_LVEWX:
4821      return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4822					  arglist, target);
4823    case ALTIVEC_BUILTIN_LVXL:
4824      return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4825					  arglist, target);
4826    case ALTIVEC_BUILTIN_LVX:
4827      return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4828					  arglist, target);
4829    default:
4830      break;
4831      /* Fall through.  */
4832    }
4833
4834  *expandedp = false;
4835  return NULL_RTX;
4836}
4837
4838/* Binops that need to be initialized manually, but can be expanded
4839   automagically by rs6000_expand_binop_builtin.  */
4840static struct builtin_description bdesc_2arg_spe[] =
4841{
4842  { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4843  { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4844  { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4845  { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4846  { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4847  { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4848  { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4849  { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4850  { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4851  { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4852  { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4853  { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4854  { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4855  { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4856  { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4857  { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4858  { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4859  { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4860  { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4861  { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4862  { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4863  { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4864};
4865
4866/* Expand the builtin in EXP and store the result in TARGET.  Store
4867   true in *EXPANDEDP if we found a builtin to expand.
4868
4869   This expands the SPE builtins that are not simple unary and binary
4870   operations.  */
4871static rtx
4872spe_expand_builtin (exp, target, expandedp)
4873     tree exp;
4874     rtx target;
4875     bool *expandedp;
4876{
4877  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4878  tree arglist = TREE_OPERAND (exp, 1);
4879  tree arg1, arg0;
4880  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4881  enum insn_code icode;
4882  enum machine_mode tmode, mode0;
4883  rtx pat, op0;
4884  struct builtin_description *d;
4885  size_t i;
4886
4887  *expandedp = true;
4888
4889  /* Syntax check for a 5-bit unsigned immediate.  */
4890  switch (fcode)
4891    {
4892    case SPE_BUILTIN_EVSTDD:
4893    case SPE_BUILTIN_EVSTDH:
4894    case SPE_BUILTIN_EVSTDW:
4895    case SPE_BUILTIN_EVSTWHE:
4896    case SPE_BUILTIN_EVSTWHO:
4897    case SPE_BUILTIN_EVSTWWE:
4898    case SPE_BUILTIN_EVSTWWO:
4899      arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4900      if (TREE_CODE (arg1) != INTEGER_CST
4901	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
4902	{
4903	  error ("argument 2 must be a 5-bit unsigned literal");
4904	  return const0_rtx;
4905	}
4906      break;
4907    default:
4908      break;
4909    }
4910
4911  d = (struct builtin_description *) bdesc_2arg_spe;
4912  for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4913    if (d->code == fcode)
4914      return rs6000_expand_binop_builtin (d->icode, arglist, target);
4915
4916  d = (struct builtin_description *) bdesc_spe_predicates;
4917  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4918    if (d->code == fcode)
4919      return spe_expand_predicate_builtin (d->icode, arglist, target);
4920
4921  d = (struct builtin_description *) bdesc_spe_evsel;
4922  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4923    if (d->code == fcode)
4924      return spe_expand_evsel_builtin (d->icode, arglist, target);
4925
4926  switch (fcode)
4927    {
4928    case SPE_BUILTIN_EVSTDDX:
4929      return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4930    case SPE_BUILTIN_EVSTDHX:
4931      return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4932    case SPE_BUILTIN_EVSTDWX:
4933      return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4934    case SPE_BUILTIN_EVSTWHEX:
4935      return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4936    case SPE_BUILTIN_EVSTWHOX:
4937      return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4938    case SPE_BUILTIN_EVSTWWEX:
4939      return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4940    case SPE_BUILTIN_EVSTWWOX:
4941      return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4942    case SPE_BUILTIN_EVSTDD:
4943      return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4944    case SPE_BUILTIN_EVSTDH:
4945      return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4946    case SPE_BUILTIN_EVSTDW:
4947      return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4948    case SPE_BUILTIN_EVSTWHE:
4949      return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4950    case SPE_BUILTIN_EVSTWHO:
4951      return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4952    case SPE_BUILTIN_EVSTWWE:
4953      return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4954    case SPE_BUILTIN_EVSTWWO:
4955      return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4956    case SPE_BUILTIN_MFSPEFSCR:
4957      icode = CODE_FOR_spe_mfspefscr;
4958      tmode = insn_data[icode].operand[0].mode;
4959
4960      if (target == 0
4961	  || GET_MODE (target) != tmode
4962	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4963	target = gen_reg_rtx (tmode);
4964
4965      pat = GEN_FCN (icode) (target);
4966      if (! pat)
4967	return 0;
4968      emit_insn (pat);
4969      return target;
4970    case SPE_BUILTIN_MTSPEFSCR:
4971      icode = CODE_FOR_spe_mtspefscr;
4972      arg0 = TREE_VALUE (arglist);
4973      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4974      mode0 = insn_data[icode].operand[0].mode;
4975
4976      if (arg0 == error_mark_node)
4977	return const0_rtx;
4978
4979      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4980	op0 = copy_to_mode_reg (mode0, op0);
4981
4982      pat = GEN_FCN (icode) (op0);
4983      if (pat)
4984	emit_insn (pat);
4985      return NULL_RTX;
4986    default:
4987      break;
4988    }
4989
4990  *expandedp = false;
4991  return NULL_RTX;
4992}
4993
4994static rtx
4995spe_expand_predicate_builtin (icode, arglist, target)
4996     enum insn_code icode;
4997     tree arglist;
4998     rtx target;
4999{
5000  rtx pat, scratch, tmp;
5001  tree form = TREE_VALUE (arglist);
5002  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5003  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5004  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5005  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5006  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5007  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5008  int form_int;
5009  enum rtx_code code;
5010
5011  if (TREE_CODE (form) != INTEGER_CST)
5012    {
5013      error ("argument 1 of __builtin_spe_predicate must be a constant");
5014      return const0_rtx;
5015    }
5016  else
5017    form_int = TREE_INT_CST_LOW (form);
5018
5019  if (mode0 != mode1)
5020    abort ();
5021
5022  if (arg0 == error_mark_node || arg1 == error_mark_node)
5023    return const0_rtx;
5024
5025  if (target == 0
5026      || GET_MODE (target) != SImode
5027      || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5028    target = gen_reg_rtx (SImode);
5029
5030  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5031    op0 = copy_to_mode_reg (mode0, op0);
5032  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5033    op1 = copy_to_mode_reg (mode1, op1);
5034
5035  scratch = gen_reg_rtx (CCmode);
5036
5037  pat = GEN_FCN (icode) (scratch, op0, op1);
5038  if (! pat)
5039    return const0_rtx;
5040  emit_insn (pat);
5041
5042  /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5043     _lower_.  We use one compare, but look in different bits of the
5044     CR for each variant.
5045
5046     There are 2 elements in each SPE simd type (upper/lower).  The CR
5047     bits are set as follows:
5048
5049     BIT0  | BIT 1  | BIT 2   | BIT 3
5050     U     |   L    | (U | L) | (U & L)
5051
5052     So, for an "all" relationship, BIT 3 would be set.
5053     For an "any" relationship, BIT 2 would be set.  Etc.
5054
5055     Following traditional nomenclature, these bits map to:
5056
5057     BIT0  | BIT 1  | BIT 2   | BIT 3
5058     LT    | GT     | EQ      | OV
5059
5060     Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5061  */
5062
5063  switch (form_int)
5064    {
5065      /* All variant.  OV bit.  */
5066    case 0:
5067      /* We need to get to the OV bit, which is the ORDERED bit.  We
5068	 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5069	 that's ugly and will trigger a validate_condition_mode abort.
5070	 So let's just use another pattern.  */
5071      emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5072      return target;
5073      /* Any variant.  EQ bit.  */
5074    case 1:
5075      code = EQ;
5076      break;
5077      /* Upper variant.  LT bit.  */
5078    case 2:
5079      code = LT;
5080      break;
5081      /* Lower variant.  GT bit.  */
5082    case 3:
5083      code = GT;
5084      break;
5085    default:
5086      error ("argument 1 of __builtin_spe_predicate is out of range");
5087      return const0_rtx;
5088    }
5089
5090  tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5091  emit_move_insn (target, tmp);
5092
5093  return target;
5094}
5095
5096/* The evsel builtins look like this:
5097
5098     e = __builtin_spe_evsel_OP (a, b, c, d);
5099
5100   and work like this:
5101
5102     e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5103     e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5104*/
5105
5106static rtx
5107spe_expand_evsel_builtin (icode, arglist, target)
5108     enum insn_code icode;
5109     tree arglist;
5110     rtx target;
5111{
5112  rtx pat, scratch;
5113  tree arg0 = TREE_VALUE (arglist);
5114  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5115  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5116  tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5117  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5118  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5119  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5120  rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5121  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5122  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5123
5124  if (mode0 != mode1)
5125    abort ();
5126
5127  if (arg0 == error_mark_node || arg1 == error_mark_node
5128      || arg2 == error_mark_node || arg3 == error_mark_node)
5129    return const0_rtx;
5130
5131  if (target == 0
5132      || GET_MODE (target) != mode0
5133      || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5134    target = gen_reg_rtx (mode0);
5135
5136  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5137    op0 = copy_to_mode_reg (mode0, op0);
5138  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5139    op1 = copy_to_mode_reg (mode0, op1);
5140  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5141    op2 = copy_to_mode_reg (mode0, op2);
5142  if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5143    op3 = copy_to_mode_reg (mode0, op3);
5144
5145  /* Generate the compare.  */
5146  scratch = gen_reg_rtx (CCmode);
5147  pat = GEN_FCN (icode) (scratch, op0, op1);
5148  if (! pat)
5149    return const0_rtx;
5150  emit_insn (pat);
5151
5152  if (mode0 == V2SImode)
5153    emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5154  else
5155    emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5156
5157  return target;
5158}
5159
5160/* Expand an expression EXP that calls a built-in function,
5161   with result going to TARGET if that's convenient
5162   (and in mode MODE if that's convenient).
5163   SUBTARGET may be used as the target for computing one of EXP's operands.
5164   IGNORE is nonzero if the value is to be ignored.  */
5165
5166static rtx
5167rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5168     tree exp;
5169     rtx target;
5170     rtx subtarget ATTRIBUTE_UNUSED;
5171     enum machine_mode mode ATTRIBUTE_UNUSED;
5172     int ignore ATTRIBUTE_UNUSED;
5173{
5174  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5175  tree arglist = TREE_OPERAND (exp, 1);
5176  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5177  struct builtin_description *d;
5178  size_t i;
5179  rtx ret;
5180  bool success;
5181
5182  if (TARGET_ALTIVEC)
5183    {
5184      ret = altivec_expand_builtin (exp, target, &success);
5185
5186      if (success)
5187	return ret;
5188    }
5189  if (TARGET_SPE)
5190    {
5191      ret = spe_expand_builtin (exp, target, &success);
5192
5193      if (success)
5194	return ret;
5195    }
5196
5197  if (TARGET_ALTIVEC || TARGET_SPE)
5198    {
5199      /* Handle simple unary operations.  */
5200      d = (struct builtin_description *) bdesc_1arg;
5201      for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5202	if (d->code == fcode)
5203	  return rs6000_expand_unop_builtin (d->icode, arglist, target);
5204
5205      /* Handle simple binary operations.  */
5206      d = (struct builtin_description *) bdesc_2arg;
5207      for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5208	if (d->code == fcode)
5209	  return rs6000_expand_binop_builtin (d->icode, arglist, target);
5210
5211      /* Handle simple ternary operations.  */
5212      d = (struct builtin_description *) bdesc_3arg;
5213      for (i = 0; i < ARRAY_SIZE  (bdesc_3arg); i++, d++)
5214	if (d->code == fcode)
5215	  return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5216    }
5217
5218  abort ();
5219  return NULL_RTX;
5220}
5221
5222static void
5223rs6000_init_builtins ()
5224{
5225  if (TARGET_SPE)
5226    spe_init_builtins ();
5227  if (TARGET_ALTIVEC)
5228    altivec_init_builtins ();
5229  if (TARGET_ALTIVEC || TARGET_SPE)
5230    rs6000_common_init_builtins ();
5231}
5232
5233/* Search through a set of builtins and enable the mask bits.
5234   DESC is an array of builtins.
5235   SIZE is the totaly number of builtins.
5236   START is the builtin enum at which to start.
5237   END is the builtin enum at which to end.  */
5238static void
5239enable_mask_for_builtins (desc, size, start, end)
5240     struct builtin_description *desc;
5241     int size;
5242     enum rs6000_builtins start, end;
5243{
5244  int i;
5245
5246  for (i = 0; i < size; ++i)
5247    if (desc[i].code == start)
5248      break;
5249
5250  if (i == size)
5251    return;
5252
5253  for (; i < size; ++i)
5254    {
5255      /* Flip all the bits on.  */
5256      desc[i].mask = target_flags;
5257      if (desc[i].code == end)
5258	break;
5259    }
5260}
5261
5262static void
5263spe_init_builtins ()
5264{
5265  tree endlink = void_list_node;
5266  tree puint_type_node = build_pointer_type (unsigned_type_node);
5267  tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5268  tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5269  struct builtin_description *d;
5270  size_t i;
5271
5272  tree v2si_ftype_4_v2si
5273    = build_function_type
5274    (V2SI_type_node,
5275     tree_cons (NULL_TREE, V2SI_type_node,
5276		tree_cons (NULL_TREE, V2SI_type_node,
5277			   tree_cons (NULL_TREE, V2SI_type_node,
5278				      tree_cons (NULL_TREE, V2SI_type_node,
5279						 endlink)))));
5280
5281  tree v2sf_ftype_4_v2sf
5282    = build_function_type
5283    (V2SF_type_node,
5284     tree_cons (NULL_TREE, V2SF_type_node,
5285		tree_cons (NULL_TREE, V2SF_type_node,
5286			   tree_cons (NULL_TREE, V2SF_type_node,
5287				      tree_cons (NULL_TREE, V2SF_type_node,
5288						 endlink)))));
5289
5290  tree int_ftype_int_v2si_v2si
5291    = build_function_type
5292    (integer_type_node,
5293     tree_cons (NULL_TREE, integer_type_node,
5294		tree_cons (NULL_TREE, V2SI_type_node,
5295			   tree_cons (NULL_TREE, V2SI_type_node,
5296				      endlink))));
5297
5298  tree int_ftype_int_v2sf_v2sf
5299    = build_function_type
5300    (integer_type_node,
5301     tree_cons (NULL_TREE, integer_type_node,
5302		tree_cons (NULL_TREE, V2SF_type_node,
5303			   tree_cons (NULL_TREE, V2SF_type_node,
5304				      endlink))));
5305
5306  tree void_ftype_v2si_puint_int
5307    = build_function_type (void_type_node,
5308			   tree_cons (NULL_TREE, V2SI_type_node,
5309				      tree_cons (NULL_TREE, puint_type_node,
5310						 tree_cons (NULL_TREE,
5311							    integer_type_node,
5312							    endlink))));
5313
5314  tree void_ftype_v2si_puint_char
5315    = build_function_type (void_type_node,
5316			   tree_cons (NULL_TREE, V2SI_type_node,
5317				      tree_cons (NULL_TREE, puint_type_node,
5318						 tree_cons (NULL_TREE,
5319							    char_type_node,
5320							    endlink))));
5321
5322  tree void_ftype_v2si_pv2si_int
5323    = build_function_type (void_type_node,
5324			   tree_cons (NULL_TREE, V2SI_type_node,
5325				      tree_cons (NULL_TREE, pv2si_type_node,
5326						 tree_cons (NULL_TREE,
5327							    integer_type_node,
5328							    endlink))));
5329
5330  tree void_ftype_v2si_pv2si_char
5331    = build_function_type (void_type_node,
5332			   tree_cons (NULL_TREE, V2SI_type_node,
5333				      tree_cons (NULL_TREE, pv2si_type_node,
5334						 tree_cons (NULL_TREE,
5335							    char_type_node,
5336							    endlink))));
5337
5338  tree void_ftype_int
5339    = build_function_type (void_type_node,
5340			   tree_cons (NULL_TREE, integer_type_node, endlink));
5341
5342  tree int_ftype_void
5343    = build_function_type (integer_type_node,
5344			   tree_cons (NULL_TREE, void_type_node, endlink));
5345
5346  tree v2si_ftype_pv2si_int
5347    = build_function_type (V2SI_type_node,
5348			   tree_cons (NULL_TREE, pv2si_type_node,
5349				      tree_cons (NULL_TREE, integer_type_node,
5350						 endlink)));
5351
5352  tree v2si_ftype_puint_int
5353    = build_function_type (V2SI_type_node,
5354			   tree_cons (NULL_TREE, puint_type_node,
5355				      tree_cons (NULL_TREE, integer_type_node,
5356						 endlink)));
5357
5358  tree v2si_ftype_pushort_int
5359    = build_function_type (V2SI_type_node,
5360			   tree_cons (NULL_TREE, pushort_type_node,
5361				      tree_cons (NULL_TREE, integer_type_node,
5362						 endlink)));
5363
5364  /* The initialization of the simple binary and unary builtins is
5365     done in rs6000_common_init_builtins, but we have to enable the
5366     mask bits here manually because we have run out of `target_flags'
5367     bits.  We really need to redesign this mask business.  */
5368
5369  enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5370			    ARRAY_SIZE (bdesc_2arg),
5371			    SPE_BUILTIN_EVADDW,
5372			    SPE_BUILTIN_EVXOR);
5373  enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5374			    ARRAY_SIZE (bdesc_1arg),
5375			    SPE_BUILTIN_EVABS,
5376			    SPE_BUILTIN_EVSUBFUSIAAW);
5377  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5378			    ARRAY_SIZE (bdesc_spe_predicates),
5379			    SPE_BUILTIN_EVCMPEQ,
5380			    SPE_BUILTIN_EVFSTSTLT);
5381  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5382			    ARRAY_SIZE (bdesc_spe_evsel),
5383			    SPE_BUILTIN_EVSEL_CMPGTS,
5384			    SPE_BUILTIN_EVSEL_FSTSTEQ);
5385
5386  /* Initialize irregular SPE builtins.  */
5387
5388  def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5389  def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5390  def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5391  def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5392  def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5393  def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5394  def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5395  def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5396  def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5397  def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5398  def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5399  def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5400  def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5401  def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5402  def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5403  def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5404
5405  /* Loads.  */
5406  def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5407  def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5408  def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5409  def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5410  def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5411  def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5412  def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5413  def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5414  def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5415  def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5416  def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5417  def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5418  def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5419  def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5420  def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5421  def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5422  def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5423  def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5424  def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5425  def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5426  def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5427  def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5428
5429  /* Predicates.  */
5430  d = (struct builtin_description *) bdesc_spe_predicates;
5431  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5432    {
5433      tree type;
5434
5435      switch (insn_data[d->icode].operand[1].mode)
5436	{
5437	case V2SImode:
5438	  type = int_ftype_int_v2si_v2si;
5439	  break;
5440	case V2SFmode:
5441	  type = int_ftype_int_v2sf_v2sf;
5442	  break;
5443	default:
5444	  abort ();
5445	}
5446
5447      def_builtin (d->mask, d->name, type, d->code);
5448    }
5449
5450  /* Evsel predicates.  */
5451  d = (struct builtin_description *) bdesc_spe_evsel;
5452  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5453    {
5454      tree type;
5455
5456      switch (insn_data[d->icode].operand[1].mode)
5457	{
5458	case V2SImode:
5459	  type = v2si_ftype_4_v2si;
5460	  break;
5461	case V2SFmode:
5462	  type = v2sf_ftype_4_v2sf;
5463	  break;
5464	default:
5465	  abort ();
5466	}
5467
5468      def_builtin (d->mask, d->name, type, d->code);
5469    }
5470}
5471
5472static void
5473altivec_init_builtins ()
5474{
5475  struct builtin_description *d;
5476  struct builtin_description_predicates *dp;
5477  size_t i;
5478  tree pfloat_type_node = build_pointer_type (float_type_node);
5479  tree pint_type_node = build_pointer_type (integer_type_node);
5480  tree pshort_type_node = build_pointer_type (short_integer_type_node);
5481  tree pchar_type_node = build_pointer_type (char_type_node);
5482
5483  tree pvoid_type_node = build_pointer_type (void_type_node);
5484
5485  tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5486  tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5487  tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5488  tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5489
5490  tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5491
5492  tree int_ftype_int_v4si_v4si
5493    = build_function_type_list (integer_type_node,
5494				integer_type_node, V4SI_type_node,
5495				V4SI_type_node, NULL_TREE);
5496  tree v4sf_ftype_pcfloat
5497    = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5498  tree void_ftype_pfloat_v4sf
5499    = build_function_type_list (void_type_node,
5500				pfloat_type_node, V4SF_type_node, NULL_TREE);
5501  tree v4si_ftype_pcint
5502    = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5503  tree void_ftype_pint_v4si
5504    = build_function_type_list (void_type_node,
5505				pint_type_node, V4SI_type_node, NULL_TREE);
5506  tree v8hi_ftype_pcshort
5507    = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5508  tree void_ftype_pshort_v8hi
5509    = build_function_type_list (void_type_node,
5510				pshort_type_node, V8HI_type_node, NULL_TREE);
5511  tree v16qi_ftype_pcchar
5512    = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5513  tree void_ftype_pchar_v16qi
5514    = build_function_type_list (void_type_node,
5515				pchar_type_node, V16QI_type_node, NULL_TREE);
5516  tree void_ftype_v4si
5517    = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5518  tree v8hi_ftype_void
5519    = build_function_type (V8HI_type_node, void_list_node);
5520  tree void_ftype_void
5521    = build_function_type (void_type_node, void_list_node);
5522  tree void_ftype_qi
5523    = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5524
5525  tree v16qi_ftype_int_pcvoid
5526    = build_function_type_list (V16QI_type_node,
5527				integer_type_node, pcvoid_type_node, NULL_TREE);
5528  tree v8hi_ftype_int_pcvoid
5529    = build_function_type_list (V8HI_type_node,
5530				integer_type_node, pcvoid_type_node, NULL_TREE);
5531  tree v4si_ftype_int_pcvoid
5532    = build_function_type_list (V4SI_type_node,
5533				integer_type_node, pcvoid_type_node, NULL_TREE);
5534
5535  tree void_ftype_v4si_int_pvoid
5536    = build_function_type_list (void_type_node,
5537				V4SI_type_node, integer_type_node,
5538				pvoid_type_node, NULL_TREE);
5539  tree void_ftype_v16qi_int_pvoid
5540    = build_function_type_list (void_type_node,
5541				V16QI_type_node, integer_type_node,
5542				pvoid_type_node, NULL_TREE);
5543  tree void_ftype_v8hi_int_pvoid
5544    = build_function_type_list (void_type_node,
5545				V8HI_type_node, integer_type_node,
5546				pvoid_type_node, NULL_TREE);
5547  tree int_ftype_int_v8hi_v8hi
5548    = build_function_type_list (integer_type_node,
5549				integer_type_node, V8HI_type_node,
5550				V8HI_type_node, NULL_TREE);
5551  tree int_ftype_int_v16qi_v16qi
5552    = build_function_type_list (integer_type_node,
5553				integer_type_node, V16QI_type_node,
5554				V16QI_type_node, NULL_TREE);
5555  tree int_ftype_int_v4sf_v4sf
5556    = build_function_type_list (integer_type_node,
5557				integer_type_node, V4SF_type_node,
5558				V4SF_type_node, NULL_TREE);
5559  tree v4si_ftype_v4si
5560    = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5561  tree v8hi_ftype_v8hi
5562    = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5563  tree v16qi_ftype_v16qi
5564    = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5565  tree v4sf_ftype_v4sf
5566    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5567  tree void_ftype_pcvoid_int_char
5568    = build_function_type_list (void_type_node,
5569				pcvoid_type_node, integer_type_node,
5570				char_type_node, NULL_TREE);
5571
5572  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5573	       ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5574  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5575	       ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5576  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5577	       ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5578  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5579	       ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5580  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5581	       ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5582  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5583	       ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5584  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5585	       ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5586  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5587	       ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5588  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5589  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5590  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5591  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5592  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5593  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5594  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5595  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5596  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5597  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5598  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5599  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5600  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5601  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5602  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5603  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5604
5605  /* Add the DST variants.  */
5606  d = (struct builtin_description *) bdesc_dst;
5607  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5608    def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5609
5610  /* Initialize the predicates.  */
5611  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5612  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5613    {
5614      enum machine_mode mode1;
5615      tree type;
5616
5617      mode1 = insn_data[dp->icode].operand[1].mode;
5618
5619      switch (mode1)
5620	{
5621	case V4SImode:
5622	  type = int_ftype_int_v4si_v4si;
5623	  break;
5624	case V8HImode:
5625	  type = int_ftype_int_v8hi_v8hi;
5626	  break;
5627	case V16QImode:
5628	  type = int_ftype_int_v16qi_v16qi;
5629	  break;
5630	case V4SFmode:
5631	  type = int_ftype_int_v4sf_v4sf;
5632	  break;
5633	default:
5634	  abort ();
5635	}
5636
5637      def_builtin (dp->mask, dp->name, type, dp->code);
5638    }
5639
5640  /* Initialize the abs* operators.  */
5641  d = (struct builtin_description *) bdesc_abs;
5642  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5643    {
5644      enum machine_mode mode0;
5645      tree type;
5646
5647      mode0 = insn_data[d->icode].operand[0].mode;
5648
5649      switch (mode0)
5650	{
5651	case V4SImode:
5652	  type = v4si_ftype_v4si;
5653	  break;
5654	case V8HImode:
5655	  type = v8hi_ftype_v8hi;
5656	  break;
5657	case V16QImode:
5658	  type = v16qi_ftype_v16qi;
5659	  break;
5660	case V4SFmode:
5661	  type = v4sf_ftype_v4sf;
5662	  break;
5663	default:
5664	  abort ();
5665	}
5666
5667      def_builtin (d->mask, d->name, type, d->code);
5668    }
5669}
5670
5671static void
5672rs6000_common_init_builtins ()
5673{
5674  struct builtin_description *d;
5675  size_t i;
5676
5677  tree v4sf_ftype_v4sf_v4sf_v16qi
5678    = build_function_type_list (V4SF_type_node,
5679				V4SF_type_node, V4SF_type_node,
5680				V16QI_type_node, NULL_TREE);
5681  tree v4si_ftype_v4si_v4si_v16qi
5682    = build_function_type_list (V4SI_type_node,
5683				V4SI_type_node, V4SI_type_node,
5684				V16QI_type_node, NULL_TREE);
5685  tree v8hi_ftype_v8hi_v8hi_v16qi
5686    = build_function_type_list (V8HI_type_node,
5687				V8HI_type_node, V8HI_type_node,
5688				V16QI_type_node, NULL_TREE);
5689  tree v16qi_ftype_v16qi_v16qi_v16qi
5690    = build_function_type_list (V16QI_type_node,
5691				V16QI_type_node, V16QI_type_node,
5692				V16QI_type_node, NULL_TREE);
5693  tree v4si_ftype_char
5694    = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5695  tree v8hi_ftype_char
5696    = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5697  tree v16qi_ftype_char
5698    = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5699  tree v8hi_ftype_v16qi
5700    = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5701  tree v4sf_ftype_v4sf
5702    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5703
5704  tree v2si_ftype_v2si_v2si
5705    = build_function_type_list (V2SI_type_node,
5706				V2SI_type_node, V2SI_type_node, NULL_TREE);
5707
5708  tree v2sf_ftype_v2sf_v2sf
5709    = build_function_type_list (V2SF_type_node,
5710				V2SF_type_node, V2SF_type_node, NULL_TREE);
5711
5712  tree v2si_ftype_int_int
5713    = build_function_type_list (V2SI_type_node,
5714				integer_type_node, integer_type_node,
5715				NULL_TREE);
5716
5717  tree v2si_ftype_v2si
5718    = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5719
5720  tree v2sf_ftype_v2sf
5721    = build_function_type_list (V2SF_type_node,
5722				V2SF_type_node, NULL_TREE);
5723
5724  tree v2sf_ftype_v2si
5725    = build_function_type_list (V2SF_type_node,
5726				V2SI_type_node, NULL_TREE);
5727
5728  tree v2si_ftype_v2sf
5729    = build_function_type_list (V2SI_type_node,
5730				V2SF_type_node, NULL_TREE);
5731
5732  tree v2si_ftype_v2si_char
5733    = build_function_type_list (V2SI_type_node,
5734				V2SI_type_node, char_type_node, NULL_TREE);
5735
5736  tree v2si_ftype_int_char
5737    = build_function_type_list (V2SI_type_node,
5738				integer_type_node, char_type_node, NULL_TREE);
5739
5740  tree v2si_ftype_char
5741    = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5742
5743  tree int_ftype_int_int
5744    = build_function_type_list (integer_type_node,
5745				integer_type_node, integer_type_node,
5746				NULL_TREE);
5747
5748  tree v4si_ftype_v4si_v4si
5749    = build_function_type_list (V4SI_type_node,
5750				V4SI_type_node, V4SI_type_node, NULL_TREE);
5751  tree v4sf_ftype_v4si_char
5752    = build_function_type_list (V4SF_type_node,
5753				V4SI_type_node, char_type_node, NULL_TREE);
5754  tree v4si_ftype_v4sf_char
5755    = build_function_type_list (V4SI_type_node,
5756				V4SF_type_node, char_type_node, NULL_TREE);
5757  tree v4si_ftype_v4si_char
5758    = build_function_type_list (V4SI_type_node,
5759				V4SI_type_node, char_type_node, NULL_TREE);
5760  tree v8hi_ftype_v8hi_char
5761    = build_function_type_list (V8HI_type_node,
5762				V8HI_type_node, char_type_node, NULL_TREE);
5763  tree v16qi_ftype_v16qi_char
5764    = build_function_type_list (V16QI_type_node,
5765				V16QI_type_node, char_type_node, NULL_TREE);
5766  tree v16qi_ftype_v16qi_v16qi_char
5767    = build_function_type_list (V16QI_type_node,
5768				V16QI_type_node, V16QI_type_node,
5769				char_type_node, NULL_TREE);
5770  tree v8hi_ftype_v8hi_v8hi_char
5771    = build_function_type_list (V8HI_type_node,
5772				V8HI_type_node, V8HI_type_node,
5773				char_type_node, NULL_TREE);
5774  tree v4si_ftype_v4si_v4si_char
5775    = build_function_type_list (V4SI_type_node,
5776				V4SI_type_node, V4SI_type_node,
5777				char_type_node, NULL_TREE);
5778  tree v4sf_ftype_v4sf_v4sf_char
5779    = build_function_type_list (V4SF_type_node,
5780				V4SF_type_node, V4SF_type_node,
5781				char_type_node, NULL_TREE);
5782  tree v4sf_ftype_v4sf_v4sf
5783    = build_function_type_list (V4SF_type_node,
5784				V4SF_type_node, V4SF_type_node, NULL_TREE);
5785  tree v4sf_ftype_v4sf_v4sf_v4si
5786    = build_function_type_list (V4SF_type_node,
5787				V4SF_type_node, V4SF_type_node,
5788				V4SI_type_node, NULL_TREE);
5789  tree v4sf_ftype_v4sf_v4sf_v4sf
5790    = build_function_type_list (V4SF_type_node,
5791				V4SF_type_node, V4SF_type_node,
5792				V4SF_type_node, NULL_TREE);
5793  tree v4si_ftype_v4si_v4si_v4si
5794    = build_function_type_list (V4SI_type_node,
5795				V4SI_type_node, V4SI_type_node,
5796				V4SI_type_node, NULL_TREE);
5797  tree v8hi_ftype_v8hi_v8hi
5798    = build_function_type_list (V8HI_type_node,
5799				V8HI_type_node, V8HI_type_node, NULL_TREE);
5800  tree v8hi_ftype_v8hi_v8hi_v8hi
5801    = build_function_type_list (V8HI_type_node,
5802				V8HI_type_node, V8HI_type_node,
5803				V8HI_type_node, NULL_TREE);
5804 tree v4si_ftype_v8hi_v8hi_v4si
5805    = build_function_type_list (V4SI_type_node,
5806				V8HI_type_node, V8HI_type_node,
5807				V4SI_type_node, NULL_TREE);
5808 tree v4si_ftype_v16qi_v16qi_v4si
5809    = build_function_type_list (V4SI_type_node,
5810				V16QI_type_node, V16QI_type_node,
5811				V4SI_type_node, NULL_TREE);
5812  tree v16qi_ftype_v16qi_v16qi
5813    = build_function_type_list (V16QI_type_node,
5814				V16QI_type_node, V16QI_type_node, NULL_TREE);
5815  tree v4si_ftype_v4sf_v4sf
5816    = build_function_type_list (V4SI_type_node,
5817				V4SF_type_node, V4SF_type_node, NULL_TREE);
5818  tree v8hi_ftype_v16qi_v16qi
5819    = build_function_type_list (V8HI_type_node,
5820				V16QI_type_node, V16QI_type_node, NULL_TREE);
5821  tree v4si_ftype_v8hi_v8hi
5822    = build_function_type_list (V4SI_type_node,
5823				V8HI_type_node, V8HI_type_node, NULL_TREE);
5824  tree v8hi_ftype_v4si_v4si
5825    = build_function_type_list (V8HI_type_node,
5826				V4SI_type_node, V4SI_type_node, NULL_TREE);
5827  tree v16qi_ftype_v8hi_v8hi
5828    = build_function_type_list (V16QI_type_node,
5829				V8HI_type_node, V8HI_type_node, NULL_TREE);
5830  tree v4si_ftype_v16qi_v4si
5831    = build_function_type_list (V4SI_type_node,
5832				V16QI_type_node, V4SI_type_node, NULL_TREE);
5833  tree v4si_ftype_v16qi_v16qi
5834    = build_function_type_list (V4SI_type_node,
5835				V16QI_type_node, V16QI_type_node, NULL_TREE);
5836  tree v4si_ftype_v8hi_v4si
5837    = build_function_type_list (V4SI_type_node,
5838				V8HI_type_node, V4SI_type_node, NULL_TREE);
5839  tree v4si_ftype_v8hi
5840    = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5841  tree int_ftype_v4si_v4si
5842    = build_function_type_list (integer_type_node,
5843				V4SI_type_node, V4SI_type_node, NULL_TREE);
5844  tree int_ftype_v4sf_v4sf
5845    = build_function_type_list (integer_type_node,
5846				V4SF_type_node, V4SF_type_node, NULL_TREE);
5847  tree int_ftype_v16qi_v16qi
5848    = build_function_type_list (integer_type_node,
5849				V16QI_type_node, V16QI_type_node, NULL_TREE);
5850  tree int_ftype_v8hi_v8hi
5851    = build_function_type_list (integer_type_node,
5852				V8HI_type_node, V8HI_type_node, NULL_TREE);
5853
5854  /* Add the simple ternary operators.  */
5855  d = (struct builtin_description *) bdesc_3arg;
5856  for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5857    {
5858
5859      enum machine_mode mode0, mode1, mode2, mode3;
5860      tree type;
5861
5862      if (d->name == 0 || d->icode == CODE_FOR_nothing)
5863	continue;
5864
5865      mode0 = insn_data[d->icode].operand[0].mode;
5866      mode1 = insn_data[d->icode].operand[1].mode;
5867      mode2 = insn_data[d->icode].operand[2].mode;
5868      mode3 = insn_data[d->icode].operand[3].mode;
5869
5870      /* When all four are of the same mode.  */
5871      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5872	{
5873	  switch (mode0)
5874	    {
5875	    case V4SImode:
5876	      type = v4si_ftype_v4si_v4si_v4si;
5877	      break;
5878	    case V4SFmode:
5879	      type = v4sf_ftype_v4sf_v4sf_v4sf;
5880	      break;
5881	    case V8HImode:
5882	      type = v8hi_ftype_v8hi_v8hi_v8hi;
5883	      break;
5884	    case V16QImode:
5885	      type = v16qi_ftype_v16qi_v16qi_v16qi;
5886	      break;
5887	    default:
5888	      abort();
5889	    }
5890	}
5891      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5892        {
5893	  switch (mode0)
5894	    {
5895	    case V4SImode:
5896	      type = v4si_ftype_v4si_v4si_v16qi;
5897	      break;
5898	    case V4SFmode:
5899	      type = v4sf_ftype_v4sf_v4sf_v16qi;
5900	      break;
5901	    case V8HImode:
5902	      type = v8hi_ftype_v8hi_v8hi_v16qi;
5903	      break;
5904	    case V16QImode:
5905	      type = v16qi_ftype_v16qi_v16qi_v16qi;
5906	      break;
5907	    default:
5908	      abort();
5909	    }
5910	}
5911      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5912	       && mode3 == V4SImode)
5913	type = v4si_ftype_v16qi_v16qi_v4si;
5914      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5915	       && mode3 == V4SImode)
5916	type = v4si_ftype_v8hi_v8hi_v4si;
5917      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5918	       && mode3 == V4SImode)
5919	type = v4sf_ftype_v4sf_v4sf_v4si;
5920
5921      /* vchar, vchar, vchar, 4 bit literal.  */
5922      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5923	       && mode3 == QImode)
5924	type = v16qi_ftype_v16qi_v16qi_char;
5925
5926      /* vshort, vshort, vshort, 4 bit literal.  */
5927      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5928	       && mode3 == QImode)
5929	type = v8hi_ftype_v8hi_v8hi_char;
5930
5931      /* vint, vint, vint, 4 bit literal.  */
5932      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5933	       && mode3 == QImode)
5934	type = v4si_ftype_v4si_v4si_char;
5935
5936      /* vfloat, vfloat, vfloat, 4 bit literal.  */
5937      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5938	       && mode3 == QImode)
5939	type = v4sf_ftype_v4sf_v4sf_char;
5940
5941      else
5942	abort ();
5943
5944      def_builtin (d->mask, d->name, type, d->code);
5945    }
5946
5947  /* Add the simple binary operators.  */
5948  d = (struct builtin_description *) bdesc_2arg;
5949  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5950    {
5951      enum machine_mode mode0, mode1, mode2;
5952      tree type;
5953
5954      if (d->name == 0 || d->icode == CODE_FOR_nothing)
5955	continue;
5956
5957      mode0 = insn_data[d->icode].operand[0].mode;
5958      mode1 = insn_data[d->icode].operand[1].mode;
5959      mode2 = insn_data[d->icode].operand[2].mode;
5960
5961      /* When all three operands are of the same mode.  */
5962      if (mode0 == mode1 && mode1 == mode2)
5963	{
5964	  switch (mode0)
5965	    {
5966	    case V4SFmode:
5967	      type = v4sf_ftype_v4sf_v4sf;
5968	      break;
5969	    case V4SImode:
5970	      type = v4si_ftype_v4si_v4si;
5971	      break;
5972	    case V16QImode:
5973	      type = v16qi_ftype_v16qi_v16qi;
5974	      break;
5975	    case V8HImode:
5976	      type = v8hi_ftype_v8hi_v8hi;
5977	      break;
5978	    case V2SImode:
5979	      type = v2si_ftype_v2si_v2si;
5980	      break;
5981	    case V2SFmode:
5982	      type = v2sf_ftype_v2sf_v2sf;
5983	      break;
5984	    case SImode:
5985	      type = int_ftype_int_int;
5986	      break;
5987	    default:
5988	      abort ();
5989	    }
5990	}
5991
5992      /* A few other combos we really don't want to do manually.  */
5993
5994      /* vint, vfloat, vfloat.  */
5995      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
5996	type = v4si_ftype_v4sf_v4sf;
5997
5998      /* vshort, vchar, vchar.  */
5999      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6000	type = v8hi_ftype_v16qi_v16qi;
6001
6002      /* vint, vshort, vshort.  */
6003      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6004	type = v4si_ftype_v8hi_v8hi;
6005
6006      /* vshort, vint, vint.  */
6007      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6008	type = v8hi_ftype_v4si_v4si;
6009
6010      /* vchar, vshort, vshort.  */
6011      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6012	type = v16qi_ftype_v8hi_v8hi;
6013
6014      /* vint, vchar, vint.  */
6015      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6016	type = v4si_ftype_v16qi_v4si;
6017
6018      /* vint, vchar, vchar.  */
6019      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6020	type = v4si_ftype_v16qi_v16qi;
6021
6022      /* vint, vshort, vint.  */
6023      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6024	type = v4si_ftype_v8hi_v4si;
6025
6026      /* vint, vint, 5 bit literal.  */
6027      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6028	type = v4si_ftype_v4si_char;
6029
6030      /* vshort, vshort, 5 bit literal.  */
6031      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6032	type = v8hi_ftype_v8hi_char;
6033
6034      /* vchar, vchar, 5 bit literal.  */
6035      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6036	type = v16qi_ftype_v16qi_char;
6037
6038      /* vfloat, vint, 5 bit literal.  */
6039      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6040	type = v4sf_ftype_v4si_char;
6041
6042      /* vint, vfloat, 5 bit literal.  */
6043      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6044	type = v4si_ftype_v4sf_char;
6045
6046      else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6047	type = v2si_ftype_int_int;
6048
6049      else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6050	type = v2si_ftype_v2si_char;
6051
6052      else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6053	type = v2si_ftype_int_char;
6054
6055      /* int, x, x.  */
6056      else if (mode0 == SImode)
6057	{
6058	  switch (mode1)
6059	    {
6060	    case V4SImode:
6061	      type = int_ftype_v4si_v4si;
6062	      break;
6063	    case V4SFmode:
6064	      type = int_ftype_v4sf_v4sf;
6065	      break;
6066	    case V16QImode:
6067	      type = int_ftype_v16qi_v16qi;
6068	      break;
6069	    case V8HImode:
6070	      type = int_ftype_v8hi_v8hi;
6071	      break;
6072	    default:
6073	      abort ();
6074	    }
6075	}
6076
6077      else
6078	abort ();
6079
6080      def_builtin (d->mask, d->name, type, d->code);
6081    }
6082
6083  /* Add the simple unary operators.  */
6084  d = (struct builtin_description *) bdesc_1arg;
6085  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6086    {
6087      enum machine_mode mode0, mode1;
6088      tree type;
6089
6090      if (d->name == 0 || d->icode == CODE_FOR_nothing)
6091	continue;
6092
6093      mode0 = insn_data[d->icode].operand[0].mode;
6094      mode1 = insn_data[d->icode].operand[1].mode;
6095
6096      if (mode0 == V4SImode && mode1 == QImode)
6097        type = v4si_ftype_char;
6098      else if (mode0 == V8HImode && mode1 == QImode)
6099        type = v8hi_ftype_char;
6100      else if (mode0 == V16QImode && mode1 == QImode)
6101        type = v16qi_ftype_char;
6102      else if (mode0 == V4SFmode && mode1 == V4SFmode)
6103	type = v4sf_ftype_v4sf;
6104      else if (mode0 == V8HImode && mode1 == V16QImode)
6105	type = v8hi_ftype_v16qi;
6106      else if (mode0 == V4SImode && mode1 == V8HImode)
6107	type = v4si_ftype_v8hi;
6108      else if (mode0 == V2SImode && mode1 == V2SImode)
6109	type = v2si_ftype_v2si;
6110      else if (mode0 == V2SFmode && mode1 == V2SFmode)
6111	type = v2sf_ftype_v2sf;
6112      else if (mode0 == V2SFmode && mode1 == V2SImode)
6113	type = v2sf_ftype_v2si;
6114      else if (mode0 == V2SImode && mode1 == V2SFmode)
6115	type = v2si_ftype_v2sf;
6116      else if (mode0 == V2SImode && mode1 == QImode)
6117	type = v2si_ftype_char;
6118      else
6119	abort ();
6120
6121      def_builtin (d->mask, d->name, type, d->code);
6122    }
6123}
6124
6125
6126/* Expand a block move operation, and return 1 if successful.  Return 0
6127   if we should let the compiler generate normal code.
6128
6129   operands[0] is the destination
6130   operands[1] is the source
6131   operands[2] is the length
6132   operands[3] is the alignment */
6133
6134#define MAX_MOVE_REG 4
6135
6136int
6137expand_block_move (operands)
6138     rtx operands[];
6139{
6140  rtx orig_dest = operands[0];
6141  rtx orig_src	= operands[1];
6142  rtx bytes_rtx	= operands[2];
6143  rtx align_rtx = operands[3];
6144  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
6145  int align;
6146  int bytes;
6147  int offset;
6148  int move_bytes;
6149
6150  /* If this is not a fixed size move, just call memcpy */
6151  if (! constp)
6152    return 0;
6153
6154  /* If this is not a fixed size alignment, abort */
6155  if (GET_CODE (align_rtx) != CONST_INT)
6156    abort ();
6157  align = INTVAL (align_rtx);
6158
6159  /* Anything to move? */
6160  bytes = INTVAL (bytes_rtx);
6161  if (bytes <= 0)
6162    return 1;
6163
6164  /* store_one_arg depends on expand_block_move to handle at least the size of
6165     reg_parm_stack_space.  */
6166  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6167    return 0;
6168
6169  if (TARGET_STRING)	/* string instructions are available */
6170    {
6171      for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6172	{
6173	  union {
6174	    rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6175	    rtx (*mov) PARAMS ((rtx, rtx));
6176	  } gen_func;
6177	  enum machine_mode mode = BLKmode;
6178	  rtx src, dest;
6179
6180	  if (bytes > 24		/* move up to 32 bytes at a time */
6181	      && ! fixed_regs[5]
6182	      && ! fixed_regs[6]
6183	      && ! fixed_regs[7]
6184	      && ! fixed_regs[8]
6185	      && ! fixed_regs[9]
6186	      && ! fixed_regs[10]
6187	      && ! fixed_regs[11]
6188	      && ! fixed_regs[12])
6189	    {
6190	      move_bytes = (bytes > 32) ? 32 : bytes;
6191	      gen_func.movstrsi = gen_movstrsi_8reg;
6192	    }
6193	  else if (bytes > 16	/* move up to 24 bytes at a time */
6194		   && ! fixed_regs[5]
6195		   && ! fixed_regs[6]
6196		   && ! fixed_regs[7]
6197		   && ! fixed_regs[8]
6198		   && ! fixed_regs[9]
6199		   && ! fixed_regs[10])
6200	    {
6201	      move_bytes = (bytes > 24) ? 24 : bytes;
6202	      gen_func.movstrsi = gen_movstrsi_6reg;
6203	    }
6204	  else if (bytes > 8	/* move up to 16 bytes at a time */
6205		   && ! fixed_regs[5]
6206		   && ! fixed_regs[6]
6207		   && ! fixed_regs[7]
6208		   && ! fixed_regs[8])
6209	    {
6210	      move_bytes = (bytes > 16) ? 16 : bytes;
6211	      gen_func.movstrsi = gen_movstrsi_4reg;
6212	    }
6213	  else if (bytes >= 8 && TARGET_POWERPC64
6214		   /* 64-bit loads and stores require word-aligned
6215                      displacements.  */
6216		   && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6217	    {
6218	      move_bytes = 8;
6219	      mode = DImode;
6220	      gen_func.mov = gen_movdi;
6221	    }
6222	  else if (bytes > 4 && !TARGET_POWERPC64)
6223	    {			/* move up to 8 bytes at a time */
6224	      move_bytes = (bytes > 8) ? 8 : bytes;
6225	      gen_func.movstrsi = gen_movstrsi_2reg;
6226	    }
6227	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6228	    {			/* move 4 bytes */
6229	      move_bytes = 4;
6230	      mode = SImode;
6231	      gen_func.mov = gen_movsi;
6232	    }
6233	  else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6234	    {			/* move 2 bytes */
6235	      move_bytes = 2;
6236	      mode = HImode;
6237	      gen_func.mov = gen_movhi;
6238	    }
6239	  else if (bytes == 1)	/* move 1 byte */
6240	    {
6241	      move_bytes = 1;
6242	      mode = QImode;
6243	      gen_func.mov = gen_movqi;
6244	    }
6245	  else
6246	    {			/* move up to 4 bytes at a time */
6247	      move_bytes = (bytes > 4) ? 4 : bytes;
6248	      gen_func.movstrsi = gen_movstrsi_1reg;
6249	    }
6250
6251	  src = adjust_address (orig_src, mode, offset);
6252	  dest = adjust_address (orig_dest, mode, offset);
6253
6254	  if (mode == BLKmode)
6255	    {
6256	      /* Move the address into scratch registers.  The movstrsi
6257		 patterns require zero offset.  */
6258	      if (!REG_P (XEXP (src, 0)))
6259		{
6260		  rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6261		  src = replace_equiv_address (src, src_reg);
6262		}
6263	      set_mem_size (src, GEN_INT (move_bytes));
6264
6265	      if (!REG_P (XEXP (dest, 0)))
6266		{
6267		  rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6268		  dest = replace_equiv_address (dest, dest_reg);
6269		}
6270	      set_mem_size (dest, GEN_INT (move_bytes));
6271
6272	      emit_insn ((*gen_func.movstrsi) (dest, src,
6273					       GEN_INT (move_bytes & 31),
6274					       align_rtx));
6275	    }
6276	  else
6277	    {
6278	      rtx tmp_reg = gen_reg_rtx (mode);
6279
6280	      emit_insn ((*gen_func.mov) (tmp_reg, src));
6281	      emit_insn ((*gen_func.mov) (dest, tmp_reg));
6282	    }
6283	}
6284    }
6285
6286  else			/* string instructions not available */
6287    {
6288      rtx stores[MAX_MOVE_REG];
6289      int num_reg = 0;
6290      int i;
6291
6292      for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6293	{
6294	  rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6295	  enum machine_mode mode;
6296	  rtx src, dest, tmp_reg;
6297
6298	  /* Generate the appropriate load and store, saving the stores
6299	     for later.  */
6300	  if (bytes >= 8 && TARGET_POWERPC64
6301	      /* 64-bit loads and stores require word-aligned
6302                 displacements.  */
6303	      && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6304	    {
6305	      move_bytes = 8;
6306	      mode = DImode;
6307	      gen_mov_func = gen_movdi;
6308	    }
6309	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6310	    {
6311	      move_bytes = 4;
6312	      mode = SImode;
6313	      gen_mov_func = gen_movsi;
6314	    }
6315	  else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6316	    {
6317	      move_bytes = 2;
6318	      mode = HImode;
6319	      gen_mov_func = gen_movhi;
6320	    }
6321	  else
6322	    {
6323	      move_bytes = 1;
6324	      mode = QImode;
6325	      gen_mov_func = gen_movqi;
6326	    }
6327
6328	  src = adjust_address (orig_src, mode, offset);
6329	  dest = adjust_address (orig_dest, mode, offset);
6330	  tmp_reg = gen_reg_rtx (mode);
6331
6332	  emit_insn ((*gen_mov_func) (tmp_reg, src));
6333	  stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6334
6335	  if (num_reg >= MAX_MOVE_REG)
6336	    {
6337	      for (i = 0; i < num_reg; i++)
6338		emit_insn (stores[i]);
6339	      num_reg = 0;
6340	    }
6341	}
6342
6343      for (i = 0; i < num_reg; i++)
6344	emit_insn (stores[i]);
6345    }
6346
6347  return 1;
6348}
6349
6350
6351/* Return 1 if OP is a load multiple operation.  It is known to be a
6352   PARALLEL and the first section will be tested.  */
6353
6354int
6355load_multiple_operation (op, mode)
6356     rtx op;
6357     enum machine_mode mode ATTRIBUTE_UNUSED;
6358{
6359  int count = XVECLEN (op, 0);
6360  unsigned int dest_regno;
6361  rtx src_addr;
6362  int i;
6363
6364  /* Perform a quick check so we don't blow up below.  */
6365  if (count <= 1
6366      || GET_CODE (XVECEXP (op, 0, 0)) != SET
6367      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6368      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6369    return 0;
6370
6371  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6372  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6373
6374  for (i = 1; i < count; i++)
6375    {
6376      rtx elt = XVECEXP (op, 0, i);
6377
6378      if (GET_CODE (elt) != SET
6379	  || GET_CODE (SET_DEST (elt)) != REG
6380	  || GET_MODE (SET_DEST (elt)) != SImode
6381	  || REGNO (SET_DEST (elt)) != dest_regno + i
6382	  || GET_CODE (SET_SRC (elt)) != MEM
6383	  || GET_MODE (SET_SRC (elt)) != SImode
6384	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6385	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6386	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6387	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6388	return 0;
6389    }
6390
6391  return 1;
6392}
6393
6394/* Similar, but tests for store multiple.  Here, the second vector element
6395   is a CLOBBER.  It will be tested later.  */
6396
6397int
6398store_multiple_operation (op, mode)
6399     rtx op;
6400     enum machine_mode mode ATTRIBUTE_UNUSED;
6401{
6402  int count = XVECLEN (op, 0) - 1;
6403  unsigned int src_regno;
6404  rtx dest_addr;
6405  int i;
6406
6407  /* Perform a quick check so we don't blow up below.  */
6408  if (count <= 1
6409      || GET_CODE (XVECEXP (op, 0, 0)) != SET
6410      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6411      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6412    return 0;
6413
6414  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6415  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6416
6417  for (i = 1; i < count; i++)
6418    {
6419      rtx elt = XVECEXP (op, 0, i + 1);
6420
6421      if (GET_CODE (elt) != SET
6422	  || GET_CODE (SET_SRC (elt)) != REG
6423	  || GET_MODE (SET_SRC (elt)) != SImode
6424	  || REGNO (SET_SRC (elt)) != src_regno + i
6425	  || GET_CODE (SET_DEST (elt)) != MEM
6426	  || GET_MODE (SET_DEST (elt)) != SImode
6427	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6428	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6429	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6430	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6431	return 0;
6432    }
6433
6434  return 1;
6435}
6436
6437/* Return a string to perform a load_multiple operation.
6438   operands[0] is the vector.
6439   operands[1] is the source address.
6440   operands[2] is the first destination register.  */
6441
6442const char *
6443rs6000_output_load_multiple (operands)
6444     rtx operands[3];
6445{
6446  /* We have to handle the case where the pseudo used to contain the address
6447     is assigned to one of the output registers.  */
6448  int i, j;
6449  int words = XVECLEN (operands[0], 0);
6450  rtx xop[10];
6451
6452  if (XVECLEN (operands[0], 0) == 1)
6453    return "{l|lwz} %2,0(%1)";
6454
6455  for (i = 0; i < words; i++)
6456    if (refers_to_regno_p (REGNO (operands[2]) + i,
6457			   REGNO (operands[2]) + i + 1, operands[1], 0))
6458      {
6459	if (i == words-1)
6460	  {
6461	    xop[0] = GEN_INT (4 * (words-1));
6462	    xop[1] = operands[1];
6463	    xop[2] = operands[2];
6464	    output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6465	    return "";
6466	  }
6467	else if (i == 0)
6468	  {
6469	    xop[0] = GEN_INT (4 * (words-1));
6470	    xop[1] = operands[1];
6471	    xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6472	    output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6473	    return "";
6474	  }
6475	else
6476	  {
6477	    for (j = 0; j < words; j++)
6478	      if (j != i)
6479		{
6480		  xop[0] = GEN_INT (j * 4);
6481		  xop[1] = operands[1];
6482		  xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6483		  output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6484		}
6485	    xop[0] = GEN_INT (i * 4);
6486	    xop[1] = operands[1];
6487	    output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6488	    return "";
6489	  }
6490      }
6491
6492  return "{lsi|lswi} %2,%1,%N0";
6493}
6494
6495/* Return 1 for a parallel vrsave operation.  */
6496
6497int
6498vrsave_operation (op, mode)
6499     rtx op;
6500     enum machine_mode mode ATTRIBUTE_UNUSED;
6501{
6502  int count = XVECLEN (op, 0);
6503  unsigned int dest_regno, src_regno;
6504  int i;
6505
6506  if (count <= 1
6507      || GET_CODE (XVECEXP (op, 0, 0)) != SET
6508      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6509      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6510    return 0;
6511
6512  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6513  src_regno  = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6514
6515  if (dest_regno != VRSAVE_REGNO
6516      && src_regno != VRSAVE_REGNO)
6517    return 0;
6518
6519  for (i = 1; i < count; i++)
6520    {
6521      rtx elt = XVECEXP (op, 0, i);
6522
6523      if (GET_CODE (elt) != CLOBBER
6524	  && GET_CODE (elt) != SET)
6525	return 0;
6526    }
6527
6528  return 1;
6529}
6530
6531/* Return 1 for an PARALLEL suitable for mtcrf.  */
6532
6533int
6534mtcrf_operation (op, mode)
6535     rtx op;
6536     enum machine_mode mode ATTRIBUTE_UNUSED;
6537{
6538  int count = XVECLEN (op, 0);
6539  int i;
6540  rtx src_reg;
6541
6542  /* Perform a quick check so we don't blow up below.  */
6543  if (count < 1
6544      || GET_CODE (XVECEXP (op, 0, 0)) != SET
6545      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6546      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6547    return 0;
6548  src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6549
6550  if (GET_CODE (src_reg) != REG
6551      || GET_MODE (src_reg) != SImode
6552      || ! INT_REGNO_P (REGNO (src_reg)))
6553    return 0;
6554
6555  for (i = 0; i < count; i++)
6556    {
6557      rtx exp = XVECEXP (op, 0, i);
6558      rtx unspec;
6559      int maskval;
6560
6561      if (GET_CODE (exp) != SET
6562	  || GET_CODE (SET_DEST (exp)) != REG
6563	  || GET_MODE (SET_DEST (exp)) != CCmode
6564	  || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6565	return 0;
6566      unspec = SET_SRC (exp);
6567      maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6568
6569      if (GET_CODE (unspec) != UNSPEC
6570	  || XINT (unspec, 1) != 20
6571	  || XVECLEN (unspec, 0) != 2
6572	  || XVECEXP (unspec, 0, 0) != src_reg
6573	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6574	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6575	return 0;
6576    }
6577  return 1;
6578}
6579
6580/* Return 1 for an PARALLEL suitable for lmw.  */
6581
6582int
6583lmw_operation (op, mode)
6584     rtx op;
6585     enum machine_mode mode ATTRIBUTE_UNUSED;
6586{
6587  int count = XVECLEN (op, 0);
6588  unsigned int dest_regno;
6589  rtx src_addr;
6590  unsigned int base_regno;
6591  HOST_WIDE_INT offset;
6592  int i;
6593
6594  /* Perform a quick check so we don't blow up below.  */
6595  if (count <= 1
6596      || GET_CODE (XVECEXP (op, 0, 0)) != SET
6597      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6598      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6599    return 0;
6600
6601  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6602  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6603
6604  if (dest_regno > 31
6605      || count != 32 - (int) dest_regno)
6606    return 0;
6607
6608  if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6609    {
6610      offset = 0;
6611      base_regno = REGNO (src_addr);
6612      if (base_regno == 0)
6613	return 0;
6614    }
6615  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6616    {
6617      offset = INTVAL (XEXP (src_addr, 1));
6618      base_regno = REGNO (XEXP (src_addr, 0));
6619    }
6620  else
6621    return 0;
6622
6623  for (i = 0; i < count; i++)
6624    {
6625      rtx elt = XVECEXP (op, 0, i);
6626      rtx newaddr;
6627      rtx addr_reg;
6628      HOST_WIDE_INT newoffset;
6629
6630      if (GET_CODE (elt) != SET
6631	  || GET_CODE (SET_DEST (elt)) != REG
6632	  || GET_MODE (SET_DEST (elt)) != SImode
6633	  || REGNO (SET_DEST (elt)) != dest_regno + i
6634	  || GET_CODE (SET_SRC (elt)) != MEM
6635	  || GET_MODE (SET_SRC (elt)) != SImode)
6636	return 0;
6637      newaddr = XEXP (SET_SRC (elt), 0);
6638      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6639	{
6640	  newoffset = 0;
6641	  addr_reg = newaddr;
6642	}
6643      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6644	{
6645	  addr_reg = XEXP (newaddr, 0);
6646	  newoffset = INTVAL (XEXP (newaddr, 1));
6647	}
6648      else
6649	return 0;
6650      if (REGNO (addr_reg) != base_regno
6651	  || newoffset != offset + 4 * i)
6652	return 0;
6653    }
6654
6655  return 1;
6656}
6657
6658/* Return 1 for an PARALLEL suitable for stmw.  */
6659
6660int
6661stmw_operation (op, mode)
6662     rtx op;
6663     enum machine_mode mode ATTRIBUTE_UNUSED;
6664{
6665  int count = XVECLEN (op, 0);
6666  unsigned int src_regno;
6667  rtx dest_addr;
6668  unsigned int base_regno;
6669  HOST_WIDE_INT offset;
6670  int i;
6671
6672  /* Perform a quick check so we don't blow up below.  */
6673  if (count <= 1
6674      || GET_CODE (XVECEXP (op, 0, 0)) != SET
6675      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6676      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6677    return 0;
6678
6679  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6680  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6681
6682  if (src_regno > 31
6683      || count != 32 - (int) src_regno)
6684    return 0;
6685
6686  if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6687    {
6688      offset = 0;
6689      base_regno = REGNO (dest_addr);
6690      if (base_regno == 0)
6691	return 0;
6692    }
6693  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6694    {
6695      offset = INTVAL (XEXP (dest_addr, 1));
6696      base_regno = REGNO (XEXP (dest_addr, 0));
6697    }
6698  else
6699    return 0;
6700
6701  for (i = 0; i < count; i++)
6702    {
6703      rtx elt = XVECEXP (op, 0, i);
6704      rtx newaddr;
6705      rtx addr_reg;
6706      HOST_WIDE_INT newoffset;
6707
6708      if (GET_CODE (elt) != SET
6709	  || GET_CODE (SET_SRC (elt)) != REG
6710	  || GET_MODE (SET_SRC (elt)) != SImode
6711	  || REGNO (SET_SRC (elt)) != src_regno + i
6712	  || GET_CODE (SET_DEST (elt)) != MEM
6713	  || GET_MODE (SET_DEST (elt)) != SImode)
6714	return 0;
6715      newaddr = XEXP (SET_DEST (elt), 0);
6716      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6717	{
6718	  newoffset = 0;
6719	  addr_reg = newaddr;
6720	}
6721      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6722	{
6723	  addr_reg = XEXP (newaddr, 0);
6724	  newoffset = INTVAL (XEXP (newaddr, 1));
6725	}
6726      else
6727	return 0;
6728      if (REGNO (addr_reg) != base_regno
6729	  || newoffset != offset + 4 * i)
6730	return 0;
6731    }
6732
6733  return 1;
6734}
6735
6736/* A validation routine: say whether CODE, a condition code, and MODE
6737   match.  The other alternatives either don't make sense or should
6738   never be generated.  */
6739
6740static void
6741validate_condition_mode (code, mode)
6742     enum rtx_code code;
6743     enum machine_mode mode;
6744{
6745  if (GET_RTX_CLASS (code) != '<'
6746      || GET_MODE_CLASS (mode) != MODE_CC)
6747    abort ();
6748
6749  /* These don't make sense.  */
6750  if ((code == GT || code == LT || code == GE || code == LE)
6751      && mode == CCUNSmode)
6752    abort ();
6753
6754  if ((code == GTU || code == LTU || code == GEU || code == LEU)
6755      && mode != CCUNSmode)
6756    abort ();
6757
6758  if (mode != CCFPmode
6759      && (code == ORDERED || code == UNORDERED
6760	  || code == UNEQ || code == LTGT
6761	  || code == UNGT || code == UNLT
6762	  || code == UNGE || code == UNLE))
6763    abort ();
6764
6765  /* These should never be generated except for
6766     flag_unsafe_math_optimizations and flag_finite_math_only.  */
6767  if (mode == CCFPmode
6768      && ! flag_unsafe_math_optimizations
6769      && ! flag_finite_math_only
6770      && (code == LE || code == GE
6771	  || code == UNEQ || code == LTGT
6772	  || code == UNGT || code == UNLT))
6773    abort ();
6774
6775  /* These are invalid; the information is not there.  */
6776  if (mode == CCEQmode
6777      && code != EQ && code != NE)
6778    abort ();
6779}
6780
6781/* Return 1 if OP is a comparison operation that is valid for a branch insn.
6782   We only check the opcode against the mode of the CC value here.  */
6783
6784int
6785branch_comparison_operator (op, mode)
6786     rtx op;
6787     enum machine_mode mode ATTRIBUTE_UNUSED;
6788{
6789  enum rtx_code code = GET_CODE (op);
6790  enum machine_mode cc_mode;
6791
6792  if (GET_RTX_CLASS (code) != '<')
6793    return 0;
6794
6795  cc_mode = GET_MODE (XEXP (op, 0));
6796  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6797    return 0;
6798
6799  validate_condition_mode (code, cc_mode);
6800
6801  return 1;
6802}
6803
6804/* Return 1 if OP is a comparison operation that is valid for a branch
6805   insn and which is true if the corresponding bit in the CC register
6806   is set.  */
6807
6808int
6809branch_positive_comparison_operator (op, mode)
6810     rtx op;
6811     enum machine_mode mode;
6812{
6813  enum rtx_code code;
6814
6815  if (! branch_comparison_operator (op, mode))
6816    return 0;
6817
6818  code = GET_CODE (op);
6819  return (code == EQ || code == LT || code == GT
6820	  || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6821	  || code == LTU || code == GTU
6822	  || code == UNORDERED);
6823}
6824
6825/* Return 1 if OP is a comparison operation that is valid for an scc insn.
6826   We check the opcode against the mode of the CC value and disallow EQ or
6827   NE comparisons for integers.  */
6828
6829int
6830scc_comparison_operator (op, mode)
6831     rtx op;
6832     enum machine_mode mode;
6833{
6834  enum rtx_code code = GET_CODE (op);
6835  enum machine_mode cc_mode;
6836
6837  if (GET_MODE (op) != mode && mode != VOIDmode)
6838    return 0;
6839
6840  if (GET_RTX_CLASS (code) != '<')
6841    return 0;
6842
6843  cc_mode = GET_MODE (XEXP (op, 0));
6844  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6845    return 0;
6846
6847  validate_condition_mode (code, cc_mode);
6848
6849  if (code == NE && cc_mode != CCFPmode)
6850    return 0;
6851
6852  return 1;
6853}
6854
6855int
6856trap_comparison_operator (op, mode)
6857    rtx op;
6858    enum machine_mode mode;
6859{
6860  if (mode != VOIDmode && mode != GET_MODE (op))
6861    return 0;
6862  return GET_RTX_CLASS (GET_CODE (op)) == '<';
6863}
6864
6865int
6866boolean_operator (op, mode)
6867    rtx op;
6868    enum machine_mode mode ATTRIBUTE_UNUSED;
6869{
6870  enum rtx_code code = GET_CODE (op);
6871  return (code == AND || code == IOR || code == XOR);
6872}
6873
6874int
6875boolean_or_operator (op, mode)
6876    rtx op;
6877    enum machine_mode mode ATTRIBUTE_UNUSED;
6878{
6879  enum rtx_code code = GET_CODE (op);
6880  return (code == IOR || code == XOR);
6881}
6882
6883int
6884min_max_operator (op, mode)
6885    rtx op;
6886    enum machine_mode mode ATTRIBUTE_UNUSED;
6887{
6888  enum rtx_code code = GET_CODE (op);
6889  return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6890}
6891
6892/* Return 1 if ANDOP is a mask that has no bits on that are not in the
6893   mask required to convert the result of a rotate insn into a shift
6894   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
6895
6896int
6897includes_lshift_p (shiftop, andop)
6898     rtx shiftop;
6899     rtx andop;
6900{
6901  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6902
6903  shift_mask <<= INTVAL (shiftop);
6904
6905  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6906}
6907
6908/* Similar, but for right shift.  */
6909
6910int
6911includes_rshift_p (shiftop, andop)
6912     rtx shiftop;
6913     rtx andop;
6914{
6915  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6916
6917  shift_mask >>= INTVAL (shiftop);
6918
6919  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6920}
6921
6922/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6923   to perform a left shift.  It must have exactly SHIFTOP least
6924   signifigant 0's, then one or more 1's, then zero or more 0's.  */
6925
6926int
6927includes_rldic_lshift_p (shiftop, andop)
6928     rtx shiftop;
6929     rtx andop;
6930{
6931  if (GET_CODE (andop) == CONST_INT)
6932    {
6933      HOST_WIDE_INT c, lsb, shift_mask;
6934
6935      c = INTVAL (andop);
6936      if (c == 0 || c == ~0)
6937	return 0;
6938
6939      shift_mask = ~0;
6940      shift_mask <<= INTVAL (shiftop);
6941
6942      /* Find the least signifigant one bit.  */
6943      lsb = c & -c;
6944
6945      /* It must coincide with the LSB of the shift mask.  */
6946      if (-lsb != shift_mask)
6947	return 0;
6948
6949      /* Invert to look for the next transition (if any).  */
6950      c = ~c;
6951
6952      /* Remove the low group of ones (originally low group of zeros).  */
6953      c &= -lsb;
6954
6955      /* Again find the lsb, and check we have all 1's above.  */
6956      lsb = c & -c;
6957      return c == -lsb;
6958    }
6959  else if (GET_CODE (andop) == CONST_DOUBLE
6960	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6961    {
6962      HOST_WIDE_INT low, high, lsb;
6963      HOST_WIDE_INT shift_mask_low, shift_mask_high;
6964
6965      low = CONST_DOUBLE_LOW (andop);
6966      if (HOST_BITS_PER_WIDE_INT < 64)
6967	high = CONST_DOUBLE_HIGH (andop);
6968
6969      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
6970	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
6971	return 0;
6972
6973      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6974	{
6975	  shift_mask_high = ~0;
6976	  if (INTVAL (shiftop) > 32)
6977	    shift_mask_high <<= INTVAL (shiftop) - 32;
6978
6979	  lsb = high & -high;
6980
6981	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
6982	    return 0;
6983
6984	  high = ~high;
6985	  high &= -lsb;
6986
6987	  lsb = high & -high;
6988	  return high == -lsb;
6989	}
6990
6991      shift_mask_low = ~0;
6992      shift_mask_low <<= INTVAL (shiftop);
6993
6994      lsb = low & -low;
6995
6996      if (-lsb != shift_mask_low)
6997	return 0;
6998
6999      if (HOST_BITS_PER_WIDE_INT < 64)
7000	high = ~high;
7001      low = ~low;
7002      low &= -lsb;
7003
7004      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7005	{
7006	  lsb = high & -high;
7007	  return high == -lsb;
7008	}
7009
7010      lsb = low & -low;
7011      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7012    }
7013  else
7014    return 0;
7015}
7016
7017/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7018   to perform a left shift.  It must have SHIFTOP or more least
7019   signifigant 0's, with the remainder of the word 1's.  */
7020
7021int
7022includes_rldicr_lshift_p (shiftop, andop)
7023     rtx shiftop;
7024     rtx andop;
7025{
7026  if (GET_CODE (andop) == CONST_INT)
7027    {
7028      HOST_WIDE_INT c, lsb, shift_mask;
7029
7030      shift_mask = ~0;
7031      shift_mask <<= INTVAL (shiftop);
7032      c = INTVAL (andop);
7033
7034      /* Find the least signifigant one bit.  */
7035      lsb = c & -c;
7036
7037      /* It must be covered by the shift mask.
7038	 This test also rejects c == 0.  */
7039      if ((lsb & shift_mask) == 0)
7040	return 0;
7041
7042      /* Check we have all 1's above the transition, and reject all 1's.  */
7043      return c == -lsb && lsb != 1;
7044    }
7045  else if (GET_CODE (andop) == CONST_DOUBLE
7046	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7047    {
7048      HOST_WIDE_INT low, lsb, shift_mask_low;
7049
7050      low = CONST_DOUBLE_LOW (andop);
7051
7052      if (HOST_BITS_PER_WIDE_INT < 64)
7053	{
7054	  HOST_WIDE_INT high, shift_mask_high;
7055
7056	  high = CONST_DOUBLE_HIGH (andop);
7057
7058	  if (low == 0)
7059	    {
7060	      shift_mask_high = ~0;
7061	      if (INTVAL (shiftop) > 32)
7062		shift_mask_high <<= INTVAL (shiftop) - 32;
7063
7064	      lsb = high & -high;
7065
7066	      if ((lsb & shift_mask_high) == 0)
7067		return 0;
7068
7069	      return high == -lsb;
7070	    }
7071	  if (high != ~0)
7072	    return 0;
7073	}
7074
7075      shift_mask_low = ~0;
7076      shift_mask_low <<= INTVAL (shiftop);
7077
7078      lsb = low & -low;
7079
7080      if ((lsb & shift_mask_low) == 0)
7081	return 0;
7082
7083      return low == -lsb && lsb != 1;
7084    }
7085  else
7086    return 0;
7087}
7088
7089/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7090   for lfq and stfq insns.
7091
7092   Note reg1 and reg2 *must* be hard registers.  To be sure we will
7093   abort if we are passed pseudo registers.  */
7094
7095int
7096registers_ok_for_quad_peep (reg1, reg2)
7097     rtx reg1, reg2;
7098{
7099  /* We might have been passed a SUBREG.  */
7100  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7101    return 0;
7102
7103  return (REGNO (reg1) == REGNO (reg2) - 1);
7104}
7105
7106/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7107   addr1 and addr2 must be in consecutive memory locations
7108   (addr2 == addr1 + 8).  */
7109
7110int
7111addrs_ok_for_quad_peep (addr1, addr2)
7112     rtx addr1;
7113     rtx addr2;
7114{
7115  unsigned int reg1;
7116  int offset1;
7117
7118  /* Extract an offset (if used) from the first addr.  */
7119  if (GET_CODE (addr1) == PLUS)
7120    {
7121      /* If not a REG, return zero.  */
7122      if (GET_CODE (XEXP (addr1, 0)) != REG)
7123	return 0;
7124      else
7125	{
7126          reg1 = REGNO (XEXP (addr1, 0));
7127	  /* The offset must be constant!  */
7128	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7129            return 0;
7130          offset1 = INTVAL (XEXP (addr1, 1));
7131	}
7132    }
7133  else if (GET_CODE (addr1) != REG)
7134    return 0;
7135  else
7136    {
7137      reg1 = REGNO (addr1);
7138      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
7139      offset1 = 0;
7140    }
7141
7142  /* Make sure the second address is a (mem (plus (reg) (const_int))).  */
7143  if (GET_CODE (addr2) != PLUS)
7144    return 0;
7145
7146  if (GET_CODE (XEXP (addr2, 0)) != REG
7147      || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7148    return 0;
7149
7150  if (reg1 != REGNO (XEXP (addr2, 0)))
7151    return 0;
7152
7153  /* The offset for the second addr must be 8 more than the first addr.  */
7154  if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7155    return 0;
7156
7157  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
7158     instructions.  */
7159  return 1;
7160}
7161
7162/* Return the register class of a scratch register needed to copy IN into
7163   or out of a register in CLASS in MODE.  If it can be done directly,
7164   NO_REGS is returned.  */
7165
7166enum reg_class
7167secondary_reload_class (class, mode, in)
7168     enum reg_class class;
7169     enum machine_mode mode ATTRIBUTE_UNUSED;
7170     rtx in;
7171{
7172  int regno;
7173
7174  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7175    {
7176      /* We cannot copy a symbolic operand directly into anything
7177         other than BASE_REGS for TARGET_ELF.  So indicate that a
7178         register from BASE_REGS is needed as an intermediate
7179         register.
7180
7181	 On Darwin, pic addresses require a load from memory, which
7182	 needs a base register.  */
7183      if (class != BASE_REGS
7184          && (GET_CODE (in) == SYMBOL_REF
7185              || GET_CODE (in) == HIGH
7186              || GET_CODE (in) == LABEL_REF
7187              || GET_CODE (in) == CONST))
7188        return BASE_REGS;
7189    }
7190
7191  if (GET_CODE (in) == REG)
7192    {
7193      regno = REGNO (in);
7194      if (regno >= FIRST_PSEUDO_REGISTER)
7195	{
7196	  regno = true_regnum (in);
7197	  if (regno >= FIRST_PSEUDO_REGISTER)
7198	    regno = -1;
7199	}
7200    }
7201  else if (GET_CODE (in) == SUBREG)
7202    {
7203      regno = true_regnum (in);
7204      if (regno >= FIRST_PSEUDO_REGISTER)
7205	regno = -1;
7206    }
7207  else
7208    regno = -1;
7209
7210  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7211     into anything.  */
7212  if (class == GENERAL_REGS || class == BASE_REGS
7213      || (regno >= 0 && INT_REGNO_P (regno)))
7214    return NO_REGS;
7215
7216  /* Constants, memory, and FP registers can go into FP registers.  */
7217  if ((regno == -1 || FP_REGNO_P (regno))
7218      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7219    return NO_REGS;
7220
7221  /* Memory, and AltiVec registers can go into AltiVec registers.  */
7222  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7223      && class == ALTIVEC_REGS)
7224    return NO_REGS;
7225
7226  /* We can copy among the CR registers.  */
7227  if ((class == CR_REGS || class == CR0_REGS)
7228      && regno >= 0 && CR_REGNO_P (regno))
7229    return NO_REGS;
7230
7231  /* Otherwise, we need GENERAL_REGS.  */
7232  return GENERAL_REGS;
7233}
7234
7235/* Given a comparison operation, return the bit number in CCR to test.  We
7236   know this is a valid comparison.
7237
7238   SCC_P is 1 if this is for an scc.  That means that %D will have been
7239   used instead of %C, so the bits will be in different places.
7240
7241   Return -1 if OP isn't a valid comparison for some reason.  */
7242
7243int
7244ccr_bit (op, scc_p)
7245     rtx op;
7246     int scc_p;
7247{
7248  enum rtx_code code = GET_CODE (op);
7249  enum machine_mode cc_mode;
7250  int cc_regnum;
7251  int base_bit;
7252  rtx reg;
7253
7254  if (GET_RTX_CLASS (code) != '<')
7255    return -1;
7256
7257  reg = XEXP (op, 0);
7258
7259  if (GET_CODE (reg) != REG
7260      || ! CR_REGNO_P (REGNO (reg)))
7261    abort ();
7262
7263  cc_mode = GET_MODE (reg);
7264  cc_regnum = REGNO (reg);
7265  base_bit = 4 * (cc_regnum - CR0_REGNO);
7266
7267  validate_condition_mode (code, cc_mode);
7268
7269  switch (code)
7270    {
7271    case NE:
7272      if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7273	return base_bit + 1;
7274      return scc_p ? base_bit + 3 : base_bit + 2;
7275    case EQ:
7276      if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7277	return base_bit + 1;
7278      return base_bit + 2;
7279    case GT:  case GTU:  case UNLE:
7280      return base_bit + 1;
7281    case LT:  case LTU:  case UNGE:
7282      return base_bit;
7283    case ORDERED:  case UNORDERED:
7284      return base_bit + 3;
7285
7286    case GE:  case GEU:
7287      /* If scc, we will have done a cror to put the bit in the
7288	 unordered position.  So test that bit.  For integer, this is ! LT
7289	 unless this is an scc insn.  */
7290      return scc_p ? base_bit + 3 : base_bit;
7291
7292    case LE:  case LEU:
7293      return scc_p ? base_bit + 3 : base_bit + 1;
7294
7295    default:
7296      abort ();
7297    }
7298}
7299
7300/* Return the GOT register.  */
7301
7302struct rtx_def *
7303rs6000_got_register (value)
7304     rtx value ATTRIBUTE_UNUSED;
7305{
7306  /* The second flow pass currently (June 1999) can't update
7307     regs_ever_live without disturbing other parts of the compiler, so
7308     update it here to make the prolog/epilogue code happy.  */
7309  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7310    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7311
7312  current_function_uses_pic_offset_table = 1;
7313
7314  return pic_offset_table_rtx;
7315}
7316
7317/* Function to init struct machine_function.
7318   This will be called, via a pointer variable,
7319   from push_function_context.  */
7320
7321static struct machine_function *
7322rs6000_init_machine_status ()
7323{
7324  return ggc_alloc_cleared (sizeof (machine_function));
7325}
7326
7327/* These macros test for integers and extract the low-order bits.  */
7328#define INT_P(X)  \
7329((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
7330 && GET_MODE (X) == VOIDmode)
7331
7332#define INT_LOWPART(X) \
7333  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7334
7335int
7336extract_MB (op)
7337     rtx op;
7338{
7339  int i;
7340  unsigned long val = INT_LOWPART (op);
7341
7342  /* If the high bit is zero, the value is the first 1 bit we find
7343     from the left.  */
7344  if ((val & 0x80000000) == 0)
7345    {
7346      if ((val & 0xffffffff) == 0)
7347	abort ();
7348
7349      i = 1;
7350      while (((val <<= 1) & 0x80000000) == 0)
7351	++i;
7352      return i;
7353    }
7354
7355  /* If the high bit is set and the low bit is not, or the mask is all
7356     1's, the value is zero.  */
7357  if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7358    return 0;
7359
7360  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
7361     from the right.  */
7362  i = 31;
7363  while (((val >>= 1) & 1) != 0)
7364    --i;
7365
7366  return i;
7367}
7368
7369int
7370extract_ME (op)
7371     rtx op;
7372{
7373  int i;
7374  unsigned long val = INT_LOWPART (op);
7375
7376  /* If the low bit is zero, the value is the first 1 bit we find from
7377     the right.  */
7378  if ((val & 1) == 0)
7379    {
7380      if ((val & 0xffffffff) == 0)
7381	abort ();
7382
7383      i = 30;
7384      while (((val >>= 1) & 1) == 0)
7385	--i;
7386
7387      return i;
7388    }
7389
7390  /* If the low bit is set and the high bit is not, or the mask is all
7391     1's, the value is 31.  */
7392  if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7393    return 31;
7394
7395  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
7396     from the left.  */
7397  i = 0;
7398  while (((val <<= 1) & 0x80000000) != 0)
7399    ++i;
7400
7401  return i;
7402}
7403
7404/* Print an operand.  Recognize special options, documented below.  */
7405
7406#if TARGET_ELF
7407#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7408#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7409#else
7410#define SMALL_DATA_RELOC "sda21"
7411#define SMALL_DATA_REG 0
7412#endif
7413
7414void
7415print_operand (file, x, code)
7416    FILE *file;
7417    rtx x;
7418    int code;
7419{
7420  int i;
7421  HOST_WIDE_INT val;
7422  unsigned HOST_WIDE_INT uval;
7423
7424  switch (code)
7425    {
7426    case '.':
7427      /* Write out an instruction after the call which may be replaced
7428	 with glue code by the loader.  This depends on the AIX version.  */
7429      asm_fprintf (file, RS6000_CALL_GLUE);
7430      return;
7431
7432      /* %a is output_address.  */
7433
7434    case 'A':
7435      /* If X is a constant integer whose low-order 5 bits are zero,
7436	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
7437	 in the AIX assembler where "sri" with a zero shift count
7438	 writes a trash instruction.  */
7439      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7440	putc ('l', file);
7441      else
7442	putc ('r', file);
7443      return;
7444
7445    case 'b':
7446      /* If constant, low-order 16 bits of constant, unsigned.
7447	 Otherwise, write normally.  */
7448      if (INT_P (x))
7449	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7450      else
7451	print_operand (file, x, 0);
7452      return;
7453
7454    case 'B':
7455      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7456	 for 64-bit mask direction.  */
7457      putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7458      return;
7459
7460      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7461	 output_operand.  */
7462
7463    case 'D':
7464      /* There used to be a comment for 'C' reading "This is an
7465	   optional cror needed for certain floating-point
7466	   comparisons.  Otherwise write nothing."  */
7467
7468      /* Similar, except that this is for an scc, so we must be able to
7469	 encode the test in a single bit that is one.  We do the above
7470	 for any LE, GE, GEU, or LEU and invert the bit for NE.  */
7471      if (GET_CODE (x) == LE || GET_CODE (x) == GE
7472	  || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7473	{
7474	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7475
7476	  fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7477		   base_bit + 2,
7478		   base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7479	}
7480
7481      else if (GET_CODE (x) == NE)
7482	{
7483	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7484
7485	  fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7486		   base_bit + 2, base_bit + 2);
7487	}
7488      else if (TARGET_SPE && TARGET_HARD_FLOAT
7489	       && GET_CODE (x) == EQ
7490	       && GET_MODE (XEXP (x, 0)) == CCFPmode)
7491	{
7492	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7493
7494	  fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7495		   base_bit + 1, base_bit + 1);
7496	}
7497      return;
7498
7499    case 'E':
7500      /* X is a CR register.  Print the number of the EQ bit of the CR */
7501      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7502	output_operand_lossage ("invalid %%E value");
7503      else
7504	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7505      return;
7506
7507    case 'f':
7508      /* X is a CR register.  Print the shift count needed to move it
7509	 to the high-order four bits.  */
7510      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7511	output_operand_lossage ("invalid %%f value");
7512      else
7513	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7514      return;
7515
7516    case 'F':
7517      /* Similar, but print the count for the rotate in the opposite
7518	 direction.  */
7519      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7520	output_operand_lossage ("invalid %%F value");
7521      else
7522	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7523      return;
7524
7525    case 'G':
7526      /* X is a constant integer.  If it is negative, print "m",
7527	 otherwise print "z".  This is to make an aze or ame insn.  */
7528      if (GET_CODE (x) != CONST_INT)
7529	output_operand_lossage ("invalid %%G value");
7530      else if (INTVAL (x) >= 0)
7531	putc ('z', file);
7532      else
7533	putc ('m', file);
7534      return;
7535
7536    case 'h':
7537      /* If constant, output low-order five bits.  Otherwise, write
7538	 normally.  */
7539      if (INT_P (x))
7540	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7541      else
7542	print_operand (file, x, 0);
7543      return;
7544
7545    case 'H':
7546      /* If constant, output low-order six bits.  Otherwise, write
7547	 normally.  */
7548      if (INT_P (x))
7549	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7550      else
7551	print_operand (file, x, 0);
7552      return;
7553
7554    case 'I':
7555      /* Print `i' if this is a constant, else nothing.  */
7556      if (INT_P (x))
7557	putc ('i', file);
7558      return;
7559
7560    case 'j':
7561      /* Write the bit number in CCR for jump.  */
7562      i = ccr_bit (x, 0);
7563      if (i == -1)
7564	output_operand_lossage ("invalid %%j code");
7565      else
7566	fprintf (file, "%d", i);
7567      return;
7568
7569    case 'J':
7570      /* Similar, but add one for shift count in rlinm for scc and pass
7571	 scc flag to `ccr_bit'.  */
7572      i = ccr_bit (x, 1);
7573      if (i == -1)
7574	output_operand_lossage ("invalid %%J code");
7575      else
7576	/* If we want bit 31, write a shift count of zero, not 32.  */
7577	fprintf (file, "%d", i == 31 ? 0 : i + 1);
7578      return;
7579
7580    case 'k':
7581      /* X must be a constant.  Write the 1's complement of the
7582	 constant.  */
7583      if (! INT_P (x))
7584	output_operand_lossage ("invalid %%k value");
7585      else
7586	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7587      return;
7588
7589    case 'K':
7590      /* X must be a symbolic constant on ELF.  Write an
7591	 expression suitable for an 'addi' that adds in the low 16
7592	 bits of the MEM.  */
7593      if (GET_CODE (x) != CONST)
7594	{
7595	  print_operand_address (file, x);
7596	  fputs ("@l", file);
7597	}
7598      else
7599	{
7600	  if (GET_CODE (XEXP (x, 0)) != PLUS
7601	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7602		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7603	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7604	    output_operand_lossage ("invalid %%K value");
7605	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
7606	  fputs ("@l", file);
7607	  /* For GNU as, there must be a non-alphanumeric character
7608	     between 'l' and the number.  The '-' is added by
7609	     print_operand() already.  */
7610	  if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7611	    fputs ("+", file);
7612	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7613	}
7614      return;
7615
7616      /* %l is output_asm_label.  */
7617
7618    case 'L':
7619      /* Write second word of DImode or DFmode reference.  Works on register
7620	 or non-indexed memory only.  */
7621      if (GET_CODE (x) == REG)
7622	fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7623      else if (GET_CODE (x) == MEM)
7624	{
7625	  /* Handle possible auto-increment.  Since it is pre-increment and
7626	     we have already done it, we can just use an offset of word.  */
7627	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
7628	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7629	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7630					   UNITS_PER_WORD));
7631	  else
7632	    output_address (XEXP (adjust_address_nv (x, SImode,
7633						     UNITS_PER_WORD),
7634				  0));
7635
7636	  if (small_data_operand (x, GET_MODE (x)))
7637	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7638		     reg_names[SMALL_DATA_REG]);
7639	}
7640      return;
7641
7642    case 'm':
7643      /* MB value for a mask operand.  */
7644      if (! mask_operand (x, SImode))
7645	output_operand_lossage ("invalid %%m value");
7646
7647      fprintf (file, "%d", extract_MB (x));
7648      return;
7649
7650    case 'M':
7651      /* ME value for a mask operand.  */
7652      if (! mask_operand (x, SImode))
7653	output_operand_lossage ("invalid %%M value");
7654
7655      fprintf (file, "%d", extract_ME (x));
7656      return;
7657
7658      /* %n outputs the negative of its operand.  */
7659
7660    case 'N':
7661      /* Write the number of elements in the vector times 4.  */
7662      if (GET_CODE (x) != PARALLEL)
7663	output_operand_lossage ("invalid %%N value");
7664      else
7665	fprintf (file, "%d", XVECLEN (x, 0) * 4);
7666      return;
7667
7668    case 'O':
7669      /* Similar, but subtract 1 first.  */
7670      if (GET_CODE (x) != PARALLEL)
7671	output_operand_lossage ("invalid %%O value");
7672      else
7673	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7674      return;
7675
7676    case 'p':
7677      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
7678      if (! INT_P (x)
7679	  || INT_LOWPART (x) < 0
7680	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
7681	output_operand_lossage ("invalid %%p value");
7682      else
7683	fprintf (file, "%d", i);
7684      return;
7685
7686    case 'P':
7687      /* The operand must be an indirect memory reference.  The result
7688	 is the register number.  */
7689      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7690	  || REGNO (XEXP (x, 0)) >= 32)
7691	output_operand_lossage ("invalid %%P value");
7692      else
7693	fprintf (file, "%d", REGNO (XEXP (x, 0)));
7694      return;
7695
7696    case 'q':
7697      /* This outputs the logical code corresponding to a boolean
7698	 expression.  The expression may have one or both operands
7699	 negated (if one, only the first one).  For condition register
7700         logical operations, it will also treat the negated
7701         CR codes as NOTs, but not handle NOTs of them.  */
7702      {
7703	const char *const *t = 0;
7704	const char *s;
7705	enum rtx_code code = GET_CODE (x);
7706	static const char * const tbl[3][3] = {
7707	  { "and", "andc", "nor" },
7708	  { "or", "orc", "nand" },
7709	  { "xor", "eqv", "xor" } };
7710
7711	if (code == AND)
7712	  t = tbl[0];
7713	else if (code == IOR)
7714	  t = tbl[1];
7715	else if (code == XOR)
7716	  t = tbl[2];
7717	else
7718	  output_operand_lossage ("invalid %%q value");
7719
7720	if (GET_CODE (XEXP (x, 0)) != NOT)
7721	  s = t[0];
7722	else
7723	  {
7724	    if (GET_CODE (XEXP (x, 1)) == NOT)
7725	      s = t[2];
7726	    else
7727	      s = t[1];
7728	  }
7729
7730	fputs (s, file);
7731      }
7732      return;
7733
7734    case 'R':
7735      /* X is a CR register.  Print the mask for `mtcrf'.  */
7736      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7737	output_operand_lossage ("invalid %%R value");
7738      else
7739	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7740      return;
7741
7742    case 's':
7743      /* Low 5 bits of 32 - value */
7744      if (! INT_P (x))
7745	output_operand_lossage ("invalid %%s value");
7746      else
7747	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7748      return;
7749
7750    case 'S':
7751      /* PowerPC64 mask position.  All 0's is excluded.
7752	 CONST_INT 32-bit mask is considered sign-extended so any
7753	 transition must occur within the CONST_INT, not on the boundary.  */
7754      if (! mask64_operand (x, DImode))
7755	output_operand_lossage ("invalid %%S value");
7756
7757      uval = INT_LOWPART (x);
7758
7759      if (uval & 1)	/* Clear Left */
7760	{
7761	  uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7762	  i = 64;
7763	}
7764      else		/* Clear Right */
7765	{
7766	  uval = ~uval;
7767	  uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7768	  i = 63;
7769	}
7770      while (uval != 0)
7771	--i, uval >>= 1;
7772      if (i < 0)
7773	abort ();
7774      fprintf (file, "%d", i);
7775      return;
7776
7777    case 't':
7778      /* Like 'J' but get to the OVERFLOW/UNORDERED bit.  */
7779      if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7780	abort ();
7781
7782      /* Bit 3 is OV bit.  */
7783      i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7784
7785      /* If we want bit 31, write a shift count of zero, not 32.  */
7786      fprintf (file, "%d", i == 31 ? 0 : i + 1);
7787      return;
7788
7789    case 'T':
7790      /* Print the symbolic name of a branch target register.  */
7791      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7792				  && REGNO (x) != COUNT_REGISTER_REGNUM))
7793	output_operand_lossage ("invalid %%T value");
7794      else if (REGNO (x) == LINK_REGISTER_REGNUM)
7795	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7796      else
7797	fputs ("ctr", file);
7798      return;
7799
7800    case 'u':
7801      /* High-order 16 bits of constant for use in unsigned operand.  */
7802      if (! INT_P (x))
7803	output_operand_lossage ("invalid %%u value");
7804      else
7805	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7806		 (INT_LOWPART (x) >> 16) & 0xffff);
7807      return;
7808
7809    case 'v':
7810      /* High-order 16 bits of constant for use in signed operand.  */
7811      if (! INT_P (x))
7812	output_operand_lossage ("invalid %%v value");
7813      else
7814	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7815		 (INT_LOWPART (x) >> 16) & 0xffff);
7816      return;
7817
7818    case 'U':
7819      /* Print `u' if this has an auto-increment or auto-decrement.  */
7820      if (GET_CODE (x) == MEM
7821	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
7822	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7823	putc ('u', file);
7824      return;
7825
7826    case 'V':
7827      /* Print the trap code for this operand.  */
7828      switch (GET_CODE (x))
7829	{
7830	case EQ:
7831	  fputs ("eq", file);   /* 4 */
7832	  break;
7833	case NE:
7834	  fputs ("ne", file);   /* 24 */
7835	  break;
7836	case LT:
7837	  fputs ("lt", file);   /* 16 */
7838	  break;
7839	case LE:
7840	  fputs ("le", file);   /* 20 */
7841	  break;
7842	case GT:
7843	  fputs ("gt", file);   /* 8 */
7844	  break;
7845	case GE:
7846	  fputs ("ge", file);   /* 12 */
7847	  break;
7848	case LTU:
7849	  fputs ("llt", file);  /* 2 */
7850	  break;
7851	case LEU:
7852	  fputs ("lle", file);  /* 6 */
7853	  break;
7854	case GTU:
7855	  fputs ("lgt", file);  /* 1 */
7856	  break;
7857	case GEU:
7858	  fputs ("lge", file);  /* 5 */
7859	  break;
7860	default:
7861	  abort ();
7862	}
7863      break;
7864
7865    case 'w':
7866      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
7867	 normally.  */
7868      if (INT_P (x))
7869	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7870		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7871      else
7872	print_operand (file, x, 0);
7873      return;
7874
7875    case 'W':
7876      /* MB value for a PowerPC64 rldic operand.  */
7877      val = (GET_CODE (x) == CONST_INT
7878	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7879
7880      if (val < 0)
7881	i = -1;
7882      else
7883	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7884	  if ((val <<= 1) < 0)
7885	    break;
7886
7887#if HOST_BITS_PER_WIDE_INT == 32
7888      if (GET_CODE (x) == CONST_INT && i >= 0)
7889	i += 32;  /* zero-extend high-part was all 0's */
7890      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7891	{
7892	  val = CONST_DOUBLE_LOW (x);
7893
7894	  if (val == 0)
7895	    abort ();
7896	  else if (val < 0)
7897	    --i;
7898	  else
7899	    for ( ; i < 64; i++)
7900	      if ((val <<= 1) < 0)
7901		break;
7902	}
7903#endif
7904
7905      fprintf (file, "%d", i + 1);
7906      return;
7907
7908    case 'X':
7909      if (GET_CODE (x) == MEM
7910	  && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7911	putc ('x', file);
7912      return;
7913
7914    case 'Y':
7915      /* Like 'L', for third word of TImode  */
7916      if (GET_CODE (x) == REG)
7917	fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7918      else if (GET_CODE (x) == MEM)
7919	{
7920	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
7921	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7922	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7923	  else
7924	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7925	  if (small_data_operand (x, GET_MODE (x)))
7926	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7927		     reg_names[SMALL_DATA_REG]);
7928	}
7929      return;
7930
7931    case 'z':
7932      /* X is a SYMBOL_REF.  Write out the name preceded by a
7933	 period and without any trailing data in brackets.  Used for function
7934	 names.  If we are configured for System V (or the embedded ABI) on
7935	 the PowerPC, do not emit the period, since those systems do not use
7936	 TOCs and the like.  */
7937      if (GET_CODE (x) != SYMBOL_REF)
7938	abort ();
7939
7940      if (XSTR (x, 0)[0] != '.')
7941	{
7942	  switch (DEFAULT_ABI)
7943	    {
7944	    default:
7945	      abort ();
7946
7947	    case ABI_AIX:
7948	      putc ('.', file);
7949	      break;
7950
7951	    case ABI_V4:
7952	    case ABI_AIX_NODESC:
7953	    case ABI_DARWIN:
7954	      break;
7955	    }
7956	}
7957#if TARGET_AIX
7958      RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
7959#else
7960      assemble_name (file, XSTR (x, 0));
7961#endif
7962      return;
7963
7964    case 'Z':
7965      /* Like 'L', for last word of TImode.  */
7966      if (GET_CODE (x) == REG)
7967	fprintf (file, "%s", reg_names[REGNO (x) + 3]);
7968      else if (GET_CODE (x) == MEM)
7969	{
7970	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
7971	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7972	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
7973	  else
7974	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
7975	  if (small_data_operand (x, GET_MODE (x)))
7976	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7977		     reg_names[SMALL_DATA_REG]);
7978	}
7979      return;
7980
7981      /* Print AltiVec or SPE memory operand.  */
7982    case 'y':
7983      {
7984	rtx tmp;
7985
7986	if (GET_CODE (x) != MEM)
7987	  abort ();
7988
7989	tmp = XEXP (x, 0);
7990
7991	if (TARGET_SPE)
7992	  {
7993	    /* Handle [reg].  */
7994	    if (GET_CODE (tmp) == REG)
7995	      {
7996		fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
7997		break;
7998	      }
7999	    /* Handle [reg+UIMM].  */
8000	    else if (GET_CODE (tmp) == PLUS &&
8001		     GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8002	      {
8003		int x;
8004
8005		if (GET_CODE (XEXP (tmp, 0)) != REG)
8006		  abort ();
8007
8008		x = INTVAL (XEXP (tmp, 1));
8009		fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8010		break;
8011	      }
8012
8013	    /* Fall through.  Must be [reg+reg].  */
8014	  }
8015	if (GET_CODE (tmp) == REG)
8016	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8017	else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8018	  {
8019	    if (REGNO (XEXP (tmp, 0)) == 0)
8020	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8021		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
8022	    else
8023	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8024		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
8025	  }
8026	else
8027	  abort ();
8028	break;
8029      }
8030
8031    case 0:
8032      if (GET_CODE (x) == REG)
8033	fprintf (file, "%s", reg_names[REGNO (x)]);
8034      else if (GET_CODE (x) == MEM)
8035	{
8036	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
8037	     know the width from the mode.  */
8038	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8039	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8040		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8041	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8042	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8043		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8044	  else
8045	    output_address (XEXP (x, 0));
8046	}
8047      else
8048	output_addr_const (file, x);
8049      return;
8050
8051    default:
8052      output_operand_lossage ("invalid %%xn code");
8053    }
8054}
8055
8056/* Print the address of an operand.  */
8057
8058void
8059print_operand_address (file, x)
8060     FILE *file;
8061     rtx x;
8062{
8063  if (GET_CODE (x) == REG)
8064    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8065  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8066	   || GET_CODE (x) == LABEL_REF)
8067    {
8068      output_addr_const (file, x);
8069      if (small_data_operand (x, GET_MODE (x)))
8070	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8071		 reg_names[SMALL_DATA_REG]);
8072      else if (TARGET_TOC)
8073	abort ();
8074    }
8075  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8076    {
8077      if (REGNO (XEXP (x, 0)) == 0)
8078	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8079		 reg_names[ REGNO (XEXP (x, 0)) ]);
8080      else
8081	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8082		 reg_names[ REGNO (XEXP (x, 1)) ]);
8083    }
8084  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8085    {
8086      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8087      fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8088    }
8089#if TARGET_ELF
8090  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8091           && CONSTANT_P (XEXP (x, 1)))
8092    {
8093      output_addr_const (file, XEXP (x, 1));
8094      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8095    }
8096#endif
8097#if TARGET_MACHO
8098  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8099           && CONSTANT_P (XEXP (x, 1)))
8100    {
8101      fprintf (file, "lo16(");
8102      output_addr_const (file, XEXP (x, 1));
8103      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8104    }
8105#endif
8106  else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8107    {
8108      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8109	{
8110	  rtx contains_minus = XEXP (x, 1);
8111	  rtx minus, symref;
8112	  const char *name;
8113
8114	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
8115	     turn it into (sym) for output_addr_const.  */
8116	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8117	    contains_minus = XEXP (contains_minus, 0);
8118
8119	  minus = XEXP (contains_minus, 0);
8120	  symref = XEXP (minus, 0);
8121	  XEXP (contains_minus, 0) = symref;
8122	  if (TARGET_ELF)
8123	    {
8124	      char *newname;
8125
8126	      name = XSTR (symref, 0);
8127	      newname = alloca (strlen (name) + sizeof ("@toc"));
8128	      strcpy (newname, name);
8129	      strcat (newname, "@toc");
8130	      XSTR (symref, 0) = newname;
8131	    }
8132	  output_addr_const (file, XEXP (x, 1));
8133	  if (TARGET_ELF)
8134	    XSTR (symref, 0) = name;
8135	  XEXP (contains_minus, 0) = minus;
8136	}
8137      else
8138	output_addr_const (file, XEXP (x, 1));
8139
8140      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8141    }
8142  else
8143    abort ();
8144}
8145
8146/* Target hook for assembling integer objects.  The PowerPC version has
8147   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8148   is defined.  It also needs to handle DI-mode objects on 64-bit
8149   targets.  */
8150
8151static bool
8152rs6000_assemble_integer (x, size, aligned_p)
8153     rtx x;
8154     unsigned int size;
8155     int aligned_p;
8156{
8157#ifdef RELOCATABLE_NEEDS_FIXUP
8158  /* Special handling for SI values.  */
8159  if (size == 4 && aligned_p)
8160    {
8161      extern int in_toc_section PARAMS ((void));
8162      static int recurse = 0;
8163
8164      /* For -mrelocatable, we mark all addresses that need to be fixed up
8165	 in the .fixup section.  */
8166      if (TARGET_RELOCATABLE
8167	  && !in_toc_section ()
8168	  && !in_text_section ()
8169	  && !recurse
8170	  && GET_CODE (x) != CONST_INT
8171	  && GET_CODE (x) != CONST_DOUBLE
8172	  && CONSTANT_P (x))
8173	{
8174	  char buf[256];
8175
8176	  recurse = 1;
8177	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8178	  fixuplabelno++;
8179	  ASM_OUTPUT_LABEL (asm_out_file, buf);
8180	  fprintf (asm_out_file, "\t.long\t(");
8181	  output_addr_const (asm_out_file, x);
8182	  fprintf (asm_out_file, ")@fixup\n");
8183	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8184	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
8185	  fprintf (asm_out_file, "\t.long\t");
8186	  assemble_name (asm_out_file, buf);
8187	  fprintf (asm_out_file, "\n\t.previous\n");
8188	  recurse = 0;
8189	  return true;
8190	}
8191      /* Remove initial .'s to turn a -mcall-aixdesc function
8192	 address into the address of the descriptor, not the function
8193	 itself.  */
8194      else if (GET_CODE (x) == SYMBOL_REF
8195	       && XSTR (x, 0)[0] == '.'
8196	       && DEFAULT_ABI == ABI_AIX)
8197	{
8198	  const char *name = XSTR (x, 0);
8199	  while (*name == '.')
8200	    name++;
8201
8202	  fprintf (asm_out_file, "\t.long\t%s\n", name);
8203	  return true;
8204	}
8205    }
8206#endif /* RELOCATABLE_NEEDS_FIXUP */
8207  return default_assemble_integer (x, size, aligned_p);
8208}
8209
8210#ifdef HAVE_GAS_HIDDEN
8211/* Emit an assembler directive to set symbol visibility for DECL to
8212   VISIBILITY_TYPE.  */
8213
8214static void
8215rs6000_assemble_visibility (decl, vis)
8216     tree decl;
8217     int vis;
8218{
8219  /* Functions need to have their entry point symbol visibility set as
8220     well as their descriptor symbol visibility.  */
8221  if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8222    {
8223      static const char * const visibility_types[] = {
8224        NULL, "internal", "hidden", "protected"
8225      };
8226
8227      const char *name, *type;
8228
8229      name = ((* targetm.strip_name_encoding)
8230	      (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8231      type = visibility_types[vis];
8232
8233      fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8234      fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8235    }
8236  else
8237    default_assemble_visibility (decl, vis);
8238}
8239#endif
8240
8241enum rtx_code
8242rs6000_reverse_condition (mode, code)
8243     enum machine_mode mode;
8244     enum rtx_code code;
8245{
8246  /* Reversal of FP compares takes care -- an ordered compare
8247     becomes an unordered compare and vice versa.  */
8248  if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8249    return reverse_condition_maybe_unordered (code);
8250  else
8251    return reverse_condition (code);
8252}
8253
8254/* Generate a compare for CODE.  Return a brand-new rtx that
8255   represents the result of the compare.  */
8256
8257static rtx
8258rs6000_generate_compare (code)
8259     enum rtx_code code;
8260{
8261  enum machine_mode comp_mode;
8262  rtx compare_result;
8263
8264  if (rs6000_compare_fp_p)
8265    comp_mode = CCFPmode;
8266  else if (code == GTU || code == LTU
8267	  || code == GEU || code == LEU)
8268    comp_mode = CCUNSmode;
8269  else
8270    comp_mode = CCmode;
8271
8272  /* First, the compare.  */
8273  compare_result = gen_reg_rtx (comp_mode);
8274
8275  /* SPE FP compare instructions on the GPRs.  Yuck!  */
8276  if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8277    {
8278      rtx cmp, or1, or2, or_result, compare_result2;
8279
8280      switch (code)
8281	{
8282	case EQ:
8283	case UNEQ:
8284	case NE:
8285	case LTGT:
8286	  cmp = flag_unsafe_math_optimizations
8287	    ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8288			       rs6000_compare_op1)
8289	    : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8290			       rs6000_compare_op1);
8291	  break;
8292	case GT:
8293	case GTU:
8294	case UNGT:
8295	case UNGE:
8296	case GE:
8297	case GEU:
8298	  cmp = flag_unsafe_math_optimizations
8299	    ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8300			       rs6000_compare_op1)
8301	    : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8302			       rs6000_compare_op1);
8303	  break;
8304	case LT:
8305	case LTU:
8306	case UNLT:
8307	case UNLE:
8308	case LE:
8309	case LEU:
8310	  cmp = flag_unsafe_math_optimizations
8311	    ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8312			       rs6000_compare_op1)
8313	    : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8314			       rs6000_compare_op1);
8315	  break;
8316	default:
8317	  abort ();
8318	}
8319
8320      /* Synthesize LE and GE from LT/GT || EQ.  */
8321      if (code == LE || code == GE || code == LEU || code == GEU)
8322	{
8323	  /* Synthesize GE/LE frome GT/LT || EQ.  */
8324
8325	  emit_insn (cmp);
8326
8327	  switch (code)
8328	    {
8329	    case LE: code = LT; break;
8330	    case GE: code = GT; break;
8331	    case LEU: code = LT; break;
8332	    case GEU: code = GT; break;
8333	    default: abort ();
8334	    }
8335
8336	  or1 = gen_reg_rtx (SImode);
8337	  or2 = gen_reg_rtx (SImode);
8338	  or_result = gen_reg_rtx (CCEQmode);
8339	  compare_result2 = gen_reg_rtx (CCFPmode);
8340
8341	  /* Do the EQ.  */
8342	  cmp = flag_unsafe_math_optimizations
8343	    ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8344			       rs6000_compare_op1)
8345	    : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8346			       rs6000_compare_op1);
8347	  emit_insn (cmp);
8348
8349	  /* The MC8540 FP compare instructions set the CR bits
8350	     differently than other PPC compare instructions.  For
8351	     that matter, there is no generic test instruction, but a
8352	     testgt, testlt, and testeq.  For a true condition, bit 2
8353	     is set (x1xx) in the CR.  Following the traditional CR
8354	     values:
8355
8356	     LT    GT    EQ    OV
8357	     bit3  bit2  bit1  bit0
8358
8359	     ... bit 2 would be a GT CR alias, so later on we
8360	     look in the GT bits for the branch instructins.
8361	     However, we must be careful to emit correct RTL in
8362	     the meantime, so optimizations don't get confused.  */
8363
8364	  or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8365	  or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8366
8367	  /* OR them together.  */
8368	  cmp = gen_rtx_SET (VOIDmode, or_result,
8369			     gen_rtx_COMPARE (CCEQmode,
8370					      gen_rtx_IOR (SImode, or1, or2),
8371					      const_true_rtx));
8372	  compare_result = or_result;
8373	  code = EQ;
8374	}
8375      else
8376	{
8377	  /* We only care about 1 bit (x1xx), so map everything to NE to
8378	     maintain rtl sanity.  We'll get to the right bit (x1xx) at
8379	     code output time.  */
8380	  if (code == NE || code == LTGT)
8381	    /* Do the inverse here because we have no cmpne
8382	       instruction.  We use the cmpeq instruction and expect
8383	       to get a 0 instead.  */
8384	    code = EQ;
8385	  else
8386	    code = NE;
8387	}
8388
8389      emit_insn (cmp);
8390    }
8391  else
8392    emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8393			    gen_rtx_COMPARE (comp_mode,
8394					     rs6000_compare_op0,
8395					     rs6000_compare_op1)));
8396
8397  /* Some kinds of FP comparisons need an OR operation;
8398     except for flag_unsafe_math_optimizations we don't bother.  */
8399  if (rs6000_compare_fp_p
8400      && ! flag_unsafe_math_optimizations
8401      && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8402      && (code == LE || code == GE
8403	  || code == UNEQ || code == LTGT
8404	  || code == UNGT || code == UNLT))
8405    {
8406      enum rtx_code or1, or2;
8407      rtx or1_rtx, or2_rtx, compare2_rtx;
8408      rtx or_result = gen_reg_rtx (CCEQmode);
8409
8410      switch (code)
8411	{
8412	case LE: or1 = LT;  or2 = EQ;  break;
8413	case GE: or1 = GT;  or2 = EQ;  break;
8414	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
8415	case LTGT: or1 = LT;  or2 = GT;  break;
8416	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
8417	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
8418	default:  abort ();
8419	}
8420      validate_condition_mode (or1, comp_mode);
8421      validate_condition_mode (or2, comp_mode);
8422      or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8423      or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8424      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8425				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8426				      const_true_rtx);
8427      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8428
8429      compare_result = or_result;
8430      code = EQ;
8431    }
8432
8433  validate_condition_mode (code, GET_MODE (compare_result));
8434
8435  return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8436}
8437
8438
8439/* Emit the RTL for an sCOND pattern.  */
8440
8441void
8442rs6000_emit_sCOND (code, result)
8443     enum rtx_code code;
8444     rtx result;
8445{
8446  rtx condition_rtx;
8447  enum machine_mode op_mode;
8448
8449  condition_rtx = rs6000_generate_compare (code);
8450
8451  op_mode = GET_MODE (rs6000_compare_op0);
8452  if (op_mode == VOIDmode)
8453    op_mode = GET_MODE (rs6000_compare_op1);
8454
8455  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8456    {
8457      PUT_MODE (condition_rtx, DImode);
8458      convert_move (result, condition_rtx, 0);
8459    }
8460  else
8461    {
8462      PUT_MODE (condition_rtx, SImode);
8463      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8464    }
8465}
8466
8467/* Emit a branch of kind CODE to location LOC.  */
8468
8469void
8470rs6000_emit_cbranch (code, loc)
8471     enum rtx_code code;
8472     rtx loc;
8473{
8474  rtx condition_rtx, loc_ref;
8475
8476  condition_rtx = rs6000_generate_compare (code);
8477  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8478  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8479			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8480						     loc_ref, pc_rtx)));
8481}
8482
8483/* Return the string to output a conditional branch to LABEL, which is
8484   the operand number of the label, or -1 if the branch is really a
8485   conditional return.
8486
8487   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
8488   condition code register and its mode specifies what kind of
8489   comparison we made.
8490
8491   REVERSED is nonzero if we should reverse the sense of the comparison.
8492
8493   INSN is the insn.  */
8494
8495char *
8496output_cbranch (op, label, reversed, insn)
8497     rtx op;
8498     const char * label;
8499     int reversed;
8500     rtx insn;
8501{
8502  static char string[64];
8503  enum rtx_code code = GET_CODE (op);
8504  rtx cc_reg = XEXP (op, 0);
8505  enum machine_mode mode = GET_MODE (cc_reg);
8506  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8507  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8508  int really_reversed = reversed ^ need_longbranch;
8509  char *s = string;
8510  const char *ccode;
8511  const char *pred;
8512  rtx note;
8513
8514  validate_condition_mode (code, mode);
8515
8516  /* Work out which way this really branches.  We could use
8517     reverse_condition_maybe_unordered here always but this
8518     makes the resulting assembler clearer.  */
8519  if (really_reversed)
8520    {
8521      /* Reversal of FP compares takes care -- an ordered compare
8522	 becomes an unordered compare and vice versa.  */
8523      if (mode == CCFPmode)
8524	code = reverse_condition_maybe_unordered (code);
8525      else
8526	code = reverse_condition (code);
8527    }
8528
8529  if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8530    {
8531      /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8532	 to the GT bit.  */
8533      if (code == EQ)
8534	/* Opposite of GT.  */
8535	code = UNLE;
8536      else if (code == NE)
8537	code = GT;
8538      else
8539	abort ();
8540    }
8541
8542  switch (code)
8543    {
8544      /* Not all of these are actually distinct opcodes, but
8545	 we distinguish them for clarity of the resulting assembler.  */
8546    case NE: case LTGT:
8547      ccode = "ne"; break;
8548    case EQ: case UNEQ:
8549      ccode = "eq"; break;
8550    case GE: case GEU:
8551      ccode = "ge"; break;
8552    case GT: case GTU: case UNGT:
8553      ccode = "gt"; break;
8554    case LE: case LEU:
8555      ccode = "le"; break;
8556    case LT: case LTU: case UNLT:
8557      ccode = "lt"; break;
8558    case UNORDERED: ccode = "un"; break;
8559    case ORDERED: ccode = "nu"; break;
8560    case UNGE: ccode = "nl"; break;
8561    case UNLE: ccode = "ng"; break;
8562    default:
8563      abort ();
8564    }
8565
8566  /* Maybe we have a guess as to how likely the branch is.
8567     The old mnemonics don't have a way to specify this information.  */
8568  pred = "";
8569  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8570  if (note != NULL_RTX)
8571    {
8572      /* PROB is the difference from 50%.  */
8573      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8574      bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8575
8576      /* Only hint for highly probable/improbable branches on newer
8577	 cpus as static prediction overrides processor dynamic
8578	 prediction.  For older cpus we may as well always hint, but
8579	 assume not taken for branches that are very close to 50% as a
8580	 mispredicted taken branch is more expensive than a
8581	 mispredicted not-taken branch.  */
8582      if (always_hint
8583	  || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8584	{
8585	  if (abs (prob) > REG_BR_PROB_BASE / 20
8586	      && ((prob > 0) ^ need_longbranch))
8587	    pred = "+";
8588	  else
8589	    pred = "-";
8590	}
8591    }
8592
8593  if (label == NULL)
8594    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8595  else
8596    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8597
8598  /* We need to escape any '%' characters in the reg_names string.
8599     Assume they'd only be the first character...  */
8600  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8601    *s++ = '%';
8602  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8603
8604  if (label != NULL)
8605    {
8606      /* If the branch distance was too far, we may have to use an
8607	 unconditional branch to go the distance.  */
8608      if (need_longbranch)
8609	s += sprintf (s, ",$+8\n\tb %s", label);
8610      else
8611	s += sprintf (s, ",%s", label);
8612    }
8613
8614  return string;
8615}
8616
8617/* Emit a conditional move: move TRUE_COND to DEST if OP of the
8618   operands of the last comparison is nonzero/true, FALSE_COND if it
8619   is zero/false.  Return 0 if the hardware has no such operation.  */
8620
8621int
8622rs6000_emit_cmove (dest, op, true_cond, false_cond)
8623     rtx dest;
8624     rtx op;
8625     rtx true_cond;
8626     rtx false_cond;
8627{
8628  enum rtx_code code = GET_CODE (op);
8629  rtx op0 = rs6000_compare_op0;
8630  rtx op1 = rs6000_compare_op1;
8631  REAL_VALUE_TYPE c1;
8632  enum machine_mode compare_mode = GET_MODE (op0);
8633  enum machine_mode result_mode = GET_MODE (dest);
8634  rtx temp;
8635
8636  /* These modes should always match. */
8637  if (GET_MODE (op1) != compare_mode
8638      /* In the isel case however, we can use a compare immediate, so
8639	 op1 may be a small constant.  */
8640      && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8641    return 0;
8642  if (GET_MODE (true_cond) != result_mode)
8643    return 0;
8644  if (GET_MODE (false_cond) != result_mode)
8645    return 0;
8646
8647  /* First, work out if the hardware can do this at all, or
8648     if it's too slow...  */
8649  if (! rs6000_compare_fp_p)
8650    {
8651      if (TARGET_ISEL)
8652	return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8653      return 0;
8654    }
8655
8656  /* Eliminate half of the comparisons by switching operands, this
8657     makes the remaining code simpler.  */
8658  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8659      || code == LTGT || code == LT)
8660    {
8661      code = reverse_condition_maybe_unordered (code);
8662      temp = true_cond;
8663      true_cond = false_cond;
8664      false_cond = temp;
8665    }
8666
8667  /* UNEQ and LTGT take four instructions for a comparison with zero,
8668     it'll probably be faster to use a branch here too.  */
8669  if (code == UNEQ)
8670    return 0;
8671
8672  if (GET_CODE (op1) == CONST_DOUBLE)
8673    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8674
8675  /* We're going to try to implement comparions by performing
8676     a subtract, then comparing against zero.  Unfortunately,
8677     Inf - Inf is NaN which is not zero, and so if we don't
8678     know that the operand is finite and the comparison
8679     would treat EQ different to UNORDERED, we can't do it.  */
8680  if (! flag_unsafe_math_optimizations
8681      && code != GT && code != UNGE
8682      && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8683      /* Constructs of the form (a OP b ? a : b) are safe.  */
8684      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8685	  || (! rtx_equal_p (op0, true_cond)
8686	      && ! rtx_equal_p (op1, true_cond))))
8687    return 0;
8688  /* At this point we know we can use fsel.  */
8689
8690  /* Reduce the comparison to a comparison against zero.  */
8691  temp = gen_reg_rtx (compare_mode);
8692  emit_insn (gen_rtx_SET (VOIDmode, temp,
8693			  gen_rtx_MINUS (compare_mode, op0, op1)));
8694  op0 = temp;
8695  op1 = CONST0_RTX (compare_mode);
8696
8697  /* If we don't care about NaNs we can reduce some of the comparisons
8698     down to faster ones.  */
8699  if (flag_unsafe_math_optimizations)
8700    switch (code)
8701      {
8702      case GT:
8703	code = LE;
8704	temp = true_cond;
8705	true_cond = false_cond;
8706	false_cond = temp;
8707	break;
8708      case UNGE:
8709	code = GE;
8710	break;
8711      case UNEQ:
8712	code = EQ;
8713	break;
8714      default:
8715	break;
8716      }
8717
8718  /* Now, reduce everything down to a GE.  */
8719  switch (code)
8720    {
8721    case GE:
8722      break;
8723
8724    case LE:
8725      temp = gen_reg_rtx (compare_mode);
8726      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8727      op0 = temp;
8728      break;
8729
8730    case ORDERED:
8731      temp = gen_reg_rtx (compare_mode);
8732      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8733      op0 = temp;
8734      break;
8735
8736    case EQ:
8737      temp = gen_reg_rtx (compare_mode);
8738      emit_insn (gen_rtx_SET (VOIDmode, temp,
8739			      gen_rtx_NEG (compare_mode,
8740					   gen_rtx_ABS (compare_mode, op0))));
8741      op0 = temp;
8742      break;
8743
8744    case UNGE:
8745      temp = gen_reg_rtx (result_mode);
8746      emit_insn (gen_rtx_SET (VOIDmode, temp,
8747			      gen_rtx_IF_THEN_ELSE (result_mode,
8748						    gen_rtx_GE (VOIDmode,
8749								op0, op1),
8750						    true_cond, false_cond)));
8751      false_cond = temp;
8752      true_cond = false_cond;
8753
8754      temp = gen_reg_rtx (compare_mode);
8755      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8756      op0 = temp;
8757      break;
8758
8759    case GT:
8760      temp = gen_reg_rtx (result_mode);
8761      emit_insn (gen_rtx_SET (VOIDmode, temp,
8762			      gen_rtx_IF_THEN_ELSE (result_mode,
8763						    gen_rtx_GE (VOIDmode,
8764								op0, op1),
8765						    true_cond, false_cond)));
8766      true_cond = temp;
8767      false_cond = true_cond;
8768
8769      temp = gen_reg_rtx (compare_mode);
8770      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8771      op0 = temp;
8772      break;
8773
8774    default:
8775      abort ();
8776    }
8777
8778  emit_insn (gen_rtx_SET (VOIDmode, dest,
8779			  gen_rtx_IF_THEN_ELSE (result_mode,
8780						gen_rtx_GE (VOIDmode,
8781							    op0, op1),
8782						true_cond, false_cond)));
8783  return 1;
8784}
8785
8786/* Same as above, but for ints (isel).  */
8787
8788static int
8789rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8790     rtx dest;
8791     rtx op;
8792     rtx true_cond;
8793     rtx false_cond;
8794{
8795  rtx condition_rtx, cr;
8796
8797  /* All isel implementations thus far are 32-bits.  */
8798  if (GET_MODE (rs6000_compare_op0) != SImode)
8799    return 0;
8800
8801  /* We still have to do the compare, because isel doesn't do a
8802     compare, it just looks at the CRx bits set by a previous compare
8803     instruction.  */
8804  condition_rtx = rs6000_generate_compare (GET_CODE (op));
8805  cr = XEXP (condition_rtx, 0);
8806
8807  if (GET_MODE (cr) == CCmode)
8808    emit_insn (gen_isel_signed (dest, condition_rtx,
8809				true_cond, false_cond, cr));
8810  else
8811    emit_insn (gen_isel_unsigned (dest, condition_rtx,
8812				  true_cond, false_cond, cr));
8813
8814  return 1;
8815}
8816
8817const char *
8818output_isel (operands)
8819     rtx *operands;
8820{
8821  enum rtx_code code;
8822
8823  code = GET_CODE (operands[1]);
8824  if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8825    {
8826      PUT_CODE (operands[1], reverse_condition (code));
8827      return "isel %0,%3,%2,%j1";
8828    }
8829  else
8830    return "isel %0,%2,%3,%j1";
8831}
8832
8833void
8834rs6000_emit_minmax (dest, code, op0, op1)
8835     rtx dest;
8836     enum rtx_code code;
8837     rtx op0;
8838     rtx op1;
8839{
8840  enum machine_mode mode = GET_MODE (op0);
8841  enum rtx_code c;
8842  rtx target;
8843
8844  if (code == SMAX || code == SMIN)
8845    c = GE;
8846  else
8847    c = GEU;
8848
8849  if (code == SMAX || code == UMAX)
8850    target = emit_conditional_move (dest, c, op0, op1, mode,
8851				    op0, op1, mode, 0);
8852  else
8853    target = emit_conditional_move (dest, c, op0, op1, mode,
8854				    op1, op0, mode, 0);
8855  if (target == NULL_RTX)
8856    abort ();
8857  if (target != dest)
8858    emit_move_insn (dest, target);
8859}
8860
8861/* This page contains routines that are used to determine what the
8862   function prologue and epilogue code will do and write them out.  */
8863
8864/* Return the first fixed-point register that is required to be
8865   saved. 32 if none.  */
8866
8867int
8868first_reg_to_save ()
8869{
8870  int first_reg;
8871
8872  /* Find lowest numbered live register.  */
8873  for (first_reg = 13; first_reg <= 31; first_reg++)
8874    if (regs_ever_live[first_reg]
8875	&& (! call_used_regs[first_reg]
8876	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8877		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
8878		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8879      break;
8880
8881#if TARGET_MACHO
8882  if (flag_pic
8883      && current_function_uses_pic_offset_table
8884      && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
8885    return RS6000_PIC_OFFSET_TABLE_REGNUM;
8886#endif
8887
8888  return first_reg;
8889}
8890
8891/* Similar, for FP regs.  */
8892
8893int
8894first_fp_reg_to_save ()
8895{
8896  int first_reg;
8897
8898  /* Find lowest numbered live register.  */
8899  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8900    if (regs_ever_live[first_reg])
8901      break;
8902
8903  return first_reg;
8904}
8905
8906/* Similar, for AltiVec regs.  */
8907
8908static int
8909first_altivec_reg_to_save ()
8910{
8911  int i;
8912
8913  /* Stack frame remains as is unless we are in AltiVec ABI.  */
8914  if (! TARGET_ALTIVEC_ABI)
8915    return LAST_ALTIVEC_REGNO + 1;
8916
8917  /* Find lowest numbered live register.  */
8918  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8919    if (regs_ever_live[i])
8920      break;
8921
8922  return i;
8923}
8924
8925/* Return a 32-bit mask of the AltiVec registers we need to set in
8926   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
8927   the 32-bit word is 0.  */
8928
8929static unsigned int
8930compute_vrsave_mask ()
8931{
8932  unsigned int i, mask = 0;
8933
8934  /* First, find out if we use _any_ altivec registers.  */
8935  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8936    if (regs_ever_live[i])
8937      mask |= ALTIVEC_REG_BIT (i);
8938
8939  if (mask == 0)
8940    return mask;
8941
8942  /* Next, add all registers that are call-clobbered.  We do this
8943     because post-reload register optimizers such as regrename_optimize
8944     may choose to use them.  They never change the register class
8945     chosen by reload, so cannot create new uses of altivec registers
8946     if there were none before, so the early exit above is safe.  */
8947  /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8948     altivec registers not saved in the mask, which might well make the
8949     adjustments below more effective in eliding the save/restore of
8950     VRSAVE in small functions.  */
8951  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8952    if (call_used_regs[i])
8953      mask |= ALTIVEC_REG_BIT (i);
8954
8955  /* Next, remove the argument registers from the set.  These must
8956     be in the VRSAVE mask set by the caller, so we don't need to add
8957     them in again.  More importantly, the mask we compute here is
8958     used to generate CLOBBERs in the set_vrsave insn, and we do not
8959     wish the argument registers to die.  */
8960  for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
8961    mask &= ~ALTIVEC_REG_BIT (i);
8962
8963  /* Similarly, remove the return value from the set.  */
8964  {
8965    bool yes = false;
8966    diddle_return_value (is_altivec_return_reg, &yes);
8967    if (yes)
8968      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
8969  }
8970
8971  return mask;
8972}
8973
8974static void
8975is_altivec_return_reg (reg, xyes)
8976     rtx reg;
8977     void *xyes;
8978{
8979  bool *yes = (bool *) xyes;
8980  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
8981    *yes = true;
8982}
8983
8984
8985/* Calculate the stack information for the current function.  This is
8986   complicated by having two separate calling sequences, the AIX calling
8987   sequence and the V.4 calling sequence.
8988
8989   AIX (and Darwin/Mac OS X) stack frames look like:
8990							  32-bit  64-bit
8991	SP---->	+---------------------------------------+
8992		| back chain to caller			| 0	  0
8993		+---------------------------------------+
8994		| saved CR				| 4       8 (8-11)
8995		+---------------------------------------+
8996		| saved LR				| 8       16
8997		+---------------------------------------+
8998		| reserved for compilers		| 12      24
8999		+---------------------------------------+
9000		| reserved for binders			| 16      32
9001		+---------------------------------------+
9002		| saved TOC pointer			| 20      40
9003		+---------------------------------------+
9004		| Parameter save area (P)		| 24      48
9005		+---------------------------------------+
9006		| Alloca space (A)			| 24+P    etc.
9007		+---------------------------------------+
9008		| Local variable space (L)		| 24+P+A
9009		+---------------------------------------+
9010		| Float/int conversion temporary (X)	| 24+P+A+L
9011		+---------------------------------------+
9012		| Save area for AltiVec registers (W)	| 24+P+A+L+X
9013		+---------------------------------------+
9014		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
9015		+---------------------------------------+
9016		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
9017		+---------------------------------------+
9018		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
9019		+---------------------------------------+
9020		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
9021		+---------------------------------------+
9022	old SP->| back chain to caller's caller		|
9023		+---------------------------------------+
9024
9025   The required alignment for AIX configurations is two words (i.e., 8
9026   or 16 bytes).
9027
9028
9029   V.4 stack frames look like:
9030
9031	SP---->	+---------------------------------------+
9032		| back chain to caller			| 0
9033		+---------------------------------------+
9034		| caller's saved LR			| 4
9035		+---------------------------------------+
9036		| Parameter save area (P)		| 8
9037		+---------------------------------------+
9038		| Alloca space (A)			| 8+P
9039		+---------------------------------------+
9040		| Varargs save area (V)			| 8+P+A
9041		+---------------------------------------+
9042		| Local variable space (L)		| 8+P+A+V
9043		+---------------------------------------+
9044		| Float/int conversion temporary (X)	| 8+P+A+V+L
9045		+---------------------------------------+
9046		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
9047		+---------------------------------------+
9048		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
9049		+---------------------------------------+
9050		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
9051		+---------------------------------------+
9052                | SPE: area for 64-bit GP registers     |
9053                +---------------------------------------+
9054                | SPE alignment padding                 |
9055                +---------------------------------------+
9056		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
9057		+---------------------------------------+
9058		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
9059		+---------------------------------------+
9060		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
9061		+---------------------------------------+
9062	old SP->| back chain to caller's caller		|
9063		+---------------------------------------+
9064
9065   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9066   given.  (But note below and in sysv4.h that we require only 8 and
9067   may round up the size of our stack frame anyways.  The historical
9068   reason is early versions of powerpc-linux which didn't properly
9069   align the stack at program startup.  A happy side-effect is that
9070   -mno-eabi libraries can be used with -meabi programs.)
9071
9072   The EABI configuration defaults to the V.4 layout, unless
9073   -mcall-aix is used, in which case the AIX layout is used.  However,
9074   the stack alignment requirements may differ.  If -mno-eabi is not
9075   given, the required stack alignment is 8 bytes; if -mno-eabi is
9076   given, the required alignment is 16 bytes.  (But see V.4 comment
9077   above.)  */
9078
9079#ifndef ABI_STACK_BOUNDARY
9080#define ABI_STACK_BOUNDARY STACK_BOUNDARY
9081#endif
9082
9083rs6000_stack_t *
9084rs6000_stack_info ()
9085{
9086  static rs6000_stack_t info, zero_info;
9087  rs6000_stack_t *info_ptr = &info;
9088  int reg_size = TARGET_POWERPC64 ? 8 : 4;
9089  enum rs6000_abi abi;
9090  int ehrd_size;
9091  int total_raw_size;
9092
9093  /* Zero all fields portably.  */
9094  info = zero_info;
9095
9096  /* Select which calling sequence.  */
9097  info_ptr->abi = abi = DEFAULT_ABI;
9098
9099  /* Calculate which registers need to be saved & save area size.  */
9100  info_ptr->first_gp_reg_save = first_reg_to_save ();
9101  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9102     even if it currently looks like we won't.  */
9103  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9104       || (flag_pic == 1 && abi == ABI_V4)
9105       || (flag_pic && abi == ABI_DARWIN))
9106      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9107    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9108  else
9109    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9110
9111  /* For the SPE, we have an additional upper 32-bits on each GPR.
9112     Ideally we should save the entire 64-bits only when the upper
9113     half is used in SIMD instructions.  Since we only record
9114     registers live (not the size they are used in), this proves
9115     difficult because we'd have to traverse the instruction chain at
9116     the right time, taking reload into account.  This is a real pain,
9117     so we opt to save the GPRs in 64-bits always.  Anyone overly
9118     concerned with frame size can fix this.  ;-).
9119
9120     So... since we save all GPRs (except the SP) in 64-bits, the
9121     traditional GP save area will be empty.  */
9122  if (TARGET_SPE_ABI)
9123    info_ptr->gp_size = 0;
9124
9125  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9126  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9127
9128  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9129  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9130				 - info_ptr->first_altivec_reg_save);
9131
9132  /* Does this function call anything?  */
9133  info_ptr->calls_p = (! current_function_is_leaf
9134		       || cfun->machine->ra_needs_full_frame);
9135
9136  /* Determine if we need to save the link register.  */
9137  if (rs6000_ra_ever_killed ()
9138      || (DEFAULT_ABI == ABI_AIX && current_function_profile)
9139#ifdef TARGET_RELOCATABLE
9140      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9141#endif
9142      || (info_ptr->first_fp_reg_save != 64
9143	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9144      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9145      || (abi == ABI_V4 && current_function_calls_alloca)
9146      || (DEFAULT_ABI == ABI_DARWIN
9147	  && flag_pic
9148	  && current_function_uses_pic_offset_table)
9149      || info_ptr->calls_p)
9150    {
9151      info_ptr->lr_save_p = 1;
9152      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9153    }
9154
9155  /* Determine if we need to save the condition code registers.  */
9156  if (regs_ever_live[CR2_REGNO]
9157      || regs_ever_live[CR3_REGNO]
9158      || regs_ever_live[CR4_REGNO])
9159    {
9160      info_ptr->cr_save_p = 1;
9161      if (abi == ABI_V4)
9162	info_ptr->cr_size = reg_size;
9163    }
9164
9165  /* If the current function calls __builtin_eh_return, then we need
9166     to allocate stack space for registers that will hold data for
9167     the exception handler.  */
9168  if (current_function_calls_eh_return)
9169    {
9170      unsigned int i;
9171      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9172	continue;
9173
9174      /* SPE saves EH registers in 64-bits.  */
9175      ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9176    }
9177  else
9178    ehrd_size = 0;
9179
9180  /* Determine various sizes.  */
9181  info_ptr->reg_size     = reg_size;
9182  info_ptr->fixed_size   = RS6000_SAVE_AREA;
9183  info_ptr->varargs_size = RS6000_VARARGS_AREA;
9184  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
9185  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
9186					 8);
9187
9188  if (TARGET_SPE_ABI)
9189    info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9190  else
9191    info_ptr->spe_gp_size = 0;
9192
9193  if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9194    {
9195      info_ptr->vrsave_mask = compute_vrsave_mask ();
9196      info_ptr->vrsave_size  = info_ptr->vrsave_mask ? 4 : 0;
9197    }
9198  else
9199    {
9200      info_ptr->vrsave_mask = 0;
9201      info_ptr->vrsave_size = 0;
9202    }
9203
9204  /* Calculate the offsets.  */
9205  switch (abi)
9206    {
9207    case ABI_NONE:
9208    default:
9209      abort ();
9210
9211    case ABI_AIX:
9212    case ABI_AIX_NODESC:
9213    case ABI_DARWIN:
9214      info_ptr->fp_save_offset   = - info_ptr->fp_size;
9215      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
9216
9217      if (TARGET_ALTIVEC_ABI)
9218	{
9219	  info_ptr->vrsave_save_offset
9220	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9221
9222	  /* Align stack so vector save area is on a quadword boundary.  */
9223	  if (info_ptr->altivec_size != 0)
9224	    info_ptr->altivec_padding_size
9225	      = 16 - (-info_ptr->vrsave_save_offset % 16);
9226	  else
9227	    info_ptr->altivec_padding_size = 0;
9228
9229	  info_ptr->altivec_save_offset
9230	    = info_ptr->vrsave_save_offset
9231	    - info_ptr->altivec_padding_size
9232	    - info_ptr->altivec_size;
9233
9234	  /* Adjust for AltiVec case.  */
9235	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9236	}
9237      else
9238	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
9239      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
9240      info_ptr->lr_save_offset   = 2*reg_size;
9241      break;
9242
9243    case ABI_V4:
9244      info_ptr->fp_save_offset   = - info_ptr->fp_size;
9245      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
9246      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
9247
9248      if (TARGET_SPE_ABI)
9249      {
9250        /* Align stack so SPE GPR save area is aligned on a
9251           double-word boundary.  */
9252        if (info_ptr->spe_gp_size != 0)
9253          info_ptr->spe_padding_size
9254            = 8 - (-info_ptr->cr_save_offset % 8);
9255        else
9256          info_ptr->spe_padding_size = 0;
9257
9258        info_ptr->spe_gp_save_offset
9259          = info_ptr->cr_save_offset
9260          - info_ptr->spe_padding_size
9261          - info_ptr->spe_gp_size;
9262
9263        /* Adjust for SPE case.  */
9264        info_ptr->toc_save_offset
9265          = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9266      }
9267      else if (TARGET_ALTIVEC_ABI)
9268	{
9269	  info_ptr->vrsave_save_offset
9270	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9271
9272	  /* Align stack so vector save area is on a quadword boundary.  */
9273	  if (info_ptr->altivec_size != 0)
9274	    info_ptr->altivec_padding_size
9275	      = 16 - (-info_ptr->vrsave_save_offset % 16);
9276	  else
9277	    info_ptr->altivec_padding_size = 0;
9278
9279	  info_ptr->altivec_save_offset
9280	    = info_ptr->vrsave_save_offset
9281	    - info_ptr->altivec_padding_size
9282	    - info_ptr->altivec_size;
9283
9284	  /* Adjust for AltiVec case.  */
9285	  info_ptr->toc_save_offset
9286	    = info_ptr->altivec_save_offset - info_ptr->toc_size;
9287	}
9288      else
9289	info_ptr->toc_save_offset  = info_ptr->cr_save_offset - info_ptr->toc_size;
9290      info_ptr->ehrd_offset      = info_ptr->toc_save_offset - ehrd_size;
9291      info_ptr->lr_save_offset   = reg_size;
9292      break;
9293    }
9294
9295  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
9296					 + info_ptr->gp_size
9297					 + info_ptr->altivec_size
9298					 + info_ptr->altivec_padding_size
9299					 + info_ptr->vrsave_size
9300					 + info_ptr->spe_gp_size
9301					 + info_ptr->spe_padding_size
9302					 + ehrd_size
9303					 + info_ptr->cr_size
9304					 + info_ptr->lr_size
9305					 + info_ptr->toc_size,
9306					 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9307					 ? 16 : 8);
9308
9309  total_raw_size	 = (info_ptr->vars_size
9310			    + info_ptr->parm_size
9311			    + info_ptr->save_size
9312			    + info_ptr->varargs_size
9313			    + info_ptr->fixed_size);
9314
9315  info_ptr->total_size =
9316    RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9317
9318  /* Determine if we need to allocate any stack frame:
9319
9320     For AIX we need to push the stack if a frame pointer is needed
9321     (because the stack might be dynamically adjusted), if we are
9322     debugging, if we make calls, or if the sum of fp_save, gp_save,
9323     and local variables are more than the space needed to save all
9324     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9325     + 18*8 = 288 (GPR13 reserved).
9326
9327     For V.4 we don't have the stack cushion that AIX uses, but assume
9328     that the debugger can handle stackless frames.  */
9329
9330  if (info_ptr->calls_p)
9331    info_ptr->push_p = 1;
9332
9333  else if (abi == ABI_V4)
9334    info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9335
9336  else
9337    info_ptr->push_p = (frame_pointer_needed
9338			|| (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9339			|| ((total_raw_size - info_ptr->fixed_size)
9340			    > (TARGET_32BIT ? 220 : 288)));
9341
9342  /* Zero offsets if we're not saving those registers.  */
9343  if (info_ptr->fp_size == 0)
9344    info_ptr->fp_save_offset = 0;
9345
9346  if (info_ptr->gp_size == 0)
9347    info_ptr->gp_save_offset = 0;
9348
9349  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9350    info_ptr->altivec_save_offset = 0;
9351
9352  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9353    info_ptr->vrsave_save_offset = 0;
9354
9355  if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9356    info_ptr->spe_gp_save_offset = 0;
9357
9358  if (! info_ptr->lr_save_p)
9359    info_ptr->lr_save_offset = 0;
9360
9361  if (! info_ptr->cr_save_p)
9362    info_ptr->cr_save_offset = 0;
9363
9364  if (! info_ptr->toc_save_p)
9365    info_ptr->toc_save_offset = 0;
9366
9367  return info_ptr;
9368}
9369
9370void
9371debug_stack_info (info)
9372     rs6000_stack_t *info;
9373{
9374  const char *abi_string;
9375
9376  if (! info)
9377    info = rs6000_stack_info ();
9378
9379  fprintf (stderr, "\nStack information for function %s:\n",
9380	   ((current_function_decl && DECL_NAME (current_function_decl))
9381	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9382	    : "<unknown>"));
9383
9384  switch (info->abi)
9385    {
9386    default:		 abi_string = "Unknown";	break;
9387    case ABI_NONE:	 abi_string = "NONE";		break;
9388    case ABI_AIX:
9389    case ABI_AIX_NODESC: abi_string = "AIX";		break;
9390    case ABI_DARWIN:	 abi_string = "Darwin";		break;
9391    case ABI_V4:	 abi_string = "V.4";		break;
9392    }
9393
9394  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
9395
9396  if (TARGET_ALTIVEC_ABI)
9397    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9398
9399  if (TARGET_SPE_ABI)
9400    fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9401
9402  if (info->first_gp_reg_save != 32)
9403    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
9404
9405  if (info->first_fp_reg_save != 64)
9406    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
9407
9408  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9409    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9410	     info->first_altivec_reg_save);
9411
9412  if (info->lr_save_p)
9413    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
9414
9415  if (info->cr_save_p)
9416    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
9417
9418  if (info->toc_save_p)
9419    fprintf (stderr, "\ttoc_save_p          = %5d\n", info->toc_save_p);
9420
9421  if (info->vrsave_mask)
9422    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
9423
9424  if (info->push_p)
9425    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
9426
9427  if (info->calls_p)
9428    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
9429
9430  if (info->gp_save_offset)
9431    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
9432
9433  if (info->fp_save_offset)
9434    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
9435
9436  if (info->altivec_save_offset)
9437    fprintf (stderr, "\taltivec_save_offset = %5d\n",
9438	     info->altivec_save_offset);
9439
9440  if (info->spe_gp_save_offset)
9441    fprintf (stderr, "\tspe_gp_save_offset  = %5d\n",
9442	     info->spe_gp_save_offset);
9443
9444  if (info->vrsave_save_offset)
9445    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
9446	     info->vrsave_save_offset);
9447
9448  if (info->lr_save_offset)
9449    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
9450
9451  if (info->cr_save_offset)
9452    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
9453
9454  if (info->toc_save_offset)
9455    fprintf (stderr, "\ttoc_save_offset     = %5d\n", info->toc_save_offset);
9456
9457  if (info->varargs_save_offset)
9458    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9459
9460  if (info->total_size)
9461    fprintf (stderr, "\ttotal_size          = %5d\n", info->total_size);
9462
9463  if (info->varargs_size)
9464    fprintf (stderr, "\tvarargs_size        = %5d\n", info->varargs_size);
9465
9466  if (info->vars_size)
9467    fprintf (stderr, "\tvars_size           = %5d\n", info->vars_size);
9468
9469  if (info->parm_size)
9470    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
9471
9472  if (info->fixed_size)
9473    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
9474
9475  if (info->gp_size)
9476    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
9477
9478  if (info->spe_gp_size)
9479    fprintf (stderr, "\tspe_gp_size         = %5d\n", info->spe_gp_size);
9480
9481  if (info->fp_size)
9482    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
9483
9484  if (info->altivec_size)
9485    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
9486
9487  if (info->vrsave_size)
9488    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
9489
9490  if (info->altivec_padding_size)
9491    fprintf (stderr, "\taltivec_padding_size= %5d\n",
9492	     info->altivec_padding_size);
9493
9494  if (info->spe_padding_size)
9495    fprintf (stderr, "\tspe_padding_size    = %5d\n",
9496	     info->spe_padding_size);
9497
9498  if (info->lr_size)
9499    fprintf (stderr, "\tlr_size             = %5d\n", info->lr_size);
9500
9501  if (info->cr_size)
9502    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
9503
9504  if (info->toc_size)
9505    fprintf (stderr, "\ttoc_size            = %5d\n", info->toc_size);
9506
9507  if (info->save_size)
9508    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
9509
9510  if (info->reg_size != 4)
9511    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
9512
9513  fprintf (stderr, "\n");
9514}
9515
9516rtx
9517rs6000_return_addr (count, frame)
9518     int count;
9519     rtx frame;
9520{
9521  /* Currently we don't optimize very well between prolog and body
9522     code and for PIC code the code can be actually quite bad, so
9523     don't try to be too clever here.  */
9524  if (count != 0 || flag_pic != 0)
9525    {
9526      cfun->machine->ra_needs_full_frame = 1;
9527
9528      return
9529	gen_rtx_MEM
9530	  (Pmode,
9531	   memory_address
9532	   (Pmode,
9533	    plus_constant (copy_to_reg
9534			   (gen_rtx_MEM (Pmode,
9535					 memory_address (Pmode, frame))),
9536			   RETURN_ADDRESS_OFFSET)));
9537    }
9538
9539  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9540}
9541
9542/* Say whether a function is a candidate for sibcall handling or not.
9543   We do not allow indirect calls to be optimized into sibling calls.
9544   Also, we can't do it if there are any vector parameters; there's
9545   nowhere to put the VRsave code so it works; note that functions with
9546   vector parameters are required to have a prototype, so the argument
9547   type info must be available here.  (The tail recursion case can work
9548   with vector parameters, but there's no way to distinguish here.) */
9549int
9550function_ok_for_sibcall (fndecl)
9551    tree fndecl;
9552{
9553  tree type;
9554  if (fndecl)
9555    {
9556      if (TARGET_ALTIVEC_VRSAVE)
9557        {
9558	  for (type = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9559	       type; type = TREE_CHAIN (type))
9560	    {
9561	      if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9562		return 0;
9563	    }
9564        }
9565      if (DEFAULT_ABI == ABI_DARWIN
9566	  || (*targetm.binds_local_p) (fndecl))
9567	{
9568	  tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
9569
9570	  if (!lookup_attribute ("longcall", attr_list)
9571	      || lookup_attribute ("shortcall", attr_list))
9572	    return 1;
9573	}
9574    }
9575  return 0;
9576}
9577
9578static int
9579rs6000_ra_ever_killed ()
9580{
9581  rtx top;
9582  rtx reg;
9583  rtx insn;
9584
9585  /* Irritatingly, there are two kinds of thunks -- those created with
9586     TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9587     through the regular part of the compiler.  This is a very hacky
9588     way to tell them apart.  */
9589  if (current_function_is_thunk && !no_new_pseudos)
9590    return 0;
9591
9592  /* regs_ever_live has LR marked as used if any sibcalls are present,
9593     but this should not force saving and restoring in the
9594     pro/epilogue.  Likewise, reg_set_between_p thinks a sibcall
9595     clobbers LR, so that is inappropriate. */
9596
9597  /* Also, the prologue can generate a store into LR that
9598     doesn't really count, like this:
9599
9600        move LR->R0
9601        bcl to set PIC register
9602        move LR->R31
9603        move R0->LR
9604
9605     When we're called from the epilogue, we need to avoid counting
9606     this as a store.  */
9607
9608  push_topmost_sequence ();
9609  top = get_insns ();
9610  pop_topmost_sequence ();
9611  reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9612
9613  for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9614    {
9615      if (INSN_P (insn))
9616	{
9617	  if (FIND_REG_INC_NOTE (insn, reg))
9618	    return 1;
9619	  else if (GET_CODE (insn) == CALL_INSN
9620		   && !SIBLING_CALL_P (insn))
9621	    return 1;
9622	  else if (set_of (reg, insn) != NULL_RTX
9623		   && !prologue_epilogue_contains (insn))
9624	    return 1;
9625    	}
9626    }
9627  return 0;
9628}
9629
9630/* Add a REG_MAYBE_DEAD note to the insn.  */
9631static void
9632rs6000_maybe_dead (insn)
9633     rtx insn;
9634{
9635  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9636					const0_rtx,
9637					REG_NOTES (insn));
9638}
9639
9640/* Emit instructions needed to load the TOC register.
9641   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9642   a constant pool; or for SVR4 -fpic.  */
9643
9644void
9645rs6000_emit_load_toc_table (fromprolog)
9646     int fromprolog;
9647{
9648  rtx dest, insn;
9649  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9650
9651  if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9652    {
9653      rtx temp = (fromprolog
9654		  ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9655		  : gen_reg_rtx (Pmode));
9656      insn = emit_insn (gen_load_toc_v4_pic_si (temp));
9657      if (fromprolog)
9658	rs6000_maybe_dead (insn);
9659      insn = emit_move_insn (dest, temp);
9660      if (fromprolog)
9661	rs6000_maybe_dead (insn);
9662    }
9663  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9664    {
9665      char buf[30];
9666      rtx tempLR = (fromprolog
9667		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9668		    : gen_reg_rtx (Pmode));
9669      rtx temp0 = (fromprolog
9670		   ? gen_rtx_REG (Pmode, 0)
9671		   : gen_reg_rtx (Pmode));
9672      rtx symF;
9673
9674      /* possibly create the toc section */
9675      if (! toc_initialized)
9676	{
9677	  toc_section ();
9678	  function_section (current_function_decl);
9679	}
9680
9681      if (fromprolog)
9682	{
9683	  rtx symL;
9684
9685	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9686	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9687
9688	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9689	  symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9690
9691	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9692							       symF)));
9693	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9694	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9695							       symL,
9696							       symF)));
9697	}
9698      else
9699	{
9700	  rtx tocsym;
9701	  static int reload_toc_labelno = 0;
9702
9703	  tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9704
9705	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9706	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9707
9708	  emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
9709	  emit_move_insn (dest, tempLR);
9710	  emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
9711	}
9712      insn = emit_insn (gen_addsi3 (dest, temp0, dest));
9713      if (fromprolog)
9714	rs6000_maybe_dead (insn);
9715    }
9716  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9717    {
9718      /* This is for AIX code running in non-PIC ELF32.  */
9719      char buf[30];
9720      rtx realsym;
9721      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9722      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9723
9724      insn = emit_insn (gen_elf_high (dest, realsym));
9725      if (fromprolog)
9726	rs6000_maybe_dead (insn);
9727      insn = emit_insn (gen_elf_low (dest, dest, realsym));
9728      if (fromprolog)
9729	rs6000_maybe_dead (insn);
9730    }
9731  else if (DEFAULT_ABI == ABI_AIX)
9732    {
9733      if (TARGET_32BIT)
9734	insn = emit_insn (gen_load_toc_aix_si (dest));
9735      else
9736	insn = emit_insn (gen_load_toc_aix_di (dest));
9737      if (fromprolog)
9738	rs6000_maybe_dead (insn);
9739    }
9740  else
9741    abort ();
9742}
9743
9744int
9745get_TOC_alias_set ()
9746{
9747    static int set = -1;
9748    if (set == -1)
9749      set = new_alias_set ();
9750    return set;
9751}
9752
9753/* This retuns nonzero if the current function uses the TOC.  This is
9754   determined by the presence of (unspec ... 7), which is generated by
9755   the various load_toc_* patterns.  */
9756
9757int
9758uses_TOC ()
9759{
9760    rtx insn;
9761
9762    for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9763      if (INSN_P (insn))
9764	{
9765	  rtx pat = PATTERN (insn);
9766	  int i;
9767
9768	  if (GET_CODE (pat) == PARALLEL)
9769	    for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9770	      if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9771		 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9772		  return 1;
9773	}
9774    return 0;
9775}
9776
9777rtx
9778create_TOC_reference (symbol)
9779    rtx symbol;
9780{
9781  return gen_rtx_PLUS (Pmode,
9782	   gen_rtx_REG (Pmode, TOC_REGISTER),
9783	     gen_rtx_CONST (Pmode,
9784	       gen_rtx_MINUS (Pmode, symbol,
9785		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9786}
9787
9788#if TARGET_AIX
9789/* __throw will restore its own return address to be the same as the
9790   return address of the function that the throw is being made to.
9791   This is unfortunate, because we want to check the original
9792   return address to see if we need to restore the TOC.
9793   So we have to squirrel it away here.
9794   This is used only in compiling __throw and __rethrow.
9795
9796   Most of this code should be removed by CSE.  */
9797static rtx insn_after_throw;
9798
9799/* This does the saving...  */
9800void
9801rs6000_aix_emit_builtin_unwind_init ()
9802{
9803  rtx mem;
9804  rtx stack_top = gen_reg_rtx (Pmode);
9805  rtx opcode_addr = gen_reg_rtx (Pmode);
9806
9807  insn_after_throw = gen_reg_rtx (SImode);
9808
9809  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9810  emit_move_insn (stack_top, mem);
9811
9812  mem = gen_rtx_MEM (Pmode,
9813		     gen_rtx_PLUS (Pmode, stack_top,
9814				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9815  emit_move_insn (opcode_addr, mem);
9816  emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9817}
9818
9819/* Emit insns to _restore_ the TOC register, at runtime (specifically
9820   in _eh.o).  Only used on AIX.
9821
9822   The idea is that on AIX, function calls look like this:
9823	bl  somefunction-trampoline
9824	lwz r2,20(sp)
9825
9826   and later,
9827	somefunction-trampoline:
9828	stw r2,20(sp)
9829	 ... load function address in the count register ...
9830	bctr
9831   or like this, if the linker determines that this is not a cross-module call
9832   and so the TOC need not be restored:
9833	bl  somefunction
9834	nop
9835   or like this, if the compiler could determine that this is not a
9836   cross-module call:
9837	bl  somefunction
9838   now, the tricky bit here is that register 2 is saved and restored
9839   by the _linker_, so we can't readily generate debugging information
9840   for it.  So we need to go back up the call chain looking at the
9841   insns at return addresses to see which calls saved the TOC register
9842   and so see where it gets restored from.
9843
9844   Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9845   just before the actual epilogue.
9846
9847   On the bright side, this incurs no space or time overhead unless an
9848   exception is thrown, except for the extra code in libgcc.a.
9849
9850   The parameter STACKSIZE is a register containing (at runtime)
9851   the amount to be popped off the stack in addition to the stack frame
9852   of this routine (which will be __throw or __rethrow, and so is
9853   guaranteed to have a stack frame).  */
9854
9855void
9856rs6000_emit_eh_toc_restore (stacksize)
9857     rtx stacksize;
9858{
9859  rtx top_of_stack;
9860  rtx bottom_of_stack = gen_reg_rtx (Pmode);
9861  rtx tocompare = gen_reg_rtx (SImode);
9862  rtx opcode = gen_reg_rtx (SImode);
9863  rtx opcode_addr = gen_reg_rtx (Pmode);
9864  rtx mem;
9865  rtx loop_start = gen_label_rtx ();
9866  rtx no_toc_restore_needed = gen_label_rtx ();
9867  rtx loop_exit = gen_label_rtx ();
9868
9869  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9870  set_mem_alias_set (mem, rs6000_sr_alias_set);
9871  emit_move_insn (bottom_of_stack, mem);
9872
9873  top_of_stack = expand_binop (Pmode, add_optab,
9874			       bottom_of_stack, stacksize,
9875			       NULL_RTX, 1, OPTAB_WIDEN);
9876
9877  emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9878					   : 0xE8410028, SImode));
9879
9880  if (insn_after_throw == NULL_RTX)
9881    abort ();
9882  emit_move_insn (opcode, insn_after_throw);
9883
9884  emit_note (NULL, NOTE_INSN_LOOP_BEG);
9885  emit_label (loop_start);
9886
9887  do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9888			   SImode, NULL_RTX, NULL_RTX,
9889			   no_toc_restore_needed);
9890
9891  mem = gen_rtx_MEM (Pmode,
9892		     gen_rtx_PLUS (Pmode, bottom_of_stack,
9893				   GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9894  emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9895
9896  emit_label (no_toc_restore_needed);
9897  do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9898			   Pmode, NULL_RTX, NULL_RTX,
9899			   loop_exit);
9900
9901  mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9902  set_mem_alias_set (mem, rs6000_sr_alias_set);
9903  emit_move_insn (bottom_of_stack, mem);
9904
9905  mem = gen_rtx_MEM (Pmode,
9906		     gen_rtx_PLUS (Pmode, bottom_of_stack,
9907				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9908  emit_move_insn (opcode_addr, mem);
9909  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9910
9911  emit_note (NULL, NOTE_INSN_LOOP_CONT);
9912  emit_jump (loop_start);
9913  emit_note (NULL, NOTE_INSN_LOOP_END);
9914  emit_label (loop_exit);
9915}
9916#endif /* TARGET_AIX */
9917
9918/* This ties together stack memory (MEM with an alias set of
9919   rs6000_sr_alias_set) and the change to the stack pointer.  */
9920
9921static void
9922rs6000_emit_stack_tie ()
9923{
9924  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9925
9926  set_mem_alias_set (mem, rs6000_sr_alias_set);
9927  emit_insn (gen_stack_tie (mem));
9928}
9929
9930/* Emit the correct code for allocating stack space, as insns.
9931   If COPY_R12, make sure a copy of the old frame is left in r12.
9932   The generated code may use hard register 0 as a temporary.  */
9933
9934static void
9935rs6000_emit_allocate_stack (size, copy_r12)
9936     HOST_WIDE_INT size;
9937     int copy_r12;
9938{
9939  rtx insn;
9940  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9941  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9942  rtx todec = GEN_INT (-size);
9943
9944  if (current_function_limit_stack)
9945    {
9946      if (REG_P (stack_limit_rtx)
9947	  && REGNO (stack_limit_rtx) > 1
9948	  && REGNO (stack_limit_rtx) <= 31)
9949	{
9950	  emit_insn (Pmode == SImode
9951		     ? gen_addsi3 (tmp_reg,
9952				   stack_limit_rtx,
9953				   GEN_INT (size))
9954		     : gen_adddi3 (tmp_reg,
9955				   stack_limit_rtx,
9956				   GEN_INT (size)));
9957
9958	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9959				    const0_rtx));
9960	}
9961      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9962	       && TARGET_32BIT
9963	       && DEFAULT_ABI == ABI_V4)
9964	{
9965	  rtx toload = gen_rtx_CONST (VOIDmode,
9966				      gen_rtx_PLUS (Pmode,
9967						    stack_limit_rtx,
9968						    GEN_INT (size)));
9969
9970	  emit_insn (gen_elf_high (tmp_reg, toload));
9971	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
9972	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9973				    const0_rtx));
9974	}
9975      else
9976	warning ("stack limit expression is not supported");
9977    }
9978
9979  if (copy_r12 || ! TARGET_UPDATE)
9980    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
9981
9982  if (TARGET_UPDATE)
9983    {
9984      if (size > 32767)
9985	{
9986	  /* Need a note here so that try_split doesn't get confused.  */
9987	  if (get_last_insn() == NULL_RTX)
9988	    emit_note (0, NOTE_INSN_DELETED);
9989	  insn = emit_move_insn (tmp_reg, todec);
9990	  try_split (PATTERN (insn), insn, 0);
9991	  todec = tmp_reg;
9992	}
9993
9994      if (Pmode == SImode)
9995	insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
9996					    todec, stack_reg));
9997      else
9998	insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
9999					    todec, stack_reg));
10000    }
10001  else
10002    {
10003      if (Pmode == SImode)
10004	insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
10005      else
10006	insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
10007      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10008		      gen_rtx_REG (Pmode, 12));
10009    }
10010
10011  RTX_FRAME_RELATED_P (insn) = 1;
10012  REG_NOTES (insn) =
10013    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10014		       gen_rtx_SET (VOIDmode, stack_reg,
10015				    gen_rtx_PLUS (Pmode, stack_reg,
10016						  GEN_INT (-size))),
10017		       REG_NOTES (insn));
10018}
10019
10020/* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
10021   knows that:
10022
10023     (mem (plus (blah) (regXX)))
10024
10025   is really:
10026
10027     (mem (plus (blah) (const VALUE_OF_REGXX))).  */
10028
10029static void
10030altivec_frame_fixup (insn, reg, val)
10031     rtx insn, reg;
10032     HOST_WIDE_INT val;
10033{
10034  rtx real;
10035
10036  real = copy_rtx (PATTERN (insn));
10037
10038  real = replace_rtx (real, reg, GEN_INT (val));
10039
10040  RTX_FRAME_RELATED_P (insn) = 1;
10041  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10042					real,
10043					REG_NOTES (insn));
10044}
10045
10046/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10047   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10048   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
10049   deduce these equivalences by itself so it wasn't necessary to hold
10050   its hand so much.  */
10051
10052static void
10053rs6000_frame_related (insn, reg, val, reg2, rreg)
10054     rtx insn;
10055     rtx reg;
10056     HOST_WIDE_INT val;
10057     rtx reg2;
10058     rtx rreg;
10059{
10060  rtx real, temp;
10061
10062  /* copy_rtx will not make unique copies of registers, so we need to
10063     ensure we don't have unwanted sharing here.  */
10064  if (reg == reg2)
10065    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10066
10067  if (reg == rreg)
10068    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10069
10070  real = copy_rtx (PATTERN (insn));
10071
10072  if (reg2 != NULL_RTX)
10073    real = replace_rtx (real, reg2, rreg);
10074
10075  real = replace_rtx (real, reg,
10076		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10077							STACK_POINTER_REGNUM),
10078				    GEN_INT (val)));
10079
10080  /* We expect that 'real' is either a SET or a PARALLEL containing
10081     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
10082     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
10083
10084  if (GET_CODE (real) == SET)
10085    {
10086      rtx set = real;
10087
10088      temp = simplify_rtx (SET_SRC (set));
10089      if (temp)
10090	SET_SRC (set) = temp;
10091      temp = simplify_rtx (SET_DEST (set));
10092      if (temp)
10093	SET_DEST (set) = temp;
10094      if (GET_CODE (SET_DEST (set)) == MEM)
10095	{
10096	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10097	  if (temp)
10098	    XEXP (SET_DEST (set), 0) = temp;
10099	}
10100    }
10101  else if (GET_CODE (real) == PARALLEL)
10102    {
10103      int i;
10104      for (i = 0; i < XVECLEN (real, 0); i++)
10105	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10106	  {
10107	    rtx set = XVECEXP (real, 0, i);
10108
10109	    temp = simplify_rtx (SET_SRC (set));
10110	    if (temp)
10111	      SET_SRC (set) = temp;
10112	    temp = simplify_rtx (SET_DEST (set));
10113	    if (temp)
10114	      SET_DEST (set) = temp;
10115	    if (GET_CODE (SET_DEST (set)) == MEM)
10116	      {
10117		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10118		if (temp)
10119		  XEXP (SET_DEST (set), 0) = temp;
10120	      }
10121	    RTX_FRAME_RELATED_P (set) = 1;
10122	  }
10123    }
10124  else
10125    abort ();
10126
10127  RTX_FRAME_RELATED_P (insn) = 1;
10128  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10129					real,
10130					REG_NOTES (insn));
10131}
10132
10133/* Returns an insn that has a vrsave set operation with the
10134   appropriate CLOBBERs.  */
10135
10136static rtx
10137generate_set_vrsave (reg, info, epiloguep)
10138     rtx reg;
10139     rs6000_stack_t *info;
10140     int epiloguep;
10141{
10142  int nclobs, i;
10143  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10144  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10145
10146  clobs[0]
10147    = gen_rtx_SET (VOIDmode,
10148		   vrsave,
10149		   gen_rtx_UNSPEC_VOLATILE (SImode,
10150					    gen_rtvec (2, reg, vrsave),
10151					    30));
10152
10153  nclobs = 1;
10154
10155  /* We need to clobber the registers in the mask so the scheduler
10156     does not move sets to VRSAVE before sets of AltiVec registers.
10157
10158     However, if the function receives nonlocal gotos, reload will set
10159     all call saved registers live.  We will end up with:
10160
10161     	(set (reg 999) (mem))
10162	(parallel [ (set (reg vrsave) (unspec blah))
10163		    (clobber (reg 999))])
10164
10165     The clobber will cause the store into reg 999 to be dead, and
10166     flow will attempt to delete an epilogue insn.  In this case, we
10167     need an unspec use/set of the register.  */
10168
10169  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10170    if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10171      {
10172	if (!epiloguep || call_used_regs [i])
10173	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10174					     gen_rtx_REG (V4SImode, i));
10175	else
10176	  {
10177	    rtx reg = gen_rtx_REG (V4SImode, i);
10178
10179	    clobs[nclobs++]
10180	      = gen_rtx_SET (VOIDmode,
10181			     reg,
10182			     gen_rtx_UNSPEC (V4SImode,
10183					     gen_rtvec (1, reg), 27));
10184	  }
10185      }
10186
10187  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10188
10189  for (i = 0; i < nclobs; ++i)
10190    XVECEXP (insn, 0, i) = clobs[i];
10191
10192  return insn;
10193}
10194
10195/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10196   Save REGNO into [FRAME_REG + OFFSET] in mode MODE.  */
10197
10198static void
10199emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10200     rtx frame_reg;
10201     rtx frame_ptr;
10202     enum machine_mode mode;
10203     unsigned int regno;
10204     int offset;
10205     int total_size;
10206{
10207  rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10208  rtx replacea, replaceb;
10209
10210  int_rtx = GEN_INT (offset);
10211
10212  /* Some cases that need register indexed addressing.  */
10213  if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10214      || (TARGET_SPE_ABI
10215	  && SPE_VECTOR_MODE (mode)
10216	  && !SPE_CONST_OFFSET_OK (offset)))
10217    {
10218      /* Whomever calls us must make sure r11 is available in the
10219         flow path of instructions in the prologue.  */
10220      offset_rtx = gen_rtx_REG (Pmode, 11);
10221      emit_move_insn (offset_rtx, int_rtx);
10222
10223      replacea = offset_rtx;
10224      replaceb = int_rtx;
10225    }
10226  else
10227    {
10228      offset_rtx = int_rtx;
10229      replacea = NULL_RTX;
10230      replaceb = NULL_RTX;
10231    }
10232
10233  reg = gen_rtx_REG (mode, regno);
10234  addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10235  mem = gen_rtx_MEM (mode, addr);
10236  set_mem_alias_set (mem, rs6000_sr_alias_set);
10237
10238  insn = emit_move_insn (mem, reg);
10239
10240  rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10241}
10242
10243/* Emit an offset memory reference suitable for a frame store, while
10244   converting to a valid addressing mode.  */
10245
10246static rtx
10247gen_frame_mem_offset (mode, reg, offset)
10248     enum machine_mode mode;
10249     rtx reg;
10250     int offset;
10251{
10252  rtx int_rtx, offset_rtx;
10253
10254  int_rtx = GEN_INT (offset);
10255
10256  if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10257    {
10258      offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10259      emit_move_insn (offset_rtx, int_rtx);
10260    }
10261  else
10262    offset_rtx = int_rtx;
10263
10264  return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10265}
10266
10267/* Emit function prologue as insns.  */
10268
10269void
10270rs6000_emit_prologue ()
10271{
10272  rs6000_stack_t *info = rs6000_stack_info ();
10273  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10274  int reg_size = TARGET_POWERPC64 ? 8 : 4;
10275  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10276  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10277  rtx frame_reg_rtx = sp_reg_rtx;
10278  rtx cr_save_rtx = NULL;
10279  rtx insn;
10280  int saving_FPRs_inline;
10281  int using_store_multiple;
10282  HOST_WIDE_INT sp_offset = 0;
10283
10284   if (TARGET_SPE_ABI)
10285     {
10286       reg_mode = V2SImode;
10287       reg_size = 8;
10288     }
10289
10290  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10291			  && !TARGET_SPE_ABI
10292			  && info->first_gp_reg_save < 31);
10293  saving_FPRs_inline = (info->first_fp_reg_save == 64
10294			|| FP_SAVE_INLINE (info->first_fp_reg_save));
10295
10296  /* For V.4, update stack before we do any saving and set back pointer.  */
10297  if (info->push_p && DEFAULT_ABI == ABI_V4)
10298    {
10299      if (info->total_size < 32767)
10300	sp_offset = info->total_size;
10301      else
10302	frame_reg_rtx = frame_ptr_rtx;
10303      rs6000_emit_allocate_stack (info->total_size,
10304				  (frame_reg_rtx != sp_reg_rtx
10305				   && (info->cr_save_p
10306				       || info->lr_save_p
10307				       || info->first_fp_reg_save < 64
10308				       || info->first_gp_reg_save < 32
10309				       )));
10310      if (frame_reg_rtx != sp_reg_rtx)
10311	rs6000_emit_stack_tie ();
10312    }
10313
10314  /* Save AltiVec registers if needed.  */
10315  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10316    {
10317      int i;
10318
10319      /* There should be a non inline version of this, for when we
10320	 are saving lots of vector registers.  */
10321      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10322	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10323	  {
10324	    rtx areg, savereg, mem;
10325	    int offset;
10326
10327	    offset = info->altivec_save_offset + sp_offset
10328	      + 16 * (i - info->first_altivec_reg_save);
10329
10330	    savereg = gen_rtx_REG (V4SImode, i);
10331
10332	    areg = gen_rtx_REG (Pmode, 0);
10333	    emit_move_insn (areg, GEN_INT (offset));
10334
10335	    /* AltiVec addressing mode is [reg+reg].  */
10336	    mem = gen_rtx_MEM (V4SImode,
10337			       gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10338
10339	    set_mem_alias_set (mem, rs6000_sr_alias_set);
10340
10341	    insn = emit_move_insn (mem, savereg);
10342
10343	    altivec_frame_fixup (insn, areg, offset);
10344	  }
10345    }
10346
10347  /* VRSAVE is a bit vector representing which AltiVec registers
10348     are used.  The OS uses this to determine which vector
10349     registers to save on a context switch.  We need to save
10350     VRSAVE on the stack frame, add whatever AltiVec registers we
10351     used in this function, and do the corresponding magic in the
10352     epilogue.  */
10353
10354  if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10355    {
10356      rtx reg, mem, vrsave;
10357      int offset;
10358
10359      /* Get VRSAVE onto a GPR.  */
10360      reg = gen_rtx_REG (SImode, 12);
10361      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10362      if (TARGET_MACHO)
10363	emit_insn (gen_get_vrsave_internal (reg));
10364      else
10365	emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10366
10367      /* Save VRSAVE.  */
10368      offset = info->vrsave_save_offset + sp_offset;
10369      mem
10370	= gen_rtx_MEM (SImode,
10371		       gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10372      set_mem_alias_set (mem, rs6000_sr_alias_set);
10373      insn = emit_move_insn (mem, reg);
10374
10375      /* Include the registers in the mask.  */
10376      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10377
10378      insn = emit_insn (generate_set_vrsave (reg, info, 0));
10379    }
10380
10381  /* If we use the link register, get it into r0.  */
10382  if (info->lr_save_p)
10383    emit_move_insn (gen_rtx_REG (Pmode, 0),
10384		    gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10385
10386  /* If we need to save CR, put it into r12.  */
10387  if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10388    {
10389      cr_save_rtx = gen_rtx_REG (SImode, 12);
10390      emit_insn (gen_movesi_from_cr (cr_save_rtx));
10391    }
10392
10393  /* Do any required saving of fpr's.  If only one or two to save, do
10394     it ourselves.  Otherwise, call function.  */
10395  if (saving_FPRs_inline)
10396    {
10397      int i;
10398      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10399	if ((regs_ever_live[info->first_fp_reg_save+i]
10400	     && ! call_used_regs[info->first_fp_reg_save+i]))
10401	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10402			   info->first_fp_reg_save + i,
10403			   info->fp_save_offset + sp_offset + 8 * i,
10404			   info->total_size);
10405    }
10406  else if (info->first_fp_reg_save != 64)
10407    {
10408      int i;
10409      char rname[30];
10410      const char *alloc_rname;
10411      rtvec p;
10412      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10413
10414      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10415					  gen_rtx_REG (Pmode,
10416						       LINK_REGISTER_REGNUM));
10417      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10418	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10419      alloc_rname = ggc_strdup (rname);
10420      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10421				      gen_rtx_SYMBOL_REF (Pmode,
10422							  alloc_rname));
10423      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10424	{
10425	  rtx addr, reg, mem;
10426	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10427	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10428			       GEN_INT (info->fp_save_offset
10429					+ sp_offset + 8*i));
10430	  mem = gen_rtx_MEM (DFmode, addr);
10431	  set_mem_alias_set (mem, rs6000_sr_alias_set);
10432
10433	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10434	}
10435      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10436      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10437			    NULL_RTX, NULL_RTX);
10438    }
10439
10440  /* Save GPRs.  This is done as a PARALLEL if we are using
10441     the store-multiple instructions.  */
10442  if (using_store_multiple)
10443    {
10444      rtvec p;
10445      int i;
10446      p = rtvec_alloc (32 - info->first_gp_reg_save);
10447      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10448	{
10449	  rtx addr, reg, mem;
10450	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10451	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10452			       GEN_INT (info->gp_save_offset
10453					+ sp_offset
10454					+ reg_size * i));
10455	  mem = gen_rtx_MEM (reg_mode, addr);
10456	  set_mem_alias_set (mem, rs6000_sr_alias_set);
10457
10458	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10459	}
10460      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10461      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10462			    NULL_RTX, NULL_RTX);
10463    }
10464  else
10465    {
10466      int i;
10467      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10468	if ((regs_ever_live[info->first_gp_reg_save+i]
10469	     && ! call_used_regs[info->first_gp_reg_save+i])
10470	    || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10471		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10472		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10473	  {
10474	    rtx addr, reg, mem;
10475	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10476
10477	    if (TARGET_SPE_ABI)
10478	      {
10479		int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10480		rtx b;
10481
10482		if (!SPE_CONST_OFFSET_OK (offset))
10483		  {
10484		    b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10485		    emit_move_insn (b, GEN_INT (offset));
10486		  }
10487		else
10488		  b = GEN_INT (offset);
10489
10490		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10491		mem = gen_rtx_MEM (V2SImode, addr);
10492		set_mem_alias_set (mem, rs6000_sr_alias_set);
10493		insn = emit_move_insn (mem, reg);
10494
10495		if (GET_CODE (b) == CONST_INT)
10496		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10497					NULL_RTX, NULL_RTX);
10498		else
10499		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10500					b, GEN_INT (offset));
10501	      }
10502	    else
10503	      {
10504		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10505				     GEN_INT (info->gp_save_offset
10506					      + sp_offset
10507					      + reg_size * i));
10508		mem = gen_rtx_MEM (reg_mode, addr);
10509		set_mem_alias_set (mem, rs6000_sr_alias_set);
10510
10511		insn = emit_move_insn (mem, reg);
10512		rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10513				      NULL_RTX, NULL_RTX);
10514	      }
10515	  }
10516    }
10517
10518  /* ??? There's no need to emit actual instructions here, but it's the
10519     easiest way to get the frame unwind information emitted.  */
10520  if (current_function_calls_eh_return)
10521    {
10522      unsigned int i, regno;
10523
10524      for (i = 0; ; ++i)
10525	{
10526	  regno = EH_RETURN_DATA_REGNO (i);
10527	  if (regno == INVALID_REGNUM)
10528	    break;
10529
10530	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10531			   info->ehrd_offset + sp_offset
10532			   + reg_size * (int) i,
10533			   info->total_size);
10534	}
10535    }
10536
10537  /* Save lr if we used it.  */
10538  if (info->lr_save_p)
10539    {
10540      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10541			       GEN_INT (info->lr_save_offset + sp_offset));
10542      rtx reg = gen_rtx_REG (Pmode, 0);
10543      rtx mem = gen_rtx_MEM (Pmode, addr);
10544      /* This should not be of rs6000_sr_alias_set, because of
10545	 __builtin_return_address.  */
10546
10547      insn = emit_move_insn (mem, reg);
10548      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10549			    reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10550    }
10551
10552  /* Save CR if we use any that must be preserved.  */
10553  if (info->cr_save_p)
10554    {
10555      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10556			       GEN_INT (info->cr_save_offset + sp_offset));
10557      rtx mem = gen_rtx_MEM (SImode, addr);
10558
10559      set_mem_alias_set (mem, rs6000_sr_alias_set);
10560
10561      /* If r12 was used to hold the original sp, copy cr into r0 now
10562	 that it's free.  */
10563      if (REGNO (frame_reg_rtx) == 12)
10564	{
10565	  cr_save_rtx = gen_rtx_REG (SImode, 0);
10566	  emit_insn (gen_movesi_from_cr (cr_save_rtx));
10567	}
10568      insn = emit_move_insn (mem, cr_save_rtx);
10569
10570      /* Now, there's no way that dwarf2out_frame_debug_expr is going
10571	 to understand '(unspec:SI [(reg:CC 68) ...] 19)'.  But that's
10572	 OK.  All we have to do is specify that _one_ condition code
10573	 register is saved in this stack slot.  The thrower's epilogue
10574	 will then restore all the call-saved registers.
10575	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
10576      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10577			    cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10578    }
10579
10580  /* Update stack and set back pointer unless this is V.4,
10581     for which it was done previously.  */
10582  if (info->push_p && DEFAULT_ABI != ABI_V4)
10583    rs6000_emit_allocate_stack (info->total_size, FALSE);
10584
10585  /* Set frame pointer, if needed.  */
10586  if (frame_pointer_needed)
10587    {
10588      insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10589			     sp_reg_rtx);
10590      RTX_FRAME_RELATED_P (insn) = 1;
10591    }
10592
10593  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
10594  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10595      || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10596	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10597  {
10598    /* If emit_load_toc_table will use the link register, we need to save
10599       it.  We use R11 for this purpose because emit_load_toc_table
10600       can use register 0.  This allows us to use a plain 'blr' to return
10601       from the procedure more often.  */
10602    int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10603				    && ! info->lr_save_p
10604				    && EXIT_BLOCK_PTR->pred != NULL);
10605    if (save_LR_around_toc_setup)
10606      emit_move_insn (gen_rtx_REG (Pmode, 11),
10607		      gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10608
10609    rs6000_emit_load_toc_table (TRUE);
10610
10611    if (save_LR_around_toc_setup)
10612      emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10613		      gen_rtx_REG (Pmode, 11));
10614  }
10615
10616#if TARGET_MACHO
10617  if (DEFAULT_ABI == ABI_DARWIN
10618      && flag_pic && current_function_uses_pic_offset_table)
10619    {
10620      rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10621      char *picbase = machopic_function_base_name ();
10622      rtx src = gen_rtx_SYMBOL_REF (Pmode, ggc_alloc_string (picbase, -1));
10623
10624      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10625
10626      rs6000_maybe_dead (
10627	emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10628			gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10629    }
10630#endif
10631}
10632
10633/* Write function prologue.  */
10634
10635static void
10636rs6000_output_function_prologue (file, size)
10637     FILE *file;
10638     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10639{
10640  rs6000_stack_t *info = rs6000_stack_info ();
10641
10642  if (TARGET_DEBUG_STACK)
10643    debug_stack_info (info);
10644
10645  /* Write .extern for any function we will call to save and restore
10646     fp values.  */
10647  if (info->first_fp_reg_save < 64
10648      && !FP_SAVE_INLINE (info->first_fp_reg_save))
10649    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10650	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10651	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10652	     RESTORE_FP_SUFFIX);
10653
10654  /* Write .extern for AIX common mode routines, if needed.  */
10655  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10656    {
10657      fputs ("\t.extern __mulh\n", file);
10658      fputs ("\t.extern __mull\n", file);
10659      fputs ("\t.extern __divss\n", file);
10660      fputs ("\t.extern __divus\n", file);
10661      fputs ("\t.extern __quoss\n", file);
10662      fputs ("\t.extern __quous\n", file);
10663      common_mode_defined = 1;
10664    }
10665
10666  if (! HAVE_prologue)
10667    {
10668      start_sequence ();
10669
10670      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10671	 the "toplevel" insn chain.  */
10672      emit_note (0, NOTE_INSN_DELETED);
10673      rs6000_emit_prologue ();
10674      emit_note (0, NOTE_INSN_DELETED);
10675
10676      /* Expand INSN_ADDRESSES so final() doesn't crash. */
10677      {
10678	rtx insn;
10679	unsigned addr = 0;
10680	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10681	  {
10682	    INSN_ADDRESSES_NEW (insn, addr);
10683	    addr += 4;
10684	  }
10685      }
10686
10687      if (TARGET_DEBUG_STACK)
10688	debug_rtx_list (get_insns (), 100);
10689      final (get_insns (), file, FALSE, FALSE);
10690      end_sequence ();
10691    }
10692
10693  rs6000_pic_labelno++;
10694}
10695
10696/* Emit function epilogue as insns.
10697
10698   At present, dwarf2out_frame_debug_expr doesn't understand
10699   register restores, so we don't bother setting RTX_FRAME_RELATED_P
10700   anywhere in the epilogue.  Most of the insns below would in any case
10701   need special notes to explain where r11 is in relation to the stack.  */
10702
10703void
10704rs6000_emit_epilogue (sibcall)
10705     int sibcall;
10706{
10707  rs6000_stack_t *info;
10708  int restoring_FPRs_inline;
10709  int using_load_multiple;
10710  int using_mfcr_multiple;
10711  int use_backchain_to_restore_sp;
10712  int sp_offset = 0;
10713  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10714  rtx frame_reg_rtx = sp_reg_rtx;
10715  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10716  int reg_size = TARGET_POWERPC64 ? 8 : 4;
10717  int i;
10718
10719  if (TARGET_SPE_ABI)
10720    {
10721      reg_mode = V2SImode;
10722      reg_size = 8;
10723    }
10724
10725  info = rs6000_stack_info ();
10726  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10727			 && !TARGET_SPE_ABI
10728			 && info->first_gp_reg_save < 31);
10729  restoring_FPRs_inline = (sibcall
10730			   || current_function_calls_eh_return
10731			   || info->first_fp_reg_save == 64
10732			   || FP_SAVE_INLINE (info->first_fp_reg_save));
10733  use_backchain_to_restore_sp = (frame_pointer_needed
10734				 || current_function_calls_alloca
10735				 || info->total_size > 32767);
10736  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10737			 || rs6000_cpu == PROCESSOR_PPC603
10738			 || rs6000_cpu == PROCESSOR_PPC750
10739			 || optimize_size);
10740
10741  /* If we have a frame pointer, a call to alloca,  or a large stack
10742     frame, restore the old stack pointer using the backchain.  Otherwise,
10743     we know what size to update it with.  */
10744  if (use_backchain_to_restore_sp)
10745    {
10746      /* Under V.4, don't reset the stack pointer until after we're done
10747	 loading the saved registers.  */
10748      if (DEFAULT_ABI == ABI_V4)
10749	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10750
10751      emit_move_insn (frame_reg_rtx,
10752		      gen_rtx_MEM (Pmode, sp_reg_rtx));
10753
10754    }
10755  else if (info->push_p)
10756    {
10757      if (DEFAULT_ABI == ABI_V4)
10758	sp_offset = info->total_size;
10759      else
10760	{
10761	  emit_insn (TARGET_32BIT
10762		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10763				   GEN_INT (info->total_size))
10764		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10765				   GEN_INT (info->total_size)));
10766	}
10767    }
10768
10769  /* Restore AltiVec registers if needed.  */
10770  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10771    {
10772      int i;
10773
10774      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10775	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10776	  {
10777	    rtx addr, areg, mem;
10778
10779	    areg = gen_rtx_REG (Pmode, 0);
10780	    emit_move_insn
10781	      (areg, GEN_INT (info->altivec_save_offset
10782			      + sp_offset
10783			      + 16 * (i - info->first_altivec_reg_save)));
10784
10785	    /* AltiVec addressing mode is [reg+reg].  */
10786	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10787	    mem = gen_rtx_MEM (V4SImode, addr);
10788	    set_mem_alias_set (mem, rs6000_sr_alias_set);
10789
10790	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10791	  }
10792    }
10793
10794  /* Restore VRSAVE if needed.  */
10795  if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10796    {
10797      rtx addr, mem, reg;
10798
10799      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10800			   GEN_INT (info->vrsave_save_offset + sp_offset));
10801      mem = gen_rtx_MEM (SImode, addr);
10802      set_mem_alias_set (mem, rs6000_sr_alias_set);
10803      reg = gen_rtx_REG (SImode, 12);
10804      emit_move_insn (reg, mem);
10805
10806      emit_insn (generate_set_vrsave (reg, info, 1));
10807    }
10808
10809  /* Get the old lr if we saved it.  */
10810  if (info->lr_save_p)
10811    {
10812      rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10813				      info->lr_save_offset + sp_offset);
10814
10815      set_mem_alias_set (mem, rs6000_sr_alias_set);
10816
10817      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10818    }
10819
10820  /* Get the old cr if we saved it.  */
10821  if (info->cr_save_p)
10822    {
10823      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10824			       GEN_INT (info->cr_save_offset + sp_offset));
10825      rtx mem = gen_rtx_MEM (SImode, addr);
10826
10827      set_mem_alias_set (mem, rs6000_sr_alias_set);
10828
10829      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10830    }
10831
10832  /* Set LR here to try to overlap restores below.  */
10833  if (info->lr_save_p)
10834    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10835		    gen_rtx_REG (Pmode, 0));
10836
10837  /* Load exception handler data registers, if needed.  */
10838  if (current_function_calls_eh_return)
10839    {
10840      unsigned int i, regno;
10841
10842      for (i = 0; ; ++i)
10843	{
10844	  rtx mem;
10845
10846	  regno = EH_RETURN_DATA_REGNO (i);
10847	  if (regno == INVALID_REGNUM)
10848	    break;
10849
10850	  mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10851				      info->ehrd_offset + sp_offset
10852				      + reg_size * (int) i);
10853	  set_mem_alias_set (mem, rs6000_sr_alias_set);
10854
10855	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10856	}
10857    }
10858
10859  /* Restore GPRs.  This is done as a PARALLEL if we are using
10860     the load-multiple instructions.  */
10861  if (using_load_multiple)
10862    {
10863      rtvec p;
10864      p = rtvec_alloc (32 - info->first_gp_reg_save);
10865      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10866	{
10867	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10868				   GEN_INT (info->gp_save_offset
10869					    + sp_offset
10870					    + reg_size * i));
10871	  rtx mem = gen_rtx_MEM (reg_mode, addr);
10872
10873	  set_mem_alias_set (mem, rs6000_sr_alias_set);
10874
10875	  RTVEC_ELT (p, i) =
10876	    gen_rtx_SET (VOIDmode,
10877			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10878			 mem);
10879	}
10880      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10881    }
10882  else
10883    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10884      if ((regs_ever_live[info->first_gp_reg_save+i]
10885	   && ! call_used_regs[info->first_gp_reg_save+i])
10886	  || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10887	      && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10888		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10889	{
10890	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10891				   GEN_INT (info->gp_save_offset
10892					    + sp_offset
10893					    + reg_size * i));
10894	  rtx mem = gen_rtx_MEM (reg_mode, addr);
10895
10896	  /* Restore 64-bit quantities for SPE.  */
10897	  if (TARGET_SPE_ABI)
10898	    {
10899	      int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10900	      rtx b;
10901
10902	      if (!SPE_CONST_OFFSET_OK (offset))
10903		{
10904		  b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10905		  emit_move_insn (b, GEN_INT (offset));
10906		}
10907	      else
10908		b = GEN_INT (offset);
10909
10910	      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10911	      mem = gen_rtx_MEM (V2SImode, addr);
10912	    }
10913
10914	  set_mem_alias_set (mem, rs6000_sr_alias_set);
10915
10916	  emit_move_insn (gen_rtx_REG (reg_mode,
10917				       info->first_gp_reg_save + i), mem);
10918	}
10919
10920  /* Restore fpr's if we need to do it without calling a function.  */
10921  if (restoring_FPRs_inline)
10922    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10923      if ((regs_ever_live[info->first_fp_reg_save+i]
10924	   && ! call_used_regs[info->first_fp_reg_save+i]))
10925	{
10926	  rtx addr, mem;
10927	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10928			       GEN_INT (info->fp_save_offset
10929					+ sp_offset
10930					+ 8 * i));
10931	  mem = gen_rtx_MEM (DFmode, addr);
10932	  set_mem_alias_set (mem, rs6000_sr_alias_set);
10933
10934	  emit_move_insn (gen_rtx_REG (DFmode,
10935				       info->first_fp_reg_save + i),
10936			  mem);
10937	}
10938
10939  /* If we saved cr, restore it here.  Just those that were used.  */
10940  if (info->cr_save_p)
10941    {
10942      rtx r12_rtx = gen_rtx_REG (SImode, 12);
10943      int count = 0;
10944
10945      if (using_mfcr_multiple)
10946	{
10947	  for (i = 0; i < 8; i++)
10948	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10949	      count++;
10950	  if (count == 0)
10951	    abort ();
10952	}
10953
10954      if (using_mfcr_multiple && count > 1)
10955	{
10956	  rtvec p;
10957	  int ndx;
10958
10959	  p = rtvec_alloc (count);
10960
10961	  ndx = 0;
10962	  for (i = 0; i < 8; i++)
10963	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10964	      {
10965		rtvec r = rtvec_alloc (2);
10966		RTVEC_ELT (r, 0) = r12_rtx;
10967		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
10968		RTVEC_ELT (p, ndx) =
10969		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
10970			       gen_rtx_UNSPEC (CCmode, r, 20));
10971		ndx++;
10972	      }
10973	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10974	  if (ndx != count)
10975	    abort ();
10976	}
10977      else
10978	for (i = 0; i < 8; i++)
10979	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10980	    {
10981	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
10982							   CR0_REGNO+i),
10983					      r12_rtx));
10984	    }
10985    }
10986
10987  /* If this is V.4, unwind the stack pointer after all of the loads
10988     have been done.  We need to emit a block here so that sched
10989     doesn't decide to move the sp change before the register restores
10990     (which may not have any obvious dependency on the stack).  This
10991     doesn't hurt performance, because there is no scheduling that can
10992     be done after this point.  */
10993  if (DEFAULT_ABI == ABI_V4)
10994    {
10995      if (frame_reg_rtx != sp_reg_rtx)
10996	  rs6000_emit_stack_tie ();
10997
10998      if (use_backchain_to_restore_sp)
10999	{
11000	  emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11001	}
11002      else if (sp_offset != 0)
11003	{
11004	  emit_insn (Pmode == SImode
11005		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11006				   GEN_INT (sp_offset))
11007		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11008				   GEN_INT (sp_offset)));
11009	}
11010    }
11011
11012  if (current_function_calls_eh_return)
11013    {
11014      rtx sa = EH_RETURN_STACKADJ_RTX;
11015      emit_insn (Pmode == SImode
11016		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11017		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11018    }
11019
11020  if (!sibcall)
11021    {
11022      rtvec p;
11023      if (! restoring_FPRs_inline)
11024	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11025      else
11026	p = rtvec_alloc (2);
11027
11028      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11029      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11030				      gen_rtx_REG (Pmode,
11031						   LINK_REGISTER_REGNUM));
11032
11033      /* If we have to restore more than two FP registers, branch to the
11034	 restore function.  It will return to our caller.  */
11035      if (! restoring_FPRs_inline)
11036	{
11037	  int i;
11038	  char rname[30];
11039	  const char *alloc_rname;
11040
11041	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11042		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11043	  alloc_rname = ggc_strdup (rname);
11044	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11045					  gen_rtx_SYMBOL_REF (Pmode,
11046							      alloc_rname));
11047
11048	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11049	    {
11050	      rtx addr, mem;
11051	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11052				   GEN_INT (info->fp_save_offset + 8*i));
11053	      mem = gen_rtx_MEM (DFmode, addr);
11054	      set_mem_alias_set (mem, rs6000_sr_alias_set);
11055
11056	      RTVEC_ELT (p, i+3) =
11057		gen_rtx_SET (VOIDmode,
11058			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11059			     mem);
11060	    }
11061	}
11062
11063      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11064    }
11065}
11066
11067/* Write function epilogue.  */
11068
11069static void
11070rs6000_output_function_epilogue (file, size)
11071     FILE *file;
11072     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11073{
11074  rs6000_stack_t *info = rs6000_stack_info ();
11075
11076  if (! HAVE_epilogue)
11077    {
11078      rtx insn = get_last_insn ();
11079      /* If the last insn was a BARRIER, we don't have to write anything except
11080	 the trace table.  */
11081      if (GET_CODE (insn) == NOTE)
11082	insn = prev_nonnote_insn (insn);
11083      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
11084	{
11085	  /* This is slightly ugly, but at least we don't have two
11086	     copies of the epilogue-emitting code.  */
11087	  start_sequence ();
11088
11089	  /* A NOTE_INSN_DELETED is supposed to be at the start
11090	     and end of the "toplevel" insn chain.  */
11091	  emit_note (0, NOTE_INSN_DELETED);
11092	  rs6000_emit_epilogue (FALSE);
11093	  emit_note (0, NOTE_INSN_DELETED);
11094
11095	  /* Expand INSN_ADDRESSES so final() doesn't crash. */
11096	  {
11097	    rtx insn;
11098	    unsigned addr = 0;
11099	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11100	      {
11101		INSN_ADDRESSES_NEW (insn, addr);
11102		addr += 4;
11103	      }
11104	  }
11105
11106	  if (TARGET_DEBUG_STACK)
11107	    debug_rtx_list (get_insns (), 100);
11108	  final (get_insns (), file, FALSE, FALSE);
11109	  end_sequence ();
11110	}
11111    }
11112
11113#if TARGET_OBJECT_FORMAT == OBJECT_MACHO
11114  /* Mach-O doesn't support labels at the end of objects, so if
11115     it looks like we might want one, insert a NOP.  */
11116  {
11117    rtx insn = get_last_insn ();
11118    while (insn
11119	   && NOTE_P (insn)
11120	   && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
11121      insn = PREV_INSN (insn);
11122    if (insn
11123	&& (LABEL_P (insn)
11124	    || (NOTE_P (insn)
11125		&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
11126      fputs ("\tnop\n", file);
11127  }
11128#endif
11129
11130  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
11131     on its format.
11132
11133     We don't output a traceback table if -finhibit-size-directive was
11134     used.  The documentation for -finhibit-size-directive reads
11135     ``don't output a @code{.size} assembler directive, or anything
11136     else that would cause trouble if the function is split in the
11137     middle, and the two halves are placed at locations far apart in
11138     memory.''  The traceback table has this property, since it
11139     includes the offset from the start of the function to the
11140     traceback table itself.
11141
11142     System V.4 Powerpc's (and the embedded ABI derived from it) use a
11143     different traceback table.  */
11144  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11145      && rs6000_traceback != traceback_none)
11146    {
11147      const char *fname = NULL;
11148      const char *language_string = lang_hooks.name;
11149      int fixed_parms = 0, float_parms = 0, parm_info = 0;
11150      int i;
11151      int optional_tbtab;
11152
11153      if (rs6000_traceback == traceback_full)
11154	optional_tbtab = 1;
11155      else if (rs6000_traceback == traceback_part)
11156	optional_tbtab = 0;
11157      else
11158	optional_tbtab = !optimize_size && !TARGET_ELF;
11159
11160      if (optional_tbtab)
11161	{
11162	  fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11163	  while (*fname == '.')	/* V.4 encodes . in the name */
11164	    fname++;
11165
11166	  /* Need label immediately before tbtab, so we can compute
11167	     its offset from the function start.  */
11168	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11169	  ASM_OUTPUT_LABEL (file, fname);
11170	}
11171
11172      /* The .tbtab pseudo-op can only be used for the first eight
11173	 expressions, since it can't handle the possibly variable
11174	 length fields that follow.  However, if you omit the optional
11175	 fields, the assembler outputs zeros for all optional fields
11176	 anyways, giving each variable length field is minimum length
11177	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
11178	 pseudo-op at all.  */
11179
11180      /* An all-zero word flags the start of the tbtab, for debuggers
11181	 that have to find it by searching forward from the entry
11182	 point or from the current pc.  */
11183      fputs ("\t.long 0\n", file);
11184
11185      /* Tbtab format type.  Use format type 0.  */
11186      fputs ("\t.byte 0,", file);
11187
11188      /* Language type.  Unfortunately, there doesn't seem to be any
11189	 official way to get this info, so we use language_string.  C
11190	 is 0.  C++ is 9.  No number defined for Obj-C, so use the
11191	 value for C for now.  There is no official value for Java,
11192         although IBM appears to be using 13.  There is no official value
11193	 for Chill, so we've chosen 44 pseudo-randomly.  */
11194      if (! strcmp (language_string, "GNU C")
11195	  || ! strcmp (language_string, "GNU Objective-C"))
11196	i = 0;
11197      else if (! strcmp (language_string, "GNU F77"))
11198	i = 1;
11199      else if (! strcmp (language_string, "GNU Ada"))
11200	i = 3;
11201      else if (! strcmp (language_string, "GNU Pascal"))
11202	i = 2;
11203      else if (! strcmp (language_string, "GNU C++"))
11204	i = 9;
11205      else if (! strcmp (language_string, "GNU Java"))
11206	i = 13;
11207      else if (! strcmp (language_string, "GNU CHILL"))
11208	i = 44;
11209      else
11210	abort ();
11211      fprintf (file, "%d,", i);
11212
11213      /* 8 single bit fields: global linkage (not set for C extern linkage,
11214	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11215	 from start of procedure stored in tbtab, internal function, function
11216	 has controlled storage, function has no toc, function uses fp,
11217	 function logs/aborts fp operations.  */
11218      /* Assume that fp operations are used if any fp reg must be saved.  */
11219      fprintf (file, "%d,",
11220	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11221
11222      /* 6 bitfields: function is interrupt handler, name present in
11223	 proc table, function calls alloca, on condition directives
11224	 (controls stack walks, 3 bits), saves condition reg, saves
11225	 link reg.  */
11226      /* The `function calls alloca' bit seems to be set whenever reg 31 is
11227	 set up as a frame pointer, even when there is no alloca call.  */
11228      fprintf (file, "%d,",
11229	       ((optional_tbtab << 6)
11230		| ((optional_tbtab & frame_pointer_needed) << 5)
11231		| (info->cr_save_p << 1)
11232		| (info->lr_save_p)));
11233
11234      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11235	 (6 bits).  */
11236      fprintf (file, "%d,",
11237	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
11238
11239      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
11240      fprintf (file, "%d,", (32 - first_reg_to_save ()));
11241
11242      if (optional_tbtab)
11243	{
11244	  /* Compute the parameter info from the function decl argument
11245	     list.  */
11246	  tree decl;
11247	  int next_parm_info_bit = 31;
11248
11249	  for (decl = DECL_ARGUMENTS (current_function_decl);
11250	       decl; decl = TREE_CHAIN (decl))
11251	    {
11252	      rtx parameter = DECL_INCOMING_RTL (decl);
11253	      enum machine_mode mode = GET_MODE (parameter);
11254
11255	      if (GET_CODE (parameter) == REG)
11256		{
11257		  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11258		    {
11259		      int bits;
11260
11261		      float_parms++;
11262
11263		      if (mode == SFmode)
11264			bits = 0x2;
11265		      else if (mode == DFmode || mode == TFmode)
11266			bits = 0x3;
11267		      else
11268			abort ();
11269
11270		      /* If only one bit will fit, don't or in this entry.  */
11271		      if (next_parm_info_bit > 0)
11272			parm_info |= (bits << (next_parm_info_bit - 1));
11273		      next_parm_info_bit -= 2;
11274		    }
11275		  else
11276		    {
11277		      fixed_parms += ((GET_MODE_SIZE (mode)
11278				       + (UNITS_PER_WORD - 1))
11279				      / UNITS_PER_WORD);
11280		      next_parm_info_bit -= 1;
11281		    }
11282		}
11283	    }
11284	}
11285
11286      /* Number of fixed point parameters.  */
11287      /* This is actually the number of words of fixed point parameters; thus
11288	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
11289      fprintf (file, "%d,", fixed_parms);
11290
11291      /* 2 bitfields: number of floating point parameters (7 bits), parameters
11292	 all on stack.  */
11293      /* This is actually the number of fp registers that hold parameters;
11294	 and thus the maximum value is 13.  */
11295      /* Set parameters on stack bit if parameters are not in their original
11296	 registers, regardless of whether they are on the stack?  Xlc
11297	 seems to set the bit when not optimizing.  */
11298      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11299
11300      if (! optional_tbtab)
11301	return;
11302
11303      /* Optional fields follow.  Some are variable length.  */
11304
11305      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11306	 11 double float.  */
11307      /* There is an entry for each parameter in a register, in the order that
11308	 they occur in the parameter list.  Any intervening arguments on the
11309	 stack are ignored.  If the list overflows a long (max possible length
11310	 34 bits) then completely leave off all elements that don't fit.  */
11311      /* Only emit this long if there was at least one parameter.  */
11312      if (fixed_parms || float_parms)
11313	fprintf (file, "\t.long %d\n", parm_info);
11314
11315      /* Offset from start of code to tb table.  */
11316      fputs ("\t.long ", file);
11317      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11318#if TARGET_AIX
11319      RS6000_OUTPUT_BASENAME (file, fname);
11320#else
11321      assemble_name (file, fname);
11322#endif
11323      fputs ("-.", file);
11324#if TARGET_AIX
11325      RS6000_OUTPUT_BASENAME (file, fname);
11326#else
11327      assemble_name (file, fname);
11328#endif
11329      putc ('\n', file);
11330
11331      /* Interrupt handler mask.  */
11332      /* Omit this long, since we never set the interrupt handler bit
11333	 above.  */
11334
11335      /* Number of CTL (controlled storage) anchors.  */
11336      /* Omit this long, since the has_ctl bit is never set above.  */
11337
11338      /* Displacement into stack of each CTL anchor.  */
11339      /* Omit this list of longs, because there are no CTL anchors.  */
11340
11341      /* Length of function name.  */
11342      if (*fname == '*')
11343	++fname;
11344      fprintf (file, "\t.short %d\n", (int) strlen (fname));
11345
11346      /* Function name.  */
11347      assemble_string (fname, strlen (fname));
11348
11349      /* Register for alloca automatic storage; this is always reg 31.
11350	 Only emit this if the alloca bit was set above.  */
11351      if (frame_pointer_needed)
11352	fputs ("\t.byte 31\n", file);
11353
11354      fputs ("\t.align 2\n", file);
11355    }
11356}
11357
11358/* A C compound statement that outputs the assembler code for a thunk
11359   function, used to implement C++ virtual function calls with
11360   multiple inheritance.  The thunk acts as a wrapper around a virtual
11361   function, adjusting the implicit object parameter before handing
11362   control off to the real function.
11363
11364   First, emit code to add the integer DELTA to the location that
11365   contains the incoming first argument.  Assume that this argument
11366   contains a pointer, and is the one used to pass the `this' pointer
11367   in C++.  This is the incoming argument *before* the function
11368   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
11369   values of all other incoming arguments.
11370
11371   After the addition, emit code to jump to FUNCTION, which is a
11372   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
11373   not touch the return address.  Hence returning from FUNCTION will
11374   return to whoever called the current `thunk'.
11375
11376   The effect must be as if FUNCTION had been called directly with the
11377   adjusted first argument.  This macro is responsible for emitting
11378   all of the code for a thunk function; output_function_prologue()
11379   and output_function_epilogue() are not invoked.
11380
11381   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
11382   been extracted from it.)  It might possibly be useful on some
11383   targets, but probably not.
11384
11385   If you do not define this macro, the target-independent code in the
11386   C++ frontend will generate a less efficient heavyweight thunk that
11387   calls FUNCTION instead of jumping to it.  The generic approach does
11388   not support varargs.  */
11389
11390static void
11391rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11392     FILE *file;
11393     tree thunk_fndecl ATTRIBUTE_UNUSED;
11394     HOST_WIDE_INT delta;
11395     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
11396     tree function;
11397{
11398  const char *this_reg =
11399    reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
11400  const char *prefix;
11401  const char *fname;
11402  const char *r0	 = reg_names[0];
11403  const char *toc	 = reg_names[2];
11404  const char *schain	 = reg_names[11];
11405  const char *r12	 = reg_names[12];
11406  char buf[512];
11407  static int labelno = 0;
11408
11409  /* Small constants that can be done by one add instruction.  */
11410  if (delta >= -32768 && delta <= 32767)
11411    {
11412      if (! TARGET_NEW_MNEMONICS)
11413	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, (int) delta, this_reg);
11414      else
11415	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, (int) delta);
11416    }
11417
11418  /* 64-bit constants.  If "int" is 32 bits, we'll never hit this abort.  */
11419  else if (TARGET_64BIT && (delta < -2147483647 - 1 || delta > 2147483647))
11420    abort ();
11421
11422  /* Large constants that can be done by one addis instruction.  */
11423  else if ((delta & 0xffff) == 0)
11424    asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11425		 (int) (delta >> 16));
11426
11427  /* 32-bit constants that can be done by an add and addis instruction.  */
11428  else
11429    {
11430      /* Break into two pieces, propagating the sign bit from the low
11431	 word to the upper word.  */
11432      int delta_low  = ((delta & 0xffff) ^ 0x8000) - 0x8000;
11433      int delta_high = (delta - delta_low) >> 16;
11434
11435      asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11436		   delta_high);
11437
11438      if (! TARGET_NEW_MNEMONICS)
11439	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
11440      else
11441	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
11442    }
11443
11444  /* Get the prefix in front of the names.  */
11445  switch (DEFAULT_ABI)
11446    {
11447    default:
11448      abort ();
11449
11450    case ABI_AIX:
11451      prefix = ".";
11452      break;
11453
11454    case ABI_V4:
11455    case ABI_AIX_NODESC:
11456    case ABI_DARWIN:
11457      prefix = "";
11458      break;
11459    }
11460
11461  /* If the function is compiled in this module, jump to it directly.
11462     Otherwise, load up its address and jump to it.  */
11463
11464  fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
11465
11466  if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
11467      && (! lookup_attribute ("longcall",
11468			      TYPE_ATTRIBUTES (TREE_TYPE (function)))
11469	  || lookup_attribute ("shortcall",
11470			       TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11471    {
11472      fprintf (file, "\tb %s", prefix);
11473      assemble_name (file, fname);
11474      if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
11475      putc ('\n', file);
11476    }
11477
11478  else
11479    {
11480      switch (DEFAULT_ABI)
11481	{
11482	default:
11483	  abort ();
11484
11485	case ABI_AIX:
11486	  /* Set up a TOC entry for the function.  */
11487	  ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
11488	  toc_section ();
11489	  ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
11490	  labelno++;
11491
11492	  if (TARGET_MINIMAL_TOC)
11493	    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11494	  else
11495	    {
11496	      fputs ("\t.tc ", file);
11497	      assemble_name (file, fname);
11498	      fputs ("[TC],", file);
11499	    }
11500	  assemble_name (file, fname);
11501	  putc ('\n', file);
11502	  function_section (current_function_decl);
11503	  if (TARGET_MINIMAL_TOC)
11504	    asm_fprintf (file, (TARGET_32BIT)
11505			 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
11506			 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
11507	  asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
11508	  assemble_name (file, buf);
11509	  if (TARGET_ELF && TARGET_MINIMAL_TOC)
11510	    fputs ("-(.LCTOC1)", file);
11511	  asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
11512	  asm_fprintf (file,
11513		       (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11514		       r0, r12);
11515
11516	  asm_fprintf (file,
11517		       (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11518		       toc, r12);
11519
11520	  asm_fprintf (file, "\tmtctr %s\n", r0);
11521	  asm_fprintf (file,
11522		       (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11523		       schain, r12);
11524
11525	  asm_fprintf (file, "\tbctr\n");
11526	  break;
11527
11528	case ABI_AIX_NODESC:
11529	case ABI_V4:
11530	  fprintf (file, "\tb %s", prefix);
11531	  assemble_name (file, fname);
11532	  if (flag_pic) fputs ("@plt", file);
11533	  putc ('\n', file);
11534	  break;
11535
11536#if TARGET_MACHO
11537	case ABI_DARWIN:
11538	  fprintf (file, "\tb %s", prefix);
11539	  if (flag_pic && !machopic_name_defined_p (fname))
11540	    assemble_name (file, machopic_stub_name (fname));
11541	  else
11542	    assemble_name (file, fname);
11543	  putc ('\n', file);
11544	  break;
11545#endif
11546	}
11547    }
11548}
11549
11550/* A quick summary of the various types of 'constant-pool tables'
11551   under PowerPC:
11552
11553   Target	Flags		Name		One table per
11554   AIX		(none)		AIX TOC		object file
11555   AIX		-mfull-toc	AIX TOC		object file
11556   AIX		-mminimal-toc	AIX minimal TOC	translation unit
11557   SVR4/EABI	(none)		SVR4 SDATA	object file
11558   SVR4/EABI	-fpic		SVR4 pic	object file
11559   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
11560   SVR4/EABI	-mrelocatable	EABI TOC	function
11561   SVR4/EABI	-maix		AIX TOC		object file
11562   SVR4/EABI	-maix -mminimal-toc
11563				AIX minimal TOC	translation unit
11564
11565   Name			Reg.	Set by	entries	      contains:
11566					made by	 addrs?	fp?	sum?
11567
11568   AIX TOC		2	crt0	as	 Y	option	option
11569   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
11570   SVR4 SDATA		13	crt0	gcc	 N	Y	N
11571   SVR4 pic		30	prolog	ld	 Y	not yet	N
11572   SVR4 PIC		30	prolog	gcc	 Y	option	option
11573   EABI TOC		30	prolog	gcc	 Y	option	option
11574
11575*/
11576
11577/* Hash table stuff for keeping track of TOC entries.  */
11578
11579struct toc_hash_struct
11580{
11581  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11582     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
11583  rtx key;
11584  enum machine_mode key_mode;
11585  int labelno;
11586};
11587
11588static htab_t toc_hash_table;
11589
11590/* Hash functions for the hash table.  */
11591
11592static unsigned
11593rs6000_hash_constant (k)
11594     rtx k;
11595{
11596  enum rtx_code code = GET_CODE (k);
11597  enum machine_mode mode = GET_MODE (k);
11598  unsigned result = (code << 3) ^ mode;
11599  const char *format;
11600  int flen, fidx;
11601
11602  format = GET_RTX_FORMAT (code);
11603  flen = strlen (format);
11604  fidx = 0;
11605
11606  switch (code)
11607    {
11608    case LABEL_REF:
11609      return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11610
11611    case CONST_DOUBLE:
11612      if (mode != VOIDmode)
11613	return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11614      flen = 2;
11615      break;
11616
11617    case CODE_LABEL:
11618      fidx = 3;
11619      break;
11620
11621    default:
11622      break;
11623    }
11624
11625  for (; fidx < flen; fidx++)
11626    switch (format[fidx])
11627      {
11628      case 's':
11629	{
11630	  unsigned i, len;
11631	  const char *str = XSTR (k, fidx);
11632	  len = strlen (str);
11633	  result = result * 613 + len;
11634	  for (i = 0; i < len; i++)
11635	    result = result * 613 + (unsigned) str[i];
11636	  break;
11637	}
11638      case 'u':
11639      case 'e':
11640	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11641	break;
11642      case 'i':
11643      case 'n':
11644	result = result * 613 + (unsigned) XINT (k, fidx);
11645	break;
11646      case 'w':
11647	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11648	  result = result * 613 + (unsigned) XWINT (k, fidx);
11649	else
11650	  {
11651	    size_t i;
11652	    for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11653	      result = result * 613 + (unsigned) (XWINT (k, fidx)
11654						  >> CHAR_BIT * i);
11655	  }
11656	break;
11657      default:
11658	abort ();
11659      }
11660
11661  return result;
11662}
11663
11664static unsigned
11665toc_hash_function (hash_entry)
11666     const void * hash_entry;
11667{
11668  const struct toc_hash_struct *thc =
11669    (const struct toc_hash_struct *) hash_entry;
11670  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11671}
11672
11673/* Compare H1 and H2 for equivalence.  */
11674
11675static int
11676toc_hash_eq (h1, h2)
11677     const void * h1;
11678     const void * h2;
11679{
11680  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11681  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11682
11683  if (((const struct toc_hash_struct *) h1)->key_mode
11684      != ((const struct toc_hash_struct *) h2)->key_mode)
11685    return 0;
11686
11687  return rtx_equal_p (r1, r2);
11688}
11689
11690/* Mark the hash table-entry HASH_ENTRY.  */
11691
11692static int
11693toc_hash_mark_entry (hash_slot, unused)
11694     void ** hash_slot;
11695     void * unused ATTRIBUTE_UNUSED;
11696{
11697  const struct toc_hash_struct * hash_entry =
11698    *(const struct toc_hash_struct **) hash_slot;
11699  rtx r = hash_entry->key;
11700  ggc_set_mark (hash_entry);
11701  /* For CODE_LABELS, we don't want to drag in the whole insn chain...  */
11702  if (GET_CODE (r) == LABEL_REF)
11703    {
11704      ggc_set_mark (r);
11705      ggc_set_mark (XEXP (r, 0));
11706    }
11707  else
11708    ggc_mark_rtx (r);
11709  return 1;
11710}
11711
11712/* Mark all the elements of the TOC hash-table *HT.  */
11713
11714static void
11715toc_hash_mark_table (vht)
11716     void *vht;
11717{
11718  htab_t *ht = vht;
11719
11720  htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
11721}
11722
11723/* These are the names given by the C++ front-end to vtables, and
11724   vtable-like objects.  Ideally, this logic should not be here;
11725   instead, there should be some programmatic way of inquiring as
11726   to whether or not an object is a vtable.  */
11727
11728#define VTABLE_NAME_P(NAME)				\
11729  (strncmp ("_vt.", name, strlen("_vt.")) == 0		\
11730  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
11731  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
11732  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11733
11734void
11735rs6000_output_symbol_ref (file, x)
11736     FILE *file;
11737     rtx x;
11738{
11739  /* Currently C++ toc references to vtables can be emitted before it
11740     is decided whether the vtable is public or private.  If this is
11741     the case, then the linker will eventually complain that there is
11742     a reference to an unknown section.  Thus, for vtables only,
11743     we emit the TOC reference to reference the symbol and not the
11744     section.  */
11745  const char *name = XSTR (x, 0);
11746
11747  if (VTABLE_NAME_P (name))
11748    {
11749      RS6000_OUTPUT_BASENAME (file, name);
11750    }
11751  else
11752    assemble_name (file, name);
11753}
11754
11755/* Output a TOC entry.  We derive the entry name from what is being
11756   written.  */
11757
11758void
11759output_toc (file, x, labelno, mode)
11760     FILE *file;
11761     rtx x;
11762     int labelno;
11763     enum machine_mode mode;
11764{
11765  char buf[256];
11766  const char *name = buf;
11767  const char *real_name;
11768  rtx base = x;
11769  int offset = 0;
11770
11771  if (TARGET_NO_TOC)
11772    abort ();
11773
11774  /* When the linker won't eliminate them, don't output duplicate
11775     TOC entries (this happens on AIX if there is any kind of TOC,
11776     and on SVR4 under -fPIC or -mrelocatable).  */
11777  if (TARGET_TOC)
11778    {
11779      struct toc_hash_struct *h;
11780      void * * found;
11781
11782      h = ggc_alloc (sizeof (*h));
11783      h->key = x;
11784      h->key_mode = mode;
11785      h->labelno = labelno;
11786
11787      found = htab_find_slot (toc_hash_table, h, 1);
11788      if (*found == NULL)
11789	*found = h;
11790      else  /* This is indeed a duplicate.
11791	       Set this label equal to that label.  */
11792	{
11793	  fputs ("\t.set ", file);
11794	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11795	  fprintf (file, "%d,", labelno);
11796	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11797	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11798					      found)->labelno));
11799	  return;
11800	}
11801    }
11802
11803  /* If we're going to put a double constant in the TOC, make sure it's
11804     aligned properly when strict alignment is on.  */
11805  if (GET_CODE (x) == CONST_DOUBLE
11806      && STRICT_ALIGNMENT
11807      && GET_MODE_BITSIZE (mode) >= 64
11808      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11809    ASM_OUTPUT_ALIGN (file, 3);
11810  }
11811
11812  ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
11813
11814  /* Handle FP constants specially.  Note that if we have a minimal
11815     TOC, things we put here aren't actually in the TOC, so we can allow
11816     FP constants.  */
11817  if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
11818    {
11819      REAL_VALUE_TYPE rv;
11820      long k[4];
11821
11822      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11823      REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
11824
11825      if (TARGET_64BIT)
11826	{
11827	  if (TARGET_MINIMAL_TOC)
11828	    fputs (DOUBLE_INT_ASM_OP, file);
11829	  else
11830	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11831		     k[0] & 0xffffffff, k[1] & 0xffffffff,
11832		     k[2] & 0xffffffff, k[3] & 0xffffffff);
11833	  fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
11834		   k[0] & 0xffffffff, k[1] & 0xffffffff,
11835		   k[2] & 0xffffffff, k[3] & 0xffffffff);
11836	  return;
11837	}
11838      else
11839	{
11840	  if (TARGET_MINIMAL_TOC)
11841	    fputs ("\t.long ", file);
11842	  else
11843	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11844		     k[0] & 0xffffffff, k[1] & 0xffffffff,
11845		     k[2] & 0xffffffff, k[3] & 0xffffffff);
11846	  fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11847		   k[0] & 0xffffffff, k[1] & 0xffffffff,
11848		   k[2] & 0xffffffff, k[3] & 0xffffffff);
11849	  return;
11850	}
11851    }
11852  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11853    {
11854      REAL_VALUE_TYPE rv;
11855      long k[2];
11856
11857      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11858      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11859
11860      if (TARGET_64BIT)
11861	{
11862	  if (TARGET_MINIMAL_TOC)
11863	    fputs (DOUBLE_INT_ASM_OP, file);
11864	  else
11865	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11866		     k[0] & 0xffffffff, k[1] & 0xffffffff);
11867	  fprintf (file, "0x%lx%08lx\n",
11868		   k[0] & 0xffffffff, k[1] & 0xffffffff);
11869	  return;
11870	}
11871      else
11872	{
11873	  if (TARGET_MINIMAL_TOC)
11874	    fputs ("\t.long ", file);
11875	  else
11876	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11877		     k[0] & 0xffffffff, k[1] & 0xffffffff);
11878	  fprintf (file, "0x%lx,0x%lx\n",
11879		   k[0] & 0xffffffff, k[1] & 0xffffffff);
11880	  return;
11881	}
11882    }
11883  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11884    {
11885      REAL_VALUE_TYPE rv;
11886      long l;
11887
11888      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11889      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11890
11891      if (TARGET_64BIT)
11892	{
11893	  if (TARGET_MINIMAL_TOC)
11894	    fputs (DOUBLE_INT_ASM_OP, file);
11895	  else
11896	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11897	  fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11898	  return;
11899	}
11900      else
11901	{
11902	  if (TARGET_MINIMAL_TOC)
11903	    fputs ("\t.long ", file);
11904	  else
11905	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11906	  fprintf (file, "0x%lx\n", l & 0xffffffff);
11907	  return;
11908	}
11909    }
11910  else if (GET_MODE (x) == VOIDmode
11911	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11912    {
11913      unsigned HOST_WIDE_INT low;
11914      HOST_WIDE_INT high;
11915
11916      if (GET_CODE (x) == CONST_DOUBLE)
11917	{
11918	  low = CONST_DOUBLE_LOW (x);
11919	  high = CONST_DOUBLE_HIGH (x);
11920	}
11921      else
11922#if HOST_BITS_PER_WIDE_INT == 32
11923	{
11924	  low = INTVAL (x);
11925	  high = (low & 0x80000000) ? ~0 : 0;
11926	}
11927#else
11928	{
11929          low = INTVAL (x) & 0xffffffff;
11930          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11931	}
11932#endif
11933
11934      /* TOC entries are always Pmode-sized, but since this
11935	 is a bigendian machine then if we're putting smaller
11936	 integer constants in the TOC we have to pad them.
11937	 (This is still a win over putting the constants in
11938	 a separate constant pool, because then we'd have
11939	 to have both a TOC entry _and_ the actual constant.)
11940
11941	 For a 32-bit target, CONST_INT values are loaded and shifted
11942	 entirely within `low' and can be stored in one TOC entry.  */
11943
11944      if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11945	abort ();/* It would be easy to make this work, but it doesn't now.  */
11946
11947      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11948	{
11949#if HOST_BITS_PER_WIDE_INT == 32
11950	  lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11951			 POINTER_SIZE, &low, &high, 0);
11952#else
11953	  low |= high << 32;
11954	  low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
11955	  high = (HOST_WIDE_INT) low >> 32;
11956	  low &= 0xffffffff;
11957#endif
11958	}
11959
11960      if (TARGET_64BIT)
11961	{
11962	  if (TARGET_MINIMAL_TOC)
11963	    fputs (DOUBLE_INT_ASM_OP, file);
11964	  else
11965	    fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11966		     (long) high & 0xffffffff, (long) low & 0xffffffff);
11967	  fprintf (file, "0x%lx%08lx\n",
11968		   (long) high & 0xffffffff, (long) low & 0xffffffff);
11969	  return;
11970	}
11971      else
11972	{
11973	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11974	    {
11975	      if (TARGET_MINIMAL_TOC)
11976		fputs ("\t.long ", file);
11977	      else
11978		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11979			 (long) high & 0xffffffff, (long) low & 0xffffffff);
11980	      fprintf (file, "0x%lx,0x%lx\n",
11981		       (long) high & 0xffffffff, (long) low & 0xffffffff);
11982	    }
11983	  else
11984	    {
11985	      if (TARGET_MINIMAL_TOC)
11986		fputs ("\t.long ", file);
11987	      else
11988		fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11989	      fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11990	    }
11991	  return;
11992	}
11993    }
11994
11995  if (GET_CODE (x) == CONST)
11996    {
11997      if (GET_CODE (XEXP (x, 0)) != PLUS)
11998	abort ();
11999
12000      base = XEXP (XEXP (x, 0), 0);
12001      offset = INTVAL (XEXP (XEXP (x, 0), 1));
12002    }
12003
12004  if (GET_CODE (base) == SYMBOL_REF)
12005    name = XSTR (base, 0);
12006  else if (GET_CODE (base) == LABEL_REF)
12007    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12008  else if (GET_CODE (base) == CODE_LABEL)
12009    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12010  else
12011    abort ();
12012
12013  real_name = (*targetm.strip_name_encoding) (name);
12014  if (TARGET_MINIMAL_TOC)
12015    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12016  else
12017    {
12018      fprintf (file, "\t.tc %s", real_name);
12019
12020      if (offset < 0)
12021	fprintf (file, ".N%d", - offset);
12022      else if (offset)
12023	fprintf (file, ".P%d", offset);
12024
12025      fputs ("[TC],", file);
12026    }
12027
12028  /* Currently C++ toc references to vtables can be emitted before it
12029     is decided whether the vtable is public or private.  If this is
12030     the case, then the linker will eventually complain that there is
12031     a TOC reference to an unknown section.  Thus, for vtables only,
12032     we emit the TOC reference to reference the symbol and not the
12033     section.  */
12034  if (VTABLE_NAME_P (name))
12035    {
12036      RS6000_OUTPUT_BASENAME (file, name);
12037      if (offset < 0)
12038	fprintf (file, "%d", offset);
12039      else if (offset > 0)
12040	fprintf (file, "+%d", offset);
12041    }
12042  else
12043    output_addr_const (file, x);
12044  putc ('\n', file);
12045}
12046
12047/* Output an assembler pseudo-op to write an ASCII string of N characters
12048   starting at P to FILE.
12049
12050   On the RS/6000, we have to do this using the .byte operation and
12051   write out special characters outside the quoted string.
12052   Also, the assembler is broken; very long strings are truncated,
12053   so we must artificially break them up early.  */
12054
12055void
12056output_ascii (file, p, n)
12057     FILE *file;
12058     const char *p;
12059     int n;
12060{
12061  char c;
12062  int i, count_string;
12063  const char *for_string = "\t.byte \"";
12064  const char *for_decimal = "\t.byte ";
12065  const char *to_close = NULL;
12066
12067  count_string = 0;
12068  for (i = 0; i < n; i++)
12069    {
12070      c = *p++;
12071      if (c >= ' ' && c < 0177)
12072	{
12073	  if (for_string)
12074	    fputs (for_string, file);
12075	  putc (c, file);
12076
12077	  /* Write two quotes to get one.  */
12078	  if (c == '"')
12079	    {
12080	      putc (c, file);
12081	      ++count_string;
12082	    }
12083
12084	  for_string = NULL;
12085	  for_decimal = "\"\n\t.byte ";
12086	  to_close = "\"\n";
12087	  ++count_string;
12088
12089	  if (count_string >= 512)
12090	    {
12091	      fputs (to_close, file);
12092
12093	      for_string = "\t.byte \"";
12094	      for_decimal = "\t.byte ";
12095	      to_close = NULL;
12096	      count_string = 0;
12097	    }
12098	}
12099      else
12100	{
12101	  if (for_decimal)
12102	    fputs (for_decimal, file);
12103	  fprintf (file, "%d", c);
12104
12105	  for_string = "\n\t.byte \"";
12106	  for_decimal = ", ";
12107	  to_close = "\n";
12108	  count_string = 0;
12109	}
12110    }
12111
12112  /* Now close the string if we have written one.  Then end the line.  */
12113  if (to_close)
12114    fputs (to_close, file);
12115}
12116
12117/* Generate a unique section name for FILENAME for a section type
12118   represented by SECTION_DESC.  Output goes into BUF.
12119
12120   SECTION_DESC can be any string, as long as it is different for each
12121   possible section type.
12122
12123   We name the section in the same manner as xlc.  The name begins with an
12124   underscore followed by the filename (after stripping any leading directory
12125   names) with the last period replaced by the string SECTION_DESC.  If
12126   FILENAME does not contain a period, SECTION_DESC is appended to the end of
12127   the name.  */
12128
12129void
12130rs6000_gen_section_name (buf, filename, section_desc)
12131     char **buf;
12132     const char *filename;
12133     const char *section_desc;
12134{
12135  const char *q, *after_last_slash, *last_period = 0;
12136  char *p;
12137  int len;
12138
12139  after_last_slash = filename;
12140  for (q = filename; *q; q++)
12141    {
12142      if (*q == '/')
12143	after_last_slash = q + 1;
12144      else if (*q == '.')
12145	last_period = q;
12146    }
12147
12148  len = strlen (after_last_slash) + strlen (section_desc) + 2;
12149  *buf = (char *) xmalloc (len);
12150
12151  p = *buf;
12152  *p++ = '_';
12153
12154  for (q = after_last_slash; *q; q++)
12155    {
12156      if (q == last_period)
12157        {
12158	  strcpy (p, section_desc);
12159	  p += strlen (section_desc);
12160        }
12161
12162      else if (ISALNUM (*q))
12163        *p++ = *q;
12164    }
12165
12166  if (last_period == 0)
12167    strcpy (p, section_desc);
12168  else
12169    *p = '\0';
12170}
12171
12172/* Emit profile function.  */
12173
12174void
12175output_profile_hook (labelno)
12176     int labelno ATTRIBUTE_UNUSED;
12177{
12178  if (DEFAULT_ABI == ABI_AIX)
12179    {
12180#ifdef NO_PROFILE_COUNTERS
12181      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12182#else
12183      char buf[30];
12184      const char *label_name;
12185      rtx fun;
12186
12187      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12188      label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12189      fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12190
12191      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12192                         fun, Pmode);
12193#endif
12194    }
12195  else if (DEFAULT_ABI == ABI_DARWIN)
12196    {
12197      const char *mcount_name = RS6000_MCOUNT;
12198      int caller_addr_regno = LINK_REGISTER_REGNUM;
12199
12200      /* Be conservative and always set this, at least for now.  */
12201      current_function_uses_pic_offset_table = 1;
12202
12203#if TARGET_MACHO
12204      /* For PIC code, set up a stub and collect the caller's address
12205	 from r0, which is where the prologue puts it.  */
12206      if (flag_pic)
12207	{
12208	  mcount_name = machopic_stub_name (mcount_name);
12209	  if (current_function_uses_pic_offset_table)
12210	    caller_addr_regno = 0;
12211	}
12212#endif
12213      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12214			 0, VOIDmode, 1,
12215			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12216    }
12217}
12218
12219/* Write function profiler code.  */
12220
12221void
12222output_function_profiler (file, labelno)
12223  FILE *file;
12224  int labelno;
12225{
12226  char buf[100];
12227  int save_lr = 8;
12228
12229  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12230  switch (DEFAULT_ABI)
12231    {
12232    default:
12233      abort ();
12234
12235    case ABI_V4:
12236      save_lr = 4;
12237      /* Fall through.  */
12238
12239    case ABI_AIX_NODESC:
12240      if (!TARGET_32BIT)
12241	{
12242	  warning ("no profiling of 64-bit code for this ABI");
12243	  return;
12244	}
12245      fprintf (file, "\tmflr %s\n", reg_names[0]);
12246      if (flag_pic == 1)
12247	{
12248	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12249	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12250		       reg_names[0], save_lr, reg_names[1]);
12251	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12252	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12253	  assemble_name (file, buf);
12254	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12255	}
12256      else if (flag_pic > 1)
12257	{
12258	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12259		       reg_names[0], save_lr, reg_names[1]);
12260	  /* Now, we need to get the address of the label.  */
12261	  fputs ("\tbl 1f\n\t.long ", file);
12262	  assemble_name (file, buf);
12263	  fputs ("-.\n1:", file);
12264	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12265	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12266		       reg_names[0], reg_names[11]);
12267	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12268		       reg_names[0], reg_names[0], reg_names[11]);
12269	}
12270      else
12271	{
12272	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12273	  assemble_name (file, buf);
12274	  fputs ("@ha\n", file);
12275	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12276		       reg_names[0], save_lr, reg_names[1]);
12277	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12278	  assemble_name (file, buf);
12279	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12280	}
12281
12282      if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12283	{
12284	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12285		       reg_names[STATIC_CHAIN_REGNUM],
12286		       12, reg_names[1]);
12287	  fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12288	  asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12289		       reg_names[STATIC_CHAIN_REGNUM],
12290		       12, reg_names[1]);
12291	}
12292      else
12293	/* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
12294	fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12295      break;
12296
12297    case ABI_AIX:
12298    case ABI_DARWIN:
12299      /* Don't do anything, done in output_profile_hook ().  */
12300      break;
12301    }
12302}
12303
12304/* Adjust the cost of a scheduling dependency.  Return the new cost of
12305   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
12306
12307static int
12308rs6000_adjust_cost (insn, link, dep_insn, cost)
12309     rtx insn;
12310     rtx link;
12311     rtx dep_insn ATTRIBUTE_UNUSED;
12312     int cost;
12313{
12314  if (! recog_memoized (insn))
12315    return 0;
12316
12317  if (REG_NOTE_KIND (link) != 0)
12318    return 0;
12319
12320  if (REG_NOTE_KIND (link) == 0)
12321    {
12322      /* Data dependency; DEP_INSN writes a register that INSN reads
12323	 some cycles later.  */
12324      switch (get_attr_type (insn))
12325	{
12326	case TYPE_JMPREG:
12327	  /* Tell the first scheduling pass about the latency between
12328	     a mtctr and bctr (and mtlr and br/blr).  The first
12329	     scheduling pass will not know about this latency since
12330	     the mtctr instruction, which has the latency associated
12331	     to it, will be generated by reload.  */
12332	  return TARGET_POWER ? 5 : 4;
12333	case TYPE_BRANCH:
12334	  /* Leave some extra cycles between a compare and its
12335	     dependent branch, to inhibit expensive mispredicts.  */
12336	  if ((rs6000_cpu_attr == CPU_PPC603
12337	       || rs6000_cpu_attr == CPU_PPC604
12338	       || rs6000_cpu_attr == CPU_PPC604E
12339	       || rs6000_cpu_attr == CPU_PPC620
12340	       || rs6000_cpu_attr == CPU_PPC630
12341	       || rs6000_cpu_attr == CPU_PPC750
12342	       || rs6000_cpu_attr == CPU_PPC7400
12343	       || rs6000_cpu_attr == CPU_PPC7450
12344	       || rs6000_cpu_attr == CPU_POWER4)
12345	      && recog_memoized (dep_insn)
12346	      && (INSN_CODE (dep_insn) >= 0)
12347	      && (get_attr_type (dep_insn) == TYPE_COMPARE
12348		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12349		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12350		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
12351	    return cost + 2;
12352	default:
12353	  break;
12354	}
12355      /* Fall out to return default cost.  */
12356    }
12357
12358  return cost;
12359}
12360
12361/* A C statement (sans semicolon) to update the integer scheduling
12362   priority INSN_PRIORITY (INSN).  Reduce the priority to execute the
12363   INSN earlier, increase the priority to execute INSN later.  Do not
12364   define this macro if you do not need to adjust the scheduling
12365   priorities of insns.  */
12366
12367static int
12368rs6000_adjust_priority (insn, priority)
12369     rtx insn ATTRIBUTE_UNUSED;
12370     int priority;
12371{
12372  /* On machines (like the 750) which have asymmetric integer units,
12373     where one integer unit can do multiply and divides and the other
12374     can't, reduce the priority of multiply/divide so it is scheduled
12375     before other integer operations.  */
12376
12377#if 0
12378  if (! INSN_P (insn))
12379    return priority;
12380
12381  if (GET_CODE (PATTERN (insn)) == USE)
12382    return priority;
12383
12384  switch (rs6000_cpu_attr) {
12385  case CPU_PPC750:
12386    switch (get_attr_type (insn))
12387      {
12388      default:
12389	break;
12390
12391      case TYPE_IMUL:
12392      case TYPE_IDIV:
12393	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12394		 priority, priority);
12395	if (priority >= 0 && priority < 0x01000000)
12396	  priority >>= 3;
12397	break;
12398      }
12399  }
12400#endif
12401
12402  return priority;
12403}
12404
12405/* Return how many instructions the machine can issue per cycle.  */
12406
12407static int
12408rs6000_issue_rate ()
12409{
12410  switch (rs6000_cpu_attr) {
12411  case CPU_RIOS1:  /* ? */
12412  case CPU_RS64A:
12413  case CPU_PPC601: /* ? */
12414  case CPU_PPC7450:
12415    return 3;
12416  case CPU_PPC603:
12417  case CPU_PPC750:
12418  case CPU_PPC7400:
12419    return 2;
12420  case CPU_RIOS2:
12421  case CPU_PPC604:
12422  case CPU_PPC604E:
12423  case CPU_PPC620:
12424  case CPU_PPC630:
12425  case CPU_POWER4:
12426    return 4;
12427  default:
12428    return 1;
12429  }
12430}
12431
12432
12433/* Length in units of the trampoline for entering a nested function.  */
12434
12435int
12436rs6000_trampoline_size ()
12437{
12438  int ret = 0;
12439
12440  switch (DEFAULT_ABI)
12441    {
12442    default:
12443      abort ();
12444
12445    case ABI_AIX:
12446      ret = (TARGET_32BIT) ? 12 : 24;
12447      break;
12448
12449    case ABI_DARWIN:
12450    case ABI_V4:
12451    case ABI_AIX_NODESC:
12452      ret = (TARGET_32BIT) ? 40 : 48;
12453      break;
12454    }
12455
12456  return ret;
12457}
12458
12459/* Emit RTL insns to initialize the variable parts of a trampoline.
12460   FNADDR is an RTX for the address of the function's pure code.
12461   CXT is an RTX for the static chain value for the function.  */
12462
12463void
12464rs6000_initialize_trampoline (addr, fnaddr, cxt)
12465     rtx addr;
12466     rtx fnaddr;
12467     rtx cxt;
12468{
12469  enum machine_mode pmode = Pmode;
12470  int regsize = (TARGET_32BIT) ? 4 : 8;
12471  rtx ctx_reg = force_reg (pmode, cxt);
12472
12473  switch (DEFAULT_ABI)
12474    {
12475    default:
12476      abort ();
12477
12478/* Macros to shorten the code expansions below.  */
12479#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12480#define MEM_PLUS(addr,offset) \
12481  gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12482
12483    /* Under AIX, just build the 3 word function descriptor */
12484    case ABI_AIX:
12485      {
12486	rtx fn_reg = gen_reg_rtx (pmode);
12487	rtx toc_reg = gen_reg_rtx (pmode);
12488	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12489	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12490	emit_move_insn (MEM_DEREF (addr), fn_reg);
12491	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12492	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12493      }
12494      break;
12495
12496    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
12497    case ABI_DARWIN:
12498    case ABI_V4:
12499    case ABI_AIX_NODESC:
12500      emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12501			 FALSE, VOIDmode, 4,
12502			 addr, pmode,
12503			 GEN_INT (rs6000_trampoline_size ()), SImode,
12504			 fnaddr, pmode,
12505			 ctx_reg, pmode);
12506      break;
12507    }
12508
12509  return;
12510}
12511
12512
12513/* Table of valid machine attributes.  */
12514
12515const struct attribute_spec rs6000_attribute_table[] =
12516{
12517  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12518  { "longcall",  0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
12519  { "shortcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
12520  { NULL,        0, 0, false, false, false, NULL }
12521};
12522
12523/* Handle a "longcall" or "shortcall" attribute; arguments as in
12524   struct attribute_spec.handler.  */
12525
12526static tree
12527rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12528     tree *node;
12529     tree name;
12530     tree args ATTRIBUTE_UNUSED;
12531     int flags ATTRIBUTE_UNUSED;
12532     bool *no_add_attrs;
12533{
12534  if (TREE_CODE (*node) != FUNCTION_TYPE
12535      && TREE_CODE (*node) != FIELD_DECL
12536      && TREE_CODE (*node) != TYPE_DECL)
12537    {
12538      warning ("`%s' attribute only applies to functions",
12539	       IDENTIFIER_POINTER (name));
12540      *no_add_attrs = true;
12541    }
12542
12543  return NULL_TREE;
12544}
12545
12546/* Set longcall attributes on all functions declared when
12547   rs6000_default_long_calls is true.  */
12548static void
12549rs6000_set_default_type_attributes (type)
12550     tree type;
12551{
12552  if (rs6000_default_long_calls
12553      && (TREE_CODE (type) == FUNCTION_TYPE
12554	  || TREE_CODE (type) == METHOD_TYPE))
12555    TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12556					NULL_TREE,
12557					TYPE_ATTRIBUTES (type));
12558}
12559
12560/* Return a reference suitable for calling a function with the
12561   longcall attribute.  */
12562
12563struct rtx_def *
12564rs6000_longcall_ref (call_ref)
12565     rtx call_ref;
12566{
12567  const char *call_name;
12568  tree node;
12569
12570  if (GET_CODE (call_ref) != SYMBOL_REF)
12571    return call_ref;
12572
12573  /* System V adds '.' to the internal name, so skip them.  */
12574  call_name = XSTR (call_ref, 0);
12575  if (*call_name == '.')
12576    {
12577      while (*call_name == '.')
12578	call_name++;
12579
12580      node = get_identifier (call_name);
12581      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12582    }
12583
12584  return force_reg (Pmode, call_ref);
12585}
12586
12587
12588#ifdef USING_ELFOS_H
12589
12590/* A C statement or statements to switch to the appropriate section
12591   for output of RTX in mode MODE.  You can assume that RTX is some
12592   kind of constant in RTL.  The argument MODE is redundant except in
12593   the case of a `const_int' rtx.  Select the section by calling
12594   `text_section' or one of the alternatives for other sections.
12595
12596   Do not define this macro if you put all constants in the read-only
12597   data section.  */
12598
12599static void
12600rs6000_elf_select_rtx_section (mode, x, align)
12601     enum machine_mode mode;
12602     rtx x;
12603     unsigned HOST_WIDE_INT align;
12604{
12605  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12606    toc_section ();
12607  else
12608    default_elf_select_rtx_section (mode, x, align);
12609}
12610
12611/* A C statement or statements to switch to the appropriate
12612   section for output of DECL.  DECL is either a `VAR_DECL' node
12613   or a constant of some sort.  RELOC indicates whether forming
12614   the initial value of DECL requires link-time relocations.  */
12615
12616static void
12617rs6000_elf_select_section (decl, reloc, align)
12618     tree decl;
12619     int reloc;
12620     unsigned HOST_WIDE_INT align;
12621{
12622  default_elf_select_section_1 (decl, reloc, align,
12623				flag_pic || DEFAULT_ABI == ABI_AIX);
12624}
12625
12626/* A C statement to build up a unique section name, expressed as a
12627   STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12628   RELOC indicates whether the initial value of EXP requires
12629   link-time relocations.  If you do not define this macro, GCC will use
12630   the symbol name prefixed by `.' as the section name.  Note - this
12631   macro can now be called for uninitialized data items as well as
12632   initialized data and functions.  */
12633
12634static void
12635rs6000_elf_unique_section (decl, reloc)
12636     tree decl;
12637     int reloc;
12638{
12639  default_unique_section_1 (decl, reloc,
12640			    flag_pic || DEFAULT_ABI == ABI_AIX);
12641}
12642
12643
12644/* If we are referencing a function that is static or is known to be
12645   in this file, make the SYMBOL_REF special.  We can use this to indicate
12646   that we can branch to this function without emitting a no-op after the
12647   call.  For real AIX calling sequences, we also replace the
12648   function name with the real name (1 or 2 leading .'s), rather than
12649   the function descriptor name.  This saves a lot of overriding code
12650   to read the prefixes.  */
12651
12652static void
12653rs6000_elf_encode_section_info (decl, first)
12654     tree decl;
12655     int first;
12656{
12657  if (!first)
12658    return;
12659
12660  if (TREE_CODE (decl) == FUNCTION_DECL)
12661    {
12662      rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12663      if ((*targetm.binds_local_p) (decl))
12664	SYMBOL_REF_FLAG (sym_ref) = 1;
12665
12666      if (DEFAULT_ABI == ABI_AIX)
12667	{
12668	  size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12669	  size_t len2 = strlen (XSTR (sym_ref, 0));
12670	  char *str = alloca (len1 + len2 + 1);
12671	  str[0] = '.';
12672	  str[1] = '.';
12673	  memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12674
12675	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12676	}
12677    }
12678  else if (rs6000_sdata != SDATA_NONE
12679	   && DEFAULT_ABI == ABI_V4
12680	   && TREE_CODE (decl) == VAR_DECL)
12681    {
12682      rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12683      int size = int_size_in_bytes (TREE_TYPE (decl));
12684      tree section_name = DECL_SECTION_NAME (decl);
12685      const char *name = (char *)0;
12686      int len = 0;
12687
12688      if ((*targetm.binds_local_p) (decl))
12689	SYMBOL_REF_FLAG (sym_ref) = 1;
12690
12691      if (section_name)
12692	{
12693	  if (TREE_CODE (section_name) == STRING_CST)
12694	    {
12695	      name = TREE_STRING_POINTER (section_name);
12696	      len = TREE_STRING_LENGTH (section_name);
12697	    }
12698	  else
12699	    abort ();
12700	}
12701
12702      if ((size > 0 && size <= g_switch_value)
12703	  || (name
12704	      && ((len == sizeof (".sdata") - 1
12705		   && strcmp (name, ".sdata") == 0)
12706		  || (len == sizeof (".sdata2") - 1
12707		      && strcmp (name, ".sdata2") == 0)
12708		  || (len == sizeof (".sbss") - 1
12709		      && strcmp (name, ".sbss") == 0)
12710		  || (len == sizeof (".sbss2") - 1
12711		      && strcmp (name, ".sbss2") == 0)
12712		  || (len == sizeof (".PPC.EMB.sdata0") - 1
12713		      && strcmp (name, ".PPC.EMB.sdata0") == 0)
12714		  || (len == sizeof (".PPC.EMB.sbss0") - 1
12715		      && strcmp (name, ".PPC.EMB.sbss0") == 0))))
12716	{
12717	  size_t len = strlen (XSTR (sym_ref, 0));
12718	  char *str = alloca (len + 2);
12719
12720	  str[0] = '@';
12721	  memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12722	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12723	}
12724    }
12725}
12726
12727static const char *
12728rs6000_elf_strip_name_encoding (str)
12729     const char *str;
12730{
12731  while (*str == '*' || *str == '@')
12732    str++;
12733  return str;
12734}
12735
12736static bool
12737rs6000_elf_in_small_data_p (decl)
12738     tree decl;
12739{
12740  if (rs6000_sdata == SDATA_NONE)
12741    return false;
12742
12743  if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12744    {
12745      const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12746      if (strcmp (section, ".sdata") == 0
12747	  || strcmp (section, ".sdata2") == 0
12748	  || strcmp (section, ".sbss") == 0)
12749	return true;
12750    }
12751  else
12752    {
12753      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
12754
12755      if (size > 0
12756	  && size <= g_switch_value
12757	  && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
12758	return true;
12759    }
12760
12761  return false;
12762}
12763
12764#endif /* USING_ELFOS_H */
12765
12766
12767/* Return a REG that occurs in ADDR with coefficient 1.
12768   ADDR can be effectively incremented by incrementing REG.
12769
12770   r0 is special and we must not select it as an address
12771   register by this routine since our caller will try to
12772   increment the returned register via an "la" instruction.  */
12773
12774struct rtx_def *
12775find_addr_reg (addr)
12776     rtx addr;
12777{
12778  while (GET_CODE (addr) == PLUS)
12779    {
12780      if (GET_CODE (XEXP (addr, 0)) == REG
12781	  && REGNO (XEXP (addr, 0)) != 0)
12782	addr = XEXP (addr, 0);
12783      else if (GET_CODE (XEXP (addr, 1)) == REG
12784	       && REGNO (XEXP (addr, 1)) != 0)
12785	addr = XEXP (addr, 1);
12786      else if (CONSTANT_P (XEXP (addr, 0)))
12787	addr = XEXP (addr, 1);
12788      else if (CONSTANT_P (XEXP (addr, 1)))
12789	addr = XEXP (addr, 0);
12790      else
12791	abort ();
12792    }
12793  if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12794    return addr;
12795  abort ();
12796}
12797
12798void
12799rs6000_fatal_bad_address (op)
12800  rtx op;
12801{
12802  fatal_insn ("bad address", op);
12803}
12804
12805/* Called to register all of our global variables with the garbage
12806   collector.  */
12807
12808static void
12809rs6000_add_gc_roots ()
12810{
12811  toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
12812  ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
12813		toc_hash_mark_table);
12814}
12815
12816#if TARGET_MACHO
12817
12818#if 0
12819/* Returns 1 if OP is either a symbol reference or a sum of a symbol
12820   reference and a constant.  */
12821
12822int
12823symbolic_operand (op)
12824     rtx op;
12825{
12826  switch (GET_CODE (op))
12827    {
12828    case SYMBOL_REF:
12829    case LABEL_REF:
12830      return 1;
12831    case CONST:
12832      op = XEXP (op, 0);
12833      return (GET_CODE (op) == SYMBOL_REF ||
12834	      (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12835	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12836	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
12837    default:
12838      return 0;
12839    }
12840}
12841#endif
12842
12843#ifdef RS6000_LONG_BRANCH
12844
12845static tree stub_list = 0;
12846
12847/* ADD_COMPILER_STUB adds the compiler generated stub for handling
12848   procedure calls to the linked list.  */
12849
12850void
12851add_compiler_stub (label_name, function_name, line_number)
12852     tree label_name;
12853     tree function_name;
12854     int line_number;
12855{
12856  tree stub = build_tree_list (function_name, label_name);
12857  TREE_TYPE (stub) = build_int_2 (line_number, 0);
12858  TREE_CHAIN (stub) = stub_list;
12859  stub_list = stub;
12860}
12861
12862#define STUB_LABEL_NAME(STUB)     TREE_VALUE (STUB)
12863#define STUB_FUNCTION_NAME(STUB)  TREE_PURPOSE (STUB)
12864#define STUB_LINE_NUMBER(STUB)    TREE_INT_CST_LOW (TREE_TYPE (STUB))
12865
12866/* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12867   handling procedure calls from the linked list and initializes the
12868   linked list.  */
12869
12870void
12871output_compiler_stub ()
12872{
12873  char tmp_buf[256];
12874  char label_buf[256];
12875  tree stub;
12876
12877  if (!flag_pic)
12878    for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12879      {
12880	fprintf (asm_out_file,
12881		 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12882
12883#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12884	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12885	  fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12886#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12887
12888	if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12889	  strcpy (label_buf,
12890		  IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12891	else
12892	  {
12893	    label_buf[0] = '_';
12894	    strcpy (label_buf+1,
12895		    IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12896	  }
12897
12898	strcpy (tmp_buf, "lis r12,hi16(");
12899	strcat (tmp_buf, label_buf);
12900	strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12901	strcat (tmp_buf, label_buf);
12902	strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12903	output_asm_insn (tmp_buf, 0);
12904
12905#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12906	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12907	  fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12908#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12909      }
12910
12911  stub_list = 0;
12912}
12913
12914/* NO_PREVIOUS_DEF checks in the link list whether the function name is
12915   already there or not.  */
12916
12917int
12918no_previous_def (function_name)
12919     tree function_name;
12920{
12921  tree stub;
12922  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12923    if (function_name == STUB_FUNCTION_NAME (stub))
12924      return 0;
12925  return 1;
12926}
12927
12928/* GET_PREV_LABEL gets the label name from the previous definition of
12929   the function.  */
12930
12931tree
12932get_prev_label (function_name)
12933     tree function_name;
12934{
12935  tree stub;
12936  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12937    if (function_name == STUB_FUNCTION_NAME (stub))
12938      return STUB_LABEL_NAME (stub);
12939  return 0;
12940}
12941
12942/* INSN is either a function call or a millicode call.  It may have an
12943   unconditional jump in its delay slot.
12944
12945   CALL_DEST is the routine we are calling.  */
12946
12947char *
12948output_call (insn, call_dest, operand_number)
12949     rtx insn;
12950     rtx call_dest;
12951     int operand_number;
12952{
12953  static char buf[256];
12954  if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12955    {
12956      tree labelname;
12957      tree funname = get_identifier (XSTR (call_dest, 0));
12958
12959      if (no_previous_def (funname))
12960	{
12961	  int line_number = 0;
12962	  rtx label_rtx = gen_label_rtx ();
12963	  char *label_buf, temp_buf[256];
12964	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12965				       CODE_LABEL_NUMBER (label_rtx));
12966	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12967	  labelname = get_identifier (label_buf);
12968	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12969	  if (insn)
12970	    line_number = NOTE_LINE_NUMBER (insn);
12971	  add_compiler_stub (labelname, funname, line_number);
12972	}
12973      else
12974	labelname = get_prev_label (funname);
12975
12976      sprintf (buf, "jbsr %%z%d,%.246s",
12977	       operand_number, IDENTIFIER_POINTER (labelname));
12978      return buf;
12979    }
12980  else
12981    {
12982      sprintf (buf, "bl %%z%d", operand_number);
12983      return buf;
12984    }
12985}
12986
12987#endif /* RS6000_LONG_BRANCH */
12988
12989#define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N)		\
12990  do {								\
12991    const char *const symbol_ = (SYMBOL);			\
12992    char *buffer_ = (BUF);					\
12993    if (symbol_[0] == '"')					\
12994      {								\
12995        sprintf(buffer_, "\"L%d$%s", (N), symbol_+1);		\
12996      }								\
12997    else if (name_needs_quotes(symbol_))			\
12998      {								\
12999        sprintf(buffer_, "\"L%d$%s\"", (N), symbol_);		\
13000      }								\
13001    else							\
13002      {								\
13003        sprintf(buffer_, "L%d$%s", (N), symbol_);		\
13004      }								\
13005  } while (0)
13006
13007
13008/* Generate PIC and indirect symbol stubs.  */
13009
13010void
13011machopic_output_stub (file, symb, stub)
13012     FILE *file;
13013     const char *symb, *stub;
13014{
13015  unsigned int length;
13016  char *symbol_name, *lazy_ptr_name;
13017  char *local_label_0;
13018  static int label = 0;
13019
13020  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
13021  symb = (*targetm.strip_name_encoding) (symb);
13022
13023  label += 1;
13024
13025  length = strlen (symb);
13026  symbol_name = alloca (length + 32);
13027  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13028
13029  lazy_ptr_name = alloca (length + 32);
13030  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13031
13032  local_label_0 = alloca (length + 32);
13033  GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13034
13035  if (flag_pic == 2)
13036    machopic_picsymbol_stub_section ();
13037  else
13038    machopic_symbol_stub_section ();
13039
13040  fprintf (file, "%s:\n", stub);
13041  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13042
13043  if (flag_pic == 2)
13044    {
13045      fprintf (file, "\tmflr r0\n");
13046      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13047      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13048      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13049	       lazy_ptr_name, local_label_0);
13050      fprintf (file, "\tmtlr r0\n");
13051      fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
13052	       lazy_ptr_name, local_label_0);
13053      fprintf (file, "\tmtctr r12\n");
13054      fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
13055	       lazy_ptr_name, local_label_0);
13056      fprintf (file, "\tbctr\n");
13057    }
13058  else
13059    fprintf (file, "non-pure not supported\n");
13060
13061  machopic_lazy_symbol_ptr_section ();
13062  fprintf (file, "%s:\n", lazy_ptr_name);
13063  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13064  fprintf (file, "\t.long dyld_stub_binding_helper\n");
13065}
13066
13067/* Legitimize PIC addresses.  If the address is already
13068   position-independent, we return ORIG.  Newly generated
13069   position-independent addresses go into a reg.  This is REG if non
13070   zero, otherwise we allocate register(s) as necessary.  */
13071
13072#define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13073
13074rtx
13075rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13076     rtx orig;
13077     enum machine_mode mode;
13078     rtx reg;
13079{
13080  rtx base, offset;
13081
13082  if (reg == NULL && ! reload_in_progress && ! reload_completed)
13083    reg = gen_reg_rtx (Pmode);
13084
13085  if (GET_CODE (orig) == CONST)
13086    {
13087      if (GET_CODE (XEXP (orig, 0)) == PLUS
13088	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13089	return orig;
13090
13091      if (GET_CODE (XEXP (orig, 0)) == PLUS)
13092	{
13093	  base =
13094	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13095						    Pmode, reg);
13096	  offset =
13097	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13098						    Pmode, reg);
13099	}
13100      else
13101	abort ();
13102
13103      if (GET_CODE (offset) == CONST_INT)
13104	{
13105	  if (SMALL_INT (offset))
13106	    return plus_constant (base, INTVAL (offset));
13107	  else if (! reload_in_progress && ! reload_completed)
13108	    offset = force_reg (Pmode, offset);
13109	  else
13110	    {
13111 	      rtx mem = force_const_mem (Pmode, orig);
13112	      return machopic_legitimize_pic_address (mem, Pmode, reg);
13113	    }
13114	}
13115      return gen_rtx (PLUS, Pmode, base, offset);
13116    }
13117
13118  /* Fall back on generic machopic code.  */
13119  return machopic_legitimize_pic_address (orig, mode, reg);
13120}
13121
13122/* This is just a placeholder to make linking work without having to
13123   add this to the generic Darwin EXTRA_SECTIONS.  If -mcall-aix is
13124   ever needed for Darwin (not too likely!) this would have to get a
13125   real definition.  */
13126
13127void
13128toc_section ()
13129{
13130}
13131
13132#endif /* TARGET_MACHO */
13133
13134#if TARGET_ELF
13135static unsigned int
13136rs6000_elf_section_type_flags (decl, name, reloc)
13137     tree decl;
13138     const char *name;
13139     int reloc;
13140{
13141  unsigned int flags
13142    = default_section_type_flags_1 (decl, name, reloc,
13143				    flag_pic || DEFAULT_ABI == ABI_AIX);
13144
13145  if (TARGET_RELOCATABLE)
13146    flags |= SECTION_WRITE;
13147
13148  return flags;
13149}
13150
13151/* Record an element in the table of global constructors.  SYMBOL is
13152   a SYMBOL_REF of the function to be called; PRIORITY is a number
13153   between 0 and MAX_INIT_PRIORITY.
13154
13155   This differs from default_named_section_asm_out_constructor in
13156   that we have special handling for -mrelocatable.  */
13157
13158static void
13159rs6000_elf_asm_out_constructor (symbol, priority)
13160     rtx symbol;
13161     int priority;
13162{
13163  const char *section = ".ctors";
13164  char buf[16];
13165
13166  if (priority != DEFAULT_INIT_PRIORITY)
13167    {
13168      sprintf (buf, ".ctors.%.5u",
13169               /* Invert the numbering so the linker puts us in the proper
13170                  order; constructors are run from right to left, and the
13171                  linker sorts in increasing order.  */
13172               MAX_INIT_PRIORITY - priority);
13173      section = buf;
13174    }
13175
13176  named_section_flags (section, SECTION_WRITE);
13177  assemble_align (POINTER_SIZE);
13178
13179  if (TARGET_RELOCATABLE)
13180    {
13181      fputs ("\t.long (", asm_out_file);
13182      output_addr_const (asm_out_file, symbol);
13183      fputs (")@fixup\n", asm_out_file);
13184    }
13185  else
13186    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13187}
13188
13189static void
13190rs6000_elf_asm_out_destructor (symbol, priority)
13191     rtx symbol;
13192     int priority;
13193{
13194  const char *section = ".dtors";
13195  char buf[16];
13196
13197  if (priority != DEFAULT_INIT_PRIORITY)
13198    {
13199      sprintf (buf, ".dtors.%.5u",
13200               /* Invert the numbering so the linker puts us in the proper
13201                  order; constructors are run from right to left, and the
13202                  linker sorts in increasing order.  */
13203               MAX_INIT_PRIORITY - priority);
13204      section = buf;
13205    }
13206
13207  named_section_flags (section, SECTION_WRITE);
13208  assemble_align (POINTER_SIZE);
13209
13210  if (TARGET_RELOCATABLE)
13211    {
13212      fputs ("\t.long (", asm_out_file);
13213      output_addr_const (asm_out_file, symbol);
13214      fputs (")@fixup\n", asm_out_file);
13215    }
13216  else
13217    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13218}
13219#endif
13220
13221#if TARGET_XCOFF
13222static void
13223rs6000_xcoff_asm_globalize_label (stream, name)
13224     FILE *stream;
13225     const char *name;
13226{
13227  fputs (GLOBAL_ASM_OP, stream);
13228  RS6000_OUTPUT_BASENAME (stream, name);
13229  putc ('\n', stream);
13230}
13231
13232static void
13233rs6000_xcoff_asm_named_section (name, flags)
13234     const char *name;
13235     unsigned int flags;
13236{
13237  int smclass;
13238  static const char * const suffix[3] = { "PR", "RO", "RW" };
13239
13240  if (flags & SECTION_CODE)
13241    smclass = 0;
13242  else if (flags & SECTION_WRITE)
13243    smclass = 2;
13244  else
13245    smclass = 1;
13246
13247  fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13248	   (flags & SECTION_CODE) ? "." : "",
13249	   name, suffix[smclass], flags & SECTION_ENTSIZE);
13250}
13251
13252static void
13253rs6000_xcoff_select_section (decl, reloc, align)
13254     tree decl;
13255     int reloc;
13256     unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13257{
13258  if (decl_readonly_section_1 (decl, reloc, 1))
13259    {
13260      if (TREE_PUBLIC (decl))
13261        read_only_data_section ();
13262      else
13263        read_only_private_data_section ();
13264    }
13265  else
13266    {
13267      if (TREE_PUBLIC (decl))
13268        data_section ();
13269      else
13270        private_data_section ();
13271    }
13272}
13273
13274static void
13275rs6000_xcoff_unique_section (decl, reloc)
13276     tree decl;
13277     int reloc ATTRIBUTE_UNUSED;
13278{
13279  const char *name;
13280
13281  /* Use select_section for private and uninitialized data.  */
13282  if (!TREE_PUBLIC (decl)
13283      || DECL_COMMON (decl)
13284      || DECL_INITIAL (decl) == NULL_TREE
13285      || DECL_INITIAL (decl) == error_mark_node
13286      || (flag_zero_initialized_in_bss
13287	  && initializer_zerop (DECL_INITIAL (decl))))
13288    return;
13289
13290  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13291  name = (*targetm.strip_name_encoding) (name);
13292  DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13293}
13294
13295/* Select section for constant in constant pool.
13296
13297   On RS/6000, all constants are in the private read-only data area.
13298   However, if this is being placed in the TOC it must be output as a
13299   toc entry.  */
13300
13301static void
13302rs6000_xcoff_select_rtx_section (mode, x, align)
13303     enum machine_mode mode;
13304     rtx x;
13305     unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13306{
13307  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13308    toc_section ();
13309  else
13310    read_only_private_data_section ();
13311}
13312
13313/* Remove any trailing [DS] or the like from the symbol name.  */
13314
13315static const char *
13316rs6000_xcoff_strip_name_encoding (name)
13317     const char *name;
13318{
13319  size_t len;
13320  if (*name == '*')
13321    name++;
13322  len = strlen (name);
13323  if (name[len - 1] == ']')
13324    return ggc_alloc_string (name, len - 4);
13325  else
13326    return name;
13327}
13328
13329/* Section attributes.  AIX is always PIC.  */
13330
13331static unsigned int
13332rs6000_xcoff_section_type_flags (decl, name, reloc)
13333     tree decl;
13334     const char *name;
13335     int reloc;
13336{
13337  unsigned int align;
13338  unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13339
13340  /* Align to at least UNIT size.  */
13341  if (flags & SECTION_CODE)
13342    align = MIN_UNITS_PER_WORD;
13343  else
13344    /* Increase alignment of large objects if not already stricter.  */
13345    align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13346		 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13347		 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13348
13349  return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13350}
13351
13352#endif /* TARGET_XCOFF */
13353
13354/* Note that this is also used for PPC64 Linux.  */
13355
13356static void
13357rs6000_xcoff_encode_section_info (decl, first)
13358     tree decl;
13359     int first ATTRIBUTE_UNUSED;
13360{
13361  if (TREE_CODE (decl) == FUNCTION_DECL
13362      && (*targetm.binds_local_p) (decl))
13363    SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13364}
13365
13366/* Cross-module name binding.  For AIX and PPC64 Linux, which always are
13367   PIC, use private copy of flag_pic.  */
13368
13369static bool
13370rs6000_binds_local_p (decl)
13371     tree decl;
13372{
13373  return default_binds_local_p_1 (decl, flag_pic || rs6000_flag_pic);
13374}
13375
13376/* A C expression returning the cost of moving data from a register of class
13377   CLASS1 to one of CLASS2.  */
13378
13379int
13380rs6000_register_move_cost (mode, from, to)
13381     enum machine_mode mode;
13382     enum reg_class from, to;
13383{
13384  /*  Moves from/to GENERAL_REGS.  */
13385  if (reg_classes_intersect_p (to, GENERAL_REGS)
13386      || reg_classes_intersect_p (from, GENERAL_REGS))
13387    {
13388      if (! reg_classes_intersect_p (to, GENERAL_REGS))
13389	from = to;
13390
13391      if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13392	return (rs6000_memory_move_cost (mode, from, 0)
13393		+ rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13394
13395/* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13396      else if (from == CR_REGS)
13397	return 4;
13398
13399      else
13400/* A move will cost one instruction per GPR moved.  */
13401	return 2 * HARD_REGNO_NREGS (0, mode);
13402    }
13403
13404/* Moving between two similar registers is just one instruction.  */
13405  else if (reg_classes_intersect_p (to, from))
13406    return mode == TFmode ? 4 : 2;
13407
13408/* Everything else has to go through GENERAL_REGS.  */
13409  else
13410    return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13411	    + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13412}
13413
13414/* A C expressions returning the cost of moving data of MODE from a register to
13415   or from memory.  */
13416
13417int
13418rs6000_memory_move_cost (mode, class, in)
13419  enum machine_mode mode;
13420  enum reg_class class;
13421  int in ATTRIBUTE_UNUSED;
13422{
13423  if (reg_classes_intersect_p (class, GENERAL_REGS))
13424    return 4 * HARD_REGNO_NREGS (0, mode);
13425  else if (reg_classes_intersect_p (class, FLOAT_REGS))
13426    return 4 * HARD_REGNO_NREGS (32, mode);
13427  else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13428    return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13429  else
13430    return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13431}
13432
13433