rs6000.c revision 132718
1/* Subroutines used for code generation on IBM RS/6000.
2   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6   This file is part of GCC.
7
8   GCC is free software; you can redistribute it and/or modify it
9   under the terms of the GNU General Public License as published
10   by the Free Software Foundation; either version 2, or (at your
11   option) any later version.
12
13   GCC is distributed in the hope that it will be useful, but WITHOUT
14   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16   License for more details.
17
18   You should have received a copy of the GNU General Public License
19   along with GCC; see the file COPYING.  If not, write to the
20   Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21   MA 02111-1307, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
36#include "obstack.h"
37#include "tree.h"
38#include "expr.h"
39#include "optabs.h"
40#include "except.h"
41#include "function.h"
42#include "output.h"
43#include "basic-block.h"
44#include "integrate.h"
45#include "toplev.h"
46#include "ggc.h"
47#include "hashtab.h"
48#include "tm_p.h"
49#include "target.h"
50#include "target-def.h"
51#include "langhooks.h"
52#include "reload.h"
53#include "cfglayout.h"
54#include "sched-int.h"
55#if TARGET_XCOFF
56#include "xcoffout.h"  /* get declarations of xcoff_*_section_name */
57#endif
58
59#ifndef TARGET_NO_PROTOTYPE
60#define TARGET_NO_PROTOTYPE 0
61#endif
62
63#define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
64#define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
65                                          && !((n) & 1))
66
67#define min(A,B)	((A) < (B) ? (A) : (B))
68#define max(A,B)	((A) > (B) ? (A) : (B))
69
70/* Structure used to define the rs6000 stack */
71typedef struct rs6000_stack {
72  int first_gp_reg_save;	/* first callee saved GP register used */
73  int first_fp_reg_save;	/* first callee saved FP register used */
74  int first_altivec_reg_save;	/* first callee saved AltiVec register used */
75  int lr_save_p;		/* true if the link reg needs to be saved */
76  int cr_save_p;		/* true if the CR reg needs to be saved */
77  unsigned int vrsave_mask;	/* mask of vec registers to save */
78  int toc_save_p;		/* true if the TOC needs to be saved */
79  int push_p;			/* true if we need to allocate stack space */
80  int calls_p;			/* true if the function makes any calls */
81  enum rs6000_abi abi;		/* which ABI to use */
82  int gp_save_offset;		/* offset to save GP regs from initial SP */
83  int fp_save_offset;		/* offset to save FP regs from initial SP */
84  int altivec_save_offset;	/* offset to save AltiVec regs from initial SP */
85  int lr_save_offset;		/* offset to save LR from initial SP */
86  int cr_save_offset;		/* offset to save CR from initial SP */
87  int vrsave_save_offset;	/* offset to save VRSAVE from initial SP */
88  int spe_gp_save_offset;	/* offset to save spe 64-bit gprs  */
89  int toc_save_offset;		/* offset to save the TOC pointer */
90  int varargs_save_offset;	/* offset to save the varargs registers */
91  int ehrd_offset;		/* offset to EH return data */
92  int reg_size;			/* register size (4 or 8) */
93  int varargs_size;		/* size to hold V.4 args passed in regs */
94  HOST_WIDE_INT vars_size;	/* variable save area size */
95  int parm_size;		/* outgoing parameter size */
96  int save_size;		/* save area size */
97  int fixed_size;		/* fixed size of stack frame */
98  int gp_size;			/* size of saved GP registers */
99  int fp_size;			/* size of saved FP registers */
100  int altivec_size;		/* size of saved AltiVec registers */
101  int cr_size;			/* size to hold CR if not in save_size */
102  int lr_size;			/* size to hold LR if not in save_size */
103  int vrsave_size;		/* size to hold VRSAVE if not in save_size */
104  int altivec_padding_size;	/* size of altivec alignment padding if
105				   not in save_size */
106  int spe_gp_size;		/* size of 64-bit GPR save size for SPE */
107  int spe_padding_size;
108  int toc_size;			/* size to hold TOC if not in save_size */
109  HOST_WIDE_INT total_size;	/* total bytes allocated for stack */
110  int spe_64bit_regs_used;
111} rs6000_stack_t;
112
113/* Target cpu type */
114
115enum processor_type rs6000_cpu;
116struct rs6000_cpu_select rs6000_select[3] =
117{
118  /* switch		name,			tune	arch */
119  { (const char *)0,	"--with-cpu=",		1,	1 },
120  { (const char *)0,	"-mcpu=",		1,	1 },
121  { (const char *)0,	"-mtune=",		1,	0 },
122};
123
124/* Always emit branch hint bits.  */
125static GTY(()) bool rs6000_always_hint;
126
127/* Schedule instructions for group formation.  */
128static GTY(()) bool rs6000_sched_groups;
129
130/* Support adjust_priority scheduler hook
131   and -mprioritize-restricted-insns= option.  */
132const char *rs6000_sched_restricted_insns_priority_str;
133int rs6000_sched_restricted_insns_priority;
134
135/* Support for -msched-costly-dep option.  */
136const char *rs6000_sched_costly_dep_str;
137enum rs6000_dependence_cost rs6000_sched_costly_dep;
138
139/* Support for -minsert-sched-nops option.  */
140const char *rs6000_sched_insert_nops_str;
141enum rs6000_nop_insertion rs6000_sched_insert_nops;
142
143/* Size of long double */
144const char *rs6000_long_double_size_string;
145int rs6000_long_double_type_size;
146
147/* Whether -mabi=altivec has appeared */
148int rs6000_altivec_abi;
149
150/* Whether VRSAVE instructions should be generated.  */
151int rs6000_altivec_vrsave;
152
153/* String from -mvrsave= option.  */
154const char *rs6000_altivec_vrsave_string;
155
156/* Nonzero if we want SPE ABI extensions.  */
157int rs6000_spe_abi;
158
159/* Whether isel instructions should be generated.  */
160int rs6000_isel;
161
162/* Whether SPE simd instructions should be generated.  */
163int rs6000_spe;
164
165/* Nonzero if floating point operations are done in the GPRs.  */
166int rs6000_float_gprs = 0;
167
168/* String from -mfloat-gprs=.  */
169const char *rs6000_float_gprs_string;
170
171/* String from -misel=.  */
172const char *rs6000_isel_string;
173
174/* String from -mspe=.  */
175const char *rs6000_spe_string;
176
177/* Set to nonzero once AIX common-mode calls have been defined.  */
178static GTY(()) int common_mode_defined;
179
180/* Save information from a "cmpxx" operation until the branch or scc is
181   emitted.  */
182rtx rs6000_compare_op0, rs6000_compare_op1;
183int rs6000_compare_fp_p;
184
185/* Label number of label created for -mrelocatable, to call to so we can
186   get the address of the GOT section */
187int rs6000_pic_labelno;
188
189#ifdef USING_ELFOS_H
190/* Which abi to adhere to */
191const char *rs6000_abi_name;
192
193/* Semantics of the small data area */
194enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
195
196/* Which small data model to use */
197const char *rs6000_sdata_name = (char *)0;
198
199/* Counter for labels which are to be placed in .fixup.  */
200int fixuplabelno = 0;
201#endif
202
203/* Bit size of immediate TLS offsets and string from which it is decoded.  */
204int rs6000_tls_size = 32;
205const char *rs6000_tls_size_string;
206
207/* ABI enumeration available for subtarget to use.  */
208enum rs6000_abi rs6000_current_abi;
209
210/* ABI string from -mabi= option.  */
211const char *rs6000_abi_string;
212
213/* Debug flags */
214const char *rs6000_debug_name;
215int rs6000_debug_stack;		/* debug stack applications */
216int rs6000_debug_arg;		/* debug argument handling */
217
218/* Opaque types.  */
219static GTY(()) tree opaque_V2SI_type_node;
220static GTY(()) tree opaque_V2SF_type_node;
221static GTY(()) tree opaque_p_V2SI_type_node;
222
223const char *rs6000_traceback_name;
224static enum {
225  traceback_default = 0,
226  traceback_none,
227  traceback_part,
228  traceback_full
229} rs6000_traceback;
230
231/* Flag to say the TOC is initialized */
232int toc_initialized;
233char toc_label_name[10];
234
235/* Alias set for saves and restores from the rs6000 stack.  */
236static GTY(()) int rs6000_sr_alias_set;
237
238/* Call distance, overridden by -mlongcall and #pragma longcall(1).
239   The only place that looks at this is rs6000_set_default_type_attributes;
240   everywhere else should rely on the presence or absence of a longcall
241   attribute on the function declaration.  */
242int rs6000_default_long_calls;
243const char *rs6000_longcall_switch;
244
245/* Control alignment for fields within structures.  */
246/* String from -malign-XXXXX.  */
247const char *rs6000_alignment_string;
248int rs6000_alignment_flags;
249
250struct builtin_description
251{
252  /* mask is not const because we're going to alter it below.  This
253     nonsense will go away when we rewrite the -march infrastructure
254     to give us more target flag bits.  */
255  unsigned int mask;
256  const enum insn_code icode;
257  const char *const name;
258  const enum rs6000_builtins code;
259};
260
261static bool rs6000_function_ok_for_sibcall (tree, tree);
262static int num_insns_constant_wide (HOST_WIDE_INT);
263static void validate_condition_mode (enum rtx_code, enum machine_mode);
264static rtx rs6000_generate_compare (enum rtx_code);
265static void rs6000_maybe_dead (rtx);
266static void rs6000_emit_stack_tie (void);
267static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
268static rtx spe_synthesize_frame_save (rtx);
269static bool spe_func_has_64bit_regs_p (void);
270static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
271			     int, HOST_WIDE_INT);
272static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
273static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
274static unsigned rs6000_hash_constant (rtx);
275static unsigned toc_hash_function (const void *);
276static int toc_hash_eq (const void *, const void *);
277static int constant_pool_expr_1 (rtx, int *, int *);
278static bool constant_pool_expr_p (rtx);
279static bool toc_relative_expr_p (rtx);
280static bool legitimate_small_data_p (enum machine_mode, rtx);
281static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
282static bool legitimate_indexed_address_p (rtx, int);
283static bool legitimate_indirect_address_p (rtx, int);
284static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
285static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
286static struct machine_function * rs6000_init_machine_status (void);
287static bool rs6000_assemble_integer (rtx, unsigned int, int);
288#ifdef HAVE_GAS_HIDDEN
289static void rs6000_assemble_visibility (tree, int);
290#endif
291static int rs6000_ra_ever_killed (void);
292static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
293extern const struct attribute_spec rs6000_attribute_table[];
294static void rs6000_set_default_type_attributes (tree);
295static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
296static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
297static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
298				    tree);
299static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
300static bool rs6000_return_in_memory (tree, tree);
301static void rs6000_file_start (void);
302#if TARGET_ELF
303static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
304static void rs6000_elf_asm_out_constructor (rtx, int);
305static void rs6000_elf_asm_out_destructor (rtx, int);
306static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
307static void rs6000_elf_unique_section (tree, int);
308static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
309					   unsigned HOST_WIDE_INT);
310static void rs6000_elf_encode_section_info (tree, rtx, int)
311     ATTRIBUTE_UNUSED;
312static bool rs6000_elf_in_small_data_p (tree);
313#endif
314#if TARGET_XCOFF
315static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
316static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
317static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
318static void rs6000_xcoff_unique_section (tree, int);
319static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
320					     unsigned HOST_WIDE_INT);
321static const char * rs6000_xcoff_strip_name_encoding (const char *);
322static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
323static void rs6000_xcoff_file_start (void);
324static void rs6000_xcoff_file_end (void);
325#endif
326#if TARGET_MACHO
327static bool rs6000_binds_local_p (tree);
328#endif
329static int rs6000_use_dfa_pipeline_interface (void);
330static int rs6000_variable_issue (FILE *, int, rtx, int);
331static bool rs6000_rtx_costs (rtx, int, int, int *);
332static int rs6000_adjust_cost (rtx, rtx, rtx, int);
333static bool is_microcoded_insn (rtx);
334static int is_dispatch_slot_restricted (rtx);
335static bool is_cracked_insn (rtx);
336static bool is_branch_slot_insn (rtx);
337static int rs6000_adjust_priority (rtx, int);
338static int rs6000_issue_rate (void);
339static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
340static rtx get_next_active_insn (rtx, rtx);
341static bool insn_terminates_group_p (rtx , enum group_termination);
342static bool is_costly_group (rtx *, rtx);
343static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
344static int redefine_groups (FILE *, int, rtx, rtx);
345static int pad_groups (FILE *, int, rtx, rtx);
346static void rs6000_sched_finish (FILE *, int);
347static int rs6000_use_sched_lookahead (void);
348
349static void rs6000_init_builtins (void);
350static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
351static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
352static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
353static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
354static void altivec_init_builtins (void);
355static void rs6000_common_init_builtins (void);
356static void rs6000_init_libfuncs (void);
357
358static void enable_mask_for_builtins (struct builtin_description *, int,
359				      enum rs6000_builtins,
360				      enum rs6000_builtins);
361static void spe_init_builtins (void);
362static rtx spe_expand_builtin (tree, rtx, bool *);
363static rtx spe_expand_stv_builtin (enum insn_code, tree);
364static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
365static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
366static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
367static rs6000_stack_t *rs6000_stack_info (void);
368static void debug_stack_info (rs6000_stack_t *);
369
370static rtx altivec_expand_builtin (tree, rtx, bool *);
371static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
372static rtx altivec_expand_st_builtin (tree, rtx, bool *);
373static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
374static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
375static rtx altivec_expand_predicate_builtin (enum insn_code,
376					    const char *, tree, rtx);
377static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
378static rtx altivec_expand_stv_builtin (enum insn_code, tree);
379static void rs6000_parse_abi_options (void);
380static void rs6000_parse_alignment_option (void);
381static void rs6000_parse_tls_size_option (void);
382static void rs6000_parse_yes_no_option (const char *, const char *, int *);
383static int first_altivec_reg_to_save (void);
384static unsigned int compute_vrsave_mask (void);
385static void is_altivec_return_reg (rtx, void *);
386static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
387int easy_vector_constant (rtx, enum machine_mode);
388static int easy_vector_same (rtx, enum machine_mode);
389static int easy_vector_splat_const (int, enum machine_mode);
390static bool is_ev64_opaque_type (tree);
391static rtx rs6000_dwarf_register_span (rtx);
392static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
393static rtx rs6000_tls_get_addr (void);
394static rtx rs6000_got_sym (void);
395static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
396static const char *rs6000_get_some_local_dynamic_name (void);
397static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
398static rtx rs6000_complex_function_value (enum machine_mode);
399static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
400				    enum machine_mode, tree);
401static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
402				      enum machine_mode, tree, int);
403static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
404static void setup_incoming_varargs (CUMULATIVE_ARGS *,
405				    enum machine_mode, tree,
406				    int *, int);
407#if TARGET_MACHO
408static void macho_branch_islands (void);
409static void add_compiler_branch_island (tree, tree, int);
410static int no_previous_def (tree function_name);
411static tree get_prev_label (tree function_name);
412#endif
413
414static tree rs6000_build_builtin_va_list (void);
415
416/* Hash table stuff for keeping track of TOC entries.  */
417
418struct toc_hash_struct GTY(())
419{
420  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
421     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
422  rtx key;
423  enum machine_mode key_mode;
424  int labelno;
425};
426
427static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
428
429/* Default register names.  */
430char rs6000_reg_names[][8] =
431{
432      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
433      "8",  "9", "10", "11", "12", "13", "14", "15",
434     "16", "17", "18", "19", "20", "21", "22", "23",
435     "24", "25", "26", "27", "28", "29", "30", "31",
436      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
437      "8",  "9", "10", "11", "12", "13", "14", "15",
438     "16", "17", "18", "19", "20", "21", "22", "23",
439     "24", "25", "26", "27", "28", "29", "30", "31",
440     "mq", "lr", "ctr","ap",
441      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
442      "xer",
443      /* AltiVec registers.  */
444      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
445      "8",  "9",  "10", "11", "12", "13", "14", "15",
446      "16", "17", "18", "19", "20", "21", "22", "23",
447      "24", "25", "26", "27", "28", "29", "30", "31",
448      "vrsave", "vscr",
449      /* SPE registers.  */
450      "spe_acc", "spefscr"
451};
452
453#ifdef TARGET_REGNAMES
454static const char alt_reg_names[][8] =
455{
456   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
457   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
458  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
459  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
460   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
461   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
462  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
463  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
464    "mq",    "lr",  "ctr",   "ap",
465  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
466   "xer",
467  /* AltiVec registers.  */
468   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
469   "%v8",  "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
470  "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
471  "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
472  "vrsave", "vscr",
473  /* SPE registers.  */
474  "spe_acc", "spefscr"
475};
476#endif
477
478#ifndef MASK_STRICT_ALIGN
479#define MASK_STRICT_ALIGN 0
480#endif
481#ifndef TARGET_PROFILE_KERNEL
482#define TARGET_PROFILE_KERNEL 0
483#endif
484
485/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
486#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
487
488/* Return 1 for a symbol ref for a thread-local storage symbol.  */
489#define RS6000_SYMBOL_REF_TLS_P(RTX) \
490  (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
491
492/* Initialize the GCC target structure.  */
493#undef TARGET_ATTRIBUTE_TABLE
494#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
495#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
496#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
497
498#undef TARGET_ASM_ALIGNED_DI_OP
499#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
500
501/* Default unaligned ops are only provided for ELF.  Find the ops needed
502   for non-ELF systems.  */
503#ifndef OBJECT_FORMAT_ELF
504#if TARGET_XCOFF
505/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
506   64-bit targets.  */
507#undef TARGET_ASM_UNALIGNED_HI_OP
508#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
509#undef TARGET_ASM_UNALIGNED_SI_OP
510#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
511#undef TARGET_ASM_UNALIGNED_DI_OP
512#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
513#else
514/* For Darwin.  */
515#undef TARGET_ASM_UNALIGNED_HI_OP
516#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
517#undef TARGET_ASM_UNALIGNED_SI_OP
518#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
519#endif
520#endif
521
522/* This hook deals with fixups for relocatable code and DI-mode objects
523   in 64-bit code.  */
524#undef TARGET_ASM_INTEGER
525#define TARGET_ASM_INTEGER rs6000_assemble_integer
526
527#ifdef HAVE_GAS_HIDDEN
528#undef TARGET_ASM_ASSEMBLE_VISIBILITY
529#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
530#endif
531
532#undef TARGET_HAVE_TLS
533#define TARGET_HAVE_TLS HAVE_AS_TLS
534
535#undef TARGET_CANNOT_FORCE_CONST_MEM
536#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
537
538#undef TARGET_ASM_FUNCTION_PROLOGUE
539#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
540#undef TARGET_ASM_FUNCTION_EPILOGUE
541#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
542
543#undef  TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
544#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
545#undef  TARGET_SCHED_VARIABLE_ISSUE
546#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
547
548#undef TARGET_SCHED_ISSUE_RATE
549#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
550#undef TARGET_SCHED_ADJUST_COST
551#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
552#undef TARGET_SCHED_ADJUST_PRIORITY
553#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
554#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
555#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
556#undef TARGET_SCHED_FINISH
557#define TARGET_SCHED_FINISH rs6000_sched_finish
558
559#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
560#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
561
562#undef TARGET_INIT_BUILTINS
563#define TARGET_INIT_BUILTINS rs6000_init_builtins
564
565#undef TARGET_EXPAND_BUILTIN
566#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
567
568#undef TARGET_INIT_LIBFUNCS
569#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
570
571#if TARGET_MACHO
572#undef TARGET_BINDS_LOCAL_P
573#define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
574#endif
575
576#undef TARGET_ASM_OUTPUT_MI_THUNK
577#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
578
579#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
580#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
581
582#undef TARGET_FUNCTION_OK_FOR_SIBCALL
583#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
584
585#undef TARGET_RTX_COSTS
586#define TARGET_RTX_COSTS rs6000_rtx_costs
587#undef TARGET_ADDRESS_COST
588#define TARGET_ADDRESS_COST hook_int_rtx_0
589
590#undef TARGET_VECTOR_OPAQUE_P
591#define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
592
593#undef TARGET_DWARF_REGISTER_SPAN
594#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
595
596/* On rs6000, function arguments are promoted, as are function return
597   values.  */
598#undef TARGET_PROMOTE_FUNCTION_ARGS
599#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
600#undef TARGET_PROMOTE_FUNCTION_RETURN
601#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
602
603/* Structure return values are passed as an extra parameter.  */
604#undef TARGET_STRUCT_VALUE_RTX
605#define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
606
607#undef TARGET_RETURN_IN_MEMORY
608#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
609
610#undef TARGET_SETUP_INCOMING_VARARGS
611#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
612
613/* Always strict argument naming on rs6000.  */
614#undef TARGET_STRICT_ARGUMENT_NAMING
615#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
616#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
617#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
618#undef TARGET_SPLIT_COMPLEX_ARG
619#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
620
621#undef TARGET_BUILD_BUILTIN_VA_LIST
622#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
623
624struct gcc_target targetm = TARGET_INITIALIZER;
625
626/* Override command line options.  Mostly we process the processor
627   type and sometimes adjust other TARGET_ options.  */
628
629void
630rs6000_override_options (const char *default_cpu)
631{
632  size_t i, j;
633  struct rs6000_cpu_select *ptr;
634  int set_masks;
635
636  /* Simplifications for entries below.  */
637
638  enum {
639    POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
640    POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
641  };
642
643  /* This table occasionally claims that a processor does not support
644     a particular feature even though it does, but the feature is slower
645     than the alternative.  Thus, it shouldn't be relied on as a
646     complete description of the processor's support.
647
648     Please keep this list in order, and don't forget to update the
649     documentation in invoke.texi when adding a new processor or
650     flag.  */
651  static struct ptt
652    {
653      const char *const name;		/* Canonical processor name.  */
654      const enum processor_type processor; /* Processor type enum value.  */
655      const int target_enable;	/* Target flags to enable.  */
656    } const processor_target_table[]
657      = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
658	 {"403", PROCESSOR_PPC403,
659	  POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
660	 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
661	 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
662	 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
663	 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
664	 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
665	 {"601", PROCESSOR_PPC601,
666	  MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
667	 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
668	 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
669	 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
670	 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
671	 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
672	 {"620", PROCESSOR_PPC620,
673	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
674	 {"630", PROCESSOR_PPC630,
675	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
676	 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
677	 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
678	 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
679	 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
680	 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
681	 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
682	 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
683	 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
684	 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
685	 {"970", PROCESSOR_POWER4,
686	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
687	 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
688	 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
689	 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
690	 {"G4",  PROCESSOR_PPC7450, POWERPC_7400_MASK},
691	 {"G5", PROCESSOR_POWER4,
692	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
693	 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
694	 {"power2", PROCESSOR_POWER,
695	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
696	 {"power3", PROCESSOR_PPC630,
697	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
698	 {"power4", PROCESSOR_POWER4,
699	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
700	 {"power5", PROCESSOR_POWER5,
701	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
702	 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
703	 {"powerpc64", PROCESSOR_POWERPC64,
704	  POWERPC_BASE_MASK | MASK_POWERPC64},
705	 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
706	 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
707	 {"rios2", PROCESSOR_RIOS2,
708	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
709	 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
710	 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
711	 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
712      };
713
714  const size_t ptt_size = ARRAY_SIZE (processor_target_table);
715
716  /* Some OSs don't support saving the high part of 64-bit registers on
717     context switch.  Other OSs don't support saving Altivec registers.
718     On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
719     settings; if the user wants either, the user must explicitly specify
720     them and we won't interfere with the user's specification.  */
721
722  enum {
723    POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
724    POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
725		     | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
726		     | MASK_MFCRF)
727  };
728 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
729#ifdef OS_MISSING_POWERPC64
730  if (OS_MISSING_POWERPC64)
731    set_masks &= ~MASK_POWERPC64;
732#endif
733#ifdef OS_MISSING_ALTIVEC
734  if (OS_MISSING_ALTIVEC)
735    set_masks &= ~MASK_ALTIVEC;
736#endif
737
738  /* Don't override these by the processor default if given explicitly.  */
739  set_masks &= ~(target_flags_explicit
740		 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
741
742  /* Identify the processor type.  */
743  rs6000_select[0].string = default_cpu;
744  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
745
746  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
747    {
748      ptr = &rs6000_select[i];
749      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
750	{
751	  for (j = 0; j < ptt_size; j++)
752	    if (! strcmp (ptr->string, processor_target_table[j].name))
753	      {
754		if (ptr->set_tune_p)
755		  rs6000_cpu = processor_target_table[j].processor;
756
757		if (ptr->set_arch_p)
758		  {
759		    target_flags &= ~set_masks;
760		    target_flags |= (processor_target_table[j].target_enable
761				     & set_masks);
762		  }
763		break;
764	      }
765
766	  if (j == ptt_size)
767	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
768	}
769    }
770
771  if (TARGET_E500)
772    rs6000_isel = 1;
773
774  /* If we are optimizing big endian systems for space, use the load/store
775     multiple and string instructions.  */
776  if (BYTES_BIG_ENDIAN && optimize_size)
777    target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
778
779  /* Don't allow -mmultiple or -mstring on little endian systems
780     unless the cpu is a 750, because the hardware doesn't support the
781     instructions used in little endian mode, and causes an alignment
782     trap.  The 750 does not cause an alignment trap (except when the
783     target is unaligned).  */
784
785  if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
786    {
787      if (TARGET_MULTIPLE)
788	{
789	  target_flags &= ~MASK_MULTIPLE;
790	  if ((target_flags_explicit & MASK_MULTIPLE) != 0)
791	    warning ("-mmultiple is not supported on little endian systems");
792	}
793
794      if (TARGET_STRING)
795	{
796	  target_flags &= ~MASK_STRING;
797	  if ((target_flags_explicit & MASK_STRING) != 0)
798	    warning ("-mstring is not supported on little endian systems");
799	}
800    }
801
802  /* Set debug flags */
803  if (rs6000_debug_name)
804    {
805      if (! strcmp (rs6000_debug_name, "all"))
806	rs6000_debug_stack = rs6000_debug_arg = 1;
807      else if (! strcmp (rs6000_debug_name, "stack"))
808	rs6000_debug_stack = 1;
809      else if (! strcmp (rs6000_debug_name, "arg"))
810	rs6000_debug_arg = 1;
811      else
812	error ("unknown -mdebug-%s switch", rs6000_debug_name);
813    }
814
815  if (rs6000_traceback_name)
816    {
817      if (! strncmp (rs6000_traceback_name, "full", 4))
818	rs6000_traceback = traceback_full;
819      else if (! strncmp (rs6000_traceback_name, "part", 4))
820	rs6000_traceback = traceback_part;
821      else if (! strncmp (rs6000_traceback_name, "no", 2))
822	rs6000_traceback = traceback_none;
823      else
824	error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
825	       rs6000_traceback_name);
826    }
827
828  /* Set size of long double */
829  rs6000_long_double_type_size = 64;
830  if (rs6000_long_double_size_string)
831    {
832      char *tail;
833      int size = strtol (rs6000_long_double_size_string, &tail, 10);
834      if (*tail != '\0' || (size != 64 && size != 128))
835	error ("Unknown switch -mlong-double-%s",
836	       rs6000_long_double_size_string);
837      else
838	rs6000_long_double_type_size = size;
839    }
840
841  /* Set Altivec ABI as default for powerpc64 linux.  */
842  if (TARGET_ELF && TARGET_64BIT)
843    {
844      rs6000_altivec_abi = 1;
845      rs6000_altivec_vrsave = 1;
846    }
847
848  /* Handle -mabi= options.  */
849  rs6000_parse_abi_options ();
850
851  /* Handle -malign-XXXXX option.  */
852  rs6000_parse_alignment_option ();
853
854  /* Handle generic -mFOO=YES/NO options.  */
855  rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
856			      &rs6000_altivec_vrsave);
857  rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
858			      &rs6000_isel);
859  rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
860  rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
861			      &rs6000_float_gprs);
862
863  /* Handle -mtls-size option.  */
864  rs6000_parse_tls_size_option ();
865
866#ifdef SUBTARGET_OVERRIDE_OPTIONS
867  SUBTARGET_OVERRIDE_OPTIONS;
868#endif
869#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
870  SUBSUBTARGET_OVERRIDE_OPTIONS;
871#endif
872
873  if (TARGET_E500)
874    {
875      if (TARGET_ALTIVEC)
876      error ("AltiVec and E500 instructions cannot coexist");
877
878      /* The e500 does not have string instructions, and we set
879	 MASK_STRING above when optimizing for size.  */
880      if ((target_flags & MASK_STRING) != 0)
881	target_flags = target_flags & ~MASK_STRING;
882
883      /* No SPE means 64-bit long doubles, even if an E500.  */
884      if (rs6000_spe_string != 0
885          && !strcmp (rs6000_spe_string, "no"))
886	rs6000_long_double_type_size = 64;
887    }
888  else if (rs6000_select[1].string != NULL)
889    {
890      /* For the powerpc-eabispe configuration, we set all these by
891	 default, so let's unset them if we manually set another
892	 CPU that is not the E500.  */
893      if (rs6000_abi_string == 0)
894	rs6000_spe_abi = 0;
895      if (rs6000_spe_string == 0)
896	rs6000_spe = 0;
897      if (rs6000_float_gprs_string == 0)
898	rs6000_float_gprs = 0;
899      if (rs6000_isel_string == 0)
900	rs6000_isel = 0;
901      if (rs6000_long_double_size_string == 0)
902	rs6000_long_double_type_size = 64;
903    }
904
905  rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
906			&& rs6000_cpu != PROCESSOR_POWER5);
907  rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
908			 || rs6000_cpu == PROCESSOR_POWER5);
909
910  /* Handle -m(no-)longcall option.  This is a bit of a cheap hack,
911     using TARGET_OPTIONS to handle a toggle switch, but we're out of
912     bits in target_flags so TARGET_SWITCHES cannot be used.
913     Assumption here is that rs6000_longcall_switch points into the
914     text of the complete option, rather than being a copy, so we can
915     scan back for the presence or absence of the no- modifier.  */
916  if (rs6000_longcall_switch)
917    {
918      const char *base = rs6000_longcall_switch;
919      while (base[-1] != 'm') base--;
920
921      if (*rs6000_longcall_switch != '\0')
922	error ("invalid option `%s'", base);
923      rs6000_default_long_calls = (base[0] != 'n');
924    }
925
926  /* Handle -mprioritize-restricted-insns option.  */
927  rs6000_sched_restricted_insns_priority
928    = (rs6000_sched_groups ? 1 : 0);
929  if (rs6000_sched_restricted_insns_priority_str)
930    rs6000_sched_restricted_insns_priority =
931      atoi (rs6000_sched_restricted_insns_priority_str);
932
933  /* Handle -msched-costly-dep option.  */
934  rs6000_sched_costly_dep
935    = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
936  if (rs6000_sched_costly_dep_str)
937    {
938      if (! strcmp (rs6000_sched_costly_dep_str, "no"))
939        rs6000_sched_costly_dep = no_dep_costly;
940      else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
941        rs6000_sched_costly_dep = all_deps_costly;
942      else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
943        rs6000_sched_costly_dep = true_store_to_load_dep_costly;
944      else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
945        rs6000_sched_costly_dep = store_to_load_dep_costly;
946      else
947        rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
948    }
949
950  /* Handle -minsert-sched-nops option.  */
951  rs6000_sched_insert_nops
952    = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
953  if (rs6000_sched_insert_nops_str)
954    {
955      if (! strcmp (rs6000_sched_insert_nops_str, "no"))
956        rs6000_sched_insert_nops = sched_finish_none;
957      else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
958        rs6000_sched_insert_nops = sched_finish_pad_groups;
959      else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
960        rs6000_sched_insert_nops = sched_finish_regroup_exact;
961      else
962        rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
963    }
964
965#ifdef TARGET_REGNAMES
966  /* If the user desires alternate register names, copy in the
967     alternate names now.  */
968  if (TARGET_REGNAMES)
969    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
970#endif
971
972  /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
973     If -maix-struct-return or -msvr4-struct-return was explicitly
974     used, don't override with the ABI default.  */
975  if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
976    {
977      if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
978	target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
979      else
980	target_flags |= MASK_AIX_STRUCT_RET;
981    }
982
983  if (TARGET_LONG_DOUBLE_128
984      && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
985    REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
986
987  /* Allocate an alias set for register saves & restores from stack.  */
988  rs6000_sr_alias_set = new_alias_set ();
989
990  if (TARGET_TOC)
991    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
992
993  /* We can only guarantee the availability of DI pseudo-ops when
994     assembling for 64-bit targets.  */
995  if (!TARGET_64BIT)
996    {
997      targetm.asm_out.aligned_op.di = NULL;
998      targetm.asm_out.unaligned_op.di = NULL;
999    }
1000
1001  /* Set maximum branch target alignment at two instructions, eight bytes.  */
1002  align_jumps_max_skip = 8;
1003  align_loops_max_skip = 8;
1004
1005  /* Arrange to save and restore machine status around nested functions.  */
1006  init_machine_status = rs6000_init_machine_status;
1007
1008  /* We should always be splitting complex arguments, but we can't break
1009     Linux and Darwin ABIs at the moment.  For now, only AIX is fixed.  */
1010  if (DEFAULT_ABI != ABI_AIX)
1011    targetm.calls.split_complex_arg = NULL;
1012}
1013
1014/* Handle generic options of the form -mfoo=yes/no.
1015   NAME is the option name.
1016   VALUE is the option value.
1017   FLAG is the pointer to the flag where to store a 1 or 0, depending on
1018   whether the option value is 'yes' or 'no' respectively.  */
1019static void
1020rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1021{
1022  if (value == 0)
1023    return;
1024  else if (!strcmp (value, "yes"))
1025    *flag = 1;
1026  else if (!strcmp (value, "no"))
1027    *flag = 0;
1028  else
1029    error ("unknown -m%s= option specified: '%s'", name, value);
1030}
1031
1032/* Handle -mabi= options.  */
1033static void
1034rs6000_parse_abi_options (void)
1035{
1036  if (rs6000_abi_string == 0)
1037    return;
1038  else if (! strcmp (rs6000_abi_string, "altivec"))
1039    {
1040      rs6000_altivec_abi = 1;
1041      rs6000_spe_abi = 0;
1042    }
1043  else if (! strcmp (rs6000_abi_string, "no-altivec"))
1044    rs6000_altivec_abi = 0;
1045  else if (! strcmp (rs6000_abi_string, "spe"))
1046    {
1047      rs6000_spe_abi = 1;
1048      rs6000_altivec_abi = 0;
1049      if (!TARGET_SPE_ABI)
1050	error ("not configured for ABI: '%s'", rs6000_abi_string);
1051    }
1052
1053  else if (! strcmp (rs6000_abi_string, "no-spe"))
1054    rs6000_spe_abi = 0;
1055  else
1056    error ("unknown ABI specified: '%s'", rs6000_abi_string);
1057}
1058
1059/* Handle -malign-XXXXXX options.  */
1060static void
1061rs6000_parse_alignment_option (void)
1062{
1063  if (rs6000_alignment_string == 0)
1064    return;
1065  else if (! strcmp (rs6000_alignment_string, "power"))
1066    rs6000_alignment_flags = MASK_ALIGN_POWER;
1067  else if (! strcmp (rs6000_alignment_string, "natural"))
1068    rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1069  else
1070    error ("unknown -malign-XXXXX option specified: '%s'",
1071	   rs6000_alignment_string);
1072}
1073
1074/* Validate and record the size specified with the -mtls-size option.  */
1075
1076static void
1077rs6000_parse_tls_size_option (void)
1078{
1079  if (rs6000_tls_size_string == 0)
1080    return;
1081  else if (strcmp (rs6000_tls_size_string, "16") == 0)
1082    rs6000_tls_size = 16;
1083  else if (strcmp (rs6000_tls_size_string, "32") == 0)
1084    rs6000_tls_size = 32;
1085  else if (strcmp (rs6000_tls_size_string, "64") == 0)
1086    rs6000_tls_size = 64;
1087  else
1088    error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1089}
1090
1091void
1092optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1093{
1094}
1095
1096/* Do anything needed at the start of the asm file.  */
1097
1098static void
1099rs6000_file_start (void)
1100{
1101  size_t i;
1102  char buffer[80];
1103  const char *start = buffer;
1104  struct rs6000_cpu_select *ptr;
1105  const char *default_cpu = TARGET_CPU_DEFAULT;
1106  FILE *file = asm_out_file;
1107
1108  default_file_start ();
1109
1110#ifdef TARGET_BI_ARCH
1111  if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1112    default_cpu = 0;
1113#endif
1114
1115  if (flag_verbose_asm)
1116    {
1117      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1118      rs6000_select[0].string = default_cpu;
1119
1120      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1121	{
1122	  ptr = &rs6000_select[i];
1123	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1124	    {
1125	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1126	      start = "";
1127	    }
1128	}
1129
1130#ifdef USING_ELFOS_H
1131      switch (rs6000_sdata)
1132	{
1133	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1134	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1135	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1136	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1137	}
1138
1139      if (rs6000_sdata && g_switch_value)
1140	{
1141	  fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1142		   g_switch_value);
1143	  start = "";
1144	}
1145#endif
1146
1147      if (*start == '\0')
1148	putc ('\n', file);
1149    }
1150}
1151
1152/* Return nonzero if this function is known to have a null epilogue.  */
1153
1154int
1155direct_return (void)
1156{
1157  if (reload_completed)
1158    {
1159      rs6000_stack_t *info = rs6000_stack_info ();
1160
1161      if (info->first_gp_reg_save == 32
1162	  && info->first_fp_reg_save == 64
1163	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1164	  && ! info->lr_save_p
1165	  && ! info->cr_save_p
1166	  && info->vrsave_mask == 0
1167	  && ! info->push_p)
1168	return 1;
1169    }
1170
1171  return 0;
1172}
1173
1174/* Returns 1 always.  */
1175
1176int
1177any_operand (rtx op ATTRIBUTE_UNUSED,
1178	     enum machine_mode mode ATTRIBUTE_UNUSED)
1179{
1180  return 1;
1181}
1182
1183/* Returns 1 if op is the count register.  */
1184int
1185count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1186{
1187  if (GET_CODE (op) != REG)
1188    return 0;
1189
1190  if (REGNO (op) == COUNT_REGISTER_REGNUM)
1191    return 1;
1192
1193  if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1194    return 1;
1195
1196  return 0;
1197}
1198
1199/* Returns 1 if op is an altivec register.  */
1200int
1201altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1202{
1203
1204  return (register_operand (op, mode)
1205	  && (GET_CODE (op) != REG
1206	      || REGNO (op) > FIRST_PSEUDO_REGISTER
1207	      || ALTIVEC_REGNO_P (REGNO (op))));
1208}
1209
1210int
1211xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1212{
1213  if (GET_CODE (op) != REG)
1214    return 0;
1215
1216  if (XER_REGNO_P (REGNO (op)))
1217    return 1;
1218
1219  return 0;
1220}
1221
1222/* Return 1 if OP is a signed 8-bit constant.  Int multiplication
1223   by such constants completes more quickly.  */
1224
1225int
1226s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1227{
1228  return ( GET_CODE (op) == CONST_INT
1229	  && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1230}
1231
1232/* Return 1 if OP is a constant that can fit in a D field.  */
1233
1234int
1235short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1236{
1237  return (GET_CODE (op) == CONST_INT
1238	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1239}
1240
1241/* Similar for an unsigned D field.  */
1242
1243int
1244u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1245{
1246  return (GET_CODE (op) == CONST_INT
1247	  && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1248}
1249
1250/* Return 1 if OP is a CONST_INT that cannot fit in a signed D field.  */
1251
1252int
1253non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1254{
1255  return (GET_CODE (op) == CONST_INT
1256	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1257}
1258
1259/* Returns 1 if OP is a CONST_INT that is a positive value
1260   and an exact power of 2.  */
1261
1262int
1263exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1264{
1265  return (GET_CODE (op) == CONST_INT
1266	  && INTVAL (op) > 0
1267	  && exact_log2 (INTVAL (op)) >= 0);
1268}
1269
1270/* Returns 1 if OP is a register that is not special (i.e., not MQ,
1271   ctr, or lr).  */
1272
1273int
1274gpc_reg_operand (rtx op, enum machine_mode mode)
1275{
1276  return (register_operand (op, mode)
1277	  && (GET_CODE (op) != REG
1278	      || (REGNO (op) >= ARG_POINTER_REGNUM
1279		  && !XER_REGNO_P (REGNO (op)))
1280	      || REGNO (op) < MQ_REGNO));
1281}
1282
1283/* Returns 1 if OP is either a pseudo-register or a register denoting a
1284   CR field.  */
1285
1286int
1287cc_reg_operand (rtx op, enum machine_mode mode)
1288{
1289  return (register_operand (op, mode)
1290	  && (GET_CODE (op) != REG
1291	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
1292	      || CR_REGNO_P (REGNO (op))));
1293}
1294
1295/* Returns 1 if OP is either a pseudo-register or a register denoting a
1296   CR field that isn't CR0.  */
1297
1298int
1299cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1300{
1301  return (register_operand (op, mode)
1302	  && (GET_CODE (op) != REG
1303	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
1304	      || CR_REGNO_NOT_CR0_P (REGNO (op))));
1305}
1306
1307/* Returns 1 if OP is either a constant integer valid for a D-field or
1308   a non-special register.  If a register, it must be in the proper
1309   mode unless MODE is VOIDmode.  */
1310
1311int
1312reg_or_short_operand (rtx op, enum machine_mode mode)
1313{
1314  return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1315}
1316
1317/* Similar, except check if the negation of the constant would be
1318   valid for a D-field.  Don't allow a constant zero, since all the
1319   patterns that call this predicate use "addic r1,r2,-constant" on
1320   a constant value to set a carry when r2 is greater or equal to
1321   "constant".  That doesn't work for zero.  */
1322
1323int
1324reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1325{
1326  if (GET_CODE (op) == CONST_INT)
1327    return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1328
1329  return gpc_reg_operand (op, mode);
1330}
1331
1332/* Returns 1 if OP is either a constant integer valid for a DS-field or
1333   a non-special register.  If a register, it must be in the proper
1334   mode unless MODE is VOIDmode.  */
1335
1336int
1337reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1338{
1339  if (gpc_reg_operand (op, mode))
1340    return 1;
1341  else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1342    return 1;
1343
1344  return 0;
1345}
1346
1347
1348/* Return 1 if the operand is either a register or an integer whose
1349   high-order 16 bits are zero.  */
1350
1351int
1352reg_or_u_short_operand (rtx op, enum machine_mode mode)
1353{
1354  return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1355}
1356
1357/* Return 1 is the operand is either a non-special register or ANY
1358   constant integer.  */
1359
1360int
1361reg_or_cint_operand (rtx op, enum machine_mode mode)
1362{
1363  return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1364}
1365
1366/* Return 1 is the operand is either a non-special register or ANY
1367   32-bit signed constant integer.  */
1368
1369int
1370reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1371{
1372  return (gpc_reg_operand (op, mode)
1373	  || (GET_CODE (op) == CONST_INT
1374#if HOST_BITS_PER_WIDE_INT != 32
1375	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1376		  < (unsigned HOST_WIDE_INT) 0x100000000ll)
1377#endif
1378	      ));
1379}
1380
1381/* Return 1 is the operand is either a non-special register or a 32-bit
1382   signed constant integer valid for 64-bit addition.  */
1383
1384int
1385reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1386{
1387  return (gpc_reg_operand (op, mode)
1388	  || (GET_CODE (op) == CONST_INT
1389#if HOST_BITS_PER_WIDE_INT == 32
1390	      && INTVAL (op) < 0x7fff8000
1391#else
1392	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1393		  < 0x100000000ll)
1394#endif
1395	      ));
1396}
1397
1398/* Return 1 is the operand is either a non-special register or a 32-bit
1399   signed constant integer valid for 64-bit subtraction.  */
1400
1401int
1402reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1403{
1404  return (gpc_reg_operand (op, mode)
1405	  || (GET_CODE (op) == CONST_INT
1406#if HOST_BITS_PER_WIDE_INT == 32
1407	      && (- INTVAL (op)) < 0x7fff8000
1408#else
1409	      && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1410		  < 0x100000000ll)
1411#endif
1412	      ));
1413}
1414
1415/* Return 1 is the operand is either a non-special register or ANY
1416   32-bit unsigned constant integer.  */
1417
1418int
1419reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1420{
1421  if (GET_CODE (op) == CONST_INT)
1422    {
1423      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1424	{
1425	  if (GET_MODE_BITSIZE (mode) <= 32)
1426	    abort ();
1427
1428	  if (INTVAL (op) < 0)
1429	    return 0;
1430	}
1431
1432      return ((INTVAL (op) & GET_MODE_MASK (mode)
1433	       & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1434    }
1435  else if (GET_CODE (op) == CONST_DOUBLE)
1436    {
1437      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1438	  || mode != DImode)
1439	abort ();
1440
1441      return CONST_DOUBLE_HIGH (op) == 0;
1442    }
1443  else
1444    return gpc_reg_operand (op, mode);
1445}
1446
1447/* Return 1 if the operand is an operand that can be loaded via the GOT.  */
1448
1449int
1450got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1451{
1452  return (GET_CODE (op) == SYMBOL_REF
1453	  || GET_CODE (op) == CONST
1454	  || GET_CODE (op) == LABEL_REF);
1455}
1456
1457/* Return 1 if the operand is a simple references that can be loaded via
1458   the GOT (labels involving addition aren't allowed).  */
1459
1460int
1461got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1462{
1463  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1464}
1465
1466/* Return the number of instructions it takes to form a constant in an
1467   integer register.  */
1468
1469static int
1470num_insns_constant_wide (HOST_WIDE_INT value)
1471{
1472  /* signed constant loadable with {cal|addi} */
1473  if (CONST_OK_FOR_LETTER_P (value, 'I'))
1474    return 1;
1475
1476  /* constant loadable with {cau|addis} */
1477  else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1478    return 1;
1479
1480#if HOST_BITS_PER_WIDE_INT == 64
1481  else if (TARGET_POWERPC64)
1482    {
1483      HOST_WIDE_INT low  = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1484      HOST_WIDE_INT high = value >> 31;
1485
1486      if (high == 0 || high == -1)
1487	return 2;
1488
1489      high >>= 1;
1490
1491      if (low == 0)
1492	return num_insns_constant_wide (high) + 1;
1493      else
1494	return (num_insns_constant_wide (high)
1495		+ num_insns_constant_wide (low) + 1);
1496    }
1497#endif
1498
1499  else
1500    return 2;
1501}
1502
1503int
1504num_insns_constant (rtx op, enum machine_mode mode)
1505{
1506  if (GET_CODE (op) == CONST_INT)
1507    {
1508#if HOST_BITS_PER_WIDE_INT == 64
1509      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1510	  && mask64_operand (op, mode))
1511	    return 2;
1512      else
1513#endif
1514	return num_insns_constant_wide (INTVAL (op));
1515    }
1516
1517  else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1518    {
1519      long l;
1520      REAL_VALUE_TYPE rv;
1521
1522      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1523      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1524      return num_insns_constant_wide ((HOST_WIDE_INT) l);
1525    }
1526
1527  else if (GET_CODE (op) == CONST_DOUBLE)
1528    {
1529      HOST_WIDE_INT low;
1530      HOST_WIDE_INT high;
1531      long l[2];
1532      REAL_VALUE_TYPE rv;
1533      int endian = (WORDS_BIG_ENDIAN == 0);
1534
1535      if (mode == VOIDmode || mode == DImode)
1536	{
1537	  high = CONST_DOUBLE_HIGH (op);
1538	  low  = CONST_DOUBLE_LOW (op);
1539	}
1540      else
1541	{
1542	  REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1543	  REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1544	  high = l[endian];
1545	  low  = l[1 - endian];
1546	}
1547
1548      if (TARGET_32BIT)
1549	return (num_insns_constant_wide (low)
1550		+ num_insns_constant_wide (high));
1551
1552      else
1553	{
1554	  if (high == 0 && low >= 0)
1555	    return num_insns_constant_wide (low);
1556
1557	  else if (high == -1 && low < 0)
1558	    return num_insns_constant_wide (low);
1559
1560	  else if (mask64_operand (op, mode))
1561	    return 2;
1562
1563	  else if (low == 0)
1564	    return num_insns_constant_wide (high) + 1;
1565
1566	  else
1567	    return (num_insns_constant_wide (high)
1568		    + num_insns_constant_wide (low) + 1);
1569	}
1570    }
1571
1572  else
1573    abort ();
1574}
1575
1576/* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1577   register with one instruction per word.  We only do this if we can
1578   safely read CONST_DOUBLE_{LOW,HIGH}.  */
1579
1580int
1581easy_fp_constant (rtx op, enum machine_mode mode)
1582{
1583  if (GET_CODE (op) != CONST_DOUBLE
1584      || GET_MODE (op) != mode
1585      || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1586    return 0;
1587
1588  /* Consider all constants with -msoft-float to be easy.  */
1589  if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1590      && mode != DImode)
1591    return 1;
1592
1593  /* If we are using V.4 style PIC, consider all constants to be hard.  */
1594  if (flag_pic && DEFAULT_ABI == ABI_V4)
1595    return 0;
1596
1597#ifdef TARGET_RELOCATABLE
1598  /* Similarly if we are using -mrelocatable, consider all constants
1599     to be hard.  */
1600  if (TARGET_RELOCATABLE)
1601    return 0;
1602#endif
1603
1604  if (mode == TFmode)
1605    {
1606      long k[4];
1607      REAL_VALUE_TYPE rv;
1608
1609      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1610      REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1611
1612      return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1613	      && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1614	      && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1615	      && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1616    }
1617
1618  else if (mode == DFmode)
1619    {
1620      long k[2];
1621      REAL_VALUE_TYPE rv;
1622
1623      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1624      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1625
1626      return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1627	      && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1628    }
1629
1630  else if (mode == SFmode)
1631    {
1632      long l;
1633      REAL_VALUE_TYPE rv;
1634
1635      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1636      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1637
1638      return num_insns_constant_wide (l) == 1;
1639    }
1640
1641  else if (mode == DImode)
1642    return ((TARGET_POWERPC64
1643	     && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1644	    || (num_insns_constant (op, DImode) <= 2));
1645
1646  else if (mode == SImode)
1647    return 1;
1648  else
1649    abort ();
1650}
1651
1652/* Returns the constant for the splat instrunction, if exists.  */
1653
1654static int
1655easy_vector_splat_const (int cst, enum machine_mode mode)
1656{
1657  switch (mode)
1658    {
1659    case V4SImode:
1660      if (EASY_VECTOR_15 (cst)
1661	  || EASY_VECTOR_15_ADD_SELF (cst))
1662	return cst;
1663      if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1664	break;
1665      cst = cst >> 16;
1666    case V8HImode:
1667      if (EASY_VECTOR_15 (cst)
1668	  || EASY_VECTOR_15_ADD_SELF (cst))
1669	return cst;
1670      if ((cst & 0xff) != ((cst >> 8) & 0xff))
1671	break;
1672      cst = cst >> 8;
1673    case V16QImode:
1674	  if (EASY_VECTOR_15 (cst)
1675	      || EASY_VECTOR_15_ADD_SELF (cst))
1676	    return cst;
1677    default:
1678      break;
1679    }
1680  return 0;
1681}
1682
1683
1684/* Return nonzero if all elements of a vector have the same value.  */
1685
1686static int
1687easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1688{
1689  int units, i, cst;
1690
1691  units = CONST_VECTOR_NUNITS (op);
1692
1693  cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1694  for (i = 1; i < units; ++i)
1695    if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1696      break;
1697  if (i == units && easy_vector_splat_const (cst, mode))
1698    return 1;
1699  return 0;
1700}
1701
1702/* Return 1 if the operand is a CONST_INT and can be put into a
1703   register without using memory.  */
1704
1705int
1706easy_vector_constant (rtx op, enum machine_mode mode)
1707{
1708  int cst, cst2;
1709
1710  if (GET_CODE (op) != CONST_VECTOR
1711      || (!TARGET_ALTIVEC
1712	  && !TARGET_SPE))
1713    return 0;
1714
1715  if (zero_constant (op, mode)
1716      && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1717	  || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1718    return 1;
1719
1720  if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1721    return 0;
1722
1723  if (TARGET_SPE && mode == V1DImode)
1724    return 0;
1725
1726  cst  = INTVAL (CONST_VECTOR_ELT (op, 0));
1727  cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1728
1729  /* Limit SPE vectors to 15 bits signed.  These we can generate with:
1730       li r0, CONSTANT1
1731       evmergelo r0, r0, r0
1732       li r0, CONSTANT2
1733
1734     I don't know how efficient it would be to allow bigger constants,
1735     considering we'll have an extra 'ori' for every 'li'.  I doubt 5
1736     instructions is better than a 64-bit memory load, but I don't
1737     have the e500 timing specs.  */
1738  if (TARGET_SPE && mode == V2SImode
1739      && cst  >= -0x7fff && cst <= 0x7fff
1740      && cst2 >= -0x7fff && cst2 <= 0x7fff)
1741    return 1;
1742
1743  if (TARGET_ALTIVEC
1744      && easy_vector_same (op, mode))
1745    {
1746      cst = easy_vector_splat_const (cst, mode);
1747      if (EASY_VECTOR_15_ADD_SELF (cst)
1748	  || EASY_VECTOR_15 (cst))
1749	return 1;
1750    }
1751  return 0;
1752}
1753
1754/* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF.  */
1755
1756int
1757easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1758{
1759  int cst;
1760  if (TARGET_ALTIVEC
1761      && GET_CODE (op) == CONST_VECTOR
1762      && easy_vector_same (op, mode))
1763    {
1764      cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
1765      if (EASY_VECTOR_15_ADD_SELF (cst))
1766	return 1;
1767    }
1768  return 0;
1769}
1770
1771/* Generate easy_vector_constant out of a easy_vector_constant_add_self.  */
1772
1773rtx
1774gen_easy_vector_constant_add_self (rtx op)
1775{
1776  int i, units;
1777  rtvec v;
1778  units = GET_MODE_NUNITS (GET_MODE (op));
1779  v = rtvec_alloc (units);
1780
1781  for (i = 0; i < units; i++)
1782    RTVEC_ELT (v, i) =
1783      GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
1784  return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
1785}
1786
1787const char *
1788output_vec_const_move (rtx *operands)
1789{
1790  int cst, cst2;
1791  enum machine_mode mode;
1792  rtx dest, vec;
1793
1794  dest = operands[0];
1795  vec = operands[1];
1796
1797  cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1798  cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1799  mode = GET_MODE (dest);
1800
1801  if (TARGET_ALTIVEC)
1802    {
1803      if (zero_constant (vec, mode))
1804	return "vxor %0,%0,%0";
1805      else if (easy_vector_constant (vec, mode))
1806	{
1807	  operands[1] = GEN_INT (cst);
1808	  switch (mode)
1809	    {
1810	    case V4SImode:
1811	      if (EASY_VECTOR_15 (cst))
1812		{
1813		  operands[1] = GEN_INT (cst);
1814		  return "vspltisw %0,%1";
1815		}
1816	      else if (EASY_VECTOR_15_ADD_SELF (cst))
1817		return "#";
1818	      cst = cst >> 16;
1819	    case V8HImode:
1820	      if (EASY_VECTOR_15 (cst))
1821		{
1822		  operands[1] = GEN_INT (cst);
1823		  return "vspltish %0,%1";
1824		}
1825	      else if (EASY_VECTOR_15_ADD_SELF (cst))
1826		return "#";
1827	      cst = cst >> 8;
1828	    case V16QImode:
1829	      if (EASY_VECTOR_15 (cst))
1830		{
1831		  operands[1] = GEN_INT (cst);
1832		  return "vspltisb %0,%1";
1833		}
1834	      else if (EASY_VECTOR_15_ADD_SELF (cst))
1835		return "#";
1836	    default:
1837	      abort ();
1838	    }
1839	}
1840      else
1841	abort ();
1842    }
1843
1844  if (TARGET_SPE)
1845    {
1846      /* Vector constant 0 is handled as a splitter of V2SI, and in the
1847	 pattern of V1DI, V4HI, and V2SF.
1848
1849	 FIXME: We should probably return # and add post reload
1850	 splitters for these, but this way is so easy ;-).
1851      */
1852      operands[1] = GEN_INT (cst);
1853      operands[2] = GEN_INT (cst2);
1854      if (cst == cst2)
1855	return "li %0,%1\n\tevmergelo %0,%0,%0";
1856      else
1857	return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1858    }
1859
1860  abort ();
1861}
1862
1863/* Return 1 if the operand is the constant 0.  This works for scalars
1864   as well as vectors.  */
1865int
1866zero_constant (rtx op, enum machine_mode mode)
1867{
1868  return op == CONST0_RTX (mode);
1869}
1870
1871/* Return 1 if the operand is 0.0.  */
1872int
1873zero_fp_constant (rtx op, enum machine_mode mode)
1874{
1875  return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1876}
1877
1878/* Return 1 if the operand is in volatile memory.  Note that during
1879   the RTL generation phase, memory_operand does not return TRUE for
1880   volatile memory references.  So this function allows us to
1881   recognize volatile references where its safe.  */
1882
1883int
1884volatile_mem_operand (rtx op, enum machine_mode mode)
1885{
1886  if (GET_CODE (op) != MEM)
1887    return 0;
1888
1889  if (!MEM_VOLATILE_P (op))
1890    return 0;
1891
1892  if (mode != GET_MODE (op))
1893    return 0;
1894
1895  if (reload_completed)
1896    return memory_operand (op, mode);
1897
1898  if (reload_in_progress)
1899    return strict_memory_address_p (mode, XEXP (op, 0));
1900
1901  return memory_address_p (mode, XEXP (op, 0));
1902}
1903
1904/* Return 1 if the operand is an offsettable memory operand.  */
1905
1906int
1907offsettable_mem_operand (rtx op, enum machine_mode mode)
1908{
1909  return ((GET_CODE (op) == MEM)
1910	  && offsettable_address_p (reload_completed || reload_in_progress,
1911				    mode, XEXP (op, 0)));
1912}
1913
1914/* Return 1 if the operand is either an easy FP constant (see above) or
1915   memory.  */
1916
1917int
1918mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1919{
1920  return memory_operand (op, mode) || easy_fp_constant (op, mode);
1921}
1922
1923/* Return 1 if the operand is either a non-special register or an item
1924   that can be used as the operand of a `mode' add insn.  */
1925
1926int
1927add_operand (rtx op, enum machine_mode mode)
1928{
1929  if (GET_CODE (op) == CONST_INT)
1930    return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1931	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1932
1933  return gpc_reg_operand (op, mode);
1934}
1935
1936/* Return 1 if OP is a constant but not a valid add_operand.  */
1937
1938int
1939non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1940{
1941  return (GET_CODE (op) == CONST_INT
1942	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1943	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1944}
1945
1946/* Return 1 if the operand is a non-special register or a constant that
1947   can be used as the operand of an OR or XOR insn on the RS/6000.  */
1948
1949int
1950logical_operand (rtx op, enum machine_mode mode)
1951{
1952  HOST_WIDE_INT opl, oph;
1953
1954  if (gpc_reg_operand (op, mode))
1955    return 1;
1956
1957  if (GET_CODE (op) == CONST_INT)
1958    {
1959      opl = INTVAL (op) & GET_MODE_MASK (mode);
1960
1961#if HOST_BITS_PER_WIDE_INT <= 32
1962      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1963	return 0;
1964#endif
1965    }
1966  else if (GET_CODE (op) == CONST_DOUBLE)
1967    {
1968      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1969	abort ();
1970
1971      opl = CONST_DOUBLE_LOW (op);
1972      oph = CONST_DOUBLE_HIGH (op);
1973      if (oph != 0)
1974	return 0;
1975    }
1976  else
1977    return 0;
1978
1979  return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1980	  || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1981}
1982
1983/* Return 1 if C is a constant that is not a logical operand (as
1984   above), but could be split into one.  */
1985
1986int
1987non_logical_cint_operand (rtx op, enum machine_mode mode)
1988{
1989  return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1990	  && ! logical_operand (op, mode)
1991	  && reg_or_logical_cint_operand (op, mode));
1992}
1993
1994/* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1995   RS/6000.  It is if there are no more than two 1->0 or 0->1 transitions.
1996   Reject all ones and all zeros, since these should have been optimized
1997   away and confuse the making of MB and ME.  */
1998
1999int
2000mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2001{
2002  HOST_WIDE_INT c, lsb;
2003
2004  if (GET_CODE (op) != CONST_INT)
2005    return 0;
2006
2007  c = INTVAL (op);
2008
2009  /* Fail in 64-bit mode if the mask wraps around because the upper
2010     32-bits of the mask will all be 1s, contrary to GCC's internal view.  */
2011  if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2012    return 0;
2013
2014  /* We don't change the number of transitions by inverting,
2015     so make sure we start with the LS bit zero.  */
2016  if (c & 1)
2017    c = ~c;
2018
2019  /* Reject all zeros or all ones.  */
2020  if (c == 0)
2021    return 0;
2022
2023  /* Find the first transition.  */
2024  lsb = c & -c;
2025
2026  /* Invert to look for a second transition.  */
2027  c = ~c;
2028
2029  /* Erase first transition.  */
2030  c &= -lsb;
2031
2032  /* Find the second transition (if any).  */
2033  lsb = c & -c;
2034
2035  /* Match if all the bits above are 1's (or c is zero).  */
2036  return c == -lsb;
2037}
2038
2039/* Return 1 for the PowerPC64 rlwinm corner case.  */
2040
2041int
2042mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2043{
2044  HOST_WIDE_INT c, lsb;
2045
2046  if (GET_CODE (op) != CONST_INT)
2047    return 0;
2048
2049  c = INTVAL (op);
2050
2051  if ((c & 0x80000001) != 0x80000001)
2052    return 0;
2053
2054  c = ~c;
2055  if (c == 0)
2056    return 0;
2057
2058  lsb = c & -c;
2059  c = ~c;
2060  c &= -lsb;
2061  lsb = c & -c;
2062  return c == -lsb;
2063}
2064
2065/* Return 1 if the operand is a constant that is a PowerPC64 mask.
2066   It is if there are no more than one 1->0 or 0->1 transitions.
2067   Reject all zeros, since zero should have been optimized away and
2068   confuses the making of MB and ME.  */
2069
2070int
2071mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2072{
2073  if (GET_CODE (op) == CONST_INT)
2074    {
2075      HOST_WIDE_INT c, lsb;
2076
2077      c = INTVAL (op);
2078
2079      /* Reject all zeros.  */
2080      if (c == 0)
2081	return 0;
2082
2083      /* We don't change the number of transitions by inverting,
2084	 so make sure we start with the LS bit zero.  */
2085      if (c & 1)
2086	c = ~c;
2087
2088      /* Find the transition, and check that all bits above are 1's.  */
2089      lsb = c & -c;
2090
2091      /* Match if all the bits above are 1's (or c is zero).  */
2092      return c == -lsb;
2093    }
2094  return 0;
2095}
2096
2097/* Like mask64_operand, but allow up to three transitions.  This
2098   predicate is used by insn patterns that generate two rldicl or
2099   rldicr machine insns.  */
2100
2101int
2102mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2103{
2104  if (GET_CODE (op) == CONST_INT)
2105    {
2106      HOST_WIDE_INT c, lsb;
2107
2108      c = INTVAL (op);
2109
2110      /* Disallow all zeros.  */
2111      if (c == 0)
2112	return 0;
2113
2114      /* We don't change the number of transitions by inverting,
2115	 so make sure we start with the LS bit zero.  */
2116      if (c & 1)
2117	c = ~c;
2118
2119      /* Find the first transition.  */
2120      lsb = c & -c;
2121
2122      /* Invert to look for a second transition.  */
2123      c = ~c;
2124
2125      /* Erase first transition.  */
2126      c &= -lsb;
2127
2128      /* Find the second transition.  */
2129      lsb = c & -c;
2130
2131      /* Invert to look for a third transition.  */
2132      c = ~c;
2133
2134      /* Erase second transition.  */
2135      c &= -lsb;
2136
2137      /* Find the third transition (if any).  */
2138      lsb = c & -c;
2139
2140      /* Match if all the bits above are 1's (or c is zero).  */
2141      return c == -lsb;
2142    }
2143  return 0;
2144}
2145
2146/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2147   implement ANDing by the mask IN.  */
2148void
2149build_mask64_2_operands (rtx in, rtx *out)
2150{
2151#if HOST_BITS_PER_WIDE_INT >= 64
2152  unsigned HOST_WIDE_INT c, lsb, m1, m2;
2153  int shift;
2154
2155  if (GET_CODE (in) != CONST_INT)
2156    abort ();
2157
2158  c = INTVAL (in);
2159  if (c & 1)
2160    {
2161      /* Assume c initially something like 0x00fff000000fffff.  The idea
2162	 is to rotate the word so that the middle ^^^^^^ group of zeros
2163	 is at the MS end and can be cleared with an rldicl mask.  We then
2164	 rotate back and clear off the MS    ^^ group of zeros with a
2165	 second rldicl.  */
2166      c = ~c;			/*   c == 0xff000ffffff00000 */
2167      lsb = c & -c;		/* lsb == 0x0000000000100000 */
2168      m1 = -lsb;		/*  m1 == 0xfffffffffff00000 */
2169      c = ~c;			/*   c == 0x00fff000000fffff */
2170      c &= -lsb;		/*   c == 0x00fff00000000000 */
2171      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2172      c = ~c;			/*   c == 0xff000fffffffffff */
2173      c &= -lsb;		/*   c == 0xff00000000000000 */
2174      shift = 0;
2175      while ((lsb >>= 1) != 0)
2176	shift++;		/* shift == 44 on exit from loop */
2177      m1 <<= 64 - shift;	/*  m1 == 0xffffff0000000000 */
2178      m1 = ~m1;			/*  m1 == 0x000000ffffffffff */
2179      m2 = ~c;			/*  m2 == 0x00ffffffffffffff */
2180    }
2181  else
2182    {
2183      /* Assume c initially something like 0xff000f0000000000.  The idea
2184	 is to rotate the word so that the     ^^^  middle group of zeros
2185	 is at the LS end and can be cleared with an rldicr mask.  We then
2186	 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2187	 a second rldicr.  */
2188      lsb = c & -c;		/* lsb == 0x0000010000000000 */
2189      m2 = -lsb;		/*  m2 == 0xffffff0000000000 */
2190      c = ~c;			/*   c == 0x00fff0ffffffffff */
2191      c &= -lsb;		/*   c == 0x00fff00000000000 */
2192      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2193      c = ~c;			/*   c == 0xff000fffffffffff */
2194      c &= -lsb;		/*   c == 0xff00000000000000 */
2195      shift = 0;
2196      while ((lsb >>= 1) != 0)
2197	shift++;		/* shift == 44 on exit from loop */
2198      m1 = ~c;			/*  m1 == 0x00ffffffffffffff */
2199      m1 >>= shift;		/*  m1 == 0x0000000000000fff */
2200      m1 = ~m1;			/*  m1 == 0xfffffffffffff000 */
2201    }
2202
2203  /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2204     masks will be all 1's.  We are guaranteed more than one transition.  */
2205  out[0] = GEN_INT (64 - shift);
2206  out[1] = GEN_INT (m1);
2207  out[2] = GEN_INT (shift);
2208  out[3] = GEN_INT (m2);
2209#else
2210  (void)in;
2211  (void)out;
2212  abort ();
2213#endif
2214}
2215
2216/* Return 1 if the operand is either a non-special register or a constant
2217   that can be used as the operand of a PowerPC64 logical AND insn.  */
2218
2219int
2220and64_operand (rtx op, enum machine_mode mode)
2221{
2222  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
2223    return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2224
2225  return (logical_operand (op, mode) || mask64_operand (op, mode));
2226}
2227
2228/* Like the above, but also match constants that can be implemented
2229   with two rldicl or rldicr insns.  */
2230
2231int
2232and64_2_operand (rtx op, enum machine_mode mode)
2233{
2234  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
2235    return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2236
2237  return logical_operand (op, mode) || mask64_2_operand (op, mode);
2238}
2239
2240/* Return 1 if the operand is either a non-special register or a
2241   constant that can be used as the operand of an RS/6000 logical AND insn.  */
2242
2243int
2244and_operand (rtx op, enum machine_mode mode)
2245{
2246  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
2247    return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2248
2249  return (logical_operand (op, mode) || mask_operand (op, mode));
2250}
2251
2252/* Return 1 if the operand is a general register or memory operand.  */
2253
2254int
2255reg_or_mem_operand (rtx op, enum machine_mode mode)
2256{
2257  return (gpc_reg_operand (op, mode)
2258	  || memory_operand (op, mode)
2259	  || macho_lo_sum_memory_operand (op, mode)
2260	  || volatile_mem_operand (op, mode));
2261}
2262
2263/* Return 1 if the operand is a general register or memory operand without
2264   pre_inc or pre_dec which produces invalid form of PowerPC lwa
2265   instruction.  */
2266
2267int
2268lwa_operand (rtx op, enum machine_mode mode)
2269{
2270  rtx inner = op;
2271
2272  if (reload_completed && GET_CODE (inner) == SUBREG)
2273    inner = SUBREG_REG (inner);
2274
2275  return gpc_reg_operand (inner, mode)
2276    || (memory_operand (inner, mode)
2277	&& GET_CODE (XEXP (inner, 0)) != PRE_INC
2278	&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
2279	&& (GET_CODE (XEXP (inner, 0)) != PLUS
2280	    || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2281	    || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2282}
2283
2284/* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF.  */
2285
2286int
2287symbol_ref_operand (rtx op, enum machine_mode mode)
2288{
2289  if (mode != VOIDmode && GET_MODE (op) != mode)
2290    return 0;
2291
2292  return (GET_CODE (op) == SYMBOL_REF
2293	  && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2294}
2295
2296/* Return 1 if the operand, used inside a MEM, is a valid first argument
2297   to CALL.  This is a SYMBOL_REF, a pseudo-register, LR or CTR.  */
2298
2299int
2300call_operand (rtx op, enum machine_mode mode)
2301{
2302  if (mode != VOIDmode && GET_MODE (op) != mode)
2303    return 0;
2304
2305  return (GET_CODE (op) == SYMBOL_REF
2306	  || (GET_CODE (op) == REG
2307	      && (REGNO (op) == LINK_REGISTER_REGNUM
2308		  || REGNO (op) == COUNT_REGISTER_REGNUM
2309		  || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2310}
2311
2312/* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2313   this file.  */
2314
2315int
2316current_file_function_operand (rtx op,
2317                              enum machine_mode mode ATTRIBUTE_UNUSED)
2318{
2319  return (GET_CODE (op) == SYMBOL_REF
2320	  && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2321	  && (SYMBOL_REF_LOCAL_P (op)
2322	      || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2323}
2324
2325/* Return 1 if this operand is a valid input for a move insn.  */
2326
2327int
2328input_operand (rtx op, enum machine_mode mode)
2329{
2330  /* Memory is always valid.  */
2331  if (memory_operand (op, mode))
2332    return 1;
2333
2334  /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary.  */
2335  if (GET_CODE (op) == CONSTANT_P_RTX)
2336    return 1;
2337
2338  /* For floating-point, easy constants are valid.  */
2339  if (GET_MODE_CLASS (mode) == MODE_FLOAT
2340      && CONSTANT_P (op)
2341      && easy_fp_constant (op, mode))
2342    return 1;
2343
2344  /* Allow any integer constant.  */
2345  if (GET_MODE_CLASS (mode) == MODE_INT
2346      && (GET_CODE (op) == CONST_INT
2347	  || GET_CODE (op) == CONST_DOUBLE))
2348    return 1;
2349
2350  /* Allow easy vector constants.  */
2351  if (GET_CODE (op) == CONST_VECTOR
2352      && easy_vector_constant (op, mode))
2353    return 1;
2354
2355  /* For floating-point or multi-word mode, the only remaining valid type
2356     is a register.  */
2357  if (GET_MODE_CLASS (mode) == MODE_FLOAT
2358      || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2359    return register_operand (op, mode);
2360
2361  /* The only cases left are integral modes one word or smaller (we
2362     do not get called for MODE_CC values).  These can be in any
2363     register.  */
2364  if (register_operand (op, mode))
2365    return 1;
2366
2367  /* A SYMBOL_REF referring to the TOC is valid.  */
2368  if (legitimate_constant_pool_address_p (op))
2369    return 1;
2370
2371  /* A constant pool expression (relative to the TOC) is valid */
2372  if (toc_relative_expr_p (op))
2373    return 1;
2374
2375  /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2376     to be valid.  */
2377  if (DEFAULT_ABI == ABI_V4
2378      && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2379      && small_data_operand (op, Pmode))
2380    return 1;
2381
2382  return 0;
2383}
2384
2385
2386/* Darwin, AIX increases natural record alignment to doubleword if the first
2387   field is an FP double while the FP fields remain word aligned.  */
2388
2389unsigned int
2390rs6000_special_round_type_align (tree type, int computed, int specified)
2391{
2392  tree field = TYPE_FIELDS (type);
2393
2394  /* Skip all the static variables only if ABI is greater than
2395     1 or equal to 0.   */
2396  while (field != NULL && TREE_CODE (field) == VAR_DECL)
2397    field = TREE_CHAIN (field);
2398
2399  if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2400    return MAX (computed, specified);
2401
2402  return MAX (MAX (computed, specified), 64);
2403}
2404
2405/* Return 1 for an operand in small memory on V.4/eabi.  */
2406
2407int
2408small_data_operand (rtx op ATTRIBUTE_UNUSED,
2409		    enum machine_mode mode ATTRIBUTE_UNUSED)
2410{
2411#if TARGET_ELF
2412  rtx sym_ref;
2413
2414  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2415    return 0;
2416
2417  if (DEFAULT_ABI != ABI_V4)
2418    return 0;
2419
2420  if (GET_CODE (op) == SYMBOL_REF)
2421    sym_ref = op;
2422
2423  else if (GET_CODE (op) != CONST
2424	   || GET_CODE (XEXP (op, 0)) != PLUS
2425	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2426	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2427    return 0;
2428
2429  else
2430    {
2431      rtx sum = XEXP (op, 0);
2432      HOST_WIDE_INT summand;
2433
2434      /* We have to be careful here, because it is the referenced address
2435        that must be 32k from _SDA_BASE_, not just the symbol.  */
2436      summand = INTVAL (XEXP (sum, 1));
2437      if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2438       return 0;
2439
2440      sym_ref = XEXP (sum, 0);
2441    }
2442
2443  return SYMBOL_REF_SMALL_P (sym_ref);
2444#else
2445  return 0;
2446#endif
2447}
2448
2449/* Return true, if operand is a memory operand and has a
2450   displacement divisible by 4.  */
2451
2452int
2453word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2454{
2455  rtx addr;
2456  int off = 0;
2457
2458  if (!memory_operand (op, mode))
2459    return 0;
2460
2461  addr = XEXP (op, 0);
2462  if (GET_CODE (addr) == PLUS
2463      && GET_CODE (XEXP (addr, 0)) == REG
2464      && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2465    off = INTVAL (XEXP (addr, 1));
2466
2467  return (off % 4) == 0;
2468}
2469
2470/* Return true if operand is a (MEM (PLUS (REG) (offset))) where offset
2471   is not divisible by four.  */
2472
2473int
2474invalid_gpr_mem (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2475{
2476  rtx addr;
2477  long off;
2478
2479  if (GET_CODE (op) != MEM)
2480    return 0;
2481
2482  addr = XEXP (op, 0);
2483  if (GET_CODE (addr) != PLUS
2484      || GET_CODE (XEXP (addr, 0)) != REG
2485      || GET_CODE (XEXP (addr, 1)) != CONST_INT)
2486    return 0;
2487
2488  off = INTVAL (XEXP (addr, 1));
2489  return (off & 3) != 0;
2490}
2491
2492/* Return true if operand is a hard register that can be used as a base
2493   register.  */
2494
2495int
2496base_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2497{
2498  unsigned int regno;
2499
2500  if (!REG_P (op))
2501    return 0;
2502
2503  regno = REGNO (op);
2504  return regno != 0 && regno <= 31;
2505}
2506
2507/* Return true if either operand is a general purpose register.  */
2508
2509bool
2510gpr_or_gpr_p (rtx op0, rtx op1)
2511{
2512  return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2513	  || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2514}
2515
2516
2517/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address.  */
2518
2519static int
2520constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2521{
2522  switch (GET_CODE(op))
2523    {
2524    case SYMBOL_REF:
2525      if (RS6000_SYMBOL_REF_TLS_P (op))
2526	return 0;
2527      else if (CONSTANT_POOL_ADDRESS_P (op))
2528	{
2529	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2530	    {
2531	      *have_sym = 1;
2532	      return 1;
2533	    }
2534	  else
2535	    return 0;
2536	}
2537      else if (! strcmp (XSTR (op, 0), toc_label_name))
2538	{
2539	  *have_toc = 1;
2540	  return 1;
2541	}
2542      else
2543	return 0;
2544    case PLUS:
2545    case MINUS:
2546      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2547	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2548    case CONST:
2549      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2550    case CONST_INT:
2551      return 1;
2552    default:
2553      return 0;
2554    }
2555}
2556
2557static bool
2558constant_pool_expr_p (rtx op)
2559{
2560  int have_sym = 0;
2561  int have_toc = 0;
2562  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2563}
2564
2565static bool
2566toc_relative_expr_p (rtx op)
2567{
2568  int have_sym = 0;
2569  int have_toc = 0;
2570  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2571}
2572
2573/* SPE offset addressing is limited to 5-bits worth of double words.  */
2574#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2575
2576bool
2577legitimate_constant_pool_address_p (rtx x)
2578{
2579  return (TARGET_TOC
2580	  && GET_CODE (x) == PLUS
2581	  && GET_CODE (XEXP (x, 0)) == REG
2582	  && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2583	  && constant_pool_expr_p (XEXP (x, 1)));
2584}
2585
2586static bool
2587legitimate_small_data_p (enum machine_mode mode, rtx x)
2588{
2589  return (DEFAULT_ABI == ABI_V4
2590	  && !flag_pic && !TARGET_TOC
2591	  && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2592	  && small_data_operand (x, mode));
2593}
2594
2595static bool
2596legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2597{
2598  unsigned HOST_WIDE_INT offset, extra;
2599
2600  if (GET_CODE (x) != PLUS)
2601    return false;
2602  if (GET_CODE (XEXP (x, 0)) != REG)
2603    return false;
2604  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2605    return false;
2606  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2607    return false;
2608
2609  offset = INTVAL (XEXP (x, 1));
2610  extra = 0;
2611  switch (mode)
2612    {
2613    case V16QImode:
2614    case V8HImode:
2615    case V4SFmode:
2616    case V4SImode:
2617      /* AltiVec vector modes.  Only reg+reg addressing is valid here,
2618	 which leaves the only valid constant offset of zero, which by
2619	 canonicalization rules is also invalid.  */
2620      return false;
2621
2622    case V4HImode:
2623    case V2SImode:
2624    case V1DImode:
2625    case V2SFmode:
2626      /* SPE vector modes.  */
2627      return SPE_CONST_OFFSET_OK (offset);
2628
2629    case DFmode:
2630    case DImode:
2631      /* Both DFmode and DImode may end up in gprs.  If gprs are 32-bit,
2632	 then we need to load/store at both offset and offset+4.  */
2633      if (!TARGET_POWERPC64)
2634	extra = 4;
2635      break;
2636
2637    case TFmode:
2638    case TImode:
2639      if (!TARGET_POWERPC64)
2640	extra = 12;
2641      else
2642	extra = 8;
2643      break;
2644
2645    default:
2646      break;
2647    }
2648
2649  offset += 0x8000;
2650  return (offset < 0x10000) && (offset + extra < 0x10000);
2651}
2652
2653static bool
2654legitimate_indexed_address_p (rtx x, int strict)
2655{
2656  rtx op0, op1;
2657
2658  if (GET_CODE (x) != PLUS)
2659    return false;
2660  op0 = XEXP (x, 0);
2661  op1 = XEXP (x, 1);
2662
2663  if (!REG_P (op0) || !REG_P (op1))
2664    return false;
2665
2666  return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2667	   && INT_REG_OK_FOR_INDEX_P (op1, strict))
2668	  || (INT_REG_OK_FOR_BASE_P (op1, strict)
2669	      && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2670}
2671
2672static inline bool
2673legitimate_indirect_address_p (rtx x, int strict)
2674{
2675  return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2676}
2677
2678static bool
2679macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2680{
2681    if (!TARGET_MACHO || !flag_pic
2682        || mode != SImode || GET_CODE(x) != MEM)
2683      return false;
2684    x = XEXP (x, 0);
2685
2686  if (GET_CODE (x) != LO_SUM)
2687    return false;
2688  if (GET_CODE (XEXP (x, 0)) != REG)
2689    return false;
2690  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2691    return false;
2692  x = XEXP (x, 1);
2693
2694  return CONSTANT_P (x);
2695}
2696
2697static bool
2698legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2699{
2700  if (GET_CODE (x) != LO_SUM)
2701    return false;
2702  if (GET_CODE (XEXP (x, 0)) != REG)
2703    return false;
2704  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2705    return false;
2706  x = XEXP (x, 1);
2707
2708  if (TARGET_ELF || TARGET_MACHO)
2709    {
2710      if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2711	return false;
2712      if (TARGET_TOC)
2713	return false;
2714      if (GET_MODE_NUNITS (mode) != 1)
2715	return false;
2716      if (GET_MODE_BITSIZE (mode) > 32
2717	  && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2718	return false;
2719
2720      return CONSTANT_P (x);
2721    }
2722
2723  return false;
2724}
2725
2726
2727/* Try machine-dependent ways of modifying an illegitimate address
2728   to be legitimate.  If we find one, return the new, valid address.
2729   This is used from only one place: `memory_address' in explow.c.
2730
2731   OLDX is the address as it was before break_out_memory_refs was
2732   called.  In some cases it is useful to look at this to decide what
2733   needs to be done.
2734
2735   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2736
2737   It is always safe for this function to do nothing.  It exists to
2738   recognize opportunities to optimize the output.
2739
2740   On RS/6000, first check for the sum of a register with a constant
2741   integer that is out of range.  If so, generate code to add the
2742   constant with the low-order 16 bits masked to the register and force
2743   this result into another register (this can be done with `cau').
2744   Then generate an address of REG+(CONST&0xffff), allowing for the
2745   possibility of bit 16 being a one.
2746
2747   Then check for the sum of a register and something not constant, try to
2748   load the other things into a register and return the sum.  */
2749
2750rtx
2751rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2752			   enum machine_mode mode)
2753{
2754  if (GET_CODE (x) == SYMBOL_REF)
2755    {
2756      enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2757      if (model != 0)
2758	return rs6000_legitimize_tls_address (x, model);
2759    }
2760
2761  if (GET_CODE (x) == PLUS
2762      && GET_CODE (XEXP (x, 0)) == REG
2763      && GET_CODE (XEXP (x, 1)) == CONST_INT
2764      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2765    {
2766      HOST_WIDE_INT high_int, low_int;
2767      rtx sum;
2768      low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2769      high_int = INTVAL (XEXP (x, 1)) - low_int;
2770      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2771					 GEN_INT (high_int)), 0);
2772      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2773    }
2774  else if (GET_CODE (x) == PLUS
2775	   && GET_CODE (XEXP (x, 0)) == REG
2776	   && GET_CODE (XEXP (x, 1)) != CONST_INT
2777	   && GET_MODE_NUNITS (mode) == 1
2778	   && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2779	       || TARGET_POWERPC64
2780	       || (mode != DFmode && mode != TFmode))
2781	   && (TARGET_POWERPC64 || mode != DImode)
2782	   && mode != TImode)
2783    {
2784      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2785			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2786    }
2787  else if (ALTIVEC_VECTOR_MODE (mode))
2788    {
2789      rtx reg;
2790
2791      /* Make sure both operands are registers.  */
2792      if (GET_CODE (x) == PLUS)
2793	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2794			     force_reg (Pmode, XEXP (x, 1)));
2795
2796      reg = force_reg (Pmode, x);
2797      return reg;
2798    }
2799  else if (SPE_VECTOR_MODE (mode))
2800    {
2801      /* We accept [reg + reg] and [reg + OFFSET].  */
2802
2803      if (GET_CODE (x) == PLUS)
2804      {
2805        rtx op1 = XEXP (x, 0);
2806        rtx op2 = XEXP (x, 1);
2807
2808        op1 = force_reg (Pmode, op1);
2809
2810        if (GET_CODE (op2) != REG
2811            && (GET_CODE (op2) != CONST_INT
2812                || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2813          op2 = force_reg (Pmode, op2);
2814
2815        return gen_rtx_PLUS (Pmode, op1, op2);
2816      }
2817
2818      return force_reg (Pmode, x);
2819    }
2820  else if (TARGET_ELF
2821	   && TARGET_32BIT
2822	   && TARGET_NO_TOC
2823	   && ! flag_pic
2824	   && GET_CODE (x) != CONST_INT
2825	   && GET_CODE (x) != CONST_DOUBLE
2826	   && CONSTANT_P (x)
2827	   && GET_MODE_NUNITS (mode) == 1
2828	   && (GET_MODE_BITSIZE (mode) <= 32
2829	       || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2830    {
2831      rtx reg = gen_reg_rtx (Pmode);
2832      emit_insn (gen_elf_high (reg, x));
2833      return gen_rtx_LO_SUM (Pmode, reg, x);
2834    }
2835  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2836	   && ! flag_pic
2837#if TARGET_MACHO
2838	   && ! MACHO_DYNAMIC_NO_PIC_P
2839#endif
2840	   && GET_CODE (x) != CONST_INT
2841	   && GET_CODE (x) != CONST_DOUBLE
2842	   && CONSTANT_P (x)
2843	   && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2844	   && mode != DImode
2845	   && mode != TImode)
2846    {
2847      rtx reg = gen_reg_rtx (Pmode);
2848      emit_insn (gen_macho_high (reg, x));
2849      return gen_rtx_LO_SUM (Pmode, reg, x);
2850    }
2851  else if (TARGET_TOC
2852	   && constant_pool_expr_p (x)
2853	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2854    {
2855      return create_TOC_reference (x);
2856    }
2857  else
2858    return NULL_RTX;
2859}
2860
2861/* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2862   We need to emit DTP-relative relocations.  */
2863
2864void
2865rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2866{
2867  switch (size)
2868    {
2869    case 4:
2870      fputs ("\t.long\t", file);
2871      break;
2872    case 8:
2873      fputs (DOUBLE_INT_ASM_OP, file);
2874      break;
2875    default:
2876      abort ();
2877    }
2878  output_addr_const (file, x);
2879  fputs ("@dtprel+0x8000", file);
2880}
2881
2882/* Construct the SYMBOL_REF for the tls_get_addr function.  */
2883
2884static GTY(()) rtx rs6000_tls_symbol;
2885static rtx
2886rs6000_tls_get_addr (void)
2887{
2888  if (!rs6000_tls_symbol)
2889    rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2890
2891  return rs6000_tls_symbol;
2892}
2893
2894/* Construct the SYMBOL_REF for TLS GOT references.  */
2895
2896static GTY(()) rtx rs6000_got_symbol;
2897static rtx
2898rs6000_got_sym (void)
2899{
2900  if (!rs6000_got_symbol)
2901    {
2902      rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2903      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2904      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2905    }
2906
2907  return rs6000_got_symbol;
2908}
2909
2910/* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
2911   this (thread-local) address.  */
2912
2913static rtx
2914rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2915{
2916  rtx dest, insn;
2917
2918  dest = gen_reg_rtx (Pmode);
2919  if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2920    {
2921      rtx tlsreg;
2922
2923      if (TARGET_64BIT)
2924	{
2925	  tlsreg = gen_rtx_REG (Pmode, 13);
2926	  insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2927	}
2928      else
2929	{
2930	  tlsreg = gen_rtx_REG (Pmode, 2);
2931	  insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2932	}
2933      emit_insn (insn);
2934    }
2935  else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2936    {
2937      rtx tlsreg, tmp;
2938
2939      tmp = gen_reg_rtx (Pmode);
2940      if (TARGET_64BIT)
2941	{
2942	  tlsreg = gen_rtx_REG (Pmode, 13);
2943	  insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2944	}
2945      else
2946	{
2947	  tlsreg = gen_rtx_REG (Pmode, 2);
2948	  insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2949	}
2950      emit_insn (insn);
2951      if (TARGET_64BIT)
2952	insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2953      else
2954	insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2955      emit_insn (insn);
2956    }
2957  else
2958    {
2959      rtx r3, got, tga, tmp1, tmp2, eqv;
2960
2961      if (TARGET_64BIT)
2962	got = gen_rtx_REG (Pmode, TOC_REGISTER);
2963      else
2964	{
2965	  if (flag_pic == 1)
2966	    got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2967	  else
2968	    {
2969	      rtx gsym = rs6000_got_sym ();
2970	      got = gen_reg_rtx (Pmode);
2971	      if (flag_pic == 0)
2972		rs6000_emit_move (got, gsym, Pmode);
2973	      else
2974		{
2975		  char buf[30];
2976		  static int tls_got_labelno = 0;
2977		  rtx tempLR, lab, tmp3, mem;
2978		  rtx first, last;
2979
2980		  ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2981		  lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2982		  tempLR = gen_reg_rtx (Pmode);
2983		  tmp1 = gen_reg_rtx (Pmode);
2984		  tmp2 = gen_reg_rtx (Pmode);
2985		  tmp3 = gen_reg_rtx (Pmode);
2986		  mem = gen_rtx_MEM (Pmode, tmp1);
2987		  RTX_UNCHANGING_P (mem) = 1;
2988
2989		  first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2990							     gsym));
2991		  emit_move_insn (tmp1, tempLR);
2992		  emit_move_insn (tmp2, mem);
2993		  emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2994		  last = emit_move_insn (got, tmp3);
2995		  REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2996							REG_NOTES (last));
2997		  REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2998							 REG_NOTES (first));
2999		  REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3000							REG_NOTES (last));
3001		}
3002	    }
3003	}
3004
3005      if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3006	{
3007	  r3 = gen_rtx_REG (Pmode, 3);
3008	  if (TARGET_64BIT)
3009	    insn = gen_tls_gd_64 (r3, got, addr);
3010	  else
3011	    insn = gen_tls_gd_32 (r3, got, addr);
3012	  start_sequence ();
3013	  emit_insn (insn);
3014	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3015	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3016	  insn = emit_call_insn (insn);
3017	  CONST_OR_PURE_CALL_P (insn) = 1;
3018	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3019	  insn = get_insns ();
3020	  end_sequence ();
3021	  emit_libcall_block (insn, dest, r3, addr);
3022	}
3023      else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3024	{
3025	  r3 = gen_rtx_REG (Pmode, 3);
3026	  if (TARGET_64BIT)
3027	    insn = gen_tls_ld_64 (r3, got);
3028	  else
3029	    insn = gen_tls_ld_32 (r3, got);
3030	  start_sequence ();
3031	  emit_insn (insn);
3032	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3033	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3034	  insn = emit_call_insn (insn);
3035	  CONST_OR_PURE_CALL_P (insn) = 1;
3036	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3037	  insn = get_insns ();
3038	  end_sequence ();
3039	  tmp1 = gen_reg_rtx (Pmode);
3040	  eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3041				UNSPEC_TLSLD);
3042	  emit_libcall_block (insn, tmp1, r3, eqv);
3043	  if (rs6000_tls_size == 16)
3044	    {
3045	      if (TARGET_64BIT)
3046		insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3047	      else
3048		insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3049	    }
3050	  else if (rs6000_tls_size == 32)
3051	    {
3052	      tmp2 = gen_reg_rtx (Pmode);
3053	      if (TARGET_64BIT)
3054		insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3055	      else
3056		insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3057	      emit_insn (insn);
3058	      if (TARGET_64BIT)
3059		insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3060	      else
3061		insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3062	    }
3063	  else
3064	    {
3065	      tmp2 = gen_reg_rtx (Pmode);
3066	      if (TARGET_64BIT)
3067		insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3068	      else
3069		insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3070	      emit_insn (insn);
3071	      insn = gen_rtx_SET (Pmode, dest,
3072				  gen_rtx_PLUS (Pmode, tmp2, tmp1));
3073	    }
3074	  emit_insn (insn);
3075	}
3076      else
3077	{
3078	  /* IE, or 64 bit offset LE.  */
3079	  tmp2 = gen_reg_rtx (Pmode);
3080	  if (TARGET_64BIT)
3081	    insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3082	  else
3083	    insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3084	  emit_insn (insn);
3085	  if (TARGET_64BIT)
3086	    insn = gen_tls_tls_64 (dest, tmp2, addr);
3087	  else
3088	    insn = gen_tls_tls_32 (dest, tmp2, addr);
3089	  emit_insn (insn);
3090	}
3091    }
3092
3093  return dest;
3094}
3095
3096/* Return 1 if X is a SYMBOL_REF for a TLS symbol.  This is used in
3097   instruction definitions.  */
3098
3099int
3100rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3101{
3102  return RS6000_SYMBOL_REF_TLS_P (x);
3103}
3104
3105/* Return 1 if X contains a thread-local symbol.  */
3106
3107bool
3108rs6000_tls_referenced_p (rtx x)
3109{
3110  if (! TARGET_HAVE_TLS)
3111    return false;
3112
3113  return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3114}
3115
3116/* Return 1 if *X is a thread-local symbol.  This is the same as
3117   rs6000_tls_symbol_ref except for the type of the unused argument.  */
3118
3119static inline int
3120rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3121{
3122  return RS6000_SYMBOL_REF_TLS_P (*x);
3123}
3124
3125/* The convention appears to be to define this wherever it is used.
3126   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3127   is now used here.  */
3128#ifndef REG_MODE_OK_FOR_BASE_P
3129#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3130#endif
3131
3132/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
3133   replace the input X, or the original X if no replacement is called for.
3134   The output parameter *WIN is 1 if the calling macro should goto WIN,
3135   0 if it should not.
3136
3137   For RS/6000, we wish to handle large displacements off a base
3138   register by splitting the addend across an addiu/addis and the mem insn.
3139   This cuts number of extra insns needed from 3 to 1.
3140
3141   On Darwin, we use this to generate code for floating point constants.
3142   A movsf_low is generated so we wind up with 2 instructions rather than 3.
3143   The Darwin code is inside #if TARGET_MACHO because only then is
3144   machopic_function_base_name() defined.  */
3145rtx
3146rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3147	int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3148{
3149  /* We must recognize output that we have already generated ourselves.  */
3150  if (GET_CODE (x) == PLUS
3151      && GET_CODE (XEXP (x, 0)) == PLUS
3152      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3153      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3154      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3155    {
3156      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3157                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3158                   opnum, (enum reload_type)type);
3159      *win = 1;
3160      return x;
3161    }
3162
3163#if TARGET_MACHO
3164  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3165      && GET_CODE (x) == LO_SUM
3166      && GET_CODE (XEXP (x, 0)) == PLUS
3167      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3168      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3169      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3170      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3171      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3172      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3173      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3174    {
3175      /* Result of previous invocation of this function on Darwin
3176	 floating point constant.  */
3177      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3178		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3179		opnum, (enum reload_type)type);
3180      *win = 1;
3181      return x;
3182    }
3183#endif
3184  if (GET_CODE (x) == PLUS
3185      && GET_CODE (XEXP (x, 0)) == REG
3186      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3187      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3188      && GET_CODE (XEXP (x, 1)) == CONST_INT
3189      && !SPE_VECTOR_MODE (mode)
3190      && !ALTIVEC_VECTOR_MODE (mode))
3191    {
3192      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3193      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3194      HOST_WIDE_INT high
3195        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3196
3197      /* Check for 32-bit overflow.  */
3198      if (high + low != val)
3199        {
3200	  *win = 0;
3201	  return x;
3202	}
3203
3204      /* Reload the high part into a base reg; leave the low part
3205         in the mem directly.  */
3206
3207      x = gen_rtx_PLUS (GET_MODE (x),
3208                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3209                                      GEN_INT (high)),
3210                        GEN_INT (low));
3211
3212      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3213                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3214                   opnum, (enum reload_type)type);
3215      *win = 1;
3216      return x;
3217    }
3218#if TARGET_MACHO
3219  if (GET_CODE (x) == SYMBOL_REF
3220      && DEFAULT_ABI == ABI_DARWIN
3221      && !ALTIVEC_VECTOR_MODE (mode)
3222      && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3223      /* Don't do this for TFmode, since the result isn't offsettable.  */
3224      && mode != TFmode)
3225    {
3226      if (flag_pic)
3227	{
3228	  rtx offset = gen_rtx_CONST (Pmode,
3229			 gen_rtx_MINUS (Pmode, x,
3230			   gen_rtx_SYMBOL_REF (Pmode,
3231			     machopic_function_base_name ())));
3232	  x = gen_rtx_LO_SUM (GET_MODE (x),
3233		gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3234		  gen_rtx_HIGH (Pmode, offset)), offset);
3235	}
3236      else
3237	x = gen_rtx_LO_SUM (GET_MODE (x),
3238              gen_rtx_HIGH (Pmode, x), x);
3239
3240      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3241		   BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3242		   opnum, (enum reload_type)type);
3243      *win = 1;
3244      return x;
3245    }
3246#endif
3247  if (TARGET_TOC
3248      && constant_pool_expr_p (x)
3249      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3250    {
3251      (x) = create_TOC_reference (x);
3252      *win = 1;
3253      return x;
3254    }
3255  *win = 0;
3256  return x;
3257}
3258
3259/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3260   that is a valid memory address for an instruction.
3261   The MODE argument is the machine mode for the MEM expression
3262   that wants to use this address.
3263
3264   On the RS/6000, there are four valid address: a SYMBOL_REF that
3265   refers to a constant pool entry of an address (or the sum of it
3266   plus a constant), a short (16-bit signed) constant plus a register,
3267   the sum of two registers, or a register indirect, possibly with an
3268   auto-increment.  For DFmode and DImode with a constant plus register,
3269   we must ensure that both words are addressable or PowerPC64 with offset
3270   word aligned.
3271
3272   For modes spanning multiple registers (DFmode in 32-bit GPRs,
3273   32-bit DImode, TImode), indexed addressing cannot be used because
3274   adjacent memory cells are accessed by adding word-sized offsets
3275   during assembly output.  */
3276int
3277rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3278{
3279  if (RS6000_SYMBOL_REF_TLS_P (x))
3280    return 0;
3281  if (legitimate_indirect_address_p (x, reg_ok_strict))
3282    return 1;
3283  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3284      && !ALTIVEC_VECTOR_MODE (mode)
3285      && !SPE_VECTOR_MODE (mode)
3286      && TARGET_UPDATE
3287      && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3288    return 1;
3289  if (legitimate_small_data_p (mode, x))
3290    return 1;
3291  if (legitimate_constant_pool_address_p (x))
3292    return 1;
3293  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
3294  if (! reg_ok_strict
3295      && GET_CODE (x) == PLUS
3296      && GET_CODE (XEXP (x, 0)) == REG
3297      && (XEXP (x, 0) == virtual_stack_vars_rtx
3298	  || XEXP (x, 0) == arg_pointer_rtx)
3299      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3300    return 1;
3301  if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3302    return 1;
3303  if (mode != TImode
3304      && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3305	  || TARGET_POWERPC64
3306	  || (mode != DFmode && mode != TFmode))
3307      && (TARGET_POWERPC64 || mode != DImode)
3308      && legitimate_indexed_address_p (x, reg_ok_strict))
3309    return 1;
3310  if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3311    return 1;
3312  return 0;
3313}
3314
3315/* Go to LABEL if ADDR (a legitimate address expression)
3316   has an effect that depends on the machine mode it is used for.
3317
3318   On the RS/6000 this is true of all integral offsets (since AltiVec
3319   modes don't allow them) or is a pre-increment or decrement.
3320
3321   ??? Except that due to conceptual problems in offsettable_address_p
3322   we can't really report the problems of integral offsets.  So leave
3323   this assuming that the adjustable offset must be valid for the
3324   sub-words of a TFmode operand, which is what we had before.  */
3325
3326bool
3327rs6000_mode_dependent_address (rtx addr)
3328{
3329  switch (GET_CODE (addr))
3330    {
3331    case PLUS:
3332      if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3333	{
3334	  unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3335	  return val + 12 + 0x8000 >= 0x10000;
3336	}
3337      break;
3338
3339    case LO_SUM:
3340      return true;
3341
3342    case PRE_INC:
3343    case PRE_DEC:
3344      return TARGET_UPDATE;
3345
3346    default:
3347      break;
3348    }
3349
3350  return false;
3351}
3352
3353/* Try to output insns to set TARGET equal to the constant C if it can
3354   be done in less than N insns.  Do all computations in MODE.
3355   Returns the place where the output has been placed if it can be
3356   done and the insns have been emitted.  If it would take more than N
3357   insns, zero is returned and no insns and emitted.  */
3358
3359rtx
3360rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3361		       rtx source, int n ATTRIBUTE_UNUSED)
3362{
3363  rtx result, insn, set;
3364  HOST_WIDE_INT c0, c1;
3365
3366  if (mode == QImode || mode == HImode)
3367    {
3368      if (dest == NULL)
3369        dest = gen_reg_rtx (mode);
3370      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3371      return dest;
3372    }
3373  else if (mode == SImode)
3374    {
3375      result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3376
3377      emit_insn (gen_rtx_SET (VOIDmode, result,
3378			      GEN_INT (INTVAL (source)
3379				       & (~ (HOST_WIDE_INT) 0xffff))));
3380      emit_insn (gen_rtx_SET (VOIDmode, dest,
3381			      gen_rtx_IOR (SImode, result,
3382					   GEN_INT (INTVAL (source) & 0xffff))));
3383      result = dest;
3384    }
3385  else if (mode == DImode)
3386    {
3387      if (GET_CODE (source) == CONST_INT)
3388	{
3389	  c0 = INTVAL (source);
3390	  c1 = -(c0 < 0);
3391	}
3392      else if (GET_CODE (source) == CONST_DOUBLE)
3393	{
3394#if HOST_BITS_PER_WIDE_INT >= 64
3395	  c0 = CONST_DOUBLE_LOW (source);
3396	  c1 = -(c0 < 0);
3397#else
3398	  c0 = CONST_DOUBLE_LOW (source);
3399	  c1 = CONST_DOUBLE_HIGH (source);
3400#endif
3401	}
3402      else
3403	abort ();
3404
3405      result = rs6000_emit_set_long_const (dest, c0, c1);
3406    }
3407  else
3408    abort ();
3409
3410  insn = get_last_insn ();
3411  set = single_set (insn);
3412  if (! CONSTANT_P (SET_SRC (set)))
3413    set_unique_reg_note (insn, REG_EQUAL, source);
3414
3415  return result;
3416}
3417
3418/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3419   fall back to a straight forward decomposition.  We do this to avoid
3420   exponential run times encountered when looking for longer sequences
3421   with rs6000_emit_set_const.  */
3422static rtx
3423rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3424{
3425  if (!TARGET_POWERPC64)
3426    {
3427      rtx operand1, operand2;
3428
3429      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3430					DImode);
3431      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3432					DImode);
3433      emit_move_insn (operand1, GEN_INT (c1));
3434      emit_move_insn (operand2, GEN_INT (c2));
3435    }
3436  else
3437    {
3438      HOST_WIDE_INT ud1, ud2, ud3, ud4;
3439
3440      ud1 = c1 & 0xffff;
3441      ud2 = (c1 & 0xffff0000) >> 16;
3442#if HOST_BITS_PER_WIDE_INT >= 64
3443      c2 = c1 >> 32;
3444#endif
3445      ud3 = c2 & 0xffff;
3446      ud4 = (c2 & 0xffff0000) >> 16;
3447
3448      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3449	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3450	{
3451	  if (ud1 & 0x8000)
3452	    emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) -  0x8000)));
3453	  else
3454	    emit_move_insn (dest, GEN_INT (ud1));
3455	}
3456
3457      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3458	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3459	{
3460	  if (ud2 & 0x8000)
3461	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3462					   - 0x80000000));
3463	  else
3464	    emit_move_insn (dest, GEN_INT (ud2 << 16));
3465	  if (ud1 != 0)
3466	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3467	}
3468      else if ((ud4 == 0xffff && (ud3 & 0x8000))
3469	       || (ud4 == 0 && ! (ud3 & 0x8000)))
3470	{
3471	  if (ud3 & 0x8000)
3472	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3473					   - 0x80000000));
3474	  else
3475	    emit_move_insn (dest, GEN_INT (ud3 << 16));
3476
3477	  if (ud2 != 0)
3478	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3479	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3480	  if (ud1 != 0)
3481	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3482	}
3483      else
3484	{
3485	  if (ud4 & 0x8000)
3486	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3487					   - 0x80000000));
3488	  else
3489	    emit_move_insn (dest, GEN_INT (ud4 << 16));
3490
3491	  if (ud3 != 0)
3492	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3493
3494	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3495	  if (ud2 != 0)
3496	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3497					       GEN_INT (ud2 << 16)));
3498	  if (ud1 != 0)
3499	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3500	}
3501    }
3502  return dest;
3503}
3504
3505/* Emit a move from SOURCE to DEST in mode MODE.  */
3506void
3507rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3508{
3509  rtx operands[2];
3510  operands[0] = dest;
3511  operands[1] = source;
3512
3513  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
3514  if (GET_CODE (operands[1]) == CONST_DOUBLE
3515      && ! FLOAT_MODE_P (mode)
3516      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3517    {
3518      /* FIXME.  This should never happen.  */
3519      /* Since it seems that it does, do the safe thing and convert
3520	 to a CONST_INT.  */
3521      operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3522    }
3523  if (GET_CODE (operands[1]) == CONST_DOUBLE
3524      && ! FLOAT_MODE_P (mode)
3525      && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3526	   && CONST_DOUBLE_LOW (operands[1]) >= 0)
3527	  || (CONST_DOUBLE_HIGH (operands[1]) == -1
3528	      && CONST_DOUBLE_LOW (operands[1]) < 0)))
3529    abort ();
3530
3531  /* Check if GCC is setting up a block move that will end up using FP
3532     registers as temporaries.  We must make sure this is acceptable.  */
3533  if (GET_CODE (operands[0]) == MEM
3534      && GET_CODE (operands[1]) == MEM
3535      && mode == DImode
3536      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3537	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3538      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3539					    ? 32 : MEM_ALIGN (operands[0])))
3540	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3541					       ? 32
3542					       : MEM_ALIGN (operands[1]))))
3543      && ! MEM_VOLATILE_P (operands [0])
3544      && ! MEM_VOLATILE_P (operands [1]))
3545    {
3546      emit_move_insn (adjust_address (operands[0], SImode, 0),
3547		      adjust_address (operands[1], SImode, 0));
3548      emit_move_insn (adjust_address (operands[0], SImode, 4),
3549		      adjust_address (operands[1], SImode, 4));
3550      return;
3551    }
3552
3553  if (!no_new_pseudos)
3554    {
3555      if (GET_CODE (operands[1]) == MEM && optimize > 0
3556	  && (mode == QImode || mode == HImode || mode == SImode)
3557	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3558	{
3559	  rtx reg = gen_reg_rtx (word_mode);
3560
3561	  emit_insn (gen_rtx_SET (word_mode, reg,
3562				  gen_rtx_ZERO_EXTEND (word_mode,
3563						       operands[1])));
3564	  operands[1] = gen_lowpart (mode, reg);
3565	}
3566      if (GET_CODE (operands[0]) != REG)
3567	operands[1] = force_reg (mode, operands[1]);
3568    }
3569
3570  if (mode == SFmode && ! TARGET_POWERPC
3571      && TARGET_HARD_FLOAT && TARGET_FPRS
3572      && GET_CODE (operands[0]) == MEM)
3573    {
3574      int regnum;
3575
3576      if (reload_in_progress || reload_completed)
3577	regnum = true_regnum (operands[1]);
3578      else if (GET_CODE (operands[1]) == REG)
3579	regnum = REGNO (operands[1]);
3580      else
3581	regnum = -1;
3582
3583      /* If operands[1] is a register, on POWER it may have
3584	 double-precision data in it, so truncate it to single
3585	 precision.  */
3586      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3587	{
3588	  rtx newreg;
3589	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3590	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3591	  operands[1] = newreg;
3592	}
3593    }
3594
3595  /* Recognize the case where operand[1] is a reference to thread-local
3596     data and load its address to a register.  */
3597  if (GET_CODE (operands[1]) == SYMBOL_REF)
3598    {
3599      enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3600      if (model != 0)
3601	operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3602    }
3603
3604  /* Handle the case where reload calls us with an invalid address.  */
3605  if (reload_in_progress && mode == Pmode
3606      && (! general_operand (operands[1], mode)
3607	  || ! nonimmediate_operand (operands[0], mode)))
3608    goto emit_set;
3609
3610  /* Handle the case of CONSTANT_P_RTX.  */
3611  if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3612    goto emit_set;
3613
3614  /* 128-bit constant floating-point values on Darwin should really be
3615     loaded as two parts.  */
3616  if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3617      && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3618      && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3619    {
3620      /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3621	 know how to get a DFmode SUBREG of a TFmode.  */
3622      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3623			simplify_gen_subreg (DImode, operands[1], mode, 0),
3624			DImode);
3625      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3626					     GET_MODE_SIZE (DImode)),
3627			simplify_gen_subreg (DImode, operands[1], mode,
3628					     GET_MODE_SIZE (DImode)),
3629			DImode);
3630      return;
3631    }
3632
3633  /* FIXME:  In the long term, this switch statement should go away
3634     and be replaced by a sequence of tests based on things like
3635     mode == Pmode.  */
3636  switch (mode)
3637    {
3638    case HImode:
3639    case QImode:
3640      if (CONSTANT_P (operands[1])
3641	  && GET_CODE (operands[1]) != CONST_INT)
3642	operands[1] = force_const_mem (mode, operands[1]);
3643      break;
3644
3645    case TFmode:
3646    case DFmode:
3647    case SFmode:
3648      if (CONSTANT_P (operands[1])
3649	  && ! easy_fp_constant (operands[1], mode))
3650	operands[1] = force_const_mem (mode, operands[1]);
3651      break;
3652
3653    case V16QImode:
3654    case V8HImode:
3655    case V4SFmode:
3656    case V4SImode:
3657    case V4HImode:
3658    case V2SFmode:
3659    case V2SImode:
3660    case V1DImode:
3661      if (CONSTANT_P (operands[1])
3662	  && !easy_vector_constant (operands[1], mode))
3663	operands[1] = force_const_mem (mode, operands[1]);
3664      break;
3665
3666    case SImode:
3667    case DImode:
3668      /* Use default pattern for address of ELF small data */
3669      if (TARGET_ELF
3670	  && mode == Pmode
3671	  && DEFAULT_ABI == ABI_V4
3672	  && (GET_CODE (operands[1]) == SYMBOL_REF
3673	      || GET_CODE (operands[1]) == CONST)
3674	  && small_data_operand (operands[1], mode))
3675	{
3676	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3677	  return;
3678	}
3679
3680      if (DEFAULT_ABI == ABI_V4
3681	  && mode == Pmode && mode == SImode
3682	  && flag_pic == 1 && got_operand (operands[1], mode))
3683	{
3684	  emit_insn (gen_movsi_got (operands[0], operands[1]));
3685	  return;
3686	}
3687
3688      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3689	  && TARGET_NO_TOC
3690	  && ! flag_pic
3691	  && mode == Pmode
3692	  && CONSTANT_P (operands[1])
3693	  && GET_CODE (operands[1]) != HIGH
3694	  && GET_CODE (operands[1]) != CONST_INT)
3695	{
3696	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3697
3698	  /* If this is a function address on -mcall-aixdesc,
3699	     convert it to the address of the descriptor.  */
3700	  if (DEFAULT_ABI == ABI_AIX
3701	      && GET_CODE (operands[1]) == SYMBOL_REF
3702	      && XSTR (operands[1], 0)[0] == '.')
3703	    {
3704	      const char *name = XSTR (operands[1], 0);
3705	      rtx new_ref;
3706	      while (*name == '.')
3707		name++;
3708	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3709	      CONSTANT_POOL_ADDRESS_P (new_ref)
3710		= CONSTANT_POOL_ADDRESS_P (operands[1]);
3711	      SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3712	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3713	      SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3714	      operands[1] = new_ref;
3715	    }
3716
3717	  if (DEFAULT_ABI == ABI_DARWIN)
3718	    {
3719#if TARGET_MACHO
3720	      if (MACHO_DYNAMIC_NO_PIC_P)
3721		{
3722		  /* Take care of any required data indirection.  */
3723		  operands[1] = rs6000_machopic_legitimize_pic_address (
3724				  operands[1], mode, operands[0]);
3725		  if (operands[0] != operands[1])
3726		    emit_insn (gen_rtx_SET (VOIDmode,
3727				            operands[0], operands[1]));
3728		  return;
3729		}
3730#endif
3731	      emit_insn (gen_macho_high (target, operands[1]));
3732	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
3733	      return;
3734	    }
3735
3736	  emit_insn (gen_elf_high (target, operands[1]));
3737	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
3738	  return;
3739	}
3740
3741      /* If this is a SYMBOL_REF that refers to a constant pool entry,
3742	 and we have put it in the TOC, we just need to make a TOC-relative
3743	 reference to it.  */
3744      if (TARGET_TOC
3745	  && GET_CODE (operands[1]) == SYMBOL_REF
3746	  && constant_pool_expr_p (operands[1])
3747	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3748					      get_pool_mode (operands[1])))
3749	{
3750	  operands[1] = create_TOC_reference (operands[1]);
3751	}
3752      else if (mode == Pmode
3753	       && CONSTANT_P (operands[1])
3754	       && ((GET_CODE (operands[1]) != CONST_INT
3755		    && ! easy_fp_constant (operands[1], mode))
3756		   || (GET_CODE (operands[1]) == CONST_INT
3757		       && num_insns_constant (operands[1], mode) > 2)
3758		   || (GET_CODE (operands[0]) == REG
3759		       && FP_REGNO_P (REGNO (operands[0]))))
3760	       && GET_CODE (operands[1]) != HIGH
3761	       && ! legitimate_constant_pool_address_p (operands[1])
3762	       && ! toc_relative_expr_p (operands[1]))
3763	{
3764	  /* Emit a USE operation so that the constant isn't deleted if
3765	     expensive optimizations are turned on because nobody
3766	     references it.  This should only be done for operands that
3767	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3768	     This should not be done for operands that contain LABEL_REFs.
3769	     For now, we just handle the obvious case.  */
3770	  if (GET_CODE (operands[1]) != LABEL_REF)
3771	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3772
3773#if TARGET_MACHO
3774	  /* Darwin uses a special PIC legitimizer.  */
3775	  if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3776	    {
3777	      operands[1] =
3778		rs6000_machopic_legitimize_pic_address (operands[1], mode,
3779							operands[0]);
3780	      if (operands[0] != operands[1])
3781		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3782	      return;
3783	    }
3784#endif
3785
3786	  /* If we are to limit the number of things we put in the TOC and
3787	     this is a symbol plus a constant we can add in one insn,
3788	     just put the symbol in the TOC and add the constant.  Don't do
3789	     this if reload is in progress.  */
3790	  if (GET_CODE (operands[1]) == CONST
3791	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3792	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
3793	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3794	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3795		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3796	      && ! side_effects_p (operands[0]))
3797	    {
3798	      rtx sym =
3799		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3800	      rtx other = XEXP (XEXP (operands[1], 0), 1);
3801
3802	      sym = force_reg (mode, sym);
3803	      if (mode == SImode)
3804		emit_insn (gen_addsi3 (operands[0], sym, other));
3805	      else
3806		emit_insn (gen_adddi3 (operands[0], sym, other));
3807	      return;
3808	    }
3809
3810	  operands[1] = force_const_mem (mode, operands[1]);
3811
3812	  if (TARGET_TOC
3813	      && constant_pool_expr_p (XEXP (operands[1], 0))
3814	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3815			get_pool_constant (XEXP (operands[1], 0)),
3816			get_pool_mode (XEXP (operands[1], 0))))
3817	    {
3818	      operands[1]
3819		= gen_rtx_MEM (mode,
3820			       create_TOC_reference (XEXP (operands[1], 0)));
3821	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
3822	      RTX_UNCHANGING_P (operands[1]) = 1;
3823	    }
3824	}
3825      break;
3826
3827    case TImode:
3828      if (GET_CODE (operands[0]) == MEM
3829	  && GET_CODE (XEXP (operands[0], 0)) != REG
3830	  && ! reload_in_progress)
3831	operands[0]
3832	  = replace_equiv_address (operands[0],
3833				   copy_addr_to_reg (XEXP (operands[0], 0)));
3834
3835      if (GET_CODE (operands[1]) == MEM
3836	  && GET_CODE (XEXP (operands[1], 0)) != REG
3837	  && ! reload_in_progress)
3838	operands[1]
3839	  = replace_equiv_address (operands[1],
3840				   copy_addr_to_reg (XEXP (operands[1], 0)));
3841      if (TARGET_POWER)
3842	{
3843	  emit_insn (gen_rtx_PARALLEL (VOIDmode,
3844		       gen_rtvec (2,
3845				  gen_rtx_SET (VOIDmode,
3846					       operands[0], operands[1]),
3847				  gen_rtx_CLOBBER (VOIDmode,
3848						   gen_rtx_SCRATCH (SImode)))));
3849	  return;
3850	}
3851      break;
3852
3853    default:
3854      abort ();
3855    }
3856
3857  /* Above, we may have called force_const_mem which may have returned
3858     an invalid address.  If we can, fix this up; otherwise, reload will
3859     have to deal with it.  */
3860  if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3861    operands[1] = validize_mem (operands[1]);
3862
3863 emit_set:
3864  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3865}
3866
3867/* Nonzero if we can use a floating-point register to pass this arg.  */
3868#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE)		\
3869  (GET_MODE_CLASS (MODE) == MODE_FLOAT		\
3870   && (CUM)->fregno <= FP_ARG_MAX_REG		\
3871   && TARGET_HARD_FLOAT && TARGET_FPRS)
3872
3873/* Nonzero if we can use an AltiVec register to pass this arg.  */
3874#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED)	\
3875  (ALTIVEC_VECTOR_MODE (MODE)				\
3876   && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG		\
3877   && TARGET_ALTIVEC_ABI				\
3878   && (NAMED))
3879
3880/* Return a nonzero value to say to return the function value in
3881   memory, just as large structures are always returned.  TYPE will be
3882   the data type of the value, and FNTYPE will be the type of the
3883   function doing the returning, or @code{NULL} for libcalls.
3884
3885   The AIX ABI for the RS/6000 specifies that all structures are
3886   returned in memory.  The Darwin ABI does the same.  The SVR4 ABI
3887   specifies that structures <= 8 bytes are returned in r3/r4, but a
3888   draft put them in memory, and GCC used to implement the draft
3889   instead of the final standard.  Therefore, TARGET_AIX_STRUCT_RET
3890   controls this instead of DEFAULT_ABI; V.4 targets needing backward
3891   compatibility can change DRAFT_V4_STRUCT_RET to override the
3892   default, and -m switches get the final word.  See
3893   rs6000_override_options for more details.
3894
3895   The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3896   long double support is enabled.  These values are returned in memory.
3897
3898   int_size_in_bytes returns -1 for variable size objects, which go in
3899   memory always.  The cast to unsigned makes -1 > 8.  */
3900
3901static bool
3902rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3903{
3904  if (AGGREGATE_TYPE_P (type)
3905      && (TARGET_AIX_STRUCT_RET
3906	  || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3907    return true;
3908  if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3909    return true;
3910  return false;
3911}
3912
3913/* Initialize a variable CUM of type CUMULATIVE_ARGS
3914   for a call to a function whose data type is FNTYPE.
3915   For a library call, FNTYPE is 0.
3916
3917   For incoming args we set the number of arguments in the prototype large
3918   so we never return a PARALLEL.  */
3919
3920void
3921init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3922		      rtx libname ATTRIBUTE_UNUSED, int incoming,
3923		      int libcall, int n_named_args)
3924{
3925  static CUMULATIVE_ARGS zero_cumulative;
3926
3927  *cum = zero_cumulative;
3928  cum->words = 0;
3929  cum->fregno = FP_ARG_MIN_REG;
3930  cum->vregno = ALTIVEC_ARG_MIN_REG;
3931  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3932  cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3933		      ? CALL_LIBCALL : CALL_NORMAL);
3934  cum->sysv_gregno = GP_ARG_MIN_REG;
3935  cum->stdarg = fntype
3936    && (TYPE_ARG_TYPES (fntype) != 0
3937	&& (TREE_VALUE (tree_last  (TYPE_ARG_TYPES (fntype)))
3938	    != void_type_node));
3939
3940  cum->nargs_prototype = 0;
3941  if (incoming || cum->prototype)
3942    cum->nargs_prototype = n_named_args;
3943
3944  /* Check for a longcall attribute.  */
3945  if (fntype
3946      && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3947      && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3948    cum->call_cookie = CALL_LONG;
3949
3950  if (TARGET_DEBUG_ARG)
3951    {
3952      fprintf (stderr, "\ninit_cumulative_args:");
3953      if (fntype)
3954	{
3955	  tree ret_type = TREE_TYPE (fntype);
3956	  fprintf (stderr, " ret code = %s,",
3957		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
3958	}
3959
3960      if (cum->call_cookie & CALL_LONG)
3961	fprintf (stderr, " longcall,");
3962
3963      fprintf (stderr, " proto = %d, nargs = %d\n",
3964	       cum->prototype, cum->nargs_prototype);
3965    }
3966
3967    if (fntype
3968	&& !TARGET_ALTIVEC
3969	&& TARGET_ALTIVEC_ABI
3970        && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
3971      {
3972	error ("Cannot return value in vector register because"
3973	       " altivec instructions are disabled, use -maltivec"
3974	       " to enable them.");
3975      }
3976}
3977
3978/* If defined, a C expression which determines whether, and in which
3979   direction, to pad out an argument with extra space.  The value
3980   should be of type `enum direction': either `upward' to pad above
3981   the argument, `downward' to pad below, or `none' to inhibit
3982   padding.
3983
3984   For the AIX ABI structs are always stored left shifted in their
3985   argument slot.  */
3986
3987enum direction
3988function_arg_padding (enum machine_mode mode, tree type)
3989{
3990#ifndef AGGREGATE_PADDING_FIXED
3991#define AGGREGATE_PADDING_FIXED 0
3992#endif
3993#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3994#define AGGREGATES_PAD_UPWARD_ALWAYS 0
3995#endif
3996
3997  if (!AGGREGATE_PADDING_FIXED)
3998    {
3999      /* GCC used to pass structures of the same size as integer types as
4000	 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4001	 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4002	 passed padded downward, except that -mstrict-align further
4003	 muddied the water in that multi-component structures of 2 and 4
4004	 bytes in size were passed padded upward.
4005
4006	 The following arranges for best compatibility with previous
4007	 versions of gcc, but removes the -mstrict-align dependency.  */
4008      if (BYTES_BIG_ENDIAN)
4009	{
4010	  HOST_WIDE_INT size = 0;
4011
4012	  if (mode == BLKmode)
4013	    {
4014	      if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4015		size = int_size_in_bytes (type);
4016	    }
4017	  else
4018	    size = GET_MODE_SIZE (mode);
4019
4020	  if (size == 1 || size == 2 || size == 4)
4021	    return downward;
4022	}
4023      return upward;
4024    }
4025
4026  if (AGGREGATES_PAD_UPWARD_ALWAYS)
4027    {
4028      if (type != 0 && AGGREGATE_TYPE_P (type))
4029	return upward;
4030    }
4031
4032  /* Fall back to the default.  */
4033  return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4034}
4035
4036/* If defined, a C expression that gives the alignment boundary, in bits,
4037   of an argument with the specified mode and type.  If it is not defined,
4038   PARM_BOUNDARY is used for all arguments.
4039
4040   V.4 wants long longs to be double word aligned.  */
4041
4042int
4043function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4044{
4045  if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4046    return 64;
4047  else if (SPE_VECTOR_MODE (mode))
4048    return 64;
4049  else if (ALTIVEC_VECTOR_MODE (mode))
4050    return 128;
4051  else
4052    return PARM_BOUNDARY;
4053}
4054
4055/* Compute the size (in words) of a function argument.  */
4056
4057static unsigned long
4058rs6000_arg_size (enum machine_mode mode, tree type)
4059{
4060  unsigned long size;
4061
4062  if (mode != BLKmode)
4063    size = GET_MODE_SIZE (mode);
4064  else
4065    size = int_size_in_bytes (type);
4066
4067  if (TARGET_32BIT)
4068    return (size + 3) >> 2;
4069  else
4070    return (size + 7) >> 3;
4071}
4072
4073/* Update the data in CUM to advance over an argument
4074   of mode MODE and data type TYPE.
4075   (TYPE is null for libcalls where that information may not be available.)
4076
4077   Note that for args passed by reference, function_arg will be called
4078   with MODE and TYPE set to that of the pointer to the arg, not the arg
4079   itself.  */
4080
4081void
4082function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4083		      tree type, int named)
4084{
4085  cum->nargs_prototype--;
4086
4087  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4088    {
4089      bool stack = false;
4090
4091      if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4092        {
4093	  cum->vregno++;
4094	  if (!TARGET_ALTIVEC)
4095	    error ("Cannot pass argument in vector register because"
4096		   " altivec instructions are disabled, use -maltivec"
4097		   " to enable them.");
4098
4099	  /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4100	     even if it is going to be passed in a vector register.
4101	     Darwin does the same for variable-argument functions.  */
4102	  if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4103	      || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4104	    stack = true;
4105	}
4106      else
4107	stack = true;
4108
4109      if (stack)
4110        {
4111	  int align;
4112
4113	  /* Vector parameters must be 16-byte aligned.  This places
4114	     them at 2 mod 4 in terms of words in 32-bit mode, since
4115	     the parameter save area starts at offset 24 from the
4116	     stack.  In 64-bit mode, they just have to start on an
4117	     even word, since the parameter save area is 16-byte
4118	     aligned.  Space for GPRs is reserved even if the argument
4119	     will be passed in memory.  */
4120	  if (TARGET_32BIT)
4121	    align = (2 - cum->words) & 3;
4122	  else
4123	    align = cum->words & 1;
4124	  cum->words += align + rs6000_arg_size (mode, type);
4125
4126	  if (TARGET_DEBUG_ARG)
4127	    {
4128	      fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4129		       cum->words, align);
4130	      fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4131		       cum->nargs_prototype, cum->prototype,
4132		       GET_MODE_NAME (mode));
4133	    }
4134	}
4135    }
4136  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4137	   && !cum->stdarg
4138	   && cum->sysv_gregno <= GP_ARG_MAX_REG)
4139    cum->sysv_gregno++;
4140  else if (DEFAULT_ABI == ABI_V4)
4141    {
4142      if (TARGET_HARD_FLOAT && TARGET_FPRS
4143	  && (mode == SFmode || mode == DFmode))
4144	{
4145	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
4146	    cum->fregno++;
4147	  else
4148	    {
4149	      if (mode == DFmode)
4150	        cum->words += cum->words & 1;
4151	      cum->words += rs6000_arg_size (mode, type);
4152	    }
4153	}
4154      else
4155	{
4156	  int n_words = rs6000_arg_size (mode, type);
4157	  int gregno = cum->sysv_gregno;
4158
4159	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4160	     (r7,r8) or (r9,r10).  As does any other 2 word item such
4161	     as complex int due to a historical mistake.  */
4162	  if (n_words == 2)
4163	    gregno += (1 - gregno) & 1;
4164
4165	  /* Multi-reg args are not split between registers and stack.  */
4166	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4167	    {
4168	      /* Long long and SPE vectors are aligned on the stack.
4169		 So are other 2 word items such as complex int due to
4170		 a historical mistake.  */
4171	      if (n_words == 2)
4172		cum->words += cum->words & 1;
4173	      cum->words += n_words;
4174	    }
4175
4176	  /* Note: continuing to accumulate gregno past when we've started
4177	     spilling to the stack indicates the fact that we've started
4178	     spilling to the stack to expand_builtin_saveregs.  */
4179	  cum->sysv_gregno = gregno + n_words;
4180	}
4181
4182      if (TARGET_DEBUG_ARG)
4183	{
4184	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4185		   cum->words, cum->fregno);
4186	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4187		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4188	  fprintf (stderr, "mode = %4s, named = %d\n",
4189		   GET_MODE_NAME (mode), named);
4190	}
4191    }
4192  else
4193    {
4194      int n_words = rs6000_arg_size (mode, type);
4195      int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4196
4197      /* The simple alignment calculation here works because
4198	 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4199	 If we ever want to handle alignments larger than 8 bytes for
4200	 32-bit or 16 bytes for 64-bit, then we'll need to take into
4201	 account the offset to the start of the parm save area.  */
4202      align &= cum->words;
4203      cum->words += align + n_words;
4204
4205      if (GET_MODE_CLASS (mode) == MODE_FLOAT
4206	  && TARGET_HARD_FLOAT && TARGET_FPRS)
4207	cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4208
4209      if (TARGET_DEBUG_ARG)
4210	{
4211	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4212		   cum->words, cum->fregno);
4213	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4214		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4215	  fprintf (stderr, "named = %d, align = %d\n", named, align);
4216	}
4217    }
4218}
4219
4220/* Determine where to put a SIMD argument on the SPE.  */
4221
4222static rtx
4223rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4224			 tree type)
4225{
4226  if (cum->stdarg)
4227    {
4228      int gregno = cum->sysv_gregno;
4229      int n_words = rs6000_arg_size (mode, type);
4230
4231      /* SPE vectors are put in odd registers.  */
4232      if (n_words == 2 && (gregno & 1) == 0)
4233	gregno += 1;
4234
4235      if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4236	{
4237	  rtx r1, r2;
4238	  enum machine_mode m = SImode;
4239
4240	  r1 = gen_rtx_REG (m, gregno);
4241	  r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4242	  r2 = gen_rtx_REG (m, gregno + 1);
4243	  r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4244	  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4245	}
4246      else
4247	return NULL_RTX;
4248    }
4249  else
4250    {
4251      if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4252	return gen_rtx_REG (mode, cum->sysv_gregno);
4253      else
4254	return NULL_RTX;
4255    }
4256}
4257
4258/* Determine where to place an argument in 64-bit mode with 32-bit ABI.  */
4259
4260static rtx
4261rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4262			   tree type, int align_words)
4263{
4264  if (mode == DFmode)
4265    {
4266      /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4267	 in vararg list into zero, one or two GPRs */
4268      if (align_words >= GP_ARG_NUM_REG)
4269	return gen_rtx_PARALLEL (DFmode,
4270		 gen_rtvec (2,
4271			    gen_rtx_EXPR_LIST (VOIDmode,
4272					       NULL_RTX, const0_rtx),
4273			    gen_rtx_EXPR_LIST (VOIDmode,
4274					       gen_rtx_REG (mode,
4275							    cum->fregno),
4276					       const0_rtx)));
4277      else if (align_words + rs6000_arg_size (mode, type)
4278	       > GP_ARG_NUM_REG)
4279	/* If this is partially on the stack, then we only
4280	   include the portion actually in registers here.  */
4281	return gen_rtx_PARALLEL (DFmode,
4282		 gen_rtvec (2,
4283			    gen_rtx_EXPR_LIST (VOIDmode,
4284					       gen_rtx_REG (SImode,
4285							    GP_ARG_MIN_REG
4286							    + align_words),
4287					       const0_rtx),
4288			    gen_rtx_EXPR_LIST (VOIDmode,
4289					       gen_rtx_REG (mode,
4290							    cum->fregno),
4291					       const0_rtx)));
4292
4293      /* split a DFmode arg into two GPRs */
4294      return gen_rtx_PARALLEL (DFmode,
4295	       gen_rtvec (3,
4296			  gen_rtx_EXPR_LIST (VOIDmode,
4297					     gen_rtx_REG (SImode,
4298							  GP_ARG_MIN_REG
4299							  + align_words),
4300					     const0_rtx),
4301			  gen_rtx_EXPR_LIST (VOIDmode,
4302					     gen_rtx_REG (SImode,
4303							  GP_ARG_MIN_REG
4304							  + align_words + 1),
4305					     GEN_INT (4)),
4306			  gen_rtx_EXPR_LIST (VOIDmode,
4307					     gen_rtx_REG (mode, cum->fregno),
4308					     const0_rtx)));
4309    }
4310  /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4311     or two GPRs */
4312  else if (mode == DImode)
4313    {
4314      if (align_words < GP_ARG_NUM_REG - 1)
4315	return gen_rtx_PARALLEL (DImode,
4316		 gen_rtvec (2,
4317			    gen_rtx_EXPR_LIST (VOIDmode,
4318					       gen_rtx_REG (SImode,
4319							    GP_ARG_MIN_REG
4320							    + align_words),
4321					       const0_rtx),
4322			    gen_rtx_EXPR_LIST (VOIDmode,
4323					       gen_rtx_REG (SImode,
4324							    GP_ARG_MIN_REG
4325							    + align_words + 1),
4326					       GEN_INT (4))));
4327      else if (align_words == GP_ARG_NUM_REG - 1)
4328	  return gen_rtx_PARALLEL (DImode,
4329		   gen_rtvec (2,
4330			      gen_rtx_EXPR_LIST (VOIDmode,
4331						 NULL_RTX, const0_rtx),
4332			      gen_rtx_EXPR_LIST (VOIDmode,
4333						 gen_rtx_REG (SImode,
4334							      GP_ARG_MIN_REG
4335							      + align_words),
4336						 const0_rtx)));
4337    }
4338  else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4339    {
4340      int k;
4341      int size = int_size_in_bytes (type);
4342      int no_units = ((size - 1) / 4) + 1;
4343      int max_no_words = GP_ARG_NUM_REG - align_words;
4344      int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4345      rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4346
4347      memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4348
4349      for (k=0; k < rtlvec_len; k++)
4350	rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4351				       gen_rtx_REG (SImode,
4352						    GP_ARG_MIN_REG
4353						    + align_words + k),
4354				       k == 0 ? const0_rtx : GEN_INT (k*4));
4355
4356      return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4357  }
4358
4359  return NULL_RTX;
4360}
4361
4362/* Determine where to put an argument to a function.
4363   Value is zero to push the argument on the stack,
4364   or a hard register in which to store the argument.
4365
4366   MODE is the argument's machine mode.
4367   TYPE is the data type of the argument (as a tree).
4368    This is null for libcalls where that information may
4369    not be available.
4370   CUM is a variable of type CUMULATIVE_ARGS which gives info about
4371    the preceding args and about the function being called.
4372   NAMED is nonzero if this argument is a named parameter
4373    (otherwise it is an extra parameter matching an ellipsis).
4374
4375   On RS/6000 the first eight words of non-FP are normally in registers
4376   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
4377   Under V.4, the first 8 FP args are in registers.
4378
4379   If this is floating-point and no prototype is specified, we use
4380   both an FP and integer register (or possibly FP reg and stack).  Library
4381   functions (when CALL_LIBCALL is set) always have the proper types for args,
4382   so we can pass the FP value just in one register.  emit_library_function
4383   doesn't support PARALLEL anyway.
4384
4385   Note that for args passed by reference, function_arg will be called
4386   with MODE and TYPE set to that of the pointer to the arg, not the arg
4387   itself.  */
4388
4389struct rtx_def *
4390function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4391	      tree type, int named)
4392{
4393  enum rs6000_abi abi = DEFAULT_ABI;
4394
4395  /* Return a marker to indicate whether CR1 needs to set or clear the
4396     bit that V.4 uses to say fp args were passed in registers.
4397     Assume that we don't need the marker for software floating point,
4398     or compiler generated library calls.  */
4399  if (mode == VOIDmode)
4400    {
4401      if (abi == ABI_V4
4402	  && cum->nargs_prototype < 0
4403	  && (cum->call_cookie & CALL_LIBCALL) == 0
4404	  && (cum->prototype || TARGET_NO_PROTOTYPE))
4405	{
4406	  /* For the SPE, we need to crxor CR6 always.  */
4407	  if (TARGET_SPE_ABI)
4408	    return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4409	  else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4410	    return GEN_INT (cum->call_cookie
4411			    | ((cum->fregno == FP_ARG_MIN_REG)
4412			       ? CALL_V4_SET_FP_ARGS
4413			       : CALL_V4_CLEAR_FP_ARGS));
4414	}
4415
4416      return GEN_INT (cum->call_cookie);
4417    }
4418
4419  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4420    if (TARGET_64BIT && ! cum->prototype)
4421      {
4422       /* Vector parameters get passed in vector register
4423          and also in GPRs or memory, in absence of prototype.  */
4424       int align_words;
4425       rtx slot;
4426       align_words = (cum->words + 1) & ~1;
4427
4428       if (align_words >= GP_ARG_NUM_REG)
4429         {
4430           slot = NULL_RTX;
4431         }
4432       else
4433         {
4434           slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4435         }
4436       return gen_rtx_PARALLEL (mode,
4437                gen_rtvec (2,
4438                           gen_rtx_EXPR_LIST (VOIDmode,
4439                                              slot, const0_rtx),
4440                           gen_rtx_EXPR_LIST (VOIDmode,
4441                                              gen_rtx_REG (mode, cum->vregno),
4442                                              const0_rtx)));
4443      }
4444    else
4445      return gen_rtx_REG (mode, cum->vregno);
4446  else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4447    {
4448      if (named || abi == ABI_V4)
4449	return NULL_RTX;
4450      else
4451	{
4452	  /* Vector parameters to varargs functions under AIX or Darwin
4453	     get passed in memory and possibly also in GPRs.  */
4454	  int align, align_words;
4455	  enum machine_mode part_mode = mode;
4456
4457	  /* Vector parameters must be 16-byte aligned.  This places them at
4458	     2 mod 4 in terms of words in 32-bit mode, since the parameter
4459	     save area starts at offset 24 from the stack.  In 64-bit mode,
4460	     they just have to start on an even word, since the parameter
4461	     save area is 16-byte aligned.  */
4462	  if (TARGET_32BIT)
4463	    align = (2 - cum->words) & 3;
4464	  else
4465	    align = cum->words & 1;
4466	  align_words = cum->words + align;
4467
4468	  /* Out of registers?  Memory, then.  */
4469	  if (align_words >= GP_ARG_NUM_REG)
4470	    return NULL_RTX;
4471
4472	  /* The vector value goes in GPRs.  Only the part of the
4473	     value in GPRs is reported here.  */
4474	  if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4475	      > GP_ARG_NUM_REG)
4476	    /* Fortunately, there are only two possibilities, the value
4477	       is either wholly in GPRs or half in GPRs and half not.  */
4478	    part_mode = DImode;
4479
4480	  return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4481	}
4482    }
4483  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4484    return rs6000_spe_function_arg (cum, mode, type);
4485  else if (abi == ABI_V4)
4486    {
4487      if (TARGET_HARD_FLOAT && TARGET_FPRS
4488	  && (mode == SFmode || mode == DFmode))
4489	{
4490	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
4491	    return gen_rtx_REG (mode, cum->fregno);
4492	  else
4493	    return NULL_RTX;
4494	}
4495      else
4496	{
4497	  int n_words = rs6000_arg_size (mode, type);
4498	  int gregno = cum->sysv_gregno;
4499
4500	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4501	     (r7,r8) or (r9,r10).  As does any other 2 word item such
4502	     as complex int due to a historical mistake.  */
4503	  if (n_words == 2)
4504	    gregno += (1 - gregno) & 1;
4505
4506	  /* Multi-reg args are not split between registers and stack.  */
4507	  if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4508	    return gen_rtx_REG (mode, gregno);
4509	  else
4510	    return NULL_RTX;
4511	}
4512    }
4513  else
4514    {
4515      int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4516      int align_words = cum->words + (cum->words & align);
4517
4518      if (USE_FP_FOR_ARG_P (cum, mode, type))
4519	{
4520	  rtx fpr[2];
4521	  rtx *r;
4522	  bool needs_psave;
4523	  enum machine_mode fmode = mode;
4524	  int n;
4525	  unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4526
4527	  if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4528	    {
4529	      /* Long double split over regs and memory.  */
4530	      if (fmode == TFmode)
4531		fmode = DFmode;
4532
4533	      /* Currently, we only ever need one reg here because complex
4534		 doubles are split.  */
4535	      if (cum->fregno != FP_ARG_MAX_REG - 1)
4536		abort ();
4537	    }
4538	  fpr[1] = gen_rtx_REG (fmode, cum->fregno);
4539
4540	  /* Do we also need to pass this arg in the parameter save
4541	     area?  */
4542	  needs_psave = (type
4543			 && (cum->nargs_prototype <= 0
4544			     || (DEFAULT_ABI == ABI_AIX
4545				 && TARGET_XL_CALL
4546				 && align_words >= GP_ARG_NUM_REG)));
4547
4548	  if (!needs_psave && mode == fmode)
4549	    return fpr[1];
4550
4551          if (TARGET_32BIT && TARGET_POWERPC64
4552              && mode == DFmode && cum->stdarg)
4553            return rs6000_mixed_function_arg (cum, mode, type, align_words);
4554
4555	  /* Describe where this piece goes.  */
4556	  r = fpr + 1;
4557	  *r = gen_rtx_EXPR_LIST (VOIDmode, *r, const0_rtx);
4558	  n = 1;
4559
4560	  if (needs_psave)
4561	    {
4562	      /* Now describe the part that goes in gprs or the stack.
4563		 This piece must come first, before the fprs.  */
4564	      rtx reg = NULL_RTX;
4565	      if (align_words < GP_ARG_NUM_REG)
4566		{
4567		  unsigned long n_words = rs6000_arg_size (mode, type);
4568		  enum machine_mode rmode = mode;
4569
4570		  if (align_words + n_words > GP_ARG_NUM_REG)
4571		    /* If this is partially on the stack, then we only
4572		       include the portion actually in registers here.
4573		       We know this can only be one register because
4574		       complex doubles are splt.  */
4575		    rmode = Pmode;
4576		  reg = gen_rtx_REG (rmode, GP_ARG_MIN_REG + align_words);
4577		}
4578	      *--r = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
4579	      ++n;
4580	    }
4581
4582	  return gen_rtx_PARALLEL (mode, gen_rtvec_v (n, r));
4583	}
4584      else if (align_words < GP_ARG_NUM_REG)
4585	{
4586	  if (TARGET_32BIT && TARGET_POWERPC64
4587	      && (mode == DImode || mode == BLKmode))
4588	    return rs6000_mixed_function_arg (cum, mode, type, align_words);
4589
4590	  return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4591	}
4592      else
4593	return NULL_RTX;
4594    }
4595}
4596
4597/* For an arg passed partly in registers and partly in memory,
4598   this is the number of registers used.
4599   For args passed entirely in registers or entirely in memory, zero.  */
4600
4601int
4602function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4603			    tree type, int named)
4604{
4605  int ret = 0;
4606
4607  if (DEFAULT_ABI == ABI_V4)
4608    return 0;
4609
4610  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4611      && cum->nargs_prototype >= 0)
4612    return 0;
4613
4614  if (USE_FP_FOR_ARG_P (cum, mode, type))
4615    {
4616      if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4617	ret = FP_ARG_MAX_REG - cum->fregno;
4618      else if (cum->nargs_prototype >= 0)
4619	return 0;
4620    }
4621
4622  if (cum->words < GP_ARG_NUM_REG
4623      && GP_ARG_NUM_REG < cum->words + rs6000_arg_size (mode, type))
4624    ret = GP_ARG_NUM_REG - cum->words;
4625
4626  if (ret != 0 && TARGET_DEBUG_ARG)
4627    fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4628
4629  return ret;
4630}
4631
4632/* A C expression that indicates when an argument must be passed by
4633   reference.  If nonzero for an argument, a copy of that argument is
4634   made in memory and a pointer to the argument is passed instead of
4635   the argument itself.  The pointer is passed in whatever way is
4636   appropriate for passing a pointer to that type.
4637
4638   Under V.4, aggregates and long double are passed by reference.
4639
4640   As an extension to all 32-bit ABIs, AltiVec vectors are passed by
4641   reference unless the AltiVec vector extension ABI is in force.
4642
4643   As an extension to all ABIs, variable sized types are passed by
4644   reference.  */
4645
4646int
4647function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4648				enum machine_mode mode ATTRIBUTE_UNUSED,
4649				tree type, int named ATTRIBUTE_UNUSED)
4650{
4651  if ((DEFAULT_ABI == ABI_V4
4652       && ((type && AGGREGATE_TYPE_P (type))
4653	   || mode == TFmode))
4654      || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4655      || (type && int_size_in_bytes (type) < 0))
4656    {
4657      if (TARGET_DEBUG_ARG)
4658	fprintf (stderr, "function_arg_pass_by_reference\n");
4659
4660      return 1;
4661    }
4662  return 0;
4663}
4664
4665static void
4666rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4667{
4668  int i;
4669  enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4670
4671  if (nregs == 0)
4672    return;
4673
4674    for (i = 0; i < nregs; i++)
4675    {
4676      rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4677      if (reload_completed)
4678      {
4679	if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4680	  tem = NULL_RTX;
4681	else
4682	  tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4683				     i * GET_MODE_SIZE(reg_mode));
4684      }
4685      else
4686	tem = replace_equiv_address (tem, XEXP (tem, 0));
4687
4688      if (tem == NULL_RTX)
4689        abort ();
4690
4691      emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4692    }
4693}
4694
4695
4696/* Perform any needed actions needed for a function that is receiving a
4697   variable number of arguments.
4698
4699   CUM is as above.
4700
4701   MODE and TYPE are the mode and type of the current parameter.
4702
4703   PRETEND_SIZE is a variable that should be set to the amount of stack
4704   that must be pushed by the prolog to pretend that our caller pushed
4705   it.
4706
4707   Normally, this macro will push all remaining incoming registers on the
4708   stack and set PRETEND_SIZE to the length of the registers pushed.  */
4709
4710static void
4711setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4712		tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4713{
4714  CUMULATIVE_ARGS next_cum;
4715  int reg_size = TARGET_32BIT ? 4 : 8;
4716  rtx save_area = NULL_RTX, mem;
4717  int first_reg_offset, set;
4718
4719  /* Skip the last named argument.  */
4720  next_cum = *cum;
4721  function_arg_advance (&next_cum, mode, type, 1);
4722
4723  if (DEFAULT_ABI == ABI_V4)
4724    {
4725      /* Indicate to allocate space on the stack for varargs save area.  */
4726      cfun->machine->sysv_varargs_p = 1;
4727      if (! no_rtl)
4728	save_area = plus_constant (virtual_stack_vars_rtx,
4729				   - RS6000_VARARGS_SIZE);
4730
4731      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4732    }
4733  else
4734    {
4735      first_reg_offset = next_cum.words;
4736      save_area = virtual_incoming_args_rtx;
4737      cfun->machine->sysv_varargs_p = 0;
4738
4739      if (MUST_PASS_IN_STACK (mode, type))
4740	first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4741    }
4742
4743  set = get_varargs_alias_set ();
4744  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4745    {
4746      mem = gen_rtx_MEM (BLKmode,
4747		         plus_constant (save_area,
4748					first_reg_offset * reg_size)),
4749      set_mem_alias_set (mem, set);
4750      set_mem_align (mem, BITS_PER_WORD);
4751
4752      rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4753			          GP_ARG_NUM_REG - first_reg_offset);
4754    }
4755
4756  /* Save FP registers if needed.  */
4757  if (DEFAULT_ABI == ABI_V4
4758      && TARGET_HARD_FLOAT && TARGET_FPRS
4759      && ! no_rtl
4760      && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4761    {
4762      int fregno = next_cum.fregno;
4763      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4764      rtx lab = gen_label_rtx ();
4765      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4766
4767      emit_jump_insn (gen_rtx_SET (VOIDmode,
4768				   pc_rtx,
4769				   gen_rtx_IF_THEN_ELSE (VOIDmode,
4770					    gen_rtx_NE (VOIDmode, cr1,
4771						        const0_rtx),
4772					    gen_rtx_LABEL_REF (VOIDmode, lab),
4773					    pc_rtx)));
4774
4775      while (fregno <= FP_ARG_V4_MAX_REG)
4776	{
4777	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4778          set_mem_alias_set (mem, set);
4779	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4780	  fregno++;
4781	  off += 8;
4782	}
4783
4784      emit_label (lab);
4785    }
4786}
4787
4788/* Create the va_list data type.  */
4789
4790static tree
4791rs6000_build_builtin_va_list (void)
4792{
4793  tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4794
4795  /* For AIX, prefer 'char *' because that's what the system
4796     header files like.  */
4797  if (DEFAULT_ABI != ABI_V4)
4798    return build_pointer_type (char_type_node);
4799
4800  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4801  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4802
4803  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4804		      unsigned_char_type_node);
4805  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4806		      unsigned_char_type_node);
4807  /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4808     every user file.  */
4809  f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4810		      short_unsigned_type_node);
4811  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4812		      ptr_type_node);
4813  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4814		      ptr_type_node);
4815
4816  DECL_FIELD_CONTEXT (f_gpr) = record;
4817  DECL_FIELD_CONTEXT (f_fpr) = record;
4818  DECL_FIELD_CONTEXT (f_res) = record;
4819  DECL_FIELD_CONTEXT (f_ovf) = record;
4820  DECL_FIELD_CONTEXT (f_sav) = record;
4821
4822  TREE_CHAIN (record) = type_decl;
4823  TYPE_NAME (record) = type_decl;
4824  TYPE_FIELDS (record) = f_gpr;
4825  TREE_CHAIN (f_gpr) = f_fpr;
4826  TREE_CHAIN (f_fpr) = f_res;
4827  TREE_CHAIN (f_res) = f_ovf;
4828  TREE_CHAIN (f_ovf) = f_sav;
4829
4830  layout_type (record);
4831
4832  /* The correct type is an array type of one element.  */
4833  return build_array_type (record, build_index_type (size_zero_node));
4834}
4835
4836/* Implement va_start.  */
4837
4838void
4839rs6000_va_start (tree valist, rtx nextarg)
4840{
4841  HOST_WIDE_INT words, n_gpr, n_fpr;
4842  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4843  tree gpr, fpr, ovf, sav, t;
4844
4845  /* Only SVR4 needs something special.  */
4846  if (DEFAULT_ABI != ABI_V4)
4847    {
4848      std_expand_builtin_va_start (valist, nextarg);
4849      return;
4850    }
4851
4852  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4853  f_fpr = TREE_CHAIN (f_gpr);
4854  f_res = TREE_CHAIN (f_fpr);
4855  f_ovf = TREE_CHAIN (f_res);
4856  f_sav = TREE_CHAIN (f_ovf);
4857
4858  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4859  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4860  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4861  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4862  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4863
4864  /* Count number of gp and fp argument registers used.  */
4865  words = current_function_args_info.words;
4866  n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4867  n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4868
4869  if (TARGET_DEBUG_ARG)
4870    fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4871	     HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4872	     words, n_gpr, n_fpr);
4873
4874  t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4875  TREE_SIDE_EFFECTS (t) = 1;
4876  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4877
4878  t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4879  TREE_SIDE_EFFECTS (t) = 1;
4880  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4881
4882  /* Find the overflow area.  */
4883  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4884  if (words != 0)
4885    t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4886	       build_int_2 (words * UNITS_PER_WORD, 0));
4887  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4888  TREE_SIDE_EFFECTS (t) = 1;
4889  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4890
4891  /* Find the register save area.  */
4892  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4893  t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4894	     build_int_2 (-RS6000_VARARGS_SIZE, -1));
4895  t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4896  TREE_SIDE_EFFECTS (t) = 1;
4897  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4898}
4899
4900/* Implement va_arg.  */
4901
4902rtx
4903rs6000_va_arg (tree valist, tree type)
4904{
4905  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4906  tree gpr, fpr, ovf, sav, reg, t, u;
4907  int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4908  rtx lab_false, lab_over, addr_rtx, r;
4909  int align;
4910
4911  if (DEFAULT_ABI != ABI_V4)
4912    {
4913      /* Variable sized types are passed by reference, as are AltiVec
4914	 vectors when 32-bit and not using the AltiVec ABI extension.  */
4915      if (int_size_in_bytes (type) < 0
4916	  || (TARGET_32BIT
4917	      && !TARGET_ALTIVEC_ABI
4918	      && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
4919	{
4920	  u = build_pointer_type (type);
4921
4922	  /* Args grow upward.  */
4923	  t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4924		     build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4925	  TREE_SIDE_EFFECTS (t) = 1;
4926
4927	  t = build1 (NOP_EXPR, build_pointer_type (u), t);
4928	  TREE_SIDE_EFFECTS (t) = 1;
4929
4930	  t = build1 (INDIRECT_REF, u, t);
4931	  TREE_SIDE_EFFECTS (t) = 1;
4932
4933	  return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4934	}
4935      if (targetm.calls.split_complex_arg
4936	  && TREE_CODE (type) == COMPLEX_TYPE)
4937	{
4938	  tree elem_type = TREE_TYPE (type);
4939	  enum machine_mode elem_mode = TYPE_MODE (elem_type);
4940	  int elem_size = GET_MODE_SIZE (elem_mode);
4941
4942	  if (elem_size < UNITS_PER_WORD)
4943	    {
4944	      rtx real_part, imag_part, dest_real, rr;
4945
4946	      real_part = rs6000_va_arg (valist, elem_type);
4947	      imag_part = rs6000_va_arg (valist, elem_type);
4948
4949	      /* We're not returning the value here, but the address.
4950		 real_part and imag_part are not contiguous, and we know
4951		 there is space available to pack real_part next to
4952		 imag_part.  float _Complex is not promoted to
4953		 double _Complex by the default promotion rules that
4954		 promote float to double.  */
4955	      if (2 * elem_size > UNITS_PER_WORD)
4956		abort ();
4957
4958	      real_part = gen_rtx_MEM (elem_mode, real_part);
4959	      imag_part = gen_rtx_MEM (elem_mode, imag_part);
4960
4961	      dest_real = adjust_address (imag_part, elem_mode, -elem_size);
4962	      rr = gen_reg_rtx (elem_mode);
4963	      emit_move_insn (rr, real_part);
4964	      emit_move_insn (dest_real, rr);
4965
4966	      return XEXP (dest_real, 0);
4967	    }
4968	}
4969
4970      return std_expand_builtin_va_arg (valist, type);
4971    }
4972
4973  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4974  f_fpr = TREE_CHAIN (f_gpr);
4975  f_res = TREE_CHAIN (f_fpr);
4976  f_ovf = TREE_CHAIN (f_res);
4977  f_sav = TREE_CHAIN (f_ovf);
4978
4979  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4980  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4981  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4982  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4983  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4984
4985  size = int_size_in_bytes (type);
4986  rsize = (size + 3) / 4;
4987  align = 1;
4988
4989  if (AGGREGATE_TYPE_P (type)
4990      || TYPE_MODE (type) == TFmode
4991      || (!TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
4992    {
4993      /* Aggregates, long doubles, and AltiVec vectors are passed by
4994	 reference.  */
4995      indirect_p = 1;
4996      reg = gpr;
4997      n_reg = 1;
4998      sav_ofs = 0;
4999      sav_scale = 4;
5000      size = 4;
5001      rsize = 1;
5002    }
5003  else if (TARGET_HARD_FLOAT && TARGET_FPRS
5004	   && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5005    {
5006      /* FP args go in FP registers, if present.  */
5007      indirect_p = 0;
5008      reg = fpr;
5009      n_reg = 1;
5010      sav_ofs = 8*4;
5011      sav_scale = 8;
5012      if (TYPE_MODE (type) == DFmode)
5013	align = 8;
5014    }
5015  else
5016    {
5017      /* Otherwise into GP registers.  */
5018      indirect_p = 0;
5019      reg = gpr;
5020      n_reg = rsize;
5021      sav_ofs = 0;
5022      sav_scale = 4;
5023      if (n_reg == 2)
5024	align = 8;
5025    }
5026
5027  /* Pull the value out of the saved registers....  */
5028
5029  lab_over = NULL_RTX;
5030  addr_rtx = gen_reg_rtx (Pmode);
5031
5032  /*  AltiVec vectors never go in registers when -mabi=altivec.  */
5033  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5034    align = 16;
5035  else
5036    {
5037      lab_false = gen_label_rtx ();
5038      lab_over = gen_label_rtx ();
5039
5040      /* Long long and SPE vectors are aligned in the registers.
5041	 As are any other 2 gpr item such as complex int due to a
5042	 historical mistake.  */
5043      u = reg;
5044      if (n_reg == 2)
5045	{
5046	  u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5047		     build_int_2 (n_reg - 1, 0));
5048	  u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5049	  TREE_SIDE_EFFECTS (u) = 1;
5050	}
5051
5052      emit_cmp_and_jump_insns
5053	(expand_expr (u, NULL_RTX, QImode, EXPAND_NORMAL),
5054	 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
5055	 lab_false);
5056
5057      t = sav;
5058      if (sav_ofs)
5059	t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5060
5061      u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
5062		 build_int_2 (n_reg, 0));
5063      TREE_SIDE_EFFECTS (u) = 1;
5064
5065      u = build1 (CONVERT_EXPR, integer_type_node, u);
5066      TREE_SIDE_EFFECTS (u) = 1;
5067
5068      u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
5069      TREE_SIDE_EFFECTS (u) = 1;
5070
5071      t = build (PLUS_EXPR, ptr_type_node, t, u);
5072      TREE_SIDE_EFFECTS (t) = 1;
5073
5074      r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5075      if (r != addr_rtx)
5076	emit_move_insn (addr_rtx, r);
5077
5078      emit_jump_insn (gen_jump (lab_over));
5079      emit_barrier ();
5080
5081      emit_label (lab_false);
5082      if (n_reg > 2)
5083	{
5084	  /* Ensure that we don't find any more args in regs.
5085	     Alignment has taken care of the n_reg == 2 case.  */
5086	  t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
5087	  TREE_SIDE_EFFECTS (t) = 1;
5088	  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5089	}
5090    }
5091
5092  /* ... otherwise out of the overflow area.  */
5093
5094  /* Care for on-stack alignment if needed.  */
5095  t = ovf;
5096  if (align != 1)
5097    {
5098      t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (align - 1, 0));
5099      t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align, -1));
5100    }
5101  t = save_expr (t);
5102
5103  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5104  if (r != addr_rtx)
5105    emit_move_insn (addr_rtx, r);
5106
5107  t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5108  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5109  TREE_SIDE_EFFECTS (t) = 1;
5110  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5111
5112  if (lab_over)
5113    emit_label (lab_over);
5114
5115  if (indirect_p)
5116    {
5117      r = gen_rtx_MEM (Pmode, addr_rtx);
5118      set_mem_alias_set (r, get_varargs_alias_set ());
5119      emit_move_insn (addr_rtx, r);
5120    }
5121
5122  return addr_rtx;
5123}
5124
5125/* Builtins.  */
5126
5127#define def_builtin(MASK, NAME, TYPE, CODE)			\
5128do {								\
5129  if ((MASK) & target_flags)					\
5130    builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD,	\
5131		      NULL, NULL_TREE);				\
5132} while (0)
5133
5134/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
5135
5136static const struct builtin_description bdesc_3arg[] =
5137{
5138  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5139  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5140  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5141  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5142  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5143  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5144  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5145  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5146  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5147  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5148  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5149  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5150  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5151  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5152  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5153  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5154  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5155  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5156  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5157  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5158  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5159  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5160  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5161};
5162
5163/* DST operations: void foo (void *, const int, const char).  */
5164
5165static const struct builtin_description bdesc_dst[] =
5166{
5167  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5168  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5169  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5170  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5171};
5172
5173/* Simple binary operations: VECc = foo (VECa, VECb).  */
5174
5175static struct builtin_description bdesc_2arg[] =
5176{
5177  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5178  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5179  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5180  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5181  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5182  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5183  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5184  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5185  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5186  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5187  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5188  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5189  { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5190  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5191  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5192  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5193  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5194  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5195  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5196  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5197  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5198  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5199  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5200  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5201  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5202  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5203  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5204  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5205  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5206  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5207  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5208  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5209  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5210  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5211  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5212  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5213  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5214  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5215  { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5216  { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5217  { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5218  { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5219  { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5220  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5221  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5222  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5223  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5224  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5225  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5226  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5227  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5228  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5229  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5230  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5231  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5232  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5233  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5234  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5235  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5236  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5237  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5238  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5239  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5240  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5241  { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5242  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5243  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5244  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5245  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5246  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5247  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5248  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5249  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5250  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5251  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5252  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5253  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5254  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5255  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5256  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5257  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5258  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5259  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5260  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5261  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5262  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5263  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5264  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5265  { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5266  { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5267  { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5268  { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5269  { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5270  { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5271  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5272  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5273  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5274  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5275  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5276  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5277  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5278  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5279  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5280  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5281  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5282  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5283  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5284  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5285  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5286  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5287  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5288  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5289  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5290
5291  /* Place holder, leave as first spe builtin.  */
5292  { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5293  { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5294  { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5295  { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5296  { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5297  { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5298  { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5299  { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5300  { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5301  { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5302  { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5303  { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5304  { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5305  { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5306  { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5307  { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5308  { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5309  { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5310  { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5311  { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5312  { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5313  { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5314  { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5315  { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5316  { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5317  { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5318  { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5319  { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5320  { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5321  { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5322  { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5323  { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5324  { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5325  { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5326  { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5327  { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5328  { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5329  { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5330  { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5331  { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5332  { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5333  { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5334  { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5335  { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5336  { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5337  { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5338  { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5339  { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5340  { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5341  { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5342  { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5343  { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5344  { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5345  { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5346  { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5347  { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5348  { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5349  { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5350  { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5351  { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5352  { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5353  { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5354  { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5355  { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5356  { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5357  { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5358  { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5359  { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5360  { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5361  { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5362  { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5363  { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5364  { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5365  { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5366  { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5367  { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5368  { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5369  { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5370  { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5371  { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5372  { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5373  { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5374  { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5375  { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5376  { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5377  { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5378  { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5379  { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5380  { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5381  { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5382  { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5383  { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5384  { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5385  { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5386  { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5387  { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5388  { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5389  { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5390  { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5391  { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5392  { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5393  { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5394  { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5395  { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5396  { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5397  { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5398  { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5399  { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5400  { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5401
5402  /* SPE binary operations expecting a 5-bit unsigned literal.  */
5403  { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5404
5405  { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5406  { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5407  { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5408  { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5409  { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5410  { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5411  { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5412  { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5413  { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5414  { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5415  { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5416  { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5417  { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5418  { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5419  { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5420  { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5421  { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5422  { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5423  { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5424  { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5425  { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5426  { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5427  { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5428  { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5429  { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5430  { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5431
5432  /* Place-holder.  Leave as last binary SPE builtin.  */
5433  { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5434};
5435
5436/* AltiVec predicates.  */
5437
5438struct builtin_description_predicates
5439{
5440  const unsigned int mask;
5441  const enum insn_code icode;
5442  const char *opcode;
5443  const char *const name;
5444  const enum rs6000_builtins code;
5445};
5446
5447static const struct builtin_description_predicates bdesc_altivec_preds[] =
5448{
5449  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5450  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5451  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5452  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5453  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5454  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5455  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5456  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5457  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5458  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5459  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5460  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5461  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5462};
5463
5464/* SPE predicates.  */
5465static struct builtin_description bdesc_spe_predicates[] =
5466{
5467  /* Place-holder.  Leave as first.  */
5468  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5469  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5470  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5471  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5472  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5473  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5474  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5475  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5476  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5477  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5478  /* Place-holder.  Leave as last.  */
5479  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5480};
5481
5482/* SPE evsel predicates.  */
5483static struct builtin_description bdesc_spe_evsel[] =
5484{
5485  /* Place-holder.  Leave as first.  */
5486  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5487  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5488  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5489  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5490  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5491  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5492  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5493  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5494  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5495  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5496  /* Place-holder.  Leave as last.  */
5497  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5498};
5499
5500/* ABS* operations.  */
5501
5502static const struct builtin_description bdesc_abs[] =
5503{
5504  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5505  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5506  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5507  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5508  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5509  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5510  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5511};
5512
5513/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5514   foo (VECa).  */
5515
5516static struct builtin_description bdesc_1arg[] =
5517{
5518  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5519  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5520  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5521  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5522  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5523  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5524  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5525  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5526  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5527  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5528  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5529  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5530  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5531  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5532  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5533  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5534  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5535
5536  /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5537     end with SPE_BUILTIN_EVSUBFUSIAAW.  */
5538  { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5539  { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5540  { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5541  { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5542  { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5543  { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5544  { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5545  { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5546  { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5547  { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5548  { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5549  { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5550  { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5551  { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5552  { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5553  { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5554  { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5555  { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5556  { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5557  { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5558  { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5559  { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5560  { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5561  { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5562  { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5563  { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5564  { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5565  { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5566
5567  /* Place-holder.  Leave as last unary SPE builtin.  */
5568  { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5569};
5570
5571static rtx
5572rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5573{
5574  rtx pat;
5575  tree arg0 = TREE_VALUE (arglist);
5576  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5577  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5578  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5579
5580  if (icode == CODE_FOR_nothing)
5581    /* Builtin not supported on this processor.  */
5582    return 0;
5583
5584  /* If we got invalid arguments bail out before generating bad rtl.  */
5585  if (arg0 == error_mark_node)
5586    return const0_rtx;
5587
5588  if (icode == CODE_FOR_altivec_vspltisb
5589      || icode == CODE_FOR_altivec_vspltish
5590      || icode == CODE_FOR_altivec_vspltisw
5591      || icode == CODE_FOR_spe_evsplatfi
5592      || icode == CODE_FOR_spe_evsplati)
5593    {
5594      /* Only allow 5-bit *signed* literals.  */
5595      if (GET_CODE (op0) != CONST_INT
5596	  || INTVAL (op0) > 0x1f
5597	  || INTVAL (op0) < -0x1f)
5598	{
5599	  error ("argument 1 must be a 5-bit signed literal");
5600	  return const0_rtx;
5601	}
5602    }
5603
5604  if (target == 0
5605      || GET_MODE (target) != tmode
5606      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5607    target = gen_reg_rtx (tmode);
5608
5609  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5610    op0 = copy_to_mode_reg (mode0, op0);
5611
5612  pat = GEN_FCN (icode) (target, op0);
5613  if (! pat)
5614    return 0;
5615  emit_insn (pat);
5616
5617  return target;
5618}
5619
5620static rtx
5621altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5622{
5623  rtx pat, scratch1, scratch2;
5624  tree arg0 = TREE_VALUE (arglist);
5625  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5626  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5627  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5628
5629  /* If we have invalid arguments, bail out before generating bad rtl.  */
5630  if (arg0 == error_mark_node)
5631    return const0_rtx;
5632
5633  if (target == 0
5634      || GET_MODE (target) != tmode
5635      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5636    target = gen_reg_rtx (tmode);
5637
5638  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5639    op0 = copy_to_mode_reg (mode0, op0);
5640
5641  scratch1 = gen_reg_rtx (mode0);
5642  scratch2 = gen_reg_rtx (mode0);
5643
5644  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5645  if (! pat)
5646    return 0;
5647  emit_insn (pat);
5648
5649  return target;
5650}
5651
5652static rtx
5653rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5654{
5655  rtx pat;
5656  tree arg0 = TREE_VALUE (arglist);
5657  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5658  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5659  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5660  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5661  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5662  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5663
5664  if (icode == CODE_FOR_nothing)
5665    /* Builtin not supported on this processor.  */
5666    return 0;
5667
5668  /* If we got invalid arguments bail out before generating bad rtl.  */
5669  if (arg0 == error_mark_node || arg1 == error_mark_node)
5670    return const0_rtx;
5671
5672  if (icode == CODE_FOR_altivec_vcfux
5673      || icode == CODE_FOR_altivec_vcfsx
5674      || icode == CODE_FOR_altivec_vctsxs
5675      || icode == CODE_FOR_altivec_vctuxs
5676      || icode == CODE_FOR_altivec_vspltb
5677      || icode == CODE_FOR_altivec_vsplth
5678      || icode == CODE_FOR_altivec_vspltw
5679      || icode == CODE_FOR_spe_evaddiw
5680      || icode == CODE_FOR_spe_evldd
5681      || icode == CODE_FOR_spe_evldh
5682      || icode == CODE_FOR_spe_evldw
5683      || icode == CODE_FOR_spe_evlhhesplat
5684      || icode == CODE_FOR_spe_evlhhossplat
5685      || icode == CODE_FOR_spe_evlhhousplat
5686      || icode == CODE_FOR_spe_evlwhe
5687      || icode == CODE_FOR_spe_evlwhos
5688      || icode == CODE_FOR_spe_evlwhou
5689      || icode == CODE_FOR_spe_evlwhsplat
5690      || icode == CODE_FOR_spe_evlwwsplat
5691      || icode == CODE_FOR_spe_evrlwi
5692      || icode == CODE_FOR_spe_evslwi
5693      || icode == CODE_FOR_spe_evsrwis
5694      || icode == CODE_FOR_spe_evsubifw
5695      || icode == CODE_FOR_spe_evsrwiu)
5696    {
5697      /* Only allow 5-bit unsigned literals.  */
5698      if (TREE_CODE (arg1) != INTEGER_CST
5699	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
5700	{
5701	  error ("argument 2 must be a 5-bit unsigned literal");
5702	  return const0_rtx;
5703	}
5704    }
5705
5706  if (target == 0
5707      || GET_MODE (target) != tmode
5708      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5709    target = gen_reg_rtx (tmode);
5710
5711  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5712    op0 = copy_to_mode_reg (mode0, op0);
5713  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5714    op1 = copy_to_mode_reg (mode1, op1);
5715
5716  pat = GEN_FCN (icode) (target, op0, op1);
5717  if (! pat)
5718    return 0;
5719  emit_insn (pat);
5720
5721  return target;
5722}
5723
5724static rtx
5725altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5726				  tree arglist, rtx target)
5727{
5728  rtx pat, scratch;
5729  tree cr6_form = TREE_VALUE (arglist);
5730  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5731  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5732  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5733  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5734  enum machine_mode tmode = SImode;
5735  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5736  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5737  int cr6_form_int;
5738
5739  if (TREE_CODE (cr6_form) != INTEGER_CST)
5740    {
5741      error ("argument 1 of __builtin_altivec_predicate must be a constant");
5742      return const0_rtx;
5743    }
5744  else
5745    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5746
5747  if (mode0 != mode1)
5748    abort ();
5749
5750  /* If we have invalid arguments, bail out before generating bad rtl.  */
5751  if (arg0 == error_mark_node || arg1 == error_mark_node)
5752    return const0_rtx;
5753
5754  if (target == 0
5755      || GET_MODE (target) != tmode
5756      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5757    target = gen_reg_rtx (tmode);
5758
5759  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5760    op0 = copy_to_mode_reg (mode0, op0);
5761  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5762    op1 = copy_to_mode_reg (mode1, op1);
5763
5764  scratch = gen_reg_rtx (mode0);
5765
5766  pat = GEN_FCN (icode) (scratch, op0, op1,
5767			 gen_rtx (SYMBOL_REF, Pmode, opcode));
5768  if (! pat)
5769    return 0;
5770  emit_insn (pat);
5771
5772  /* The vec_any* and vec_all* predicates use the same opcodes for two
5773     different operations, but the bits in CR6 will be different
5774     depending on what information we want.  So we have to play tricks
5775     with CR6 to get the right bits out.
5776
5777     If you think this is disgusting, look at the specs for the
5778     AltiVec predicates.  */
5779
5780     switch (cr6_form_int)
5781       {
5782       case 0:
5783	 emit_insn (gen_cr6_test_for_zero (target));
5784	 break;
5785       case 1:
5786	 emit_insn (gen_cr6_test_for_zero_reverse (target));
5787	 break;
5788       case 2:
5789	 emit_insn (gen_cr6_test_for_lt (target));
5790	 break;
5791       case 3:
5792	 emit_insn (gen_cr6_test_for_lt_reverse (target));
5793	 break;
5794       default:
5795	 error ("argument 1 of __builtin_altivec_predicate is out of range");
5796	 break;
5797       }
5798
5799  return target;
5800}
5801
5802static rtx
5803altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5804{
5805  rtx pat, addr;
5806  tree arg0 = TREE_VALUE (arglist);
5807  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5808  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5809  enum machine_mode mode0 = Pmode;
5810  enum machine_mode mode1 = Pmode;
5811  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5812  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5813
5814  if (icode == CODE_FOR_nothing)
5815    /* Builtin not supported on this processor.  */
5816    return 0;
5817
5818  /* If we got invalid arguments bail out before generating bad rtl.  */
5819  if (arg0 == error_mark_node || arg1 == error_mark_node)
5820    return const0_rtx;
5821
5822  if (target == 0
5823      || GET_MODE (target) != tmode
5824      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5825    target = gen_reg_rtx (tmode);
5826
5827  op1 = copy_to_mode_reg (mode1, op1);
5828
5829  if (op0 == const0_rtx)
5830    {
5831      addr = gen_rtx_MEM (tmode, op1);
5832    }
5833  else
5834    {
5835      op0 = copy_to_mode_reg (mode0, op0);
5836      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5837    }
5838
5839  pat = GEN_FCN (icode) (target, addr);
5840
5841  if (! pat)
5842    return 0;
5843  emit_insn (pat);
5844
5845  return target;
5846}
5847
5848static rtx
5849spe_expand_stv_builtin (enum insn_code icode, tree arglist)
5850{
5851  tree arg0 = TREE_VALUE (arglist);
5852  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5853  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5854  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5855  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5856  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5857  rtx pat;
5858  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5859  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5860  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5861
5862  /* Invalid arguments.  Bail before doing anything stoopid!  */
5863  if (arg0 == error_mark_node
5864      || arg1 == error_mark_node
5865      || arg2 == error_mark_node)
5866    return const0_rtx;
5867
5868  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5869    op0 = copy_to_mode_reg (mode2, op0);
5870  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5871    op1 = copy_to_mode_reg (mode0, op1);
5872  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5873    op2 = copy_to_mode_reg (mode1, op2);
5874
5875  pat = GEN_FCN (icode) (op1, op2, op0);
5876  if (pat)
5877    emit_insn (pat);
5878  return NULL_RTX;
5879}
5880
5881static rtx
5882altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5883{
5884  tree arg0 = TREE_VALUE (arglist);
5885  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5886  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5887  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5888  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5889  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5890  rtx pat, addr;
5891  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5892  enum machine_mode mode1 = Pmode;
5893  enum machine_mode mode2 = Pmode;
5894
5895  /* Invalid arguments.  Bail before doing anything stoopid!  */
5896  if (arg0 == error_mark_node
5897      || arg1 == error_mark_node
5898      || arg2 == error_mark_node)
5899    return const0_rtx;
5900
5901  if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5902    op0 = copy_to_mode_reg (tmode, op0);
5903
5904  op2 = copy_to_mode_reg (mode2, op2);
5905
5906  if (op1 == const0_rtx)
5907    {
5908      addr = gen_rtx_MEM (tmode, op2);
5909    }
5910  else
5911    {
5912      op1 = copy_to_mode_reg (mode1, op1);
5913      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5914    }
5915
5916  pat = GEN_FCN (icode) (addr, op0);
5917  if (pat)
5918    emit_insn (pat);
5919  return NULL_RTX;
5920}
5921
5922static rtx
5923rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5924{
5925  rtx pat;
5926  tree arg0 = TREE_VALUE (arglist);
5927  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5928  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5929  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5930  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5931  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5932  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5933  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5934  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5935  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5936
5937  if (icode == CODE_FOR_nothing)
5938    /* Builtin not supported on this processor.  */
5939    return 0;
5940
5941  /* If we got invalid arguments bail out before generating bad rtl.  */
5942  if (arg0 == error_mark_node
5943      || arg1 == error_mark_node
5944      || arg2 == error_mark_node)
5945    return const0_rtx;
5946
5947  if (icode == CODE_FOR_altivec_vsldoi_4sf
5948      || icode == CODE_FOR_altivec_vsldoi_4si
5949      || icode == CODE_FOR_altivec_vsldoi_8hi
5950      || icode == CODE_FOR_altivec_vsldoi_16qi)
5951    {
5952      /* Only allow 4-bit unsigned literals.  */
5953      if (TREE_CODE (arg2) != INTEGER_CST
5954	  || TREE_INT_CST_LOW (arg2) & ~0xf)
5955	{
5956	  error ("argument 3 must be a 4-bit unsigned literal");
5957	  return const0_rtx;
5958	}
5959    }
5960
5961  if (target == 0
5962      || GET_MODE (target) != tmode
5963      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5964    target = gen_reg_rtx (tmode);
5965
5966  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5967    op0 = copy_to_mode_reg (mode0, op0);
5968  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5969    op1 = copy_to_mode_reg (mode1, op1);
5970  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5971    op2 = copy_to_mode_reg (mode2, op2);
5972
5973  pat = GEN_FCN (icode) (target, op0, op1, op2);
5974  if (! pat)
5975    return 0;
5976  emit_insn (pat);
5977
5978  return target;
5979}
5980
5981/* Expand the lvx builtins.  */
5982static rtx
5983altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5984{
5985  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5986  tree arglist = TREE_OPERAND (exp, 1);
5987  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5988  tree arg0;
5989  enum machine_mode tmode, mode0;
5990  rtx pat, op0;
5991  enum insn_code icode;
5992
5993  switch (fcode)
5994    {
5995    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5996      icode = CODE_FOR_altivec_lvx_16qi;
5997      break;
5998    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5999      icode = CODE_FOR_altivec_lvx_8hi;
6000      break;
6001    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6002      icode = CODE_FOR_altivec_lvx_4si;
6003      break;
6004    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6005      icode = CODE_FOR_altivec_lvx_4sf;
6006      break;
6007    default:
6008      *expandedp = false;
6009      return NULL_RTX;
6010    }
6011
6012  *expandedp = true;
6013
6014  arg0 = TREE_VALUE (arglist);
6015  op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6016  tmode = insn_data[icode].operand[0].mode;
6017  mode0 = insn_data[icode].operand[1].mode;
6018
6019  if (target == 0
6020      || GET_MODE (target) != tmode
6021      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6022    target = gen_reg_rtx (tmode);
6023
6024  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6025    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6026
6027  pat = GEN_FCN (icode) (target, op0);
6028  if (! pat)
6029    return 0;
6030  emit_insn (pat);
6031  return target;
6032}
6033
6034/* Expand the stvx builtins.  */
6035static rtx
6036altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6037			   bool *expandedp)
6038{
6039  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6040  tree arglist = TREE_OPERAND (exp, 1);
6041  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6042  tree arg0, arg1;
6043  enum machine_mode mode0, mode1;
6044  rtx pat, op0, op1;
6045  enum insn_code icode;
6046
6047  switch (fcode)
6048    {
6049    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6050      icode = CODE_FOR_altivec_stvx_16qi;
6051      break;
6052    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6053      icode = CODE_FOR_altivec_stvx_8hi;
6054      break;
6055    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6056      icode = CODE_FOR_altivec_stvx_4si;
6057      break;
6058    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6059      icode = CODE_FOR_altivec_stvx_4sf;
6060      break;
6061    default:
6062      *expandedp = false;
6063      return NULL_RTX;
6064    }
6065
6066  arg0 = TREE_VALUE (arglist);
6067  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6068  op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6069  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6070  mode0 = insn_data[icode].operand[0].mode;
6071  mode1 = insn_data[icode].operand[1].mode;
6072
6073  if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6074    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6075  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6076    op1 = copy_to_mode_reg (mode1, op1);
6077
6078  pat = GEN_FCN (icode) (op0, op1);
6079  if (pat)
6080    emit_insn (pat);
6081
6082  *expandedp = true;
6083  return NULL_RTX;
6084}
6085
6086/* Expand the dst builtins.  */
6087static rtx
6088altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6089			    bool *expandedp)
6090{
6091  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6092  tree arglist = TREE_OPERAND (exp, 1);
6093  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6094  tree arg0, arg1, arg2;
6095  enum machine_mode mode0, mode1, mode2;
6096  rtx pat, op0, op1, op2;
6097  struct builtin_description *d;
6098  size_t i;
6099
6100  *expandedp = false;
6101
6102  /* Handle DST variants.  */
6103  d = (struct builtin_description *) bdesc_dst;
6104  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6105    if (d->code == fcode)
6106      {
6107	arg0 = TREE_VALUE (arglist);
6108	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6109	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6110	op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6111	op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6112	op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6113	mode0 = insn_data[d->icode].operand[0].mode;
6114	mode1 = insn_data[d->icode].operand[1].mode;
6115	mode2 = insn_data[d->icode].operand[2].mode;
6116
6117	/* Invalid arguments, bail out before generating bad rtl.  */
6118	if (arg0 == error_mark_node
6119	    || arg1 == error_mark_node
6120	    || arg2 == error_mark_node)
6121	  return const0_rtx;
6122
6123	if (TREE_CODE (arg2) != INTEGER_CST
6124	    || TREE_INT_CST_LOW (arg2) & ~0x3)
6125	  {
6126	    error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6127	    return const0_rtx;
6128	  }
6129
6130	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6131	  op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6132	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6133	  op1 = copy_to_mode_reg (mode1, op1);
6134
6135	pat = GEN_FCN (d->icode) (op0, op1, op2);
6136	if (pat != 0)
6137	  emit_insn (pat);
6138
6139	*expandedp = true;
6140	return NULL_RTX;
6141      }
6142
6143  return NULL_RTX;
6144}
6145
6146/* Expand the builtin in EXP and store the result in TARGET.  Store
6147   true in *EXPANDEDP if we found a builtin to expand.  */
6148static rtx
6149altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6150{
6151  struct builtin_description *d;
6152  struct builtin_description_predicates *dp;
6153  size_t i;
6154  enum insn_code icode;
6155  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6156  tree arglist = TREE_OPERAND (exp, 1);
6157  tree arg0;
6158  rtx op0, pat;
6159  enum machine_mode tmode, mode0;
6160  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6161
6162  target = altivec_expand_ld_builtin (exp, target, expandedp);
6163  if (*expandedp)
6164    return target;
6165
6166  target = altivec_expand_st_builtin (exp, target, expandedp);
6167  if (*expandedp)
6168    return target;
6169
6170  target = altivec_expand_dst_builtin (exp, target, expandedp);
6171  if (*expandedp)
6172    return target;
6173
6174  *expandedp = true;
6175
6176  switch (fcode)
6177    {
6178    case ALTIVEC_BUILTIN_STVX:
6179      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6180    case ALTIVEC_BUILTIN_STVEBX:
6181      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6182    case ALTIVEC_BUILTIN_STVEHX:
6183      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6184    case ALTIVEC_BUILTIN_STVEWX:
6185      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6186    case ALTIVEC_BUILTIN_STVXL:
6187      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6188
6189    case ALTIVEC_BUILTIN_MFVSCR:
6190      icode = CODE_FOR_altivec_mfvscr;
6191      tmode = insn_data[icode].operand[0].mode;
6192
6193      if (target == 0
6194	  || GET_MODE (target) != tmode
6195	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6196	target = gen_reg_rtx (tmode);
6197
6198      pat = GEN_FCN (icode) (target);
6199      if (! pat)
6200	return 0;
6201      emit_insn (pat);
6202      return target;
6203
6204    case ALTIVEC_BUILTIN_MTVSCR:
6205      icode = CODE_FOR_altivec_mtvscr;
6206      arg0 = TREE_VALUE (arglist);
6207      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6208      mode0 = insn_data[icode].operand[0].mode;
6209
6210      /* If we got invalid arguments bail out before generating bad rtl.  */
6211      if (arg0 == error_mark_node)
6212	return const0_rtx;
6213
6214      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6215	op0 = copy_to_mode_reg (mode0, op0);
6216
6217      pat = GEN_FCN (icode) (op0);
6218      if (pat)
6219	emit_insn (pat);
6220      return NULL_RTX;
6221
6222    case ALTIVEC_BUILTIN_DSSALL:
6223      emit_insn (gen_altivec_dssall ());
6224      return NULL_RTX;
6225
6226    case ALTIVEC_BUILTIN_DSS:
6227      icode = CODE_FOR_altivec_dss;
6228      arg0 = TREE_VALUE (arglist);
6229      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6230      mode0 = insn_data[icode].operand[0].mode;
6231
6232      /* If we got invalid arguments bail out before generating bad rtl.  */
6233      if (arg0 == error_mark_node)
6234	return const0_rtx;
6235
6236      if (TREE_CODE (arg0) != INTEGER_CST
6237	  || TREE_INT_CST_LOW (arg0) & ~0x3)
6238	{
6239	  error ("argument to dss must be a 2-bit unsigned literal");
6240	  return const0_rtx;
6241	}
6242
6243      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6244	op0 = copy_to_mode_reg (mode0, op0);
6245
6246      emit_insn (gen_altivec_dss (op0));
6247      return NULL_RTX;
6248    }
6249
6250  /* Expand abs* operations.  */
6251  d = (struct builtin_description *) bdesc_abs;
6252  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6253    if (d->code == fcode)
6254      return altivec_expand_abs_builtin (d->icode, arglist, target);
6255
6256  /* Expand the AltiVec predicates.  */
6257  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6258  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6259    if (dp->code == fcode)
6260      return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6261
6262  /* LV* are funky.  We initialized them differently.  */
6263  switch (fcode)
6264    {
6265    case ALTIVEC_BUILTIN_LVSL:
6266      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6267					   arglist, target);
6268    case ALTIVEC_BUILTIN_LVSR:
6269      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6270					  arglist, target);
6271    case ALTIVEC_BUILTIN_LVEBX:
6272      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6273					  arglist, target);
6274    case ALTIVEC_BUILTIN_LVEHX:
6275      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6276					  arglist, target);
6277    case ALTIVEC_BUILTIN_LVEWX:
6278      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6279					  arglist, target);
6280    case ALTIVEC_BUILTIN_LVXL:
6281      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6282					  arglist, target);
6283    case ALTIVEC_BUILTIN_LVX:
6284      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6285					  arglist, target);
6286    default:
6287      break;
6288      /* Fall through.  */
6289    }
6290
6291  *expandedp = false;
6292  return NULL_RTX;
6293}
6294
6295/* Binops that need to be initialized manually, but can be expanded
6296   automagically by rs6000_expand_binop_builtin.  */
6297static struct builtin_description bdesc_2arg_spe[] =
6298{
6299  { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6300  { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6301  { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6302  { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6303  { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6304  { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6305  { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6306  { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6307  { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6308  { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6309  { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6310  { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6311  { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6312  { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6313  { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6314  { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6315  { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6316  { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6317  { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6318  { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6319  { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6320  { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6321};
6322
6323/* Expand the builtin in EXP and store the result in TARGET.  Store
6324   true in *EXPANDEDP if we found a builtin to expand.
6325
6326   This expands the SPE builtins that are not simple unary and binary
6327   operations.  */
6328static rtx
6329spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6330{
6331  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6332  tree arglist = TREE_OPERAND (exp, 1);
6333  tree arg1, arg0;
6334  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6335  enum insn_code icode;
6336  enum machine_mode tmode, mode0;
6337  rtx pat, op0;
6338  struct builtin_description *d;
6339  size_t i;
6340
6341  *expandedp = true;
6342
6343  /* Syntax check for a 5-bit unsigned immediate.  */
6344  switch (fcode)
6345    {
6346    case SPE_BUILTIN_EVSTDD:
6347    case SPE_BUILTIN_EVSTDH:
6348    case SPE_BUILTIN_EVSTDW:
6349    case SPE_BUILTIN_EVSTWHE:
6350    case SPE_BUILTIN_EVSTWHO:
6351    case SPE_BUILTIN_EVSTWWE:
6352    case SPE_BUILTIN_EVSTWWO:
6353      arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6354      if (TREE_CODE (arg1) != INTEGER_CST
6355	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
6356	{
6357	  error ("argument 2 must be a 5-bit unsigned literal");
6358	  return const0_rtx;
6359	}
6360      break;
6361    default:
6362      break;
6363    }
6364
6365  /* The evsplat*i instructions are not quite generic.  */
6366  switch (fcode)
6367    {
6368    case SPE_BUILTIN_EVSPLATFI:
6369      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6370					 arglist, target);
6371    case SPE_BUILTIN_EVSPLATI:
6372      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6373					 arglist, target);
6374    default:
6375      break;
6376    }
6377
6378  d = (struct builtin_description *) bdesc_2arg_spe;
6379  for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6380    if (d->code == fcode)
6381      return rs6000_expand_binop_builtin (d->icode, arglist, target);
6382
6383  d = (struct builtin_description *) bdesc_spe_predicates;
6384  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6385    if (d->code == fcode)
6386      return spe_expand_predicate_builtin (d->icode, arglist, target);
6387
6388  d = (struct builtin_description *) bdesc_spe_evsel;
6389  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6390    if (d->code == fcode)
6391      return spe_expand_evsel_builtin (d->icode, arglist, target);
6392
6393  switch (fcode)
6394    {
6395    case SPE_BUILTIN_EVSTDDX:
6396      return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6397    case SPE_BUILTIN_EVSTDHX:
6398      return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6399    case SPE_BUILTIN_EVSTDWX:
6400      return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6401    case SPE_BUILTIN_EVSTWHEX:
6402      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6403    case SPE_BUILTIN_EVSTWHOX:
6404      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6405    case SPE_BUILTIN_EVSTWWEX:
6406      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6407    case SPE_BUILTIN_EVSTWWOX:
6408      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6409    case SPE_BUILTIN_EVSTDD:
6410      return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6411    case SPE_BUILTIN_EVSTDH:
6412      return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6413    case SPE_BUILTIN_EVSTDW:
6414      return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6415    case SPE_BUILTIN_EVSTWHE:
6416      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6417    case SPE_BUILTIN_EVSTWHO:
6418      return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6419    case SPE_BUILTIN_EVSTWWE:
6420      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6421    case SPE_BUILTIN_EVSTWWO:
6422      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6423    case SPE_BUILTIN_MFSPEFSCR:
6424      icode = CODE_FOR_spe_mfspefscr;
6425      tmode = insn_data[icode].operand[0].mode;
6426
6427      if (target == 0
6428	  || GET_MODE (target) != tmode
6429	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6430	target = gen_reg_rtx (tmode);
6431
6432      pat = GEN_FCN (icode) (target);
6433      if (! pat)
6434	return 0;
6435      emit_insn (pat);
6436      return target;
6437    case SPE_BUILTIN_MTSPEFSCR:
6438      icode = CODE_FOR_spe_mtspefscr;
6439      arg0 = TREE_VALUE (arglist);
6440      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6441      mode0 = insn_data[icode].operand[0].mode;
6442
6443      if (arg0 == error_mark_node)
6444	return const0_rtx;
6445
6446      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6447	op0 = copy_to_mode_reg (mode0, op0);
6448
6449      pat = GEN_FCN (icode) (op0);
6450      if (pat)
6451	emit_insn (pat);
6452      return NULL_RTX;
6453    default:
6454      break;
6455    }
6456
6457  *expandedp = false;
6458  return NULL_RTX;
6459}
6460
6461static rtx
6462spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6463{
6464  rtx pat, scratch, tmp;
6465  tree form = TREE_VALUE (arglist);
6466  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6467  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6468  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6469  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6470  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6471  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6472  int form_int;
6473  enum rtx_code code;
6474
6475  if (TREE_CODE (form) != INTEGER_CST)
6476    {
6477      error ("argument 1 of __builtin_spe_predicate must be a constant");
6478      return const0_rtx;
6479    }
6480  else
6481    form_int = TREE_INT_CST_LOW (form);
6482
6483  if (mode0 != mode1)
6484    abort ();
6485
6486  if (arg0 == error_mark_node || arg1 == error_mark_node)
6487    return const0_rtx;
6488
6489  if (target == 0
6490      || GET_MODE (target) != SImode
6491      || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6492    target = gen_reg_rtx (SImode);
6493
6494  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6495    op0 = copy_to_mode_reg (mode0, op0);
6496  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6497    op1 = copy_to_mode_reg (mode1, op1);
6498
6499  scratch = gen_reg_rtx (CCmode);
6500
6501  pat = GEN_FCN (icode) (scratch, op0, op1);
6502  if (! pat)
6503    return const0_rtx;
6504  emit_insn (pat);
6505
6506  /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6507     _lower_.  We use one compare, but look in different bits of the
6508     CR for each variant.
6509
6510     There are 2 elements in each SPE simd type (upper/lower).  The CR
6511     bits are set as follows:
6512
6513     BIT0  | BIT 1  | BIT 2   | BIT 3
6514     U     |   L    | (U | L) | (U & L)
6515
6516     So, for an "all" relationship, BIT 3 would be set.
6517     For an "any" relationship, BIT 2 would be set.  Etc.
6518
6519     Following traditional nomenclature, these bits map to:
6520
6521     BIT0  | BIT 1  | BIT 2   | BIT 3
6522     LT    | GT     | EQ      | OV
6523
6524     Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6525  */
6526
6527  switch (form_int)
6528    {
6529      /* All variant.  OV bit.  */
6530    case 0:
6531      /* We need to get to the OV bit, which is the ORDERED bit.  We
6532	 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6533	 that's ugly and will trigger a validate_condition_mode abort.
6534	 So let's just use another pattern.  */
6535      emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6536      return target;
6537      /* Any variant.  EQ bit.  */
6538    case 1:
6539      code = EQ;
6540      break;
6541      /* Upper variant.  LT bit.  */
6542    case 2:
6543      code = LT;
6544      break;
6545      /* Lower variant.  GT bit.  */
6546    case 3:
6547      code = GT;
6548      break;
6549    default:
6550      error ("argument 1 of __builtin_spe_predicate is out of range");
6551      return const0_rtx;
6552    }
6553
6554  tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6555  emit_move_insn (target, tmp);
6556
6557  return target;
6558}
6559
6560/* The evsel builtins look like this:
6561
6562     e = __builtin_spe_evsel_OP (a, b, c, d);
6563
6564   and work like this:
6565
6566     e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6567     e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6568*/
6569
6570static rtx
6571spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6572{
6573  rtx pat, scratch;
6574  tree arg0 = TREE_VALUE (arglist);
6575  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6576  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6577  tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6578  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6579  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6580  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6581  rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6582  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6583  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6584
6585  if (mode0 != mode1)
6586    abort ();
6587
6588  if (arg0 == error_mark_node || arg1 == error_mark_node
6589      || arg2 == error_mark_node || arg3 == error_mark_node)
6590    return const0_rtx;
6591
6592  if (target == 0
6593      || GET_MODE (target) != mode0
6594      || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6595    target = gen_reg_rtx (mode0);
6596
6597  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6598    op0 = copy_to_mode_reg (mode0, op0);
6599  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6600    op1 = copy_to_mode_reg (mode0, op1);
6601  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6602    op2 = copy_to_mode_reg (mode0, op2);
6603  if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6604    op3 = copy_to_mode_reg (mode0, op3);
6605
6606  /* Generate the compare.  */
6607  scratch = gen_reg_rtx (CCmode);
6608  pat = GEN_FCN (icode) (scratch, op0, op1);
6609  if (! pat)
6610    return const0_rtx;
6611  emit_insn (pat);
6612
6613  if (mode0 == V2SImode)
6614    emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6615  else
6616    emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6617
6618  return target;
6619}
6620
6621/* Expand an expression EXP that calls a built-in function,
6622   with result going to TARGET if that's convenient
6623   (and in mode MODE if that's convenient).
6624   SUBTARGET may be used as the target for computing one of EXP's operands.
6625   IGNORE is nonzero if the value is to be ignored.  */
6626
6627static rtx
6628rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6629		      enum machine_mode mode ATTRIBUTE_UNUSED,
6630		      int ignore ATTRIBUTE_UNUSED)
6631{
6632  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6633  tree arglist = TREE_OPERAND (exp, 1);
6634  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6635  struct builtin_description *d;
6636  size_t i;
6637  rtx ret;
6638  bool success;
6639
6640  if (TARGET_ALTIVEC)
6641    {
6642      ret = altivec_expand_builtin (exp, target, &success);
6643
6644      if (success)
6645	return ret;
6646    }
6647  if (TARGET_SPE)
6648    {
6649      ret = spe_expand_builtin (exp, target, &success);
6650
6651      if (success)
6652	return ret;
6653    }
6654
6655  if (TARGET_ALTIVEC || TARGET_SPE)
6656    {
6657      /* Handle simple unary operations.  */
6658      d = (struct builtin_description *) bdesc_1arg;
6659      for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6660	if (d->code == fcode)
6661	  return rs6000_expand_unop_builtin (d->icode, arglist, target);
6662
6663      /* Handle simple binary operations.  */
6664      d = (struct builtin_description *) bdesc_2arg;
6665      for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6666	if (d->code == fcode)
6667	  return rs6000_expand_binop_builtin (d->icode, arglist, target);
6668
6669      /* Handle simple ternary operations.  */
6670      d = (struct builtin_description *) bdesc_3arg;
6671      for (i = 0; i < ARRAY_SIZE  (bdesc_3arg); i++, d++)
6672	if (d->code == fcode)
6673	  return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6674    }
6675
6676  abort ();
6677  return NULL_RTX;
6678}
6679
6680static void
6681rs6000_init_builtins (void)
6682{
6683  opaque_V2SI_type_node = copy_node (V2SI_type_node);
6684  opaque_V2SF_type_node = copy_node (V2SF_type_node);
6685  opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6686
6687  if (TARGET_SPE)
6688    spe_init_builtins ();
6689  if (TARGET_ALTIVEC)
6690    altivec_init_builtins ();
6691  if (TARGET_ALTIVEC || TARGET_SPE)
6692    rs6000_common_init_builtins ();
6693}
6694
6695/* Search through a set of builtins and enable the mask bits.
6696   DESC is an array of builtins.
6697   SIZE is the total number of builtins.
6698   START is the builtin enum at which to start.
6699   END is the builtin enum at which to end.  */
6700static void
6701enable_mask_for_builtins (struct builtin_description *desc, int size,
6702			  enum rs6000_builtins start,
6703			  enum rs6000_builtins end)
6704{
6705  int i;
6706
6707  for (i = 0; i < size; ++i)
6708    if (desc[i].code == start)
6709      break;
6710
6711  if (i == size)
6712    return;
6713
6714  for (; i < size; ++i)
6715    {
6716      /* Flip all the bits on.  */
6717      desc[i].mask = target_flags;
6718      if (desc[i].code == end)
6719	break;
6720    }
6721}
6722
6723static void
6724spe_init_builtins (void)
6725{
6726  tree endlink = void_list_node;
6727  tree puint_type_node = build_pointer_type (unsigned_type_node);
6728  tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6729  struct builtin_description *d;
6730  size_t i;
6731
6732  tree v2si_ftype_4_v2si
6733    = build_function_type
6734    (opaque_V2SI_type_node,
6735     tree_cons (NULL_TREE, opaque_V2SI_type_node,
6736		tree_cons (NULL_TREE, opaque_V2SI_type_node,
6737			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6738				      tree_cons (NULL_TREE, opaque_V2SI_type_node,
6739						 endlink)))));
6740
6741  tree v2sf_ftype_4_v2sf
6742    = build_function_type
6743    (opaque_V2SF_type_node,
6744     tree_cons (NULL_TREE, opaque_V2SF_type_node,
6745		tree_cons (NULL_TREE, opaque_V2SF_type_node,
6746			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
6747				      tree_cons (NULL_TREE, opaque_V2SF_type_node,
6748						 endlink)))));
6749
6750  tree int_ftype_int_v2si_v2si
6751    = build_function_type
6752    (integer_type_node,
6753     tree_cons (NULL_TREE, integer_type_node,
6754		tree_cons (NULL_TREE, opaque_V2SI_type_node,
6755			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6756				      endlink))));
6757
6758  tree int_ftype_int_v2sf_v2sf
6759    = build_function_type
6760    (integer_type_node,
6761     tree_cons (NULL_TREE, integer_type_node,
6762		tree_cons (NULL_TREE, opaque_V2SF_type_node,
6763			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
6764				      endlink))));
6765
6766  tree void_ftype_v2si_puint_int
6767    = build_function_type (void_type_node,
6768			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6769				      tree_cons (NULL_TREE, puint_type_node,
6770						 tree_cons (NULL_TREE,
6771							    integer_type_node,
6772							    endlink))));
6773
6774  tree void_ftype_v2si_puint_char
6775    = build_function_type (void_type_node,
6776			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6777				      tree_cons (NULL_TREE, puint_type_node,
6778						 tree_cons (NULL_TREE,
6779							    char_type_node,
6780							    endlink))));
6781
6782  tree void_ftype_v2si_pv2si_int
6783    = build_function_type (void_type_node,
6784			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6785				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6786						 tree_cons (NULL_TREE,
6787							    integer_type_node,
6788							    endlink))));
6789
6790  tree void_ftype_v2si_pv2si_char
6791    = build_function_type (void_type_node,
6792			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6793				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6794						 tree_cons (NULL_TREE,
6795							    char_type_node,
6796							    endlink))));
6797
6798  tree void_ftype_int
6799    = build_function_type (void_type_node,
6800			   tree_cons (NULL_TREE, integer_type_node, endlink));
6801
6802  tree int_ftype_void
6803    = build_function_type (integer_type_node, endlink);
6804
6805  tree v2si_ftype_pv2si_int
6806    = build_function_type (opaque_V2SI_type_node,
6807			   tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6808				      tree_cons (NULL_TREE, integer_type_node,
6809						 endlink)));
6810
6811  tree v2si_ftype_puint_int
6812    = build_function_type (opaque_V2SI_type_node,
6813			   tree_cons (NULL_TREE, puint_type_node,
6814				      tree_cons (NULL_TREE, integer_type_node,
6815						 endlink)));
6816
6817  tree v2si_ftype_pushort_int
6818    = build_function_type (opaque_V2SI_type_node,
6819			   tree_cons (NULL_TREE, pushort_type_node,
6820				      tree_cons (NULL_TREE, integer_type_node,
6821						 endlink)));
6822
6823  tree v2si_ftype_signed_char
6824    = build_function_type (opaque_V2SI_type_node,
6825			   tree_cons (NULL_TREE, signed_char_type_node,
6826				      endlink));
6827
6828  /* The initialization of the simple binary and unary builtins is
6829     done in rs6000_common_init_builtins, but we have to enable the
6830     mask bits here manually because we have run out of `target_flags'
6831     bits.  We really need to redesign this mask business.  */
6832
6833  enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6834			    ARRAY_SIZE (bdesc_2arg),
6835			    SPE_BUILTIN_EVADDW,
6836			    SPE_BUILTIN_EVXOR);
6837  enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6838			    ARRAY_SIZE (bdesc_1arg),
6839			    SPE_BUILTIN_EVABS,
6840			    SPE_BUILTIN_EVSUBFUSIAAW);
6841  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6842			    ARRAY_SIZE (bdesc_spe_predicates),
6843			    SPE_BUILTIN_EVCMPEQ,
6844			    SPE_BUILTIN_EVFSTSTLT);
6845  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6846			    ARRAY_SIZE (bdesc_spe_evsel),
6847			    SPE_BUILTIN_EVSEL_CMPGTS,
6848			    SPE_BUILTIN_EVSEL_FSTSTEQ);
6849
6850  (*lang_hooks.decls.pushdecl)
6851    (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6852		 opaque_V2SI_type_node));
6853
6854  /* Initialize irregular SPE builtins.  */
6855
6856  def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6857  def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6858  def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6859  def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6860  def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6861  def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6862  def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6863  def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6864  def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6865  def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6866  def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6867  def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6868  def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6869  def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6870  def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6871  def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6872  def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6873  def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6874
6875  /* Loads.  */
6876  def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6877  def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6878  def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6879  def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6880  def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6881  def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6882  def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6883  def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6884  def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6885  def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6886  def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6887  def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6888  def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6889  def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6890  def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6891  def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6892  def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6893  def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6894  def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6895  def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6896  def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6897  def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6898
6899  /* Predicates.  */
6900  d = (struct builtin_description *) bdesc_spe_predicates;
6901  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6902    {
6903      tree type;
6904
6905      switch (insn_data[d->icode].operand[1].mode)
6906	{
6907	case V2SImode:
6908	  type = int_ftype_int_v2si_v2si;
6909	  break;
6910	case V2SFmode:
6911	  type = int_ftype_int_v2sf_v2sf;
6912	  break;
6913	default:
6914	  abort ();
6915	}
6916
6917      def_builtin (d->mask, d->name, type, d->code);
6918    }
6919
6920  /* Evsel predicates.  */
6921  d = (struct builtin_description *) bdesc_spe_evsel;
6922  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6923    {
6924      tree type;
6925
6926      switch (insn_data[d->icode].operand[1].mode)
6927	{
6928	case V2SImode:
6929	  type = v2si_ftype_4_v2si;
6930	  break;
6931	case V2SFmode:
6932	  type = v2sf_ftype_4_v2sf;
6933	  break;
6934	default:
6935	  abort ();
6936	}
6937
6938      def_builtin (d->mask, d->name, type, d->code);
6939    }
6940}
6941
6942static void
6943altivec_init_builtins (void)
6944{
6945  struct builtin_description *d;
6946  struct builtin_description_predicates *dp;
6947  size_t i;
6948  tree pfloat_type_node = build_pointer_type (float_type_node);
6949  tree pint_type_node = build_pointer_type (integer_type_node);
6950  tree pshort_type_node = build_pointer_type (short_integer_type_node);
6951  tree pchar_type_node = build_pointer_type (char_type_node);
6952
6953  tree pvoid_type_node = build_pointer_type (void_type_node);
6954
6955  tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6956  tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6957  tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6958  tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6959
6960  tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6961
6962  tree int_ftype_int_v4si_v4si
6963    = build_function_type_list (integer_type_node,
6964				integer_type_node, V4SI_type_node,
6965				V4SI_type_node, NULL_TREE);
6966  tree v4sf_ftype_pcfloat
6967    = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6968  tree void_ftype_pfloat_v4sf
6969    = build_function_type_list (void_type_node,
6970				pfloat_type_node, V4SF_type_node, NULL_TREE);
6971  tree v4si_ftype_pcint
6972    = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6973  tree void_ftype_pint_v4si
6974    = build_function_type_list (void_type_node,
6975				pint_type_node, V4SI_type_node, NULL_TREE);
6976  tree v8hi_ftype_pcshort
6977    = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6978  tree void_ftype_pshort_v8hi
6979    = build_function_type_list (void_type_node,
6980				pshort_type_node, V8HI_type_node, NULL_TREE);
6981  tree v16qi_ftype_pcchar
6982    = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6983  tree void_ftype_pchar_v16qi
6984    = build_function_type_list (void_type_node,
6985				pchar_type_node, V16QI_type_node, NULL_TREE);
6986  tree void_ftype_v4si
6987    = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6988  tree v8hi_ftype_void
6989    = build_function_type (V8HI_type_node, void_list_node);
6990  tree void_ftype_void
6991    = build_function_type (void_type_node, void_list_node);
6992  tree void_ftype_qi
6993    = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6994
6995  tree v16qi_ftype_long_pcvoid
6996    = build_function_type_list (V16QI_type_node,
6997				long_integer_type_node, pcvoid_type_node, NULL_TREE);
6998  tree v8hi_ftype_long_pcvoid
6999    = build_function_type_list (V8HI_type_node,
7000				long_integer_type_node, pcvoid_type_node, NULL_TREE);
7001  tree v4si_ftype_long_pcvoid
7002    = build_function_type_list (V4SI_type_node,
7003				long_integer_type_node, pcvoid_type_node, NULL_TREE);
7004
7005  tree void_ftype_v4si_long_pvoid
7006    = build_function_type_list (void_type_node,
7007				V4SI_type_node, long_integer_type_node,
7008				pvoid_type_node, NULL_TREE);
7009  tree void_ftype_v16qi_long_pvoid
7010    = build_function_type_list (void_type_node,
7011				V16QI_type_node, long_integer_type_node,
7012				pvoid_type_node, NULL_TREE);
7013  tree void_ftype_v8hi_long_pvoid
7014    = build_function_type_list (void_type_node,
7015				V8HI_type_node, long_integer_type_node,
7016				pvoid_type_node, NULL_TREE);
7017  tree int_ftype_int_v8hi_v8hi
7018    = build_function_type_list (integer_type_node,
7019				integer_type_node, V8HI_type_node,
7020				V8HI_type_node, NULL_TREE);
7021  tree int_ftype_int_v16qi_v16qi
7022    = build_function_type_list (integer_type_node,
7023				integer_type_node, V16QI_type_node,
7024				V16QI_type_node, NULL_TREE);
7025  tree int_ftype_int_v4sf_v4sf
7026    = build_function_type_list (integer_type_node,
7027				integer_type_node, V4SF_type_node,
7028				V4SF_type_node, NULL_TREE);
7029  tree v4si_ftype_v4si
7030    = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7031  tree v8hi_ftype_v8hi
7032    = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7033  tree v16qi_ftype_v16qi
7034    = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7035  tree v4sf_ftype_v4sf
7036    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7037  tree void_ftype_pcvoid_int_char
7038    = build_function_type_list (void_type_node,
7039				pcvoid_type_node, integer_type_node,
7040				char_type_node, NULL_TREE);
7041
7042  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7043	       ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7044  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7045	       ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7046  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7047	       ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7048  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7049	       ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7050  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7051	       ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7052  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7053	       ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7054  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7055	       ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7056  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7057	       ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7058  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7059  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7060  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7061  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
7062  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7063  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7064  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7065  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7066  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7067  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7068  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7069  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7070  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7071  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7072  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7073  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7074
7075  /* Add the DST variants.  */
7076  d = (struct builtin_description *) bdesc_dst;
7077  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7078    def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
7079
7080  /* Initialize the predicates.  */
7081  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7082  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7083    {
7084      enum machine_mode mode1;
7085      tree type;
7086
7087      mode1 = insn_data[dp->icode].operand[1].mode;
7088
7089      switch (mode1)
7090	{
7091	case V4SImode:
7092	  type = int_ftype_int_v4si_v4si;
7093	  break;
7094	case V8HImode:
7095	  type = int_ftype_int_v8hi_v8hi;
7096	  break;
7097	case V16QImode:
7098	  type = int_ftype_int_v16qi_v16qi;
7099	  break;
7100	case V4SFmode:
7101	  type = int_ftype_int_v4sf_v4sf;
7102	  break;
7103	default:
7104	  abort ();
7105	}
7106
7107      def_builtin (dp->mask, dp->name, type, dp->code);
7108    }
7109
7110  /* Initialize the abs* operators.  */
7111  d = (struct builtin_description *) bdesc_abs;
7112  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7113    {
7114      enum machine_mode mode0;
7115      tree type;
7116
7117      mode0 = insn_data[d->icode].operand[0].mode;
7118
7119      switch (mode0)
7120	{
7121	case V4SImode:
7122	  type = v4si_ftype_v4si;
7123	  break;
7124	case V8HImode:
7125	  type = v8hi_ftype_v8hi;
7126	  break;
7127	case V16QImode:
7128	  type = v16qi_ftype_v16qi;
7129	  break;
7130	case V4SFmode:
7131	  type = v4sf_ftype_v4sf;
7132	  break;
7133	default:
7134	  abort ();
7135	}
7136
7137      def_builtin (d->mask, d->name, type, d->code);
7138    }
7139}
7140
7141static void
7142rs6000_common_init_builtins (void)
7143{
7144  struct builtin_description *d;
7145  size_t i;
7146
7147  tree v4sf_ftype_v4sf_v4sf_v16qi
7148    = build_function_type_list (V4SF_type_node,
7149				V4SF_type_node, V4SF_type_node,
7150				V16QI_type_node, NULL_TREE);
7151  tree v4si_ftype_v4si_v4si_v16qi
7152    = build_function_type_list (V4SI_type_node,
7153				V4SI_type_node, V4SI_type_node,
7154				V16QI_type_node, NULL_TREE);
7155  tree v8hi_ftype_v8hi_v8hi_v16qi
7156    = build_function_type_list (V8HI_type_node,
7157				V8HI_type_node, V8HI_type_node,
7158				V16QI_type_node, NULL_TREE);
7159  tree v16qi_ftype_v16qi_v16qi_v16qi
7160    = build_function_type_list (V16QI_type_node,
7161				V16QI_type_node, V16QI_type_node,
7162				V16QI_type_node, NULL_TREE);
7163  tree v4si_ftype_char
7164    = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
7165  tree v8hi_ftype_char
7166    = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
7167  tree v16qi_ftype_char
7168    = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
7169  tree v8hi_ftype_v16qi
7170    = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7171  tree v4sf_ftype_v4sf
7172    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7173
7174  tree v2si_ftype_v2si_v2si
7175    = build_function_type_list (opaque_V2SI_type_node,
7176				opaque_V2SI_type_node,
7177				opaque_V2SI_type_node, NULL_TREE);
7178
7179  tree v2sf_ftype_v2sf_v2sf
7180    = build_function_type_list (opaque_V2SF_type_node,
7181				opaque_V2SF_type_node,
7182				opaque_V2SF_type_node, NULL_TREE);
7183
7184  tree v2si_ftype_int_int
7185    = build_function_type_list (opaque_V2SI_type_node,
7186				integer_type_node, integer_type_node,
7187				NULL_TREE);
7188
7189  tree v2si_ftype_v2si
7190    = build_function_type_list (opaque_V2SI_type_node,
7191				opaque_V2SI_type_node, NULL_TREE);
7192
7193  tree v2sf_ftype_v2sf
7194    = build_function_type_list (opaque_V2SF_type_node,
7195				opaque_V2SF_type_node, NULL_TREE);
7196
7197  tree v2sf_ftype_v2si
7198    = build_function_type_list (opaque_V2SF_type_node,
7199				opaque_V2SI_type_node, NULL_TREE);
7200
7201  tree v2si_ftype_v2sf
7202    = build_function_type_list (opaque_V2SI_type_node,
7203				opaque_V2SF_type_node, NULL_TREE);
7204
7205  tree v2si_ftype_v2si_char
7206    = build_function_type_list (opaque_V2SI_type_node,
7207				opaque_V2SI_type_node,
7208				char_type_node, NULL_TREE);
7209
7210  tree v2si_ftype_int_char
7211    = build_function_type_list (opaque_V2SI_type_node,
7212				integer_type_node, char_type_node, NULL_TREE);
7213
7214  tree v2si_ftype_char
7215    = build_function_type_list (opaque_V2SI_type_node,
7216				char_type_node, NULL_TREE);
7217
7218  tree int_ftype_int_int
7219    = build_function_type_list (integer_type_node,
7220				integer_type_node, integer_type_node,
7221				NULL_TREE);
7222
7223  tree v4si_ftype_v4si_v4si
7224    = build_function_type_list (V4SI_type_node,
7225				V4SI_type_node, V4SI_type_node, NULL_TREE);
7226  tree v4sf_ftype_v4si_char
7227    = build_function_type_list (V4SF_type_node,
7228				V4SI_type_node, char_type_node, NULL_TREE);
7229  tree v4si_ftype_v4sf_char
7230    = build_function_type_list (V4SI_type_node,
7231				V4SF_type_node, char_type_node, NULL_TREE);
7232  tree v4si_ftype_v4si_char
7233    = build_function_type_list (V4SI_type_node,
7234				V4SI_type_node, char_type_node, NULL_TREE);
7235  tree v8hi_ftype_v8hi_char
7236    = build_function_type_list (V8HI_type_node,
7237				V8HI_type_node, char_type_node, NULL_TREE);
7238  tree v16qi_ftype_v16qi_char
7239    = build_function_type_list (V16QI_type_node,
7240				V16QI_type_node, char_type_node, NULL_TREE);
7241  tree v16qi_ftype_v16qi_v16qi_char
7242    = build_function_type_list (V16QI_type_node,
7243				V16QI_type_node, V16QI_type_node,
7244				char_type_node, NULL_TREE);
7245  tree v8hi_ftype_v8hi_v8hi_char
7246    = build_function_type_list (V8HI_type_node,
7247				V8HI_type_node, V8HI_type_node,
7248				char_type_node, NULL_TREE);
7249  tree v4si_ftype_v4si_v4si_char
7250    = build_function_type_list (V4SI_type_node,
7251				V4SI_type_node, V4SI_type_node,
7252				char_type_node, NULL_TREE);
7253  tree v4sf_ftype_v4sf_v4sf_char
7254    = build_function_type_list (V4SF_type_node,
7255				V4SF_type_node, V4SF_type_node,
7256				char_type_node, NULL_TREE);
7257  tree v4sf_ftype_v4sf_v4sf
7258    = build_function_type_list (V4SF_type_node,
7259				V4SF_type_node, V4SF_type_node, NULL_TREE);
7260  tree v4sf_ftype_v4sf_v4sf_v4si
7261    = build_function_type_list (V4SF_type_node,
7262				V4SF_type_node, V4SF_type_node,
7263				V4SI_type_node, NULL_TREE);
7264  tree v4sf_ftype_v4sf_v4sf_v4sf
7265    = build_function_type_list (V4SF_type_node,
7266				V4SF_type_node, V4SF_type_node,
7267				V4SF_type_node, NULL_TREE);
7268  tree v4si_ftype_v4si_v4si_v4si
7269    = build_function_type_list (V4SI_type_node,
7270				V4SI_type_node, V4SI_type_node,
7271				V4SI_type_node, NULL_TREE);
7272  tree v8hi_ftype_v8hi_v8hi
7273    = build_function_type_list (V8HI_type_node,
7274				V8HI_type_node, V8HI_type_node, NULL_TREE);
7275  tree v8hi_ftype_v8hi_v8hi_v8hi
7276    = build_function_type_list (V8HI_type_node,
7277				V8HI_type_node, V8HI_type_node,
7278				V8HI_type_node, NULL_TREE);
7279 tree v4si_ftype_v8hi_v8hi_v4si
7280    = build_function_type_list (V4SI_type_node,
7281				V8HI_type_node, V8HI_type_node,
7282				V4SI_type_node, NULL_TREE);
7283 tree v4si_ftype_v16qi_v16qi_v4si
7284    = build_function_type_list (V4SI_type_node,
7285				V16QI_type_node, V16QI_type_node,
7286				V4SI_type_node, NULL_TREE);
7287  tree v16qi_ftype_v16qi_v16qi
7288    = build_function_type_list (V16QI_type_node,
7289				V16QI_type_node, V16QI_type_node, NULL_TREE);
7290  tree v4si_ftype_v4sf_v4sf
7291    = build_function_type_list (V4SI_type_node,
7292				V4SF_type_node, V4SF_type_node, NULL_TREE);
7293  tree v8hi_ftype_v16qi_v16qi
7294    = build_function_type_list (V8HI_type_node,
7295				V16QI_type_node, V16QI_type_node, NULL_TREE);
7296  tree v4si_ftype_v8hi_v8hi
7297    = build_function_type_list (V4SI_type_node,
7298				V8HI_type_node, V8HI_type_node, NULL_TREE);
7299  tree v8hi_ftype_v4si_v4si
7300    = build_function_type_list (V8HI_type_node,
7301				V4SI_type_node, V4SI_type_node, NULL_TREE);
7302  tree v16qi_ftype_v8hi_v8hi
7303    = build_function_type_list (V16QI_type_node,
7304				V8HI_type_node, V8HI_type_node, NULL_TREE);
7305  tree v4si_ftype_v16qi_v4si
7306    = build_function_type_list (V4SI_type_node,
7307				V16QI_type_node, V4SI_type_node, NULL_TREE);
7308  tree v4si_ftype_v16qi_v16qi
7309    = build_function_type_list (V4SI_type_node,
7310				V16QI_type_node, V16QI_type_node, NULL_TREE);
7311  tree v4si_ftype_v8hi_v4si
7312    = build_function_type_list (V4SI_type_node,
7313				V8HI_type_node, V4SI_type_node, NULL_TREE);
7314  tree v4si_ftype_v8hi
7315    = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7316  tree int_ftype_v4si_v4si
7317    = build_function_type_list (integer_type_node,
7318				V4SI_type_node, V4SI_type_node, NULL_TREE);
7319  tree int_ftype_v4sf_v4sf
7320    = build_function_type_list (integer_type_node,
7321				V4SF_type_node, V4SF_type_node, NULL_TREE);
7322  tree int_ftype_v16qi_v16qi
7323    = build_function_type_list (integer_type_node,
7324				V16QI_type_node, V16QI_type_node, NULL_TREE);
7325  tree int_ftype_v8hi_v8hi
7326    = build_function_type_list (integer_type_node,
7327				V8HI_type_node, V8HI_type_node, NULL_TREE);
7328
7329  /* Add the simple ternary operators.  */
7330  d = (struct builtin_description *) bdesc_3arg;
7331  for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7332    {
7333
7334      enum machine_mode mode0, mode1, mode2, mode3;
7335      tree type;
7336
7337      if (d->name == 0 || d->icode == CODE_FOR_nothing)
7338	continue;
7339
7340      mode0 = insn_data[d->icode].operand[0].mode;
7341      mode1 = insn_data[d->icode].operand[1].mode;
7342      mode2 = insn_data[d->icode].operand[2].mode;
7343      mode3 = insn_data[d->icode].operand[3].mode;
7344
7345      /* When all four are of the same mode.  */
7346      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7347	{
7348	  switch (mode0)
7349	    {
7350	    case V4SImode:
7351	      type = v4si_ftype_v4si_v4si_v4si;
7352	      break;
7353	    case V4SFmode:
7354	      type = v4sf_ftype_v4sf_v4sf_v4sf;
7355	      break;
7356	    case V8HImode:
7357	      type = v8hi_ftype_v8hi_v8hi_v8hi;
7358	      break;
7359	    case V16QImode:
7360	      type = v16qi_ftype_v16qi_v16qi_v16qi;
7361	      break;
7362	    default:
7363	      abort();
7364	    }
7365	}
7366      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7367        {
7368	  switch (mode0)
7369	    {
7370	    case V4SImode:
7371	      type = v4si_ftype_v4si_v4si_v16qi;
7372	      break;
7373	    case V4SFmode:
7374	      type = v4sf_ftype_v4sf_v4sf_v16qi;
7375	      break;
7376	    case V8HImode:
7377	      type = v8hi_ftype_v8hi_v8hi_v16qi;
7378	      break;
7379	    case V16QImode:
7380	      type = v16qi_ftype_v16qi_v16qi_v16qi;
7381	      break;
7382	    default:
7383	      abort();
7384	    }
7385	}
7386      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7387	       && mode3 == V4SImode)
7388	type = v4si_ftype_v16qi_v16qi_v4si;
7389      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7390	       && mode3 == V4SImode)
7391	type = v4si_ftype_v8hi_v8hi_v4si;
7392      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7393	       && mode3 == V4SImode)
7394	type = v4sf_ftype_v4sf_v4sf_v4si;
7395
7396      /* vchar, vchar, vchar, 4 bit literal.  */
7397      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7398	       && mode3 == QImode)
7399	type = v16qi_ftype_v16qi_v16qi_char;
7400
7401      /* vshort, vshort, vshort, 4 bit literal.  */
7402      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7403	       && mode3 == QImode)
7404	type = v8hi_ftype_v8hi_v8hi_char;
7405
7406      /* vint, vint, vint, 4 bit literal.  */
7407      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7408	       && mode3 == QImode)
7409	type = v4si_ftype_v4si_v4si_char;
7410
7411      /* vfloat, vfloat, vfloat, 4 bit literal.  */
7412      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7413	       && mode3 == QImode)
7414	type = v4sf_ftype_v4sf_v4sf_char;
7415
7416      else
7417	abort ();
7418
7419      def_builtin (d->mask, d->name, type, d->code);
7420    }
7421
7422  /* Add the simple binary operators.  */
7423  d = (struct builtin_description *) bdesc_2arg;
7424  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7425    {
7426      enum machine_mode mode0, mode1, mode2;
7427      tree type;
7428
7429      if (d->name == 0 || d->icode == CODE_FOR_nothing)
7430	continue;
7431
7432      mode0 = insn_data[d->icode].operand[0].mode;
7433      mode1 = insn_data[d->icode].operand[1].mode;
7434      mode2 = insn_data[d->icode].operand[2].mode;
7435
7436      /* When all three operands are of the same mode.  */
7437      if (mode0 == mode1 && mode1 == mode2)
7438	{
7439	  switch (mode0)
7440	    {
7441	    case V4SFmode:
7442	      type = v4sf_ftype_v4sf_v4sf;
7443	      break;
7444	    case V4SImode:
7445	      type = v4si_ftype_v4si_v4si;
7446	      break;
7447	    case V16QImode:
7448	      type = v16qi_ftype_v16qi_v16qi;
7449	      break;
7450	    case V8HImode:
7451	      type = v8hi_ftype_v8hi_v8hi;
7452	      break;
7453	    case V2SImode:
7454	      type = v2si_ftype_v2si_v2si;
7455	      break;
7456	    case V2SFmode:
7457	      type = v2sf_ftype_v2sf_v2sf;
7458	      break;
7459	    case SImode:
7460	      type = int_ftype_int_int;
7461	      break;
7462	    default:
7463	      abort ();
7464	    }
7465	}
7466
7467      /* A few other combos we really don't want to do manually.  */
7468
7469      /* vint, vfloat, vfloat.  */
7470      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7471	type = v4si_ftype_v4sf_v4sf;
7472
7473      /* vshort, vchar, vchar.  */
7474      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7475	type = v8hi_ftype_v16qi_v16qi;
7476
7477      /* vint, vshort, vshort.  */
7478      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7479	type = v4si_ftype_v8hi_v8hi;
7480
7481      /* vshort, vint, vint.  */
7482      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7483	type = v8hi_ftype_v4si_v4si;
7484
7485      /* vchar, vshort, vshort.  */
7486      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7487	type = v16qi_ftype_v8hi_v8hi;
7488
7489      /* vint, vchar, vint.  */
7490      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7491	type = v4si_ftype_v16qi_v4si;
7492
7493      /* vint, vchar, vchar.  */
7494      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7495	type = v4si_ftype_v16qi_v16qi;
7496
7497      /* vint, vshort, vint.  */
7498      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7499	type = v4si_ftype_v8hi_v4si;
7500
7501      /* vint, vint, 5 bit literal.  */
7502      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7503	type = v4si_ftype_v4si_char;
7504
7505      /* vshort, vshort, 5 bit literal.  */
7506      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7507	type = v8hi_ftype_v8hi_char;
7508
7509      /* vchar, vchar, 5 bit literal.  */
7510      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7511	type = v16qi_ftype_v16qi_char;
7512
7513      /* vfloat, vint, 5 bit literal.  */
7514      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7515	type = v4sf_ftype_v4si_char;
7516
7517      /* vint, vfloat, 5 bit literal.  */
7518      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7519	type = v4si_ftype_v4sf_char;
7520
7521      else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7522	type = v2si_ftype_int_int;
7523
7524      else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7525	type = v2si_ftype_v2si_char;
7526
7527      else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7528	type = v2si_ftype_int_char;
7529
7530      /* int, x, x.  */
7531      else if (mode0 == SImode)
7532	{
7533	  switch (mode1)
7534	    {
7535	    case V4SImode:
7536	      type = int_ftype_v4si_v4si;
7537	      break;
7538	    case V4SFmode:
7539	      type = int_ftype_v4sf_v4sf;
7540	      break;
7541	    case V16QImode:
7542	      type = int_ftype_v16qi_v16qi;
7543	      break;
7544	    case V8HImode:
7545	      type = int_ftype_v8hi_v8hi;
7546	      break;
7547	    default:
7548	      abort ();
7549	    }
7550	}
7551
7552      else
7553	abort ();
7554
7555      def_builtin (d->mask, d->name, type, d->code);
7556    }
7557
7558  /* Add the simple unary operators.  */
7559  d = (struct builtin_description *) bdesc_1arg;
7560  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7561    {
7562      enum machine_mode mode0, mode1;
7563      tree type;
7564
7565      if (d->name == 0 || d->icode == CODE_FOR_nothing)
7566	continue;
7567
7568      mode0 = insn_data[d->icode].operand[0].mode;
7569      mode1 = insn_data[d->icode].operand[1].mode;
7570
7571      if (mode0 == V4SImode && mode1 == QImode)
7572        type = v4si_ftype_char;
7573      else if (mode0 == V8HImode && mode1 == QImode)
7574        type = v8hi_ftype_char;
7575      else if (mode0 == V16QImode && mode1 == QImode)
7576        type = v16qi_ftype_char;
7577      else if (mode0 == V4SFmode && mode1 == V4SFmode)
7578	type = v4sf_ftype_v4sf;
7579      else if (mode0 == V8HImode && mode1 == V16QImode)
7580	type = v8hi_ftype_v16qi;
7581      else if (mode0 == V4SImode && mode1 == V8HImode)
7582	type = v4si_ftype_v8hi;
7583      else if (mode0 == V2SImode && mode1 == V2SImode)
7584	type = v2si_ftype_v2si;
7585      else if (mode0 == V2SFmode && mode1 == V2SFmode)
7586	type = v2sf_ftype_v2sf;
7587      else if (mode0 == V2SFmode && mode1 == V2SImode)
7588	type = v2sf_ftype_v2si;
7589      else if (mode0 == V2SImode && mode1 == V2SFmode)
7590	type = v2si_ftype_v2sf;
7591      else if (mode0 == V2SImode && mode1 == QImode)
7592	type = v2si_ftype_char;
7593      else
7594	abort ();
7595
7596      def_builtin (d->mask, d->name, type, d->code);
7597    }
7598}
7599
7600static void
7601rs6000_init_libfuncs (void)
7602{
7603  if (!TARGET_HARD_FLOAT)
7604    return;
7605
7606  if (DEFAULT_ABI != ABI_V4)
7607    {
7608      if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7609	{
7610	  /* AIX library routines for float->int conversion.  */
7611	  set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7612	  set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7613	  set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7614	  set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7615	}
7616
7617      /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines.  */
7618      set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7619      set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7620      set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7621      set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7622    }
7623  else
7624    {
7625      /* 32-bit SVR4 quad floating point routines.  */
7626
7627      set_optab_libfunc (add_optab, TFmode, "_q_add");
7628      set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7629      set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7630      set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7631      set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7632      if (TARGET_PPC_GPOPT || TARGET_POWER2)
7633	set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7634
7635      set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7636      set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7637      set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7638      set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7639      set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7640      set_optab_libfunc (le_optab, TFmode, "_q_fle");
7641
7642      set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7643      set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7644      set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7645      set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7646      set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7647      set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7648      set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7649    }
7650}
7651
7652/* Expand a block move operation, and return 1 if successful.  Return 0
7653   if we should let the compiler generate normal code.
7654
7655   operands[0] is the destination
7656   operands[1] is the source
7657   operands[2] is the length
7658   operands[3] is the alignment */
7659
7660#define MAX_MOVE_REG 4
7661
7662int
7663expand_block_move (rtx operands[])
7664{
7665  rtx orig_dest = operands[0];
7666  rtx orig_src	= operands[1];
7667  rtx bytes_rtx	= operands[2];
7668  rtx align_rtx = operands[3];
7669  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
7670  int align;
7671  int bytes;
7672  int offset;
7673  int move_bytes;
7674  rtx stores[MAX_MOVE_REG];
7675  int num_reg = 0;
7676
7677  /* If this is not a fixed size move, just call memcpy */
7678  if (! constp)
7679    return 0;
7680
7681  /* If this is not a fixed size alignment, abort */
7682  if (GET_CODE (align_rtx) != CONST_INT)
7683    abort ();
7684  align = INTVAL (align_rtx);
7685
7686  /* Anything to move? */
7687  bytes = INTVAL (bytes_rtx);
7688  if (bytes <= 0)
7689    return 1;
7690
7691  /* store_one_arg depends on expand_block_move to handle at least the size of
7692     reg_parm_stack_space.  */
7693  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7694    return 0;
7695
7696  for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7697    {
7698      union {
7699	rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7700	rtx (*mov) (rtx, rtx);
7701      } gen_func;
7702      enum machine_mode mode = BLKmode;
7703      rtx src, dest;
7704
7705      if (TARGET_STRING
7706	  && bytes > 24		/* move up to 32 bytes at a time */
7707	  && ! fixed_regs[5]
7708	  && ! fixed_regs[6]
7709	  && ! fixed_regs[7]
7710	  && ! fixed_regs[8]
7711	  && ! fixed_regs[9]
7712	  && ! fixed_regs[10]
7713	  && ! fixed_regs[11]
7714	  && ! fixed_regs[12])
7715	{
7716	  move_bytes = (bytes > 32) ? 32 : bytes;
7717	  gen_func.movstrsi = gen_movstrsi_8reg;
7718	}
7719      else if (TARGET_STRING
7720	       && bytes > 16	/* move up to 24 bytes at a time */
7721	       && ! fixed_regs[5]
7722	       && ! fixed_regs[6]
7723	       && ! fixed_regs[7]
7724	       && ! fixed_regs[8]
7725	       && ! fixed_regs[9]
7726	       && ! fixed_regs[10])
7727	{
7728	  move_bytes = (bytes > 24) ? 24 : bytes;
7729	  gen_func.movstrsi = gen_movstrsi_6reg;
7730	}
7731      else if (TARGET_STRING
7732	       && bytes > 8	/* move up to 16 bytes at a time */
7733	       && ! fixed_regs[5]
7734	       && ! fixed_regs[6]
7735	       && ! fixed_regs[7]
7736	       && ! fixed_regs[8])
7737	{
7738	  move_bytes = (bytes > 16) ? 16 : bytes;
7739	  gen_func.movstrsi = gen_movstrsi_4reg;
7740	}
7741      else if (bytes >= 8 && TARGET_POWERPC64
7742	       /* 64-bit loads and stores require word-aligned
7743		  displacements.  */
7744	       && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7745	{
7746	  move_bytes = 8;
7747	  mode = DImode;
7748	  gen_func.mov = gen_movdi;
7749	}
7750      else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7751	{			/* move up to 8 bytes at a time */
7752	  move_bytes = (bytes > 8) ? 8 : bytes;
7753	  gen_func.movstrsi = gen_movstrsi_2reg;
7754	}
7755      else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7756	{			/* move 4 bytes */
7757	  move_bytes = 4;
7758	  mode = SImode;
7759	  gen_func.mov = gen_movsi;
7760	}
7761      else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7762	{			/* move 2 bytes */
7763	  move_bytes = 2;
7764	  mode = HImode;
7765	  gen_func.mov = gen_movhi;
7766	}
7767      else if (TARGET_STRING && bytes > 1)
7768	{			/* move up to 4 bytes at a time */
7769	  move_bytes = (bytes > 4) ? 4 : bytes;
7770	  gen_func.movstrsi = gen_movstrsi_1reg;
7771	}
7772      else /* move 1 byte at a time */
7773	{
7774	  move_bytes = 1;
7775	  mode = QImode;
7776	  gen_func.mov = gen_movqi;
7777	}
7778
7779      src = adjust_address (orig_src, mode, offset);
7780      dest = adjust_address (orig_dest, mode, offset);
7781
7782      if (mode != BLKmode)
7783	{
7784	  rtx tmp_reg = gen_reg_rtx (mode);
7785
7786	  emit_insn ((*gen_func.mov) (tmp_reg, src));
7787	  stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7788	}
7789
7790      if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7791	{
7792	  int i;
7793	  for (i = 0; i < num_reg; i++)
7794	    emit_insn (stores[i]);
7795	  num_reg = 0;
7796	}
7797
7798      if (mode == BLKmode)
7799	{
7800	  /* Move the address into scratch registers.  The movstrsi
7801	     patterns require zero offset.  */
7802	  if (!REG_P (XEXP (src, 0)))
7803	    {
7804	      rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7805	      src = replace_equiv_address (src, src_reg);
7806	    }
7807	  set_mem_size (src, GEN_INT (move_bytes));
7808
7809	  if (!REG_P (XEXP (dest, 0)))
7810	    {
7811	      rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7812	      dest = replace_equiv_address (dest, dest_reg);
7813	    }
7814	  set_mem_size (dest, GEN_INT (move_bytes));
7815
7816	  emit_insn ((*gen_func.movstrsi) (dest, src,
7817					   GEN_INT (move_bytes & 31),
7818					   align_rtx));
7819	}
7820    }
7821
7822  return 1;
7823}
7824
7825
7826/* Return 1 if OP is a load multiple operation.  It is known to be a
7827   PARALLEL and the first section will be tested.  */
7828
7829int
7830load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7831{
7832  int count = XVECLEN (op, 0);
7833  unsigned int dest_regno;
7834  rtx src_addr;
7835  int i;
7836
7837  /* Perform a quick check so we don't blow up below.  */
7838  if (count <= 1
7839      || GET_CODE (XVECEXP (op, 0, 0)) != SET
7840      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7841      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7842    return 0;
7843
7844  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7845  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7846
7847  for (i = 1; i < count; i++)
7848    {
7849      rtx elt = XVECEXP (op, 0, i);
7850
7851      if (GET_CODE (elt) != SET
7852	  || GET_CODE (SET_DEST (elt)) != REG
7853	  || GET_MODE (SET_DEST (elt)) != SImode
7854	  || REGNO (SET_DEST (elt)) != dest_regno + i
7855	  || GET_CODE (SET_SRC (elt)) != MEM
7856	  || GET_MODE (SET_SRC (elt)) != SImode
7857	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7858	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7859	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7860	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7861	return 0;
7862    }
7863
7864  return 1;
7865}
7866
7867/* Similar, but tests for store multiple.  Here, the second vector element
7868   is a CLOBBER.  It will be tested later.  */
7869
7870int
7871store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7872{
7873  int count = XVECLEN (op, 0) - 1;
7874  unsigned int src_regno;
7875  rtx dest_addr;
7876  int i;
7877
7878  /* Perform a quick check so we don't blow up below.  */
7879  if (count <= 1
7880      || GET_CODE (XVECEXP (op, 0, 0)) != SET
7881      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7882      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7883    return 0;
7884
7885  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7886  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7887
7888  for (i = 1; i < count; i++)
7889    {
7890      rtx elt = XVECEXP (op, 0, i + 1);
7891
7892      if (GET_CODE (elt) != SET
7893	  || GET_CODE (SET_SRC (elt)) != REG
7894	  || GET_MODE (SET_SRC (elt)) != SImode
7895	  || REGNO (SET_SRC (elt)) != src_regno + i
7896	  || GET_CODE (SET_DEST (elt)) != MEM
7897	  || GET_MODE (SET_DEST (elt)) != SImode
7898	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7899	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7900	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7901	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7902	return 0;
7903    }
7904
7905  return 1;
7906}
7907
7908/* Return a string to perform a load_multiple operation.
7909   operands[0] is the vector.
7910   operands[1] is the source address.
7911   operands[2] is the first destination register.  */
7912
7913const char *
7914rs6000_output_load_multiple (rtx operands[3])
7915{
7916  /* We have to handle the case where the pseudo used to contain the address
7917     is assigned to one of the output registers.  */
7918  int i, j;
7919  int words = XVECLEN (operands[0], 0);
7920  rtx xop[10];
7921
7922  if (XVECLEN (operands[0], 0) == 1)
7923    return "{l|lwz} %2,0(%1)";
7924
7925  for (i = 0; i < words; i++)
7926    if (refers_to_regno_p (REGNO (operands[2]) + i,
7927			   REGNO (operands[2]) + i + 1, operands[1], 0))
7928      {
7929	if (i == words-1)
7930	  {
7931	    xop[0] = GEN_INT (4 * (words-1));
7932	    xop[1] = operands[1];
7933	    xop[2] = operands[2];
7934	    output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7935	    return "";
7936	  }
7937	else if (i == 0)
7938	  {
7939	    xop[0] = GEN_INT (4 * (words-1));
7940	    xop[1] = operands[1];
7941	    xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7942	    output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7943	    return "";
7944	  }
7945	else
7946	  {
7947	    for (j = 0; j < words; j++)
7948	      if (j != i)
7949		{
7950		  xop[0] = GEN_INT (j * 4);
7951		  xop[1] = operands[1];
7952		  xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7953		  output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7954		}
7955	    xop[0] = GEN_INT (i * 4);
7956	    xop[1] = operands[1];
7957	    output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7958	    return "";
7959	  }
7960      }
7961
7962  return "{lsi|lswi} %2,%1,%N0";
7963}
7964
7965/* Return 1 for a parallel vrsave operation.  */
7966
7967int
7968vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7969{
7970  int count = XVECLEN (op, 0);
7971  unsigned int dest_regno, src_regno;
7972  int i;
7973
7974  if (count <= 1
7975      || GET_CODE (XVECEXP (op, 0, 0)) != SET
7976      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7977      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7978    return 0;
7979
7980  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7981  src_regno  = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7982
7983  if (dest_regno != VRSAVE_REGNO
7984      && src_regno != VRSAVE_REGNO)
7985    return 0;
7986
7987  for (i = 1; i < count; i++)
7988    {
7989      rtx elt = XVECEXP (op, 0, i);
7990
7991      if (GET_CODE (elt) != CLOBBER
7992	  && GET_CODE (elt) != SET)
7993	return 0;
7994    }
7995
7996  return 1;
7997}
7998
7999/* Return 1 for an PARALLEL suitable for mfcr.  */
8000
8001int
8002mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8003{
8004  int count = XVECLEN (op, 0);
8005  int i;
8006
8007  /* Perform a quick check so we don't blow up below.  */
8008  if (count < 1
8009      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8010      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8011      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8012    return 0;
8013
8014  for (i = 0; i < count; i++)
8015    {
8016      rtx exp = XVECEXP (op, 0, i);
8017      rtx unspec;
8018      int maskval;
8019      rtx src_reg;
8020
8021      src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8022
8023      if (GET_CODE (src_reg) != REG
8024	  || GET_MODE (src_reg) != CCmode
8025	  || ! CR_REGNO_P (REGNO (src_reg)))
8026	return 0;
8027
8028      if (GET_CODE (exp) != SET
8029	  || GET_CODE (SET_DEST (exp)) != REG
8030	  || GET_MODE (SET_DEST (exp)) != SImode
8031	  || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8032	return 0;
8033      unspec = SET_SRC (exp);
8034      maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8035
8036      if (GET_CODE (unspec) != UNSPEC
8037	  || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8038	  || XVECLEN (unspec, 0) != 2
8039	  || XVECEXP (unspec, 0, 0) != src_reg
8040	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8041	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8042	return 0;
8043    }
8044  return 1;
8045}
8046
8047/* Return 1 for an PARALLEL suitable for mtcrf.  */
8048
8049int
8050mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8051{
8052  int count = XVECLEN (op, 0);
8053  int i;
8054  rtx src_reg;
8055
8056  /* Perform a quick check so we don't blow up below.  */
8057  if (count < 1
8058      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8059      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8060      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8061    return 0;
8062  src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8063
8064  if (GET_CODE (src_reg) != REG
8065      || GET_MODE (src_reg) != SImode
8066      || ! INT_REGNO_P (REGNO (src_reg)))
8067    return 0;
8068
8069  for (i = 0; i < count; i++)
8070    {
8071      rtx exp = XVECEXP (op, 0, i);
8072      rtx unspec;
8073      int maskval;
8074
8075      if (GET_CODE (exp) != SET
8076	  || GET_CODE (SET_DEST (exp)) != REG
8077	  || GET_MODE (SET_DEST (exp)) != CCmode
8078	  || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8079	return 0;
8080      unspec = SET_SRC (exp);
8081      maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8082
8083      if (GET_CODE (unspec) != UNSPEC
8084	  || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8085	  || XVECLEN (unspec, 0) != 2
8086	  || XVECEXP (unspec, 0, 0) != src_reg
8087	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8088	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8089	return 0;
8090    }
8091  return 1;
8092}
8093
8094/* Return 1 for an PARALLEL suitable for lmw.  */
8095
8096int
8097lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8098{
8099  int count = XVECLEN (op, 0);
8100  unsigned int dest_regno;
8101  rtx src_addr;
8102  unsigned int base_regno;
8103  HOST_WIDE_INT offset;
8104  int i;
8105
8106  /* Perform a quick check so we don't blow up below.  */
8107  if (count <= 1
8108      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8109      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8110      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8111    return 0;
8112
8113  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8114  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8115
8116  if (dest_regno > 31
8117      || count != 32 - (int) dest_regno)
8118    return 0;
8119
8120  if (legitimate_indirect_address_p (src_addr, 0))
8121    {
8122      offset = 0;
8123      base_regno = REGNO (src_addr);
8124      if (base_regno == 0)
8125	return 0;
8126    }
8127  else if (legitimate_offset_address_p (SImode, src_addr, 0))
8128    {
8129      offset = INTVAL (XEXP (src_addr, 1));
8130      base_regno = REGNO (XEXP (src_addr, 0));
8131    }
8132  else
8133    return 0;
8134
8135  for (i = 0; i < count; i++)
8136    {
8137      rtx elt = XVECEXP (op, 0, i);
8138      rtx newaddr;
8139      rtx addr_reg;
8140      HOST_WIDE_INT newoffset;
8141
8142      if (GET_CODE (elt) != SET
8143	  || GET_CODE (SET_DEST (elt)) != REG
8144	  || GET_MODE (SET_DEST (elt)) != SImode
8145	  || REGNO (SET_DEST (elt)) != dest_regno + i
8146	  || GET_CODE (SET_SRC (elt)) != MEM
8147	  || GET_MODE (SET_SRC (elt)) != SImode)
8148	return 0;
8149      newaddr = XEXP (SET_SRC (elt), 0);
8150      if (legitimate_indirect_address_p (newaddr, 0))
8151	{
8152	  newoffset = 0;
8153	  addr_reg = newaddr;
8154	}
8155      else if (legitimate_offset_address_p (SImode, newaddr, 0))
8156	{
8157	  addr_reg = XEXP (newaddr, 0);
8158	  newoffset = INTVAL (XEXP (newaddr, 1));
8159	}
8160      else
8161	return 0;
8162      if (REGNO (addr_reg) != base_regno
8163	  || newoffset != offset + 4 * i)
8164	return 0;
8165    }
8166
8167  return 1;
8168}
8169
8170/* Return 1 for an PARALLEL suitable for stmw.  */
8171
8172int
8173stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8174{
8175  int count = XVECLEN (op, 0);
8176  unsigned int src_regno;
8177  rtx dest_addr;
8178  unsigned int base_regno;
8179  HOST_WIDE_INT offset;
8180  int i;
8181
8182  /* Perform a quick check so we don't blow up below.  */
8183  if (count <= 1
8184      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8185      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8186      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8187    return 0;
8188
8189  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8190  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8191
8192  if (src_regno > 31
8193      || count != 32 - (int) src_regno)
8194    return 0;
8195
8196  if (legitimate_indirect_address_p (dest_addr, 0))
8197    {
8198      offset = 0;
8199      base_regno = REGNO (dest_addr);
8200      if (base_regno == 0)
8201	return 0;
8202    }
8203  else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8204    {
8205      offset = INTVAL (XEXP (dest_addr, 1));
8206      base_regno = REGNO (XEXP (dest_addr, 0));
8207    }
8208  else
8209    return 0;
8210
8211  for (i = 0; i < count; i++)
8212    {
8213      rtx elt = XVECEXP (op, 0, i);
8214      rtx newaddr;
8215      rtx addr_reg;
8216      HOST_WIDE_INT newoffset;
8217
8218      if (GET_CODE (elt) != SET
8219	  || GET_CODE (SET_SRC (elt)) != REG
8220	  || GET_MODE (SET_SRC (elt)) != SImode
8221	  || REGNO (SET_SRC (elt)) != src_regno + i
8222	  || GET_CODE (SET_DEST (elt)) != MEM
8223	  || GET_MODE (SET_DEST (elt)) != SImode)
8224	return 0;
8225      newaddr = XEXP (SET_DEST (elt), 0);
8226      if (legitimate_indirect_address_p (newaddr, 0))
8227	{
8228	  newoffset = 0;
8229	  addr_reg = newaddr;
8230	}
8231      else if (legitimate_offset_address_p (SImode, newaddr, 0))
8232	{
8233	  addr_reg = XEXP (newaddr, 0);
8234	  newoffset = INTVAL (XEXP (newaddr, 1));
8235	}
8236      else
8237	return 0;
8238      if (REGNO (addr_reg) != base_regno
8239	  || newoffset != offset + 4 * i)
8240	return 0;
8241    }
8242
8243  return 1;
8244}
8245
8246/* A validation routine: say whether CODE, a condition code, and MODE
8247   match.  The other alternatives either don't make sense or should
8248   never be generated.  */
8249
8250static void
8251validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8252{
8253  if (GET_RTX_CLASS (code) != '<'
8254      || GET_MODE_CLASS (mode) != MODE_CC)
8255    abort ();
8256
8257  /* These don't make sense.  */
8258  if ((code == GT || code == LT || code == GE || code == LE)
8259      && mode == CCUNSmode)
8260    abort ();
8261
8262  if ((code == GTU || code == LTU || code == GEU || code == LEU)
8263      && mode != CCUNSmode)
8264    abort ();
8265
8266  if (mode != CCFPmode
8267      && (code == ORDERED || code == UNORDERED
8268	  || code == UNEQ || code == LTGT
8269	  || code == UNGT || code == UNLT
8270	  || code == UNGE || code == UNLE))
8271    abort ();
8272
8273  /* These should never be generated except for
8274     flag_finite_math_only.  */
8275  if (mode == CCFPmode
8276      && ! flag_finite_math_only
8277      && (code == LE || code == GE
8278	  || code == UNEQ || code == LTGT
8279	  || code == UNGT || code == UNLT))
8280    abort ();
8281
8282  /* These are invalid; the information is not there.  */
8283  if (mode == CCEQmode
8284      && code != EQ && code != NE)
8285    abort ();
8286}
8287
8288/* Return 1 if OP is a comparison operation that is valid for a branch insn.
8289   We only check the opcode against the mode of the CC value here.  */
8290
8291int
8292branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8293{
8294  enum rtx_code code = GET_CODE (op);
8295  enum machine_mode cc_mode;
8296
8297  if (GET_RTX_CLASS (code) != '<')
8298    return 0;
8299
8300  cc_mode = GET_MODE (XEXP (op, 0));
8301  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8302    return 0;
8303
8304  validate_condition_mode (code, cc_mode);
8305
8306  return 1;
8307}
8308
8309/* Return 1 if OP is a comparison operation that is valid for a branch
8310   insn and which is true if the corresponding bit in the CC register
8311   is set.  */
8312
8313int
8314branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8315{
8316  enum rtx_code code;
8317
8318  if (! branch_comparison_operator (op, mode))
8319    return 0;
8320
8321  code = GET_CODE (op);
8322  return (code == EQ || code == LT || code == GT
8323	  || code == LTU || code == GTU
8324	  || code == UNORDERED);
8325}
8326
8327/* Return 1 if OP is a comparison operation that is valid for an scc
8328   insn: it must be a positive comparison.  */
8329
8330int
8331scc_comparison_operator (rtx op, enum machine_mode mode)
8332{
8333  return branch_positive_comparison_operator (op, mode);
8334}
8335
8336int
8337trap_comparison_operator (rtx op, enum machine_mode mode)
8338{
8339  if (mode != VOIDmode && mode != GET_MODE (op))
8340    return 0;
8341  return GET_RTX_CLASS (GET_CODE (op)) == '<';
8342}
8343
8344int
8345boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8346{
8347  enum rtx_code code = GET_CODE (op);
8348  return (code == AND || code == IOR || code == XOR);
8349}
8350
8351int
8352boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8353{
8354  enum rtx_code code = GET_CODE (op);
8355  return (code == IOR || code == XOR);
8356}
8357
8358int
8359min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8360{
8361  enum rtx_code code = GET_CODE (op);
8362  return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8363}
8364
8365/* Return 1 if ANDOP is a mask that has no bits on that are not in the
8366   mask required to convert the result of a rotate insn into a shift
8367   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
8368
8369int
8370includes_lshift_p (rtx shiftop, rtx andop)
8371{
8372  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8373
8374  shift_mask <<= INTVAL (shiftop);
8375
8376  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8377}
8378
8379/* Similar, but for right shift.  */
8380
8381int
8382includes_rshift_p (rtx shiftop, rtx andop)
8383{
8384  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8385
8386  shift_mask >>= INTVAL (shiftop);
8387
8388  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8389}
8390
8391/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8392   to perform a left shift.  It must have exactly SHIFTOP least
8393   significant 0's, then one or more 1's, then zero or more 0's.  */
8394
8395int
8396includes_rldic_lshift_p (rtx shiftop, rtx andop)
8397{
8398  if (GET_CODE (andop) == CONST_INT)
8399    {
8400      HOST_WIDE_INT c, lsb, shift_mask;
8401
8402      c = INTVAL (andop);
8403      if (c == 0 || c == ~0)
8404	return 0;
8405
8406      shift_mask = ~0;
8407      shift_mask <<= INTVAL (shiftop);
8408
8409      /* Find the least significant one bit.  */
8410      lsb = c & -c;
8411
8412      /* It must coincide with the LSB of the shift mask.  */
8413      if (-lsb != shift_mask)
8414	return 0;
8415
8416      /* Invert to look for the next transition (if any).  */
8417      c = ~c;
8418
8419      /* Remove the low group of ones (originally low group of zeros).  */
8420      c &= -lsb;
8421
8422      /* Again find the lsb, and check we have all 1's above.  */
8423      lsb = c & -c;
8424      return c == -lsb;
8425    }
8426  else if (GET_CODE (andop) == CONST_DOUBLE
8427	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8428    {
8429      HOST_WIDE_INT low, high, lsb;
8430      HOST_WIDE_INT shift_mask_low, shift_mask_high;
8431
8432      low = CONST_DOUBLE_LOW (andop);
8433      if (HOST_BITS_PER_WIDE_INT < 64)
8434	high = CONST_DOUBLE_HIGH (andop);
8435
8436      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8437	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8438	return 0;
8439
8440      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8441	{
8442	  shift_mask_high = ~0;
8443	  if (INTVAL (shiftop) > 32)
8444	    shift_mask_high <<= INTVAL (shiftop) - 32;
8445
8446	  lsb = high & -high;
8447
8448	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8449	    return 0;
8450
8451	  high = ~high;
8452	  high &= -lsb;
8453
8454	  lsb = high & -high;
8455	  return high == -lsb;
8456	}
8457
8458      shift_mask_low = ~0;
8459      shift_mask_low <<= INTVAL (shiftop);
8460
8461      lsb = low & -low;
8462
8463      if (-lsb != shift_mask_low)
8464	return 0;
8465
8466      if (HOST_BITS_PER_WIDE_INT < 64)
8467	high = ~high;
8468      low = ~low;
8469      low &= -lsb;
8470
8471      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8472	{
8473	  lsb = high & -high;
8474	  return high == -lsb;
8475	}
8476
8477      lsb = low & -low;
8478      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8479    }
8480  else
8481    return 0;
8482}
8483
8484/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8485   to perform a left shift.  It must have SHIFTOP or more least
8486   significant 0's, with the remainder of the word 1's.  */
8487
8488int
8489includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8490{
8491  if (GET_CODE (andop) == CONST_INT)
8492    {
8493      HOST_WIDE_INT c, lsb, shift_mask;
8494
8495      shift_mask = ~0;
8496      shift_mask <<= INTVAL (shiftop);
8497      c = INTVAL (andop);
8498
8499      /* Find the least significant one bit.  */
8500      lsb = c & -c;
8501
8502      /* It must be covered by the shift mask.
8503	 This test also rejects c == 0.  */
8504      if ((lsb & shift_mask) == 0)
8505	return 0;
8506
8507      /* Check we have all 1's above the transition, and reject all 1's.  */
8508      return c == -lsb && lsb != 1;
8509    }
8510  else if (GET_CODE (andop) == CONST_DOUBLE
8511	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8512    {
8513      HOST_WIDE_INT low, lsb, shift_mask_low;
8514
8515      low = CONST_DOUBLE_LOW (andop);
8516
8517      if (HOST_BITS_PER_WIDE_INT < 64)
8518	{
8519	  HOST_WIDE_INT high, shift_mask_high;
8520
8521	  high = CONST_DOUBLE_HIGH (andop);
8522
8523	  if (low == 0)
8524	    {
8525	      shift_mask_high = ~0;
8526	      if (INTVAL (shiftop) > 32)
8527		shift_mask_high <<= INTVAL (shiftop) - 32;
8528
8529	      lsb = high & -high;
8530
8531	      if ((lsb & shift_mask_high) == 0)
8532		return 0;
8533
8534	      return high == -lsb;
8535	    }
8536	  if (high != ~0)
8537	    return 0;
8538	}
8539
8540      shift_mask_low = ~0;
8541      shift_mask_low <<= INTVAL (shiftop);
8542
8543      lsb = low & -low;
8544
8545      if ((lsb & shift_mask_low) == 0)
8546	return 0;
8547
8548      return low == -lsb && lsb != 1;
8549    }
8550  else
8551    return 0;
8552}
8553
8554/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8555   for lfq and stfq insns.
8556
8557   Note reg1 and reg2 *must* be hard registers.  To be sure we will
8558   abort if we are passed pseudo registers.  */
8559
8560int
8561registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8562{
8563  /* We might have been passed a SUBREG.  */
8564  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8565    return 0;
8566
8567  return (REGNO (reg1) == REGNO (reg2) - 1);
8568}
8569
8570/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8571   addr1 and addr2 must be in consecutive memory locations
8572   (addr2 == addr1 + 8).  */
8573
8574int
8575addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8576{
8577  unsigned int reg1;
8578  int offset1;
8579
8580  /* Extract an offset (if used) from the first addr.  */
8581  if (GET_CODE (addr1) == PLUS)
8582    {
8583      /* If not a REG, return zero.  */
8584      if (GET_CODE (XEXP (addr1, 0)) != REG)
8585	return 0;
8586      else
8587	{
8588          reg1 = REGNO (XEXP (addr1, 0));
8589	  /* The offset must be constant!  */
8590	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8591            return 0;
8592          offset1 = INTVAL (XEXP (addr1, 1));
8593	}
8594    }
8595  else if (GET_CODE (addr1) != REG)
8596    return 0;
8597  else
8598    {
8599      reg1 = REGNO (addr1);
8600      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
8601      offset1 = 0;
8602    }
8603
8604  /* Make sure the second address is a (mem (plus (reg) (const_int)))
8605      or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8606      register as addr1.  */
8607  if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8608   return 1;
8609  if (GET_CODE (addr2) != PLUS)
8610    return 0;
8611
8612  if (GET_CODE (XEXP (addr2, 0)) != REG
8613      || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8614    return 0;
8615
8616  if (reg1 != REGNO (XEXP (addr2, 0)))
8617    return 0;
8618
8619  /* The offset for the second addr must be 8 more than the first addr.  */
8620  if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8621    return 0;
8622
8623  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
8624     instructions.  */
8625  return 1;
8626}
8627
8628/* Return the register class of a scratch register needed to copy IN into
8629   or out of a register in CLASS in MODE.  If it can be done directly,
8630   NO_REGS is returned.  INP is nonzero if we are loading the reg, zero
8631   for storing.  */
8632
8633enum reg_class
8634secondary_reload_class (enum reg_class class,
8635			enum machine_mode mode,
8636			rtx in,
8637			int inp)
8638{
8639  int regno;
8640
8641  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8642#if TARGET_MACHO
8643                    && MACHOPIC_INDIRECT
8644#endif
8645                    ))
8646    {
8647      /* We cannot copy a symbolic operand directly into anything
8648         other than BASE_REGS for TARGET_ELF.  So indicate that a
8649         register from BASE_REGS is needed as an intermediate
8650         register.
8651
8652	 On Darwin, pic addresses require a load from memory, which
8653	 needs a base register.  */
8654      if (class != BASE_REGS
8655          && (GET_CODE (in) == SYMBOL_REF
8656              || GET_CODE (in) == HIGH
8657              || GET_CODE (in) == LABEL_REF
8658              || GET_CODE (in) == CONST))
8659        return BASE_REGS;
8660    }
8661
8662  /* A 64-bit gpr load or store using an offset that isn't a multiple of
8663     four needs a secondary reload.  */
8664  if (TARGET_POWERPC64
8665      && GET_MODE_UNIT_SIZE (mode) >= 8
8666      && (!inp || class != BASE_REGS)
8667      && invalid_gpr_mem (in, mode))
8668    return BASE_REGS;
8669
8670  if (GET_CODE (in) == REG)
8671    {
8672      regno = REGNO (in);
8673      if (regno >= FIRST_PSEUDO_REGISTER)
8674	{
8675	  regno = true_regnum (in);
8676	  if (regno >= FIRST_PSEUDO_REGISTER)
8677	    regno = -1;
8678	}
8679    }
8680  else if (GET_CODE (in) == SUBREG)
8681    {
8682      regno = true_regnum (in);
8683      if (regno >= FIRST_PSEUDO_REGISTER)
8684	regno = -1;
8685    }
8686  else
8687    regno = -1;
8688
8689  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8690     into anything.  */
8691  if (class == GENERAL_REGS || class == BASE_REGS
8692      || (regno >= 0 && INT_REGNO_P (regno)))
8693    return NO_REGS;
8694
8695  /* Constants, memory, and FP registers can go into FP registers.  */
8696  if ((regno == -1 || FP_REGNO_P (regno))
8697      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8698    return NO_REGS;
8699
8700  /* Memory, and AltiVec registers can go into AltiVec registers.  */
8701  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8702      && class == ALTIVEC_REGS)
8703    return NO_REGS;
8704
8705  /* We can copy among the CR registers.  */
8706  if ((class == CR_REGS || class == CR0_REGS)
8707      && regno >= 0 && CR_REGNO_P (regno))
8708    return NO_REGS;
8709
8710  /* Otherwise, we need GENERAL_REGS.  */
8711  return GENERAL_REGS;
8712}
8713
8714/* Given a comparison operation, return the bit number in CCR to test.  We
8715   know this is a valid comparison.
8716
8717   SCC_P is 1 if this is for an scc.  That means that %D will have been
8718   used instead of %C, so the bits will be in different places.
8719
8720   Return -1 if OP isn't a valid comparison for some reason.  */
8721
8722int
8723ccr_bit (rtx op, int scc_p)
8724{
8725  enum rtx_code code = GET_CODE (op);
8726  enum machine_mode cc_mode;
8727  int cc_regnum;
8728  int base_bit;
8729  rtx reg;
8730
8731  if (GET_RTX_CLASS (code) != '<')
8732    return -1;
8733
8734  reg = XEXP (op, 0);
8735
8736  if (GET_CODE (reg) != REG
8737      || ! CR_REGNO_P (REGNO (reg)))
8738    abort ();
8739
8740  cc_mode = GET_MODE (reg);
8741  cc_regnum = REGNO (reg);
8742  base_bit = 4 * (cc_regnum - CR0_REGNO);
8743
8744  validate_condition_mode (code, cc_mode);
8745
8746  /* When generating a sCOND operation, only positive conditions are
8747     allowed.  */
8748  if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8749      && code != GTU && code != LTU)
8750    abort ();
8751
8752  switch (code)
8753    {
8754    case NE:
8755      return scc_p ? base_bit + 3 : base_bit + 2;
8756    case EQ:
8757      return base_bit + 2;
8758    case GT:  case GTU:  case UNLE:
8759      return base_bit + 1;
8760    case LT:  case LTU:  case UNGE:
8761      return base_bit;
8762    case ORDERED:  case UNORDERED:
8763      return base_bit + 3;
8764
8765    case GE:  case GEU:
8766      /* If scc, we will have done a cror to put the bit in the
8767	 unordered position.  So test that bit.  For integer, this is ! LT
8768	 unless this is an scc insn.  */
8769      return scc_p ? base_bit + 3 : base_bit;
8770
8771    case LE:  case LEU:
8772      return scc_p ? base_bit + 3 : base_bit + 1;
8773
8774    default:
8775      abort ();
8776    }
8777}
8778
8779/* Return the GOT register.  */
8780
8781struct rtx_def *
8782rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8783{
8784  /* The second flow pass currently (June 1999) can't update
8785     regs_ever_live without disturbing other parts of the compiler, so
8786     update it here to make the prolog/epilogue code happy.  */
8787  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8788    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8789
8790  current_function_uses_pic_offset_table = 1;
8791
8792  return pic_offset_table_rtx;
8793}
8794
8795/* Function to init struct machine_function.
8796   This will be called, via a pointer variable,
8797   from push_function_context.  */
8798
8799static struct machine_function *
8800rs6000_init_machine_status (void)
8801{
8802  return ggc_alloc_cleared (sizeof (machine_function));
8803}
8804
8805/* These macros test for integers and extract the low-order bits.  */
8806#define INT_P(X)  \
8807((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
8808 && GET_MODE (X) == VOIDmode)
8809
8810#define INT_LOWPART(X) \
8811  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8812
8813int
8814extract_MB (rtx op)
8815{
8816  int i;
8817  unsigned long val = INT_LOWPART (op);
8818
8819  /* If the high bit is zero, the value is the first 1 bit we find
8820     from the left.  */
8821  if ((val & 0x80000000) == 0)
8822    {
8823      if ((val & 0xffffffff) == 0)
8824	abort ();
8825
8826      i = 1;
8827      while (((val <<= 1) & 0x80000000) == 0)
8828	++i;
8829      return i;
8830    }
8831
8832  /* If the high bit is set and the low bit is not, or the mask is all
8833     1's, the value is zero.  */
8834  if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8835    return 0;
8836
8837  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
8838     from the right.  */
8839  i = 31;
8840  while (((val >>= 1) & 1) != 0)
8841    --i;
8842
8843  return i;
8844}
8845
8846int
8847extract_ME (rtx op)
8848{
8849  int i;
8850  unsigned long val = INT_LOWPART (op);
8851
8852  /* If the low bit is zero, the value is the first 1 bit we find from
8853     the right.  */
8854  if ((val & 1) == 0)
8855    {
8856      if ((val & 0xffffffff) == 0)
8857	abort ();
8858
8859      i = 30;
8860      while (((val >>= 1) & 1) == 0)
8861	--i;
8862
8863      return i;
8864    }
8865
8866  /* If the low bit is set and the high bit is not, or the mask is all
8867     1's, the value is 31.  */
8868  if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8869    return 31;
8870
8871  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
8872     from the left.  */
8873  i = 0;
8874  while (((val <<= 1) & 0x80000000) != 0)
8875    ++i;
8876
8877  return i;
8878}
8879
8880/* Locate some local-dynamic symbol still in use by this function
8881   so that we can print its name in some tls_ld pattern.  */
8882
8883static const char *
8884rs6000_get_some_local_dynamic_name (void)
8885{
8886  rtx insn;
8887
8888  if (cfun->machine->some_ld_name)
8889    return cfun->machine->some_ld_name;
8890
8891  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8892    if (INSN_P (insn)
8893	&& for_each_rtx (&PATTERN (insn),
8894			 rs6000_get_some_local_dynamic_name_1, 0))
8895      return cfun->machine->some_ld_name;
8896
8897  abort ();
8898}
8899
8900/* Helper function for rs6000_get_some_local_dynamic_name.  */
8901
8902static int
8903rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8904{
8905  rtx x = *px;
8906
8907  if (GET_CODE (x) == SYMBOL_REF)
8908    {
8909      const char *str = XSTR (x, 0);
8910      if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8911	{
8912	  cfun->machine->some_ld_name = str;
8913	  return 1;
8914	}
8915    }
8916
8917  return 0;
8918}
8919
8920/* Print an operand.  Recognize special options, documented below.  */
8921
8922#if TARGET_ELF
8923#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8924#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8925#else
8926#define SMALL_DATA_RELOC "sda21"
8927#define SMALL_DATA_REG 0
8928#endif
8929
8930void
8931print_operand (FILE *file, rtx x, int code)
8932{
8933  int i;
8934  HOST_WIDE_INT val;
8935  unsigned HOST_WIDE_INT uval;
8936
8937  switch (code)
8938    {
8939    case '.':
8940      /* Write out an instruction after the call which may be replaced
8941	 with glue code by the loader.  This depends on the AIX version.  */
8942      asm_fprintf (file, RS6000_CALL_GLUE);
8943      return;
8944
8945      /* %a is output_address.  */
8946
8947    case 'A':
8948      /* If X is a constant integer whose low-order 5 bits are zero,
8949	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
8950	 in the AIX assembler where "sri" with a zero shift count
8951	 writes a trash instruction.  */
8952      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8953	putc ('l', file);
8954      else
8955	putc ('r', file);
8956      return;
8957
8958    case 'b':
8959      /* If constant, low-order 16 bits of constant, unsigned.
8960	 Otherwise, write normally.  */
8961      if (INT_P (x))
8962	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8963      else
8964	print_operand (file, x, 0);
8965      return;
8966
8967    case 'B':
8968      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8969	 for 64-bit mask direction.  */
8970      putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8971      return;
8972
8973      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8974	 output_operand.  */
8975
8976    case 'c':
8977      /* X is a CR register.  Print the number of the GT bit of the CR.  */
8978      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8979       output_operand_lossage ("invalid %%E value");
8980      else
8981       fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
8982      return;
8983
8984    case 'D':
8985      /* Like 'J' but get to the GT bit.  */
8986      if (GET_CODE (x) != REG)
8987       abort ();
8988
8989      /* Bit 1 is GT bit.  */
8990      i = 4 * (REGNO (x) - CR0_REGNO) + 1;
8991
8992      /* If we want bit 31, write a shift count of zero, not 32.  */
8993      fprintf (file, "%d", i == 31 ? 0 : i + 1);
8994      return;
8995
8996    case 'E':
8997      /* X is a CR register.  Print the number of the EQ bit of the CR */
8998      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8999	output_operand_lossage ("invalid %%E value");
9000      else
9001	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9002      return;
9003
9004    case 'f':
9005      /* X is a CR register.  Print the shift count needed to move it
9006	 to the high-order four bits.  */
9007      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9008	output_operand_lossage ("invalid %%f value");
9009      else
9010	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9011      return;
9012
9013    case 'F':
9014      /* Similar, but print the count for the rotate in the opposite
9015	 direction.  */
9016      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9017	output_operand_lossage ("invalid %%F value");
9018      else
9019	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9020      return;
9021
9022    case 'G':
9023      /* X is a constant integer.  If it is negative, print "m",
9024	 otherwise print "z".  This is to make an aze or ame insn.  */
9025      if (GET_CODE (x) != CONST_INT)
9026	output_operand_lossage ("invalid %%G value");
9027      else if (INTVAL (x) >= 0)
9028	putc ('z', file);
9029      else
9030	putc ('m', file);
9031      return;
9032
9033    case 'h':
9034      /* If constant, output low-order five bits.  Otherwise, write
9035	 normally.  */
9036      if (INT_P (x))
9037	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9038      else
9039	print_operand (file, x, 0);
9040      return;
9041
9042    case 'H':
9043      /* If constant, output low-order six bits.  Otherwise, write
9044	 normally.  */
9045      if (INT_P (x))
9046	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9047      else
9048	print_operand (file, x, 0);
9049      return;
9050
9051    case 'I':
9052      /* Print `i' if this is a constant, else nothing.  */
9053      if (INT_P (x))
9054	putc ('i', file);
9055      return;
9056
9057    case 'j':
9058      /* Write the bit number in CCR for jump.  */
9059      i = ccr_bit (x, 0);
9060      if (i == -1)
9061	output_operand_lossage ("invalid %%j code");
9062      else
9063	fprintf (file, "%d", i);
9064      return;
9065
9066    case 'J':
9067      /* Similar, but add one for shift count in rlinm for scc and pass
9068	 scc flag to `ccr_bit'.  */
9069      i = ccr_bit (x, 1);
9070      if (i == -1)
9071	output_operand_lossage ("invalid %%J code");
9072      else
9073	/* If we want bit 31, write a shift count of zero, not 32.  */
9074	fprintf (file, "%d", i == 31 ? 0 : i + 1);
9075      return;
9076
9077    case 'k':
9078      /* X must be a constant.  Write the 1's complement of the
9079	 constant.  */
9080      if (! INT_P (x))
9081	output_operand_lossage ("invalid %%k value");
9082      else
9083	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9084      return;
9085
9086    case 'K':
9087      /* X must be a symbolic constant on ELF.  Write an
9088	 expression suitable for an 'addi' that adds in the low 16
9089	 bits of the MEM.  */
9090      if (GET_CODE (x) != CONST)
9091	{
9092	  print_operand_address (file, x);
9093	  fputs ("@l", file);
9094	}
9095      else
9096	{
9097	  if (GET_CODE (XEXP (x, 0)) != PLUS
9098	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9099		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9100	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9101	    output_operand_lossage ("invalid %%K value");
9102	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
9103	  fputs ("@l", file);
9104	  /* For GNU as, there must be a non-alphanumeric character
9105	     between 'l' and the number.  The '-' is added by
9106	     print_operand() already.  */
9107	  if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9108	    fputs ("+", file);
9109	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9110	}
9111      return;
9112
9113      /* %l is output_asm_label.  */
9114
9115    case 'L':
9116      /* Write second word of DImode or DFmode reference.  Works on register
9117	 or non-indexed memory only.  */
9118      if (GET_CODE (x) == REG)
9119	fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9120      else if (GET_CODE (x) == MEM)
9121	{
9122	  /* Handle possible auto-increment.  Since it is pre-increment and
9123	     we have already done it, we can just use an offset of word.  */
9124	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
9125	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9126	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9127					   UNITS_PER_WORD));
9128	  else
9129	    output_address (XEXP (adjust_address_nv (x, SImode,
9130						     UNITS_PER_WORD),
9131				  0));
9132
9133	  if (small_data_operand (x, GET_MODE (x)))
9134	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9135		     reg_names[SMALL_DATA_REG]);
9136	}
9137      return;
9138
9139    case 'm':
9140      /* MB value for a mask operand.  */
9141      if (! mask_operand (x, SImode))
9142	output_operand_lossage ("invalid %%m value");
9143
9144      fprintf (file, "%d", extract_MB (x));
9145      return;
9146
9147    case 'M':
9148      /* ME value for a mask operand.  */
9149      if (! mask_operand (x, SImode))
9150	output_operand_lossage ("invalid %%M value");
9151
9152      fprintf (file, "%d", extract_ME (x));
9153      return;
9154
9155      /* %n outputs the negative of its operand.  */
9156
9157    case 'N':
9158      /* Write the number of elements in the vector times 4.  */
9159      if (GET_CODE (x) != PARALLEL)
9160	output_operand_lossage ("invalid %%N value");
9161      else
9162	fprintf (file, "%d", XVECLEN (x, 0) * 4);
9163      return;
9164
9165    case 'O':
9166      /* Similar, but subtract 1 first.  */
9167      if (GET_CODE (x) != PARALLEL)
9168	output_operand_lossage ("invalid %%O value");
9169      else
9170	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9171      return;
9172
9173    case 'p':
9174      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
9175      if (! INT_P (x)
9176	  || INT_LOWPART (x) < 0
9177	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
9178	output_operand_lossage ("invalid %%p value");
9179      else
9180	fprintf (file, "%d", i);
9181      return;
9182
9183    case 'P':
9184      /* The operand must be an indirect memory reference.  The result
9185	 is the register number.  */
9186      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9187	  || REGNO (XEXP (x, 0)) >= 32)
9188	output_operand_lossage ("invalid %%P value");
9189      else
9190	fprintf (file, "%d", REGNO (XEXP (x, 0)));
9191      return;
9192
9193    case 'q':
9194      /* This outputs the logical code corresponding to a boolean
9195	 expression.  The expression may have one or both operands
9196	 negated (if one, only the first one).  For condition register
9197         logical operations, it will also treat the negated
9198         CR codes as NOTs, but not handle NOTs of them.  */
9199      {
9200	const char *const *t = 0;
9201	const char *s;
9202	enum rtx_code code = GET_CODE (x);
9203	static const char * const tbl[3][3] = {
9204	  { "and", "andc", "nor" },
9205	  { "or", "orc", "nand" },
9206	  { "xor", "eqv", "xor" } };
9207
9208	if (code == AND)
9209	  t = tbl[0];
9210	else if (code == IOR)
9211	  t = tbl[1];
9212	else if (code == XOR)
9213	  t = tbl[2];
9214	else
9215	  output_operand_lossage ("invalid %%q value");
9216
9217	if (GET_CODE (XEXP (x, 0)) != NOT)
9218	  s = t[0];
9219	else
9220	  {
9221	    if (GET_CODE (XEXP (x, 1)) == NOT)
9222	      s = t[2];
9223	    else
9224	      s = t[1];
9225	  }
9226
9227	fputs (s, file);
9228      }
9229      return;
9230
9231    case 'Q':
9232      if (TARGET_MFCRF)
9233	fputc (',',file);
9234        /* FALLTHRU */
9235      else
9236	return;
9237
9238    case 'R':
9239      /* X is a CR register.  Print the mask for `mtcrf'.  */
9240      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9241	output_operand_lossage ("invalid %%R value");
9242      else
9243	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9244      return;
9245
9246    case 's':
9247      /* Low 5 bits of 32 - value */
9248      if (! INT_P (x))
9249	output_operand_lossage ("invalid %%s value");
9250      else
9251	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9252      return;
9253
9254    case 'S':
9255      /* PowerPC64 mask position.  All 0's is excluded.
9256	 CONST_INT 32-bit mask is considered sign-extended so any
9257	 transition must occur within the CONST_INT, not on the boundary.  */
9258      if (! mask64_operand (x, DImode))
9259	output_operand_lossage ("invalid %%S value");
9260
9261      uval = INT_LOWPART (x);
9262
9263      if (uval & 1)	/* Clear Left */
9264	{
9265#if HOST_BITS_PER_WIDE_INT > 64
9266	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9267#endif
9268	  i = 64;
9269	}
9270      else		/* Clear Right */
9271	{
9272	  uval = ~uval;
9273#if HOST_BITS_PER_WIDE_INT > 64
9274	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9275#endif
9276	  i = 63;
9277	}
9278      while (uval != 0)
9279	--i, uval >>= 1;
9280      if (i < 0)
9281	abort ();
9282      fprintf (file, "%d", i);
9283      return;
9284
9285    case 't':
9286      /* Like 'J' but get to the OVERFLOW/UNORDERED bit.  */
9287      if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9288	abort ();
9289
9290      /* Bit 3 is OV bit.  */
9291      i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9292
9293      /* If we want bit 31, write a shift count of zero, not 32.  */
9294      fprintf (file, "%d", i == 31 ? 0 : i + 1);
9295      return;
9296
9297    case 'T':
9298      /* Print the symbolic name of a branch target register.  */
9299      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9300				  && REGNO (x) != COUNT_REGISTER_REGNUM))
9301	output_operand_lossage ("invalid %%T value");
9302      else if (REGNO (x) == LINK_REGISTER_REGNUM)
9303	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9304      else
9305	fputs ("ctr", file);
9306      return;
9307
9308    case 'u':
9309      /* High-order 16 bits of constant for use in unsigned operand.  */
9310      if (! INT_P (x))
9311	output_operand_lossage ("invalid %%u value");
9312      else
9313	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9314		 (INT_LOWPART (x) >> 16) & 0xffff);
9315      return;
9316
9317    case 'v':
9318      /* High-order 16 bits of constant for use in signed operand.  */
9319      if (! INT_P (x))
9320	output_operand_lossage ("invalid %%v value");
9321      else
9322	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9323		 (INT_LOWPART (x) >> 16) & 0xffff);
9324      return;
9325
9326    case 'U':
9327      /* Print `u' if this has an auto-increment or auto-decrement.  */
9328      if (GET_CODE (x) == MEM
9329	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
9330	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9331	putc ('u', file);
9332      return;
9333
9334    case 'V':
9335      /* Print the trap code for this operand.  */
9336      switch (GET_CODE (x))
9337	{
9338	case EQ:
9339	  fputs ("eq", file);   /* 4 */
9340	  break;
9341	case NE:
9342	  fputs ("ne", file);   /* 24 */
9343	  break;
9344	case LT:
9345	  fputs ("lt", file);   /* 16 */
9346	  break;
9347	case LE:
9348	  fputs ("le", file);   /* 20 */
9349	  break;
9350	case GT:
9351	  fputs ("gt", file);   /* 8 */
9352	  break;
9353	case GE:
9354	  fputs ("ge", file);   /* 12 */
9355	  break;
9356	case LTU:
9357	  fputs ("llt", file);  /* 2 */
9358	  break;
9359	case LEU:
9360	  fputs ("lle", file);  /* 6 */
9361	  break;
9362	case GTU:
9363	  fputs ("lgt", file);  /* 1 */
9364	  break;
9365	case GEU:
9366	  fputs ("lge", file);  /* 5 */
9367	  break;
9368	default:
9369	  abort ();
9370	}
9371      break;
9372
9373    case 'w':
9374      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
9375	 normally.  */
9376      if (INT_P (x))
9377	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9378		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9379      else
9380	print_operand (file, x, 0);
9381      return;
9382
9383    case 'W':
9384      /* MB value for a PowerPC64 rldic operand.  */
9385      val = (GET_CODE (x) == CONST_INT
9386	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9387
9388      if (val < 0)
9389	i = -1;
9390      else
9391	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9392	  if ((val <<= 1) < 0)
9393	    break;
9394
9395#if HOST_BITS_PER_WIDE_INT == 32
9396      if (GET_CODE (x) == CONST_INT && i >= 0)
9397	i += 32;  /* zero-extend high-part was all 0's */
9398      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9399	{
9400	  val = CONST_DOUBLE_LOW (x);
9401
9402	  if (val == 0)
9403	    abort ();
9404	  else if (val < 0)
9405	    --i;
9406	  else
9407	    for ( ; i < 64; i++)
9408	      if ((val <<= 1) < 0)
9409		break;
9410	}
9411#endif
9412
9413      fprintf (file, "%d", i + 1);
9414      return;
9415
9416    case 'X':
9417      if (GET_CODE (x) == MEM
9418	  && legitimate_indexed_address_p (XEXP (x, 0), 0))
9419	putc ('x', file);
9420      return;
9421
9422    case 'Y':
9423      /* Like 'L', for third word of TImode  */
9424      if (GET_CODE (x) == REG)
9425	fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9426      else if (GET_CODE (x) == MEM)
9427	{
9428	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
9429	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9430	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9431	  else
9432	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9433	  if (small_data_operand (x, GET_MODE (x)))
9434	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9435		     reg_names[SMALL_DATA_REG]);
9436	}
9437      return;
9438
9439    case 'z':
9440      /* X is a SYMBOL_REF.  Write out the name preceded by a
9441	 period and without any trailing data in brackets.  Used for function
9442	 names.  If we are configured for System V (or the embedded ABI) on
9443	 the PowerPC, do not emit the period, since those systems do not use
9444	 TOCs and the like.  */
9445      if (GET_CODE (x) != SYMBOL_REF)
9446	abort ();
9447
9448      if (XSTR (x, 0)[0] != '.')
9449	{
9450	  switch (DEFAULT_ABI)
9451	    {
9452	    default:
9453	      abort ();
9454
9455	    case ABI_AIX:
9456	      putc ('.', file);
9457	      break;
9458
9459	    case ABI_V4:
9460	    case ABI_DARWIN:
9461	      break;
9462	    }
9463	}
9464      if (TARGET_AIX)
9465	RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9466      else
9467	assemble_name (file, XSTR (x, 0));
9468      return;
9469
9470    case 'Z':
9471      /* Like 'L', for last word of TImode.  */
9472      if (GET_CODE (x) == REG)
9473	fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9474      else if (GET_CODE (x) == MEM)
9475	{
9476	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
9477	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9478	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9479	  else
9480	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9481	  if (small_data_operand (x, GET_MODE (x)))
9482	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9483		     reg_names[SMALL_DATA_REG]);
9484	}
9485      return;
9486
9487      /* Print AltiVec or SPE memory operand.  */
9488    case 'y':
9489      {
9490	rtx tmp;
9491
9492	if (GET_CODE (x) != MEM)
9493	  abort ();
9494
9495	tmp = XEXP (x, 0);
9496
9497	if (TARGET_E500)
9498	  {
9499	    /* Handle [reg].  */
9500	    if (GET_CODE (tmp) == REG)
9501	      {
9502		fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9503		break;
9504	      }
9505	    /* Handle [reg+UIMM].  */
9506	    else if (GET_CODE (tmp) == PLUS &&
9507		     GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9508	      {
9509		int x;
9510
9511		if (GET_CODE (XEXP (tmp, 0)) != REG)
9512		  abort ();
9513
9514		x = INTVAL (XEXP (tmp, 1));
9515		fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9516		break;
9517	      }
9518
9519	    /* Fall through.  Must be [reg+reg].  */
9520	  }
9521	if (GET_CODE (tmp) == REG)
9522	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9523	else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9524	  {
9525	    if (REGNO (XEXP (tmp, 0)) == 0)
9526	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9527		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
9528	    else
9529	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9530		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
9531	  }
9532	else
9533	  abort ();
9534	break;
9535      }
9536
9537    case 0:
9538      if (GET_CODE (x) == REG)
9539	fprintf (file, "%s", reg_names[REGNO (x)]);
9540      else if (GET_CODE (x) == MEM)
9541	{
9542	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
9543	     know the width from the mode.  */
9544	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9545	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9546		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9547	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9548	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9549		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9550	  else
9551	    output_address (XEXP (x, 0));
9552	}
9553      else
9554	output_addr_const (file, x);
9555      return;
9556
9557    case '&':
9558      assemble_name (file, rs6000_get_some_local_dynamic_name ());
9559      return;
9560
9561    default:
9562      output_operand_lossage ("invalid %%xn code");
9563    }
9564}
9565
9566/* Print the address of an operand.  */
9567
9568void
9569print_operand_address (FILE *file, rtx x)
9570{
9571  if (GET_CODE (x) == REG)
9572    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9573  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9574	   || GET_CODE (x) == LABEL_REF)
9575    {
9576      output_addr_const (file, x);
9577      if (small_data_operand (x, GET_MODE (x)))
9578	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9579		 reg_names[SMALL_DATA_REG]);
9580      else if (TARGET_TOC)
9581	abort ();
9582    }
9583  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9584    {
9585      if (REGNO (XEXP (x, 0)) == 0)
9586	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9587		 reg_names[ REGNO (XEXP (x, 0)) ]);
9588      else
9589	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9590		 reg_names[ REGNO (XEXP (x, 1)) ]);
9591    }
9592  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9593    fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9594	     INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9595#if TARGET_ELF
9596  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9597           && CONSTANT_P (XEXP (x, 1)))
9598    {
9599      output_addr_const (file, XEXP (x, 1));
9600      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9601    }
9602#endif
9603#if TARGET_MACHO
9604  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9605           && CONSTANT_P (XEXP (x, 1)))
9606    {
9607      fprintf (file, "lo16(");
9608      output_addr_const (file, XEXP (x, 1));
9609      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9610    }
9611#endif
9612  else if (legitimate_constant_pool_address_p (x))
9613    {
9614      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9615	{
9616	  rtx contains_minus = XEXP (x, 1);
9617	  rtx minus, symref;
9618	  const char *name;
9619
9620	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
9621	     turn it into (sym) for output_addr_const.  */
9622	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9623	    contains_minus = XEXP (contains_minus, 0);
9624
9625	  minus = XEXP (contains_minus, 0);
9626	  symref = XEXP (minus, 0);
9627	  XEXP (contains_minus, 0) = symref;
9628	  if (TARGET_ELF)
9629	    {
9630	      char *newname;
9631
9632	      name = XSTR (symref, 0);
9633	      newname = alloca (strlen (name) + sizeof ("@toc"));
9634	      strcpy (newname, name);
9635	      strcat (newname, "@toc");
9636	      XSTR (symref, 0) = newname;
9637	    }
9638	  output_addr_const (file, XEXP (x, 1));
9639	  if (TARGET_ELF)
9640	    XSTR (symref, 0) = name;
9641	  XEXP (contains_minus, 0) = minus;
9642	}
9643      else
9644	output_addr_const (file, XEXP (x, 1));
9645
9646      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9647    }
9648  else
9649    abort ();
9650}
9651
9652/* Target hook for assembling integer objects.  The PowerPC version has
9653   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9654   is defined.  It also needs to handle DI-mode objects on 64-bit
9655   targets.  */
9656
9657static bool
9658rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9659{
9660#ifdef RELOCATABLE_NEEDS_FIXUP
9661  /* Special handling for SI values.  */
9662  if (size == 4 && aligned_p)
9663    {
9664      extern int in_toc_section (void);
9665      static int recurse = 0;
9666
9667      /* For -mrelocatable, we mark all addresses that need to be fixed up
9668	 in the .fixup section.  */
9669      if (TARGET_RELOCATABLE
9670	  && !in_toc_section ()
9671	  && !in_text_section ()
9672	  && !recurse
9673	  && GET_CODE (x) != CONST_INT
9674	  && GET_CODE (x) != CONST_DOUBLE
9675	  && CONSTANT_P (x))
9676	{
9677	  char buf[256];
9678
9679	  recurse = 1;
9680	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9681	  fixuplabelno++;
9682	  ASM_OUTPUT_LABEL (asm_out_file, buf);
9683	  fprintf (asm_out_file, "\t.long\t(");
9684	  output_addr_const (asm_out_file, x);
9685	  fprintf (asm_out_file, ")@fixup\n");
9686	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9687	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
9688	  fprintf (asm_out_file, "\t.long\t");
9689	  assemble_name (asm_out_file, buf);
9690	  fprintf (asm_out_file, "\n\t.previous\n");
9691	  recurse = 0;
9692	  return true;
9693	}
9694      /* Remove initial .'s to turn a -mcall-aixdesc function
9695	 address into the address of the descriptor, not the function
9696	 itself.  */
9697      else if (GET_CODE (x) == SYMBOL_REF
9698	       && XSTR (x, 0)[0] == '.'
9699	       && DEFAULT_ABI == ABI_AIX)
9700	{
9701	  const char *name = XSTR (x, 0);
9702	  while (*name == '.')
9703	    name++;
9704
9705	  fprintf (asm_out_file, "\t.long\t%s\n", name);
9706	  return true;
9707	}
9708    }
9709#endif /* RELOCATABLE_NEEDS_FIXUP */
9710  return default_assemble_integer (x, size, aligned_p);
9711}
9712
9713#ifdef HAVE_GAS_HIDDEN
9714/* Emit an assembler directive to set symbol visibility for DECL to
9715   VISIBILITY_TYPE.  */
9716
9717static void
9718rs6000_assemble_visibility (tree decl, int vis)
9719{
9720  /* Functions need to have their entry point symbol visibility set as
9721     well as their descriptor symbol visibility.  */
9722  if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9723    {
9724      static const char * const visibility_types[] = {
9725        NULL, "internal", "hidden", "protected"
9726      };
9727
9728      const char *name, *type;
9729
9730      name = ((* targetm.strip_name_encoding)
9731	      (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9732      type = visibility_types[vis];
9733
9734      fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9735      fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9736    }
9737  else
9738    default_assemble_visibility (decl, vis);
9739}
9740#endif
9741
9742enum rtx_code
9743rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9744{
9745  /* Reversal of FP compares takes care -- an ordered compare
9746     becomes an unordered compare and vice versa.  */
9747  if (mode == CCFPmode
9748      && (!flag_finite_math_only
9749	  || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9750	  || code == UNEQ || code == LTGT))
9751    return reverse_condition_maybe_unordered (code);
9752  else
9753    return reverse_condition (code);
9754}
9755
9756/* Generate a compare for CODE.  Return a brand-new rtx that
9757   represents the result of the compare.  */
9758
9759static rtx
9760rs6000_generate_compare (enum rtx_code code)
9761{
9762  enum machine_mode comp_mode;
9763  rtx compare_result;
9764
9765  if (rs6000_compare_fp_p)
9766    comp_mode = CCFPmode;
9767  else if (code == GTU || code == LTU
9768	  || code == GEU || code == LEU)
9769    comp_mode = CCUNSmode;
9770  else
9771    comp_mode = CCmode;
9772
9773  /* First, the compare.  */
9774  compare_result = gen_reg_rtx (comp_mode);
9775
9776  /* SPE FP compare instructions on the GPRs.  Yuck!  */
9777  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9778      && rs6000_compare_fp_p)
9779    {
9780      rtx cmp, or1, or2, or_result, compare_result2;
9781
9782      /* Note: The E500 comparison instructions set the GT bit (x +
9783        1), on success.  This explains the mess.  */
9784
9785      switch (code)
9786	{
9787       case EQ: case UNEQ: case NE: case LTGT:
9788	  cmp = flag_finite_math_only
9789	    ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9790			       rs6000_compare_op1)
9791	    : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9792			       rs6000_compare_op1);
9793	  break;
9794       case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
9795	  cmp = flag_finite_math_only
9796	    ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9797			       rs6000_compare_op1)
9798	    : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9799			       rs6000_compare_op1);
9800	  break;
9801       case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
9802	  cmp = flag_finite_math_only
9803	    ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9804			       rs6000_compare_op1)
9805	    : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9806			       rs6000_compare_op1);
9807	  break;
9808	default:
9809	  abort ();
9810	}
9811
9812      /* Synthesize LE and GE from LT/GT || EQ.  */
9813      if (code == LE || code == GE || code == LEU || code == GEU)
9814	{
9815	  emit_insn (cmp);
9816
9817	  switch (code)
9818	    {
9819	    case LE: code = LT; break;
9820	    case GE: code = GT; break;
9821	    case LEU: code = LT; break;
9822	    case GEU: code = GT; break;
9823	    default: abort ();
9824	    }
9825
9826	  or1 = gen_reg_rtx (SImode);
9827	  or2 = gen_reg_rtx (SImode);
9828	  or_result = gen_reg_rtx (CCEQmode);
9829	  compare_result2 = gen_reg_rtx (CCFPmode);
9830
9831	  /* Do the EQ.  */
9832	  cmp = flag_finite_math_only
9833	    ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9834			       rs6000_compare_op1)
9835	    : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9836			       rs6000_compare_op1);
9837	  emit_insn (cmp);
9838
9839	  or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
9840	  or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
9841
9842	  /* OR them together.  */
9843	  cmp = gen_rtx_SET (VOIDmode, or_result,
9844			     gen_rtx_COMPARE (CCEQmode,
9845					      gen_rtx_IOR (SImode, or1, or2),
9846					      const_true_rtx));
9847	  compare_result = or_result;
9848	  code = EQ;
9849	}
9850      else
9851	{
9852	  if (code == NE || code == LTGT)
9853	    code = NE;
9854         else
9855           code = EQ;
9856	}
9857
9858      emit_insn (cmp);
9859    }
9860  else
9861    emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9862			    gen_rtx_COMPARE (comp_mode,
9863					     rs6000_compare_op0,
9864					     rs6000_compare_op1)));
9865
9866  /* Some kinds of FP comparisons need an OR operation;
9867     under flag_finite_math_only we don't bother.  */
9868  if (rs6000_compare_fp_p
9869      && ! flag_finite_math_only
9870      && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9871      && (code == LE || code == GE
9872	  || code == UNEQ || code == LTGT
9873	  || code == UNGT || code == UNLT))
9874    {
9875      enum rtx_code or1, or2;
9876      rtx or1_rtx, or2_rtx, compare2_rtx;
9877      rtx or_result = gen_reg_rtx (CCEQmode);
9878
9879      switch (code)
9880	{
9881	case LE: or1 = LT;  or2 = EQ;  break;
9882	case GE: or1 = GT;  or2 = EQ;  break;
9883	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
9884	case LTGT: or1 = LT;  or2 = GT;  break;
9885	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
9886	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
9887	default:  abort ();
9888	}
9889      validate_condition_mode (or1, comp_mode);
9890      validate_condition_mode (or2, comp_mode);
9891      or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9892      or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9893      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9894				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9895				      const_true_rtx);
9896      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9897
9898      compare_result = or_result;
9899      code = EQ;
9900    }
9901
9902  validate_condition_mode (code, GET_MODE (compare_result));
9903
9904  return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9905}
9906
9907
9908/* Emit the RTL for an sCOND pattern.  */
9909
9910void
9911rs6000_emit_sCOND (enum rtx_code code, rtx result)
9912{
9913  rtx condition_rtx;
9914  enum machine_mode op_mode;
9915  enum rtx_code cond_code;
9916
9917  condition_rtx = rs6000_generate_compare (code);
9918  cond_code = GET_CODE (condition_rtx);
9919
9920  if (TARGET_E500 && rs6000_compare_fp_p
9921      && !TARGET_FPRS && TARGET_HARD_FLOAT)
9922    {
9923      rtx t;
9924
9925      PUT_MODE (condition_rtx, SImode);
9926      t = XEXP (condition_rtx, 0);
9927
9928      if (cond_code != NE && cond_code != EQ)
9929       abort ();
9930
9931      if (cond_code == NE)
9932       emit_insn (gen_e500_flip_gt_bit (t, t));
9933
9934      emit_insn (gen_move_from_CR_gt_bit (result, t));
9935      return;
9936    }
9937
9938  if (cond_code == NE
9939      || cond_code == GE || cond_code == LE
9940      || cond_code == GEU || cond_code == LEU
9941      || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9942    {
9943      rtx not_result = gen_reg_rtx (CCEQmode);
9944      rtx not_op, rev_cond_rtx;
9945      enum machine_mode cc_mode;
9946
9947      cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9948
9949      rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9950			      SImode, XEXP (condition_rtx, 0), const0_rtx);
9951      not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9952      emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9953      condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9954    }
9955
9956  op_mode = GET_MODE (rs6000_compare_op0);
9957  if (op_mode == VOIDmode)
9958    op_mode = GET_MODE (rs6000_compare_op1);
9959
9960  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9961    {
9962      PUT_MODE (condition_rtx, DImode);
9963      convert_move (result, condition_rtx, 0);
9964    }
9965  else
9966    {
9967      PUT_MODE (condition_rtx, SImode);
9968      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9969    }
9970}
9971
9972/* Emit a branch of kind CODE to location LOC.  */
9973
9974void
9975rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9976{
9977  rtx condition_rtx, loc_ref;
9978
9979  condition_rtx = rs6000_generate_compare (code);
9980  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9981  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9982			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9983						     loc_ref, pc_rtx)));
9984}
9985
9986/* Return the string to output a conditional branch to LABEL, which is
9987   the operand number of the label, or -1 if the branch is really a
9988   conditional return.
9989
9990   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
9991   condition code register and its mode specifies what kind of
9992   comparison we made.
9993
9994   REVERSED is nonzero if we should reverse the sense of the comparison.
9995
9996   INSN is the insn.  */
9997
9998char *
9999output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10000{
10001  static char string[64];
10002  enum rtx_code code = GET_CODE (op);
10003  rtx cc_reg = XEXP (op, 0);
10004  enum machine_mode mode = GET_MODE (cc_reg);
10005  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10006  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10007  int really_reversed = reversed ^ need_longbranch;
10008  char *s = string;
10009  const char *ccode;
10010  const char *pred;
10011  rtx note;
10012
10013  validate_condition_mode (code, mode);
10014
10015  /* Work out which way this really branches.  We could use
10016     reverse_condition_maybe_unordered here always but this
10017     makes the resulting assembler clearer.  */
10018  if (really_reversed)
10019    {
10020      /* Reversal of FP compares takes care -- an ordered compare
10021	 becomes an unordered compare and vice versa.  */
10022      if (mode == CCFPmode)
10023	code = reverse_condition_maybe_unordered (code);
10024      else
10025	code = reverse_condition (code);
10026    }
10027
10028  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10029    {
10030      /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10031	 to the GT bit.  */
10032      if (code == EQ)
10033	/* Opposite of GT.  */
10034	code = GT;
10035      else if (code == NE)
10036       code = UNLE;
10037      else
10038	abort ();
10039    }
10040
10041  switch (code)
10042    {
10043      /* Not all of these are actually distinct opcodes, but
10044	 we distinguish them for clarity of the resulting assembler.  */
10045    case NE: case LTGT:
10046      ccode = "ne"; break;
10047    case EQ: case UNEQ:
10048      ccode = "eq"; break;
10049    case GE: case GEU:
10050      ccode = "ge"; break;
10051    case GT: case GTU: case UNGT:
10052      ccode = "gt"; break;
10053    case LE: case LEU:
10054      ccode = "le"; break;
10055    case LT: case LTU: case UNLT:
10056      ccode = "lt"; break;
10057    case UNORDERED: ccode = "un"; break;
10058    case ORDERED: ccode = "nu"; break;
10059    case UNGE: ccode = "nl"; break;
10060    case UNLE: ccode = "ng"; break;
10061    default:
10062      abort ();
10063    }
10064
10065  /* Maybe we have a guess as to how likely the branch is.
10066     The old mnemonics don't have a way to specify this information.  */
10067  pred = "";
10068  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10069  if (note != NULL_RTX)
10070    {
10071      /* PROB is the difference from 50%.  */
10072      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10073
10074      /* Only hint for highly probable/improbable branches on newer
10075	 cpus as static prediction overrides processor dynamic
10076	 prediction.  For older cpus we may as well always hint, but
10077	 assume not taken for branches that are very close to 50% as a
10078	 mispredicted taken branch is more expensive than a
10079	 mispredicted not-taken branch.  */
10080      if (rs6000_always_hint
10081	  || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10082	{
10083	  if (abs (prob) > REG_BR_PROB_BASE / 20
10084	      && ((prob > 0) ^ need_longbranch))
10085              pred = "+";
10086	  else
10087	    pred = "-";
10088	}
10089    }
10090
10091  if (label == NULL)
10092    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10093  else
10094    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10095
10096  /* We need to escape any '%' characters in the reg_names string.
10097     Assume they'd only be the first character....  */
10098  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10099    *s++ = '%';
10100  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10101
10102  if (label != NULL)
10103    {
10104      /* If the branch distance was too far, we may have to use an
10105	 unconditional branch to go the distance.  */
10106      if (need_longbranch)
10107	s += sprintf (s, ",$+8\n\tb %s", label);
10108      else
10109	s += sprintf (s, ",%s", label);
10110    }
10111
10112  return string;
10113}
10114
10115/* Return the string to flip the GT bit on a CR.  */
10116char *
10117output_e500_flip_gt_bit (rtx dst, rtx src)
10118{
10119  static char string[64];
10120  int a, b;
10121
10122  if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
10123      || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
10124    abort ();
10125
10126  /* GT bit.  */
10127  a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
10128  b = 4 * (REGNO (src) - CR0_REGNO) + 1;
10129
10130  sprintf (string, "crnot %d,%d", a, b);
10131  return string;
10132}
10133
10134/* Emit a conditional move: move TRUE_COND to DEST if OP of the
10135   operands of the last comparison is nonzero/true, FALSE_COND if it
10136   is zero/false.  Return 0 if the hardware has no such operation.  */
10137
10138int
10139rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10140{
10141  enum rtx_code code = GET_CODE (op);
10142  rtx op0 = rs6000_compare_op0;
10143  rtx op1 = rs6000_compare_op1;
10144  REAL_VALUE_TYPE c1;
10145  enum machine_mode compare_mode = GET_MODE (op0);
10146  enum machine_mode result_mode = GET_MODE (dest);
10147  rtx temp;
10148
10149  /* These modes should always match.  */
10150  if (GET_MODE (op1) != compare_mode
10151      /* In the isel case however, we can use a compare immediate, so
10152	 op1 may be a small constant.  */
10153      && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10154    return 0;
10155  if (GET_MODE (true_cond) != result_mode)
10156    return 0;
10157  if (GET_MODE (false_cond) != result_mode)
10158    return 0;
10159
10160  /* First, work out if the hardware can do this at all, or
10161     if it's too slow....  */
10162  if (! rs6000_compare_fp_p)
10163    {
10164      if (TARGET_ISEL)
10165	return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10166      return 0;
10167    }
10168  else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
10169	   && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
10170    return 0;
10171
10172  /* Eliminate half of the comparisons by switching operands, this
10173     makes the remaining code simpler.  */
10174  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10175      || code == LTGT || code == LT || code == UNLE)
10176    {
10177      code = reverse_condition_maybe_unordered (code);
10178      temp = true_cond;
10179      true_cond = false_cond;
10180      false_cond = temp;
10181    }
10182
10183  /* UNEQ and LTGT take four instructions for a comparison with zero,
10184     it'll probably be faster to use a branch here too.  */
10185  if (code == UNEQ && HONOR_NANS (compare_mode))
10186    return 0;
10187
10188  if (GET_CODE (op1) == CONST_DOUBLE)
10189    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10190
10191  /* We're going to try to implement comparisons by performing
10192     a subtract, then comparing against zero.  Unfortunately,
10193     Inf - Inf is NaN which is not zero, and so if we don't
10194     know that the operand is finite and the comparison
10195     would treat EQ different to UNORDERED, we can't do it.  */
10196  if (HONOR_INFINITIES (compare_mode)
10197      && code != GT && code != UNGE
10198      && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10199      /* Constructs of the form (a OP b ? a : b) are safe.  */
10200      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10201	  || (! rtx_equal_p (op0, true_cond)
10202	      && ! rtx_equal_p (op1, true_cond))))
10203    return 0;
10204  /* At this point we know we can use fsel.  */
10205
10206  /* Reduce the comparison to a comparison against zero.  */
10207  temp = gen_reg_rtx (compare_mode);
10208  emit_insn (gen_rtx_SET (VOIDmode, temp,
10209			  gen_rtx_MINUS (compare_mode, op0, op1)));
10210  op0 = temp;
10211  op1 = CONST0_RTX (compare_mode);
10212
10213  /* If we don't care about NaNs we can reduce some of the comparisons
10214     down to faster ones.  */
10215  if (! HONOR_NANS (compare_mode))
10216    switch (code)
10217      {
10218      case GT:
10219	code = LE;
10220	temp = true_cond;
10221	true_cond = false_cond;
10222	false_cond = temp;
10223	break;
10224      case UNGE:
10225	code = GE;
10226	break;
10227      case UNEQ:
10228	code = EQ;
10229	break;
10230      default:
10231	break;
10232      }
10233
10234  /* Now, reduce everything down to a GE.  */
10235  switch (code)
10236    {
10237    case GE:
10238      break;
10239
10240    case LE:
10241      temp = gen_reg_rtx (compare_mode);
10242      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10243      op0 = temp;
10244      break;
10245
10246    case ORDERED:
10247      temp = gen_reg_rtx (compare_mode);
10248      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10249      op0 = temp;
10250      break;
10251
10252    case EQ:
10253      temp = gen_reg_rtx (compare_mode);
10254      emit_insn (gen_rtx_SET (VOIDmode, temp,
10255			      gen_rtx_NEG (compare_mode,
10256					   gen_rtx_ABS (compare_mode, op0))));
10257      op0 = temp;
10258      break;
10259
10260    case UNGE:
10261      /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10262      temp = gen_reg_rtx (result_mode);
10263      emit_insn (gen_rtx_SET (VOIDmode, temp,
10264			      gen_rtx_IF_THEN_ELSE (result_mode,
10265						    gen_rtx_GE (VOIDmode,
10266								op0, op1),
10267						    true_cond, false_cond)));
10268      false_cond = true_cond;
10269      true_cond = temp;
10270
10271      temp = gen_reg_rtx (compare_mode);
10272      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10273      op0 = temp;
10274      break;
10275
10276    case GT:
10277      /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10278      temp = gen_reg_rtx (result_mode);
10279      emit_insn (gen_rtx_SET (VOIDmode, temp,
10280			      gen_rtx_IF_THEN_ELSE (result_mode,
10281						    gen_rtx_GE (VOIDmode,
10282								op0, op1),
10283						    true_cond, false_cond)));
10284      true_cond = false_cond;
10285      false_cond = temp;
10286
10287      temp = gen_reg_rtx (compare_mode);
10288      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10289      op0 = temp;
10290      break;
10291
10292    default:
10293      abort ();
10294    }
10295
10296  emit_insn (gen_rtx_SET (VOIDmode, dest,
10297			  gen_rtx_IF_THEN_ELSE (result_mode,
10298						gen_rtx_GE (VOIDmode,
10299							    op0, op1),
10300						true_cond, false_cond)));
10301  return 1;
10302}
10303
10304/* Same as above, but for ints (isel).  */
10305
10306static int
10307rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10308{
10309  rtx condition_rtx, cr;
10310
10311  /* All isel implementations thus far are 32-bits.  */
10312  if (GET_MODE (rs6000_compare_op0) != SImode)
10313    return 0;
10314
10315  /* We still have to do the compare, because isel doesn't do a
10316     compare, it just looks at the CRx bits set by a previous compare
10317     instruction.  */
10318  condition_rtx = rs6000_generate_compare (GET_CODE (op));
10319  cr = XEXP (condition_rtx, 0);
10320
10321  if (GET_MODE (cr) == CCmode)
10322    emit_insn (gen_isel_signed (dest, condition_rtx,
10323				true_cond, false_cond, cr));
10324  else
10325    emit_insn (gen_isel_unsigned (dest, condition_rtx,
10326				  true_cond, false_cond, cr));
10327
10328  return 1;
10329}
10330
10331const char *
10332output_isel (rtx *operands)
10333{
10334  enum rtx_code code;
10335
10336  code = GET_CODE (operands[1]);
10337  if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10338    {
10339      PUT_CODE (operands[1], reverse_condition (code));
10340      return "isel %0,%3,%2,%j1";
10341    }
10342  else
10343    return "isel %0,%2,%3,%j1";
10344}
10345
10346void
10347rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10348{
10349  enum machine_mode mode = GET_MODE (op0);
10350  enum rtx_code c;
10351  rtx target;
10352
10353  if (code == SMAX || code == SMIN)
10354    c = GE;
10355  else
10356    c = GEU;
10357
10358  if (code == SMAX || code == UMAX)
10359    target = emit_conditional_move (dest, c, op0, op1, mode,
10360				    op0, op1, mode, 0);
10361  else
10362    target = emit_conditional_move (dest, c, op0, op1, mode,
10363				    op1, op0, mode, 0);
10364  if (target == NULL_RTX)
10365    abort ();
10366  if (target != dest)
10367    emit_move_insn (dest, target);
10368}
10369
10370/* Emit instructions to move SRC to DST.  Called by splitters for
10371   multi-register moves.  It will emit at most one instruction for
10372   each register that is accessed; that is, it won't emit li/lis pairs
10373   (or equivalent for 64-bit code).  One of SRC or DST must be a hard
10374   register.  */
10375
10376void
10377rs6000_split_multireg_move (rtx dst, rtx src)
10378{
10379  /* The register number of the first register being moved.  */
10380  int reg;
10381  /* The mode that is to be moved.  */
10382  enum machine_mode mode;
10383  /* The mode that the move is being done in, and its size.  */
10384  enum machine_mode reg_mode;
10385  int reg_mode_size;
10386  /* The number of registers that will be moved.  */
10387  int nregs;
10388
10389  reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10390  mode = GET_MODE (dst);
10391  nregs = HARD_REGNO_NREGS (reg, mode);
10392  if (FP_REGNO_P (reg))
10393    reg_mode = DFmode;
10394  else if (ALTIVEC_REGNO_P (reg))
10395    reg_mode = V16QImode;
10396  else
10397    reg_mode = word_mode;
10398  reg_mode_size = GET_MODE_SIZE (reg_mode);
10399
10400  if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10401    abort ();
10402
10403  if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10404    {
10405      /* Move register range backwards, if we might have destructive
10406	 overlap.  */
10407      int i;
10408      for (i = nregs - 1; i >= 0; i--)
10409	emit_insn (gen_rtx_SET (VOIDmode,
10410				simplify_gen_subreg (reg_mode, dst, mode,
10411						     i * reg_mode_size),
10412				simplify_gen_subreg (reg_mode, src, mode,
10413						     i * reg_mode_size)));
10414    }
10415  else
10416    {
10417      int i;
10418      int j = -1;
10419      bool used_update = false;
10420
10421      if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10422        {
10423          rtx breg;
10424
10425	  if (GET_CODE (XEXP (src, 0)) == PRE_INC
10426	      || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10427	    {
10428	      rtx delta_rtx;
10429	      breg = XEXP (XEXP (src, 0), 0);
10430	      delta_rtx =  GET_CODE (XEXP (src, 0)) == PRE_INC
10431		  ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10432		  : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10433	      emit_insn (TARGET_32BIT
10434			 ? gen_addsi3 (breg, breg, delta_rtx)
10435			 : gen_adddi3 (breg, breg, delta_rtx));
10436	      src = gen_rtx_MEM (mode, breg);
10437	    }
10438
10439	  /* We have now address involving an base register only.
10440	     If we use one of the registers to address memory,
10441	     we have change that register last.  */
10442
10443	  breg = (GET_CODE (XEXP (src, 0)) == PLUS
10444		  ? XEXP (XEXP (src, 0), 0)
10445		  : XEXP (src, 0));
10446
10447	  if (!REG_P (breg))
10448	      abort();
10449
10450	  if (REGNO (breg) >= REGNO (dst)
10451	      && REGNO (breg) < REGNO (dst) + nregs)
10452	    j = REGNO (breg) - REGNO (dst);
10453        }
10454
10455      if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10456	{
10457	  rtx breg;
10458
10459	  if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10460	      || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10461	    {
10462	      rtx delta_rtx;
10463	      breg = XEXP (XEXP (dst, 0), 0);
10464	      delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10465		? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10466		: GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10467
10468	      /* We have to update the breg before doing the store.
10469		 Use store with update, if available.  */
10470
10471	      if (TARGET_UPDATE)
10472		{
10473		  rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10474		  emit_insn (TARGET_32BIT
10475			     ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10476			     : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10477		  used_update = true;
10478		}
10479	      else
10480		emit_insn (TARGET_32BIT
10481			   ? gen_addsi3 (breg, breg, delta_rtx)
10482			   : gen_adddi3 (breg, breg, delta_rtx));
10483	      dst = gen_rtx_MEM (mode, breg);
10484	    }
10485	}
10486
10487      for (i = 0; i < nregs; i++)
10488	{
10489	  /* Calculate index to next subword.  */
10490	  ++j;
10491	  if (j == nregs)
10492	    j = 0;
10493
10494	  /* If compiler already emited move of first word by
10495	     store with update, no need to do anything.  */
10496	  if (j == 0 && used_update)
10497	    continue;
10498
10499	  emit_insn (gen_rtx_SET (VOIDmode,
10500				  simplify_gen_subreg (reg_mode, dst, mode,
10501						       j * reg_mode_size),
10502				  simplify_gen_subreg (reg_mode, src, mode,
10503						       j * reg_mode_size)));
10504	}
10505    }
10506}
10507
10508
10509/* This page contains routines that are used to determine what the
10510   function prologue and epilogue code will do and write them out.  */
10511
10512/* Return the first fixed-point register that is required to be
10513   saved. 32 if none.  */
10514
10515int
10516first_reg_to_save (void)
10517{
10518  int first_reg;
10519
10520  /* Find lowest numbered live register.  */
10521  for (first_reg = 13; first_reg <= 31; first_reg++)
10522    if (regs_ever_live[first_reg]
10523	&& (! call_used_regs[first_reg]
10524	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10525		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10526		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10527      break;
10528
10529#if TARGET_MACHO
10530  if (flag_pic
10531      && current_function_uses_pic_offset_table
10532      && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10533    return RS6000_PIC_OFFSET_TABLE_REGNUM;
10534#endif
10535
10536  return first_reg;
10537}
10538
10539/* Similar, for FP regs.  */
10540
10541int
10542first_fp_reg_to_save (void)
10543{
10544  int first_reg;
10545
10546  /* Find lowest numbered live register.  */
10547  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10548    if (regs_ever_live[first_reg])
10549      break;
10550
10551  return first_reg;
10552}
10553
10554/* Similar, for AltiVec regs.  */
10555
10556static int
10557first_altivec_reg_to_save (void)
10558{
10559  int i;
10560
10561  /* Stack frame remains as is unless we are in AltiVec ABI.  */
10562  if (! TARGET_ALTIVEC_ABI)
10563    return LAST_ALTIVEC_REGNO + 1;
10564
10565  /* Find lowest numbered live register.  */
10566  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10567    if (regs_ever_live[i])
10568      break;
10569
10570  return i;
10571}
10572
10573/* Return a 32-bit mask of the AltiVec registers we need to set in
10574   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
10575   the 32-bit word is 0.  */
10576
10577static unsigned int
10578compute_vrsave_mask (void)
10579{
10580  unsigned int i, mask = 0;
10581
10582  /* First, find out if we use _any_ altivec registers.  */
10583  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10584    if (regs_ever_live[i])
10585      mask |= ALTIVEC_REG_BIT (i);
10586
10587  if (mask == 0)
10588    return mask;
10589
10590  /* Next, remove the argument registers from the set.  These must
10591     be in the VRSAVE mask set by the caller, so we don't need to add
10592     them in again.  More importantly, the mask we compute here is
10593     used to generate CLOBBERs in the set_vrsave insn, and we do not
10594     wish the argument registers to die.  */
10595  for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10596    mask &= ~ALTIVEC_REG_BIT (i);
10597
10598  /* Similarly, remove the return value from the set.  */
10599  {
10600    bool yes = false;
10601    diddle_return_value (is_altivec_return_reg, &yes);
10602    if (yes)
10603      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10604  }
10605
10606  return mask;
10607}
10608
10609static void
10610is_altivec_return_reg (rtx reg, void *xyes)
10611{
10612  bool *yes = (bool *) xyes;
10613  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10614    *yes = true;
10615}
10616
10617
10618/* Calculate the stack information for the current function.  This is
10619   complicated by having two separate calling sequences, the AIX calling
10620   sequence and the V.4 calling sequence.
10621
10622   AIX (and Darwin/Mac OS X) stack frames look like:
10623							  32-bit  64-bit
10624	SP---->	+---------------------------------------+
10625		| back chain to caller			| 0	  0
10626		+---------------------------------------+
10627		| saved CR				| 4       8 (8-11)
10628		+---------------------------------------+
10629		| saved LR				| 8       16
10630		+---------------------------------------+
10631		| reserved for compilers		| 12      24
10632		+---------------------------------------+
10633		| reserved for binders			| 16      32
10634		+---------------------------------------+
10635		| saved TOC pointer			| 20      40
10636		+---------------------------------------+
10637		| Parameter save area (P)		| 24      48
10638		+---------------------------------------+
10639		| Alloca space (A)			| 24+P    etc.
10640		+---------------------------------------+
10641		| Local variable space (L)		| 24+P+A
10642		+---------------------------------------+
10643		| Float/int conversion temporary (X)	| 24+P+A+L
10644		+---------------------------------------+
10645		| Save area for AltiVec registers (W)	| 24+P+A+L+X
10646		+---------------------------------------+
10647		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
10648		+---------------------------------------+
10649		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
10650		+---------------------------------------+
10651		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
10652		+---------------------------------------+
10653		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
10654		+---------------------------------------+
10655	old SP->| back chain to caller's caller		|
10656		+---------------------------------------+
10657
10658   The required alignment for AIX configurations is two words (i.e., 8
10659   or 16 bytes).
10660
10661
10662   V.4 stack frames look like:
10663
10664	SP---->	+---------------------------------------+
10665		| back chain to caller			| 0
10666		+---------------------------------------+
10667		| caller's saved LR			| 4
10668		+---------------------------------------+
10669		| Parameter save area (P)		| 8
10670		+---------------------------------------+
10671		| Alloca space (A)			| 8+P
10672		+---------------------------------------+
10673		| Varargs save area (V)			| 8+P+A
10674		+---------------------------------------+
10675		| Local variable space (L)		| 8+P+A+V
10676		+---------------------------------------+
10677		| Float/int conversion temporary (X)	| 8+P+A+V+L
10678		+---------------------------------------+
10679		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
10680		+---------------------------------------+
10681		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
10682		+---------------------------------------+
10683		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
10684		+---------------------------------------+
10685                | SPE: area for 64-bit GP registers     |
10686                +---------------------------------------+
10687                | SPE alignment padding                 |
10688                +---------------------------------------+
10689		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
10690		+---------------------------------------+
10691		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
10692		+---------------------------------------+
10693		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
10694		+---------------------------------------+
10695	old SP->| back chain to caller's caller		|
10696		+---------------------------------------+
10697
10698   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10699   given.  (But note below and in sysv4.h that we require only 8 and
10700   may round up the size of our stack frame anyways.  The historical
10701   reason is early versions of powerpc-linux which didn't properly
10702   align the stack at program startup.  A happy side-effect is that
10703   -mno-eabi libraries can be used with -meabi programs.)
10704
10705   The EABI configuration defaults to the V.4 layout.  However,
10706   the stack alignment requirements may differ.  If -mno-eabi is not
10707   given, the required stack alignment is 8 bytes; if -mno-eabi is
10708   given, the required alignment is 16 bytes.  (But see V.4 comment
10709   above.)  */
10710
10711#ifndef ABI_STACK_BOUNDARY
10712#define ABI_STACK_BOUNDARY STACK_BOUNDARY
10713#endif
10714
10715static rs6000_stack_t *
10716rs6000_stack_info (void)
10717{
10718  static rs6000_stack_t info, zero_info;
10719  rs6000_stack_t *info_ptr = &info;
10720  int reg_size = TARGET_32BIT ? 4 : 8;
10721  int ehrd_size;
10722  HOST_WIDE_INT non_fixed_size;
10723
10724  /* Zero all fields portably.  */
10725  info = zero_info;
10726
10727  if (TARGET_SPE)
10728    {
10729      /* Cache value so we don't rescan instruction chain over and over.  */
10730      if (cfun->machine->insn_chain_scanned_p == 0)
10731	{
10732	  cfun->machine->insn_chain_scanned_p = 1;
10733	  info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10734	}
10735    }
10736
10737  /* Select which calling sequence.  */
10738  info_ptr->abi = DEFAULT_ABI;
10739
10740  /* Calculate which registers need to be saved & save area size.  */
10741  info_ptr->first_gp_reg_save = first_reg_to_save ();
10742  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10743     even if it currently looks like we won't.  */
10744  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10745       || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10746       || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10747      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10748    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10749  else
10750    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10751
10752  /* For the SPE, we have an additional upper 32-bits on each GPR.
10753     Ideally we should save the entire 64-bits only when the upper
10754     half is used in SIMD instructions.  Since we only record
10755     registers live (not the size they are used in), this proves
10756     difficult because we'd have to traverse the instruction chain at
10757     the right time, taking reload into account.  This is a real pain,
10758     so we opt to save the GPRs in 64-bits always if but one register
10759     gets used in 64-bits.  Otherwise, all the registers in the frame
10760     get saved in 32-bits.
10761
10762     So... since when we save all GPRs (except the SP) in 64-bits, the
10763     traditional GP save area will be empty.  */
10764  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10765    info_ptr->gp_size = 0;
10766
10767  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10768  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10769
10770  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10771  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10772				 - info_ptr->first_altivec_reg_save);
10773
10774  /* Does this function call anything?  */
10775  info_ptr->calls_p = (! current_function_is_leaf
10776		       || cfun->machine->ra_needs_full_frame);
10777
10778  /* Determine if we need to save the link register.  */
10779  if (rs6000_ra_ever_killed ()
10780      || (DEFAULT_ABI == ABI_AIX
10781	  && current_function_profile
10782	  && !TARGET_PROFILE_KERNEL)
10783#ifdef TARGET_RELOCATABLE
10784      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10785#endif
10786      || (info_ptr->first_fp_reg_save != 64
10787	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10788      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10789      || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10790      || (DEFAULT_ABI == ABI_DARWIN
10791	  && flag_pic
10792	  && current_function_uses_pic_offset_table)
10793      || info_ptr->calls_p)
10794    {
10795      info_ptr->lr_save_p = 1;
10796      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10797    }
10798
10799  /* Determine if we need to save the condition code registers.  */
10800  if (regs_ever_live[CR2_REGNO]
10801      || regs_ever_live[CR3_REGNO]
10802      || regs_ever_live[CR4_REGNO])
10803    {
10804      info_ptr->cr_save_p = 1;
10805      if (DEFAULT_ABI == ABI_V4)
10806	info_ptr->cr_size = reg_size;
10807    }
10808
10809  /* If the current function calls __builtin_eh_return, then we need
10810     to allocate stack space for registers that will hold data for
10811     the exception handler.  */
10812  if (current_function_calls_eh_return)
10813    {
10814      unsigned int i;
10815      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10816	continue;
10817
10818      /* SPE saves EH registers in 64-bits.  */
10819      ehrd_size = i * (TARGET_SPE_ABI
10820		       && info_ptr->spe_64bit_regs_used != 0
10821		       ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10822    }
10823  else
10824    ehrd_size = 0;
10825
10826  /* Determine various sizes.  */
10827  info_ptr->reg_size     = reg_size;
10828  info_ptr->fixed_size   = RS6000_SAVE_AREA;
10829  info_ptr->varargs_size = RS6000_VARARGS_AREA;
10830  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
10831  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
10832					 TARGET_ALTIVEC ? 16 : 8);
10833
10834  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10835    info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10836  else
10837    info_ptr->spe_gp_size = 0;
10838
10839  if (TARGET_ALTIVEC_ABI)
10840    info_ptr->vrsave_mask = compute_vrsave_mask ();
10841  else
10842    info_ptr->vrsave_mask = 0;
10843
10844  if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10845    info_ptr->vrsave_size  = 4;
10846  else
10847    info_ptr->vrsave_size  = 0;
10848
10849  /* Calculate the offsets.  */
10850  switch (DEFAULT_ABI)
10851    {
10852    case ABI_NONE:
10853    default:
10854      abort ();
10855
10856    case ABI_AIX:
10857    case ABI_DARWIN:
10858      info_ptr->fp_save_offset   = - info_ptr->fp_size;
10859      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
10860
10861      if (TARGET_ALTIVEC_ABI)
10862	{
10863	  info_ptr->vrsave_save_offset
10864	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10865
10866	  /* Align stack so vector save area is on a quadword boundary.  */
10867	  if (info_ptr->altivec_size != 0)
10868	    info_ptr->altivec_padding_size
10869	      = 16 - (-info_ptr->vrsave_save_offset % 16);
10870	  else
10871	    info_ptr->altivec_padding_size = 0;
10872
10873	  info_ptr->altivec_save_offset
10874	    = info_ptr->vrsave_save_offset
10875	    - info_ptr->altivec_padding_size
10876	    - info_ptr->altivec_size;
10877
10878	  /* Adjust for AltiVec case.  */
10879	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10880	}
10881      else
10882	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
10883      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
10884      info_ptr->lr_save_offset   = 2*reg_size;
10885      break;
10886
10887    case ABI_V4:
10888      info_ptr->fp_save_offset   = - info_ptr->fp_size;
10889      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
10890      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
10891
10892      if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10893      {
10894        /* Align stack so SPE GPR save area is aligned on a
10895           double-word boundary.  */
10896        if (info_ptr->spe_gp_size != 0)
10897          info_ptr->spe_padding_size
10898            = 8 - (-info_ptr->cr_save_offset % 8);
10899        else
10900          info_ptr->spe_padding_size = 0;
10901
10902        info_ptr->spe_gp_save_offset
10903          = info_ptr->cr_save_offset
10904          - info_ptr->spe_padding_size
10905          - info_ptr->spe_gp_size;
10906
10907        /* Adjust for SPE case.  */
10908        info_ptr->toc_save_offset
10909          = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10910      }
10911      else if (TARGET_ALTIVEC_ABI)
10912	{
10913	  info_ptr->vrsave_save_offset
10914	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10915
10916	  /* Align stack so vector save area is on a quadword boundary.  */
10917	  if (info_ptr->altivec_size != 0)
10918	    info_ptr->altivec_padding_size
10919	      = 16 - (-info_ptr->vrsave_save_offset % 16);
10920	  else
10921	    info_ptr->altivec_padding_size = 0;
10922
10923	  info_ptr->altivec_save_offset
10924	    = info_ptr->vrsave_save_offset
10925	    - info_ptr->altivec_padding_size
10926	    - info_ptr->altivec_size;
10927
10928	  /* Adjust for AltiVec case.  */
10929	  info_ptr->toc_save_offset
10930	    = info_ptr->altivec_save_offset - info_ptr->toc_size;
10931	}
10932      else
10933	info_ptr->toc_save_offset  = info_ptr->cr_save_offset - info_ptr->toc_size;
10934      info_ptr->ehrd_offset      = info_ptr->toc_save_offset - ehrd_size;
10935      info_ptr->lr_save_offset   = reg_size;
10936      break;
10937    }
10938
10939  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
10940					 + info_ptr->gp_size
10941					 + info_ptr->altivec_size
10942					 + info_ptr->altivec_padding_size
10943					 + info_ptr->spe_gp_size
10944					 + info_ptr->spe_padding_size
10945					 + ehrd_size
10946					 + info_ptr->cr_size
10947					 + info_ptr->lr_size
10948					 + info_ptr->vrsave_size
10949					 + info_ptr->toc_size,
10950					 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10951					 ? 16 : 8);
10952
10953  non_fixed_size	 = (info_ptr->vars_size
10954			    + info_ptr->parm_size
10955			    + info_ptr->save_size
10956			    + info_ptr->varargs_size);
10957
10958  info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
10959				       ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10960
10961  /* Determine if we need to allocate any stack frame:
10962
10963     For AIX we need to push the stack if a frame pointer is needed
10964     (because the stack might be dynamically adjusted), if we are
10965     debugging, if we make calls, or if the sum of fp_save, gp_save,
10966     and local variables are more than the space needed to save all
10967     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10968     + 18*8 = 288 (GPR13 reserved).
10969
10970     For V.4 we don't have the stack cushion that AIX uses, but assume
10971     that the debugger can handle stackless frames.  */
10972
10973  if (info_ptr->calls_p)
10974    info_ptr->push_p = 1;
10975
10976  else if (DEFAULT_ABI == ABI_V4)
10977    info_ptr->push_p = non_fixed_size != 0;
10978
10979  else if (frame_pointer_needed)
10980    info_ptr->push_p = 1;
10981
10982  else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10983    info_ptr->push_p = 1;
10984
10985  else
10986    info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
10987
10988  /* Zero offsets if we're not saving those registers.  */
10989  if (info_ptr->fp_size == 0)
10990    info_ptr->fp_save_offset = 0;
10991
10992  if (info_ptr->gp_size == 0)
10993    info_ptr->gp_save_offset = 0;
10994
10995  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10996    info_ptr->altivec_save_offset = 0;
10997
10998  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10999    info_ptr->vrsave_save_offset = 0;
11000
11001  if (! TARGET_SPE_ABI
11002      || info_ptr->spe_64bit_regs_used == 0
11003      || info_ptr->spe_gp_size == 0)
11004    info_ptr->spe_gp_save_offset = 0;
11005
11006  if (! info_ptr->lr_save_p)
11007    info_ptr->lr_save_offset = 0;
11008
11009  if (! info_ptr->cr_save_p)
11010    info_ptr->cr_save_offset = 0;
11011
11012  if (! info_ptr->toc_save_p)
11013    info_ptr->toc_save_offset = 0;
11014
11015  return info_ptr;
11016}
11017
11018/* Return true if the current function uses any GPRs in 64-bit SIMD
11019   mode.  */
11020
11021static bool
11022spe_func_has_64bit_regs_p (void)
11023{
11024  rtx insns, insn;
11025
11026  /* Functions that save and restore all the call-saved registers will
11027     need to save/restore the registers in 64-bits.  */
11028  if (current_function_calls_eh_return
11029      || current_function_calls_setjmp
11030      || current_function_has_nonlocal_goto)
11031    return true;
11032
11033  insns = get_insns ();
11034
11035  for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11036    {
11037      if (INSN_P (insn))
11038	{
11039	  rtx i;
11040
11041	  i = PATTERN (insn);
11042	  if (GET_CODE (i) == SET
11043	      && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11044	    return true;
11045	}
11046    }
11047
11048  return false;
11049}
11050
11051static void
11052debug_stack_info (rs6000_stack_t *info)
11053{
11054  const char *abi_string;
11055
11056  if (! info)
11057    info = rs6000_stack_info ();
11058
11059  fprintf (stderr, "\nStack information for function %s:\n",
11060	   ((current_function_decl && DECL_NAME (current_function_decl))
11061	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11062	    : "<unknown>"));
11063
11064  switch (info->abi)
11065    {
11066    default:		 abi_string = "Unknown";	break;
11067    case ABI_NONE:	 abi_string = "NONE";		break;
11068    case ABI_AIX:	 abi_string = "AIX";		break;
11069    case ABI_DARWIN:	 abi_string = "Darwin";		break;
11070    case ABI_V4:	 abi_string = "V.4";		break;
11071    }
11072
11073  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
11074
11075  if (TARGET_ALTIVEC_ABI)
11076    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11077
11078  if (TARGET_SPE_ABI)
11079    fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11080
11081  if (info->first_gp_reg_save != 32)
11082    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
11083
11084  if (info->first_fp_reg_save != 64)
11085    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
11086
11087  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11088    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11089	     info->first_altivec_reg_save);
11090
11091  if (info->lr_save_p)
11092    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
11093
11094  if (info->cr_save_p)
11095    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
11096
11097  if (info->toc_save_p)
11098    fprintf (stderr, "\ttoc_save_p          = %5d\n", info->toc_save_p);
11099
11100  if (info->vrsave_mask)
11101    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
11102
11103  if (info->push_p)
11104    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
11105
11106  if (info->calls_p)
11107    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
11108
11109  if (info->gp_save_offset)
11110    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
11111
11112  if (info->fp_save_offset)
11113    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
11114
11115  if (info->altivec_save_offset)
11116    fprintf (stderr, "\taltivec_save_offset = %5d\n",
11117	     info->altivec_save_offset);
11118
11119  if (info->spe_gp_save_offset)
11120    fprintf (stderr, "\tspe_gp_save_offset  = %5d\n",
11121	     info->spe_gp_save_offset);
11122
11123  if (info->vrsave_save_offset)
11124    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
11125	     info->vrsave_save_offset);
11126
11127  if (info->lr_save_offset)
11128    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
11129
11130  if (info->cr_save_offset)
11131    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
11132
11133  if (info->toc_save_offset)
11134    fprintf (stderr, "\ttoc_save_offset     = %5d\n", info->toc_save_offset);
11135
11136  if (info->varargs_save_offset)
11137    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11138
11139  if (info->total_size)
11140    fprintf (stderr, "\ttotal_size          = "HOST_WIDE_INT_PRINT_DEC"\n",
11141	     info->total_size);
11142
11143  if (info->varargs_size)
11144    fprintf (stderr, "\tvarargs_size        = %5d\n", info->varargs_size);
11145
11146  if (info->vars_size)
11147    fprintf (stderr, "\tvars_size           = "HOST_WIDE_INT_PRINT_DEC"\n",
11148	     info->vars_size);
11149
11150  if (info->parm_size)
11151    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
11152
11153  if (info->fixed_size)
11154    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
11155
11156  if (info->gp_size)
11157    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
11158
11159  if (info->spe_gp_size)
11160    fprintf (stderr, "\tspe_gp_size         = %5d\n", info->spe_gp_size);
11161
11162  if (info->fp_size)
11163    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
11164
11165  if (info->altivec_size)
11166    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
11167
11168  if (info->vrsave_size)
11169    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
11170
11171  if (info->altivec_padding_size)
11172    fprintf (stderr, "\taltivec_padding_size= %5d\n",
11173	     info->altivec_padding_size);
11174
11175  if (info->spe_padding_size)
11176    fprintf (stderr, "\tspe_padding_size    = %5d\n",
11177	     info->spe_padding_size);
11178
11179  if (info->lr_size)
11180    fprintf (stderr, "\tlr_size             = %5d\n", info->lr_size);
11181
11182  if (info->cr_size)
11183    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
11184
11185  if (info->toc_size)
11186    fprintf (stderr, "\ttoc_size            = %5d\n", info->toc_size);
11187
11188  if (info->save_size)
11189    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
11190
11191  if (info->reg_size != 4)
11192    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
11193
11194  fprintf (stderr, "\n");
11195}
11196
11197rtx
11198rs6000_return_addr (int count, rtx frame)
11199{
11200  /* Currently we don't optimize very well between prolog and body
11201     code and for PIC code the code can be actually quite bad, so
11202     don't try to be too clever here.  */
11203  if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11204    {
11205      cfun->machine->ra_needs_full_frame = 1;
11206
11207      return
11208	gen_rtx_MEM
11209	  (Pmode,
11210	   memory_address
11211	   (Pmode,
11212	    plus_constant (copy_to_reg
11213			   (gen_rtx_MEM (Pmode,
11214					 memory_address (Pmode, frame))),
11215			   RETURN_ADDRESS_OFFSET)));
11216    }
11217
11218  cfun->machine->ra_need_lr = 1;
11219  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11220}
11221
11222/* Say whether a function is a candidate for sibcall handling or not.
11223   We do not allow indirect calls to be optimized into sibling calls.
11224   Also, we can't do it if there are any vector parameters; there's
11225   nowhere to put the VRsave code so it works; note that functions with
11226   vector parameters are required to have a prototype, so the argument
11227   type info must be available here.  (The tail recursion case can work
11228   with vector parameters, but there's no way to distinguish here.) */
11229static bool
11230rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11231{
11232  tree type;
11233  if (decl)
11234    {
11235      if (TARGET_ALTIVEC_VRSAVE)
11236        {
11237	  for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11238	       type; type = TREE_CHAIN (type))
11239	    {
11240	      if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11241		return false;
11242	    }
11243        }
11244      if (DEFAULT_ABI == ABI_DARWIN
11245	  || (*targetm.binds_local_p) (decl))
11246	{
11247	  tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11248
11249	  if (!lookup_attribute ("longcall", attr_list)
11250	      || lookup_attribute ("shortcall", attr_list))
11251	    return true;
11252	}
11253    }
11254  return false;
11255}
11256
11257static int
11258rs6000_ra_ever_killed (void)
11259{
11260  rtx top;
11261  rtx reg;
11262  rtx insn;
11263
11264  if (current_function_is_thunk)
11265    return 0;
11266
11267  /* regs_ever_live has LR marked as used if any sibcalls are present,
11268     but this should not force saving and restoring in the
11269     pro/epilogue.  Likewise, reg_set_between_p thinks a sibcall
11270     clobbers LR, so that is inappropriate.  */
11271
11272  /* Also, the prologue can generate a store into LR that
11273     doesn't really count, like this:
11274
11275        move LR->R0
11276        bcl to set PIC register
11277        move LR->R31
11278        move R0->LR
11279
11280     When we're called from the epilogue, we need to avoid counting
11281     this as a store.  */
11282
11283  push_topmost_sequence ();
11284  top = get_insns ();
11285  pop_topmost_sequence ();
11286  reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11287
11288  for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11289    {
11290      if (INSN_P (insn))
11291	{
11292	  if (FIND_REG_INC_NOTE (insn, reg))
11293	    return 1;
11294	  else if (GET_CODE (insn) == CALL_INSN
11295		   && !SIBLING_CALL_P (insn))
11296	    return 1;
11297	  else if (set_of (reg, insn) != NULL_RTX
11298		   && !prologue_epilogue_contains (insn))
11299	    return 1;
11300    	}
11301    }
11302  return 0;
11303}
11304
11305/* Add a REG_MAYBE_DEAD note to the insn.  */
11306static void
11307rs6000_maybe_dead (rtx insn)
11308{
11309  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11310					const0_rtx,
11311					REG_NOTES (insn));
11312}
11313
11314/* Emit instructions needed to load the TOC register.
11315   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11316   a constant pool; or for SVR4 -fpic.  */
11317
11318void
11319rs6000_emit_load_toc_table (int fromprolog)
11320{
11321  rtx dest, insn;
11322  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11323
11324  if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11325    {
11326      rtx temp = (fromprolog
11327		  ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11328		  : gen_reg_rtx (Pmode));
11329      insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11330      if (fromprolog)
11331	rs6000_maybe_dead (insn);
11332      insn = emit_move_insn (dest, temp);
11333      if (fromprolog)
11334	rs6000_maybe_dead (insn);
11335    }
11336  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11337    {
11338      char buf[30];
11339      rtx tempLR = (fromprolog
11340		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11341		    : gen_reg_rtx (Pmode));
11342      rtx temp0 = (fromprolog
11343		   ? gen_rtx_REG (Pmode, 0)
11344		   : gen_reg_rtx (Pmode));
11345      rtx symF;
11346
11347      /* possibly create the toc section */
11348      if (! toc_initialized)
11349	{
11350	  toc_section ();
11351	  function_section (current_function_decl);
11352	}
11353
11354      if (fromprolog)
11355	{
11356	  rtx symL;
11357
11358	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11359	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11360
11361	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11362	  symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11363
11364	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11365							       symF)));
11366	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11367	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11368							       symL,
11369							       symF)));
11370	}
11371      else
11372	{
11373	  rtx tocsym;
11374	  static int reload_toc_labelno = 0;
11375
11376	  tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11377
11378	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
11379	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11380
11381	  emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
11382	  emit_move_insn (dest, tempLR);
11383	  emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11384	}
11385      insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11386      if (fromprolog)
11387	rs6000_maybe_dead (insn);
11388    }
11389  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11390    {
11391      /* This is for AIX code running in non-PIC ELF32.  */
11392      char buf[30];
11393      rtx realsym;
11394      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11395      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11396
11397      insn = emit_insn (gen_elf_high (dest, realsym));
11398      if (fromprolog)
11399	rs6000_maybe_dead (insn);
11400      insn = emit_insn (gen_elf_low (dest, dest, realsym));
11401      if (fromprolog)
11402	rs6000_maybe_dead (insn);
11403    }
11404  else if (DEFAULT_ABI == ABI_AIX)
11405    {
11406      if (TARGET_32BIT)
11407	insn = emit_insn (gen_load_toc_aix_si (dest));
11408      else
11409	insn = emit_insn (gen_load_toc_aix_di (dest));
11410      if (fromprolog)
11411	rs6000_maybe_dead (insn);
11412    }
11413  else
11414    abort ();
11415}
11416
11417/* Emit instructions to restore the link register after determining where
11418   its value has been stored.  */
11419
11420void
11421rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11422{
11423  rs6000_stack_t *info = rs6000_stack_info ();
11424  rtx operands[2];
11425
11426  operands[0] = source;
11427  operands[1] = scratch;
11428
11429  if (info->lr_save_p)
11430    {
11431      rtx frame_rtx = stack_pointer_rtx;
11432      HOST_WIDE_INT sp_offset = 0;
11433      rtx tmp;
11434
11435      if (frame_pointer_needed
11436	  || current_function_calls_alloca
11437	  || info->total_size > 32767)
11438	{
11439	  emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11440	  frame_rtx = operands[1];
11441	}
11442      else if (info->push_p)
11443	sp_offset = info->total_size;
11444
11445      tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11446      tmp = gen_rtx_MEM (Pmode, tmp);
11447      emit_move_insn (tmp, operands[0]);
11448    }
11449  else
11450    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11451}
11452
11453static GTY(()) int set = -1;
11454
11455int
11456get_TOC_alias_set (void)
11457{
11458  if (set == -1)
11459    set = new_alias_set ();
11460  return set;
11461}
11462
11463/* This returns nonzero if the current function uses the TOC.  This is
11464   determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11465   is generated by the ABI_V4 load_toc_* patterns.  */
11466#if TARGET_ELF
11467static int
11468uses_TOC (void)
11469{
11470  rtx insn;
11471
11472  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11473    if (INSN_P (insn))
11474      {
11475	rtx pat = PATTERN (insn);
11476	int i;
11477
11478	if (GET_CODE (pat) == PARALLEL)
11479	  for (i = 0; i < XVECLEN (pat, 0); i++)
11480	    {
11481	      rtx sub = XVECEXP (pat, 0, i);
11482	      if (GET_CODE (sub) == USE)
11483		{
11484		  sub = XEXP (sub, 0);
11485		  if (GET_CODE (sub) == UNSPEC
11486		      && XINT (sub, 1) == UNSPEC_TOC)
11487		    return 1;
11488		}
11489	    }
11490      }
11491  return 0;
11492}
11493#endif
11494
11495rtx
11496create_TOC_reference (rtx symbol)
11497{
11498  return gen_rtx_PLUS (Pmode,
11499	   gen_rtx_REG (Pmode, TOC_REGISTER),
11500	     gen_rtx_CONST (Pmode,
11501	       gen_rtx_MINUS (Pmode, symbol,
11502		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11503}
11504
11505/* If _Unwind_* has been called from within the same module,
11506   toc register is not guaranteed to be saved to 40(1) on function
11507   entry.  Save it there in that case.  */
11508
11509void
11510rs6000_aix_emit_builtin_unwind_init (void)
11511{
11512  rtx mem;
11513  rtx stack_top = gen_reg_rtx (Pmode);
11514  rtx opcode_addr = gen_reg_rtx (Pmode);
11515  rtx opcode = gen_reg_rtx (SImode);
11516  rtx tocompare = gen_reg_rtx (SImode);
11517  rtx no_toc_save_needed = gen_label_rtx ();
11518
11519  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11520  emit_move_insn (stack_top, mem);
11521
11522  mem = gen_rtx_MEM (Pmode,
11523		     gen_rtx_PLUS (Pmode, stack_top,
11524				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11525  emit_move_insn (opcode_addr, mem);
11526  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11527  emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11528					   : 0xE8410028, SImode));
11529
11530  do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11531			   SImode, NULL_RTX, NULL_RTX,
11532			   no_toc_save_needed);
11533
11534  mem = gen_rtx_MEM (Pmode,
11535		     gen_rtx_PLUS (Pmode, stack_top,
11536				   GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11537  emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11538  emit_label (no_toc_save_needed);
11539}
11540
11541/* This ties together stack memory (MEM with an alias set of
11542   rs6000_sr_alias_set) and the change to the stack pointer.  */
11543
11544static void
11545rs6000_emit_stack_tie (void)
11546{
11547  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11548
11549  set_mem_alias_set (mem, rs6000_sr_alias_set);
11550  emit_insn (gen_stack_tie (mem));
11551}
11552
11553/* Emit the correct code for allocating stack space, as insns.
11554   If COPY_R12, make sure a copy of the old frame is left in r12.
11555   The generated code may use hard register 0 as a temporary.  */
11556
11557static void
11558rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11559{
11560  rtx insn;
11561  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11562  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11563  rtx todec = GEN_INT (-size);
11564
11565  if (current_function_limit_stack)
11566    {
11567      if (REG_P (stack_limit_rtx)
11568	  && REGNO (stack_limit_rtx) > 1
11569	  && REGNO (stack_limit_rtx) <= 31)
11570	{
11571	  emit_insn (TARGET_32BIT
11572		     ? gen_addsi3 (tmp_reg,
11573				   stack_limit_rtx,
11574				   GEN_INT (size))
11575		     : gen_adddi3 (tmp_reg,
11576				   stack_limit_rtx,
11577				   GEN_INT (size)));
11578
11579	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11580				    const0_rtx));
11581	}
11582      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11583	       && TARGET_32BIT
11584	       && DEFAULT_ABI == ABI_V4)
11585	{
11586	  rtx toload = gen_rtx_CONST (VOIDmode,
11587				      gen_rtx_PLUS (Pmode,
11588						    stack_limit_rtx,
11589						    GEN_INT (size)));
11590
11591	  emit_insn (gen_elf_high (tmp_reg, toload));
11592	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11593	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11594				    const0_rtx));
11595	}
11596      else
11597	warning ("stack limit expression is not supported");
11598    }
11599
11600  if (copy_r12 || ! TARGET_UPDATE)
11601    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11602
11603  if (TARGET_UPDATE)
11604    {
11605      if (size > 32767)
11606	{
11607	  /* Need a note here so that try_split doesn't get confused.  */
11608	  if (get_last_insn() == NULL_RTX)
11609	    emit_note (NOTE_INSN_DELETED);
11610	  insn = emit_move_insn (tmp_reg, todec);
11611	  try_split (PATTERN (insn), insn, 0);
11612	  todec = tmp_reg;
11613	}
11614
11615      insn = emit_insn (TARGET_32BIT
11616			? gen_movsi_update (stack_reg, stack_reg,
11617					    todec, stack_reg)
11618			: gen_movdi_update (stack_reg, stack_reg,
11619					    todec, stack_reg));
11620    }
11621  else
11622    {
11623      insn = emit_insn (TARGET_32BIT
11624			? gen_addsi3 (stack_reg, stack_reg, todec)
11625			: gen_adddi3 (stack_reg, stack_reg, todec));
11626      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11627		      gen_rtx_REG (Pmode, 12));
11628    }
11629
11630  RTX_FRAME_RELATED_P (insn) = 1;
11631  REG_NOTES (insn) =
11632    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11633		       gen_rtx_SET (VOIDmode, stack_reg,
11634				    gen_rtx_PLUS (Pmode, stack_reg,
11635						  GEN_INT (-size))),
11636		       REG_NOTES (insn));
11637}
11638
11639/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11640   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11641   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
11642   deduce these equivalences by itself so it wasn't necessary to hold
11643   its hand so much.  */
11644
11645static void
11646rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11647		      rtx reg2, rtx rreg)
11648{
11649  rtx real, temp;
11650
11651  /* copy_rtx will not make unique copies of registers, so we need to
11652     ensure we don't have unwanted sharing here.  */
11653  if (reg == reg2)
11654    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11655
11656  if (reg == rreg)
11657    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11658
11659  real = copy_rtx (PATTERN (insn));
11660
11661  if (reg2 != NULL_RTX)
11662    real = replace_rtx (real, reg2, rreg);
11663
11664  real = replace_rtx (real, reg,
11665		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11666							STACK_POINTER_REGNUM),
11667				    GEN_INT (val)));
11668
11669  /* We expect that 'real' is either a SET or a PARALLEL containing
11670     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
11671     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
11672
11673  if (GET_CODE (real) == SET)
11674    {
11675      rtx set = real;
11676
11677      temp = simplify_rtx (SET_SRC (set));
11678      if (temp)
11679	SET_SRC (set) = temp;
11680      temp = simplify_rtx (SET_DEST (set));
11681      if (temp)
11682	SET_DEST (set) = temp;
11683      if (GET_CODE (SET_DEST (set)) == MEM)
11684	{
11685	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11686	  if (temp)
11687	    XEXP (SET_DEST (set), 0) = temp;
11688	}
11689    }
11690  else if (GET_CODE (real) == PARALLEL)
11691    {
11692      int i;
11693      for (i = 0; i < XVECLEN (real, 0); i++)
11694	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11695	  {
11696	    rtx set = XVECEXP (real, 0, i);
11697
11698	    temp = simplify_rtx (SET_SRC (set));
11699	    if (temp)
11700	      SET_SRC (set) = temp;
11701	    temp = simplify_rtx (SET_DEST (set));
11702	    if (temp)
11703	      SET_DEST (set) = temp;
11704	    if (GET_CODE (SET_DEST (set)) == MEM)
11705	      {
11706		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11707		if (temp)
11708		  XEXP (SET_DEST (set), 0) = temp;
11709	      }
11710	    RTX_FRAME_RELATED_P (set) = 1;
11711	  }
11712    }
11713  else
11714    abort ();
11715
11716  if (TARGET_SPE)
11717    real = spe_synthesize_frame_save (real);
11718
11719  RTX_FRAME_RELATED_P (insn) = 1;
11720  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11721					real,
11722					REG_NOTES (insn));
11723}
11724
11725/* Given an SPE frame note, return a PARALLEL of SETs with the
11726   original note, plus a synthetic register save.  */
11727
11728static rtx
11729spe_synthesize_frame_save (rtx real)
11730{
11731  rtx synth, offset, reg, real2;
11732
11733  if (GET_CODE (real) != SET
11734      || GET_MODE (SET_SRC (real)) != V2SImode)
11735    return real;
11736
11737  /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11738     frame related note.  The parallel contains a set of the register
11739     being saved, and another set to a synthetic register (n+1200).
11740     This is so we can differentiate between 64-bit and 32-bit saves.
11741     Words cannot describe this nastiness.  */
11742
11743  if (GET_CODE (SET_DEST (real)) != MEM
11744      || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11745      || GET_CODE (SET_SRC (real)) != REG)
11746    abort ();
11747
11748  /* Transform:
11749       (set (mem (plus (reg x) (const y)))
11750            (reg z))
11751     into:
11752       (set (mem (plus (reg x) (const y+4)))
11753            (reg z+1200))
11754  */
11755
11756  real2 = copy_rtx (real);
11757  PUT_MODE (SET_DEST (real2), SImode);
11758  reg = SET_SRC (real2);
11759  real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11760  synth = copy_rtx (real2);
11761
11762  if (BYTES_BIG_ENDIAN)
11763    {
11764      offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11765      real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11766    }
11767
11768  reg = SET_SRC (synth);
11769
11770  synth = replace_rtx (synth, reg,
11771		       gen_rtx_REG (SImode, REGNO (reg) + 1200));
11772
11773  offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11774  synth = replace_rtx (synth, offset,
11775		       GEN_INT (INTVAL (offset)
11776				+ (BYTES_BIG_ENDIAN ? 0 : 4)));
11777
11778  RTX_FRAME_RELATED_P (synth) = 1;
11779  RTX_FRAME_RELATED_P (real2) = 1;
11780  if (BYTES_BIG_ENDIAN)
11781    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11782  else
11783    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11784
11785  return real;
11786}
11787
11788/* Returns an insn that has a vrsave set operation with the
11789   appropriate CLOBBERs.  */
11790
11791static rtx
11792generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11793{
11794  int nclobs, i;
11795  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11796  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11797
11798  clobs[0]
11799    = gen_rtx_SET (VOIDmode,
11800		   vrsave,
11801		   gen_rtx_UNSPEC_VOLATILE (SImode,
11802					    gen_rtvec (2, reg, vrsave),
11803					    30));
11804
11805  nclobs = 1;
11806
11807  /* We need to clobber the registers in the mask so the scheduler
11808     does not move sets to VRSAVE before sets of AltiVec registers.
11809
11810     However, if the function receives nonlocal gotos, reload will set
11811     all call saved registers live.  We will end up with:
11812
11813     	(set (reg 999) (mem))
11814	(parallel [ (set (reg vrsave) (unspec blah))
11815		    (clobber (reg 999))])
11816
11817     The clobber will cause the store into reg 999 to be dead, and
11818     flow will attempt to delete an epilogue insn.  In this case, we
11819     need an unspec use/set of the register.  */
11820
11821  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11822    if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11823      {
11824	if (!epiloguep || call_used_regs [i])
11825	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11826					     gen_rtx_REG (V4SImode, i));
11827	else
11828	  {
11829	    rtx reg = gen_rtx_REG (V4SImode, i);
11830
11831	    clobs[nclobs++]
11832	      = gen_rtx_SET (VOIDmode,
11833			     reg,
11834			     gen_rtx_UNSPEC (V4SImode,
11835					     gen_rtvec (1, reg), 27));
11836	  }
11837      }
11838
11839  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11840
11841  for (i = 0; i < nclobs; ++i)
11842    XVECEXP (insn, 0, i) = clobs[i];
11843
11844  return insn;
11845}
11846
11847/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11848   Save REGNO into [FRAME_REG + OFFSET] in mode MODE.  */
11849
11850static void
11851emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11852		 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11853{
11854  rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11855  rtx replacea, replaceb;
11856
11857  int_rtx = GEN_INT (offset);
11858
11859  /* Some cases that need register indexed addressing.  */
11860  if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11861      || (TARGET_SPE_ABI
11862	  && SPE_VECTOR_MODE (mode)
11863	  && !SPE_CONST_OFFSET_OK (offset)))
11864    {
11865      /* Whomever calls us must make sure r11 is available in the
11866         flow path of instructions in the prologue.  */
11867      offset_rtx = gen_rtx_REG (Pmode, 11);
11868      emit_move_insn (offset_rtx, int_rtx);
11869
11870      replacea = offset_rtx;
11871      replaceb = int_rtx;
11872    }
11873  else
11874    {
11875      offset_rtx = int_rtx;
11876      replacea = NULL_RTX;
11877      replaceb = NULL_RTX;
11878    }
11879
11880  reg = gen_rtx_REG (mode, regno);
11881  addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11882  mem = gen_rtx_MEM (mode, addr);
11883  set_mem_alias_set (mem, rs6000_sr_alias_set);
11884
11885  insn = emit_move_insn (mem, reg);
11886
11887  rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11888}
11889
11890/* Emit an offset memory reference suitable for a frame store, while
11891   converting to a valid addressing mode.  */
11892
11893static rtx
11894gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11895{
11896  rtx int_rtx, offset_rtx;
11897
11898  int_rtx = GEN_INT (offset);
11899
11900  if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11901    {
11902      offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11903      emit_move_insn (offset_rtx, int_rtx);
11904    }
11905  else
11906    offset_rtx = int_rtx;
11907
11908  return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11909}
11910
11911/* Emit function prologue as insns.  */
11912
11913void
11914rs6000_emit_prologue (void)
11915{
11916  rs6000_stack_t *info = rs6000_stack_info ();
11917  enum machine_mode reg_mode = Pmode;
11918  int reg_size = TARGET_32BIT ? 4 : 8;
11919  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11920  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11921  rtx frame_reg_rtx = sp_reg_rtx;
11922  rtx cr_save_rtx = NULL_RTX;
11923  rtx insn;
11924  int saving_FPRs_inline;
11925  int using_store_multiple;
11926  HOST_WIDE_INT sp_offset = 0;
11927
11928   if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11929     {
11930       reg_mode = V2SImode;
11931       reg_size = 8;
11932     }
11933
11934  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11935			  && (!TARGET_SPE_ABI
11936			      || info->spe_64bit_regs_used == 0)
11937			  && info->first_gp_reg_save < 31);
11938  saving_FPRs_inline = (info->first_fp_reg_save == 64
11939			|| FP_SAVE_INLINE (info->first_fp_reg_save)
11940			|| current_function_calls_eh_return
11941			|| cfun->machine->ra_need_lr);
11942
11943  /* For V.4, update stack before we do any saving and set back pointer.  */
11944  if (info->push_p
11945      && (DEFAULT_ABI == ABI_V4
11946	  || current_function_calls_eh_return))
11947    {
11948      if (info->total_size < 32767)
11949	sp_offset = info->total_size;
11950      else
11951	frame_reg_rtx = frame_ptr_rtx;
11952      rs6000_emit_allocate_stack (info->total_size,
11953				  (frame_reg_rtx != sp_reg_rtx
11954				   && (info->cr_save_p
11955				       || info->lr_save_p
11956				       || info->first_fp_reg_save < 64
11957				       || info->first_gp_reg_save < 32
11958				       )));
11959      if (frame_reg_rtx != sp_reg_rtx)
11960	rs6000_emit_stack_tie ();
11961    }
11962
11963  /* Save AltiVec registers if needed.  */
11964  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11965    {
11966      int i;
11967
11968      /* There should be a non inline version of this, for when we
11969	 are saving lots of vector registers.  */
11970      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11971	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11972	  {
11973	    rtx areg, savereg, mem;
11974	    int offset;
11975
11976	    offset = info->altivec_save_offset + sp_offset
11977	      + 16 * (i - info->first_altivec_reg_save);
11978
11979	    savereg = gen_rtx_REG (V4SImode, i);
11980
11981	    areg = gen_rtx_REG (Pmode, 0);
11982	    emit_move_insn (areg, GEN_INT (offset));
11983
11984	    /* AltiVec addressing mode is [reg+reg].  */
11985	    mem = gen_rtx_MEM (V4SImode,
11986			       gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11987
11988	    set_mem_alias_set (mem, rs6000_sr_alias_set);
11989
11990	    insn = emit_move_insn (mem, savereg);
11991
11992	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11993				  areg, GEN_INT (offset));
11994	  }
11995    }
11996
11997  /* VRSAVE is a bit vector representing which AltiVec registers
11998     are used.  The OS uses this to determine which vector
11999     registers to save on a context switch.  We need to save
12000     VRSAVE on the stack frame, add whatever AltiVec registers we
12001     used in this function, and do the corresponding magic in the
12002     epilogue.  */
12003
12004  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12005      && info->vrsave_mask != 0)
12006    {
12007      rtx reg, mem, vrsave;
12008      int offset;
12009
12010      /* Get VRSAVE onto a GPR.  */
12011      reg = gen_rtx_REG (SImode, 12);
12012      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12013      if (TARGET_MACHO)
12014	emit_insn (gen_get_vrsave_internal (reg));
12015      else
12016	emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
12017
12018      /* Save VRSAVE.  */
12019      offset = info->vrsave_save_offset + sp_offset;
12020      mem
12021	= gen_rtx_MEM (SImode,
12022		       gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12023      set_mem_alias_set (mem, rs6000_sr_alias_set);
12024      insn = emit_move_insn (mem, reg);
12025
12026      /* Include the registers in the mask.  */
12027      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12028
12029      insn = emit_insn (generate_set_vrsave (reg, info, 0));
12030    }
12031
12032  /* If we use the link register, get it into r0.  */
12033  if (info->lr_save_p)
12034    emit_move_insn (gen_rtx_REG (Pmode, 0),
12035		    gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12036
12037  /* If we need to save CR, put it into r12.  */
12038  if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12039    {
12040      cr_save_rtx = gen_rtx_REG (SImode, 12);
12041      emit_insn (gen_movesi_from_cr (cr_save_rtx));
12042    }
12043
12044  /* Do any required saving of fpr's.  If only one or two to save, do
12045     it ourselves.  Otherwise, call function.  */
12046  if (saving_FPRs_inline)
12047    {
12048      int i;
12049      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12050	if ((regs_ever_live[info->first_fp_reg_save+i]
12051	     && ! call_used_regs[info->first_fp_reg_save+i]))
12052	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12053			   info->first_fp_reg_save + i,
12054			   info->fp_save_offset + sp_offset + 8 * i,
12055			   info->total_size);
12056    }
12057  else if (info->first_fp_reg_save != 64)
12058    {
12059      int i;
12060      char rname[30];
12061      const char *alloc_rname;
12062      rtvec p;
12063      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12064
12065      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12066					  gen_rtx_REG (Pmode,
12067						       LINK_REGISTER_REGNUM));
12068      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12069	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12070      alloc_rname = ggc_strdup (rname);
12071      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12072				      gen_rtx_SYMBOL_REF (Pmode,
12073							  alloc_rname));
12074      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12075	{
12076	  rtx addr, reg, mem;
12077	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12078	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12079			       GEN_INT (info->fp_save_offset
12080					+ sp_offset + 8*i));
12081	  mem = gen_rtx_MEM (DFmode, addr);
12082	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12083
12084	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12085	}
12086      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12087      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12088			    NULL_RTX, NULL_RTX);
12089    }
12090
12091  /* Save GPRs.  This is done as a PARALLEL if we are using
12092     the store-multiple instructions.  */
12093  if (using_store_multiple)
12094    {
12095      rtvec p;
12096      int i;
12097      p = rtvec_alloc (32 - info->first_gp_reg_save);
12098      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12099	{
12100	  rtx addr, reg, mem;
12101	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12102	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12103			       GEN_INT (info->gp_save_offset
12104					+ sp_offset
12105					+ reg_size * i));
12106	  mem = gen_rtx_MEM (reg_mode, addr);
12107	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12108
12109	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12110	}
12111      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12112      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12113			    NULL_RTX, NULL_RTX);
12114    }
12115  else
12116    {
12117      int i;
12118      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12119	if ((regs_ever_live[info->first_gp_reg_save+i]
12120	     && ! call_used_regs[info->first_gp_reg_save+i])
12121	    || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12122		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12123		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12124	  {
12125	    rtx addr, reg, mem;
12126	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12127
12128	    if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12129	      {
12130		int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12131		rtx b;
12132
12133		if (!SPE_CONST_OFFSET_OK (offset))
12134		  {
12135		    b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12136		    emit_move_insn (b, GEN_INT (offset));
12137		  }
12138		else
12139		  b = GEN_INT (offset);
12140
12141		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12142		mem = gen_rtx_MEM (V2SImode, addr);
12143		set_mem_alias_set (mem, rs6000_sr_alias_set);
12144		insn = emit_move_insn (mem, reg);
12145
12146		if (GET_CODE (b) == CONST_INT)
12147		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12148					NULL_RTX, NULL_RTX);
12149		else
12150		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12151					b, GEN_INT (offset));
12152	      }
12153	    else
12154	      {
12155		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12156				     GEN_INT (info->gp_save_offset
12157					      + sp_offset
12158					      + reg_size * i));
12159		mem = gen_rtx_MEM (reg_mode, addr);
12160		set_mem_alias_set (mem, rs6000_sr_alias_set);
12161
12162		insn = emit_move_insn (mem, reg);
12163		rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12164				      NULL_RTX, NULL_RTX);
12165	      }
12166	  }
12167    }
12168
12169  /* ??? There's no need to emit actual instructions here, but it's the
12170     easiest way to get the frame unwind information emitted.  */
12171  if (current_function_calls_eh_return)
12172    {
12173      unsigned int i, regno;
12174
12175      /* In AIX ABI we need to pretend we save r2 here.  */
12176      if (TARGET_AIX)
12177	{
12178	  rtx addr, reg, mem;
12179
12180	  reg = gen_rtx_REG (reg_mode, 2);
12181	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12182			       GEN_INT (sp_offset + 5 * reg_size));
12183	  mem = gen_rtx_MEM (reg_mode, addr);
12184	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12185
12186	  insn = emit_move_insn (mem, reg);
12187	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12188				NULL_RTX, NULL_RTX);
12189	  PATTERN (insn) = gen_blockage ();
12190	}
12191
12192      for (i = 0; ; ++i)
12193	{
12194	  regno = EH_RETURN_DATA_REGNO (i);
12195	  if (regno == INVALID_REGNUM)
12196	    break;
12197
12198	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12199			   info->ehrd_offset + sp_offset
12200			   + reg_size * (int) i,
12201			   info->total_size);
12202	}
12203    }
12204
12205  /* Save lr if we used it.  */
12206  if (info->lr_save_p)
12207    {
12208      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12209			       GEN_INT (info->lr_save_offset + sp_offset));
12210      rtx reg = gen_rtx_REG (Pmode, 0);
12211      rtx mem = gen_rtx_MEM (Pmode, addr);
12212      /* This should not be of rs6000_sr_alias_set, because of
12213	 __builtin_return_address.  */
12214
12215      insn = emit_move_insn (mem, reg);
12216      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12217			    reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12218    }
12219
12220  /* Save CR if we use any that must be preserved.  */
12221  if (info->cr_save_p)
12222    {
12223      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12224			       GEN_INT (info->cr_save_offset + sp_offset));
12225      rtx mem = gen_rtx_MEM (SImode, addr);
12226
12227      set_mem_alias_set (mem, rs6000_sr_alias_set);
12228
12229      /* If r12 was used to hold the original sp, copy cr into r0 now
12230	 that it's free.  */
12231      if (REGNO (frame_reg_rtx) == 12)
12232	{
12233	  cr_save_rtx = gen_rtx_REG (SImode, 0);
12234	  emit_insn (gen_movesi_from_cr (cr_save_rtx));
12235	}
12236      insn = emit_move_insn (mem, cr_save_rtx);
12237
12238      /* Now, there's no way that dwarf2out_frame_debug_expr is going
12239	 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12240	 But that's OK.  All we have to do is specify that _one_ condition
12241	 code register is saved in this stack slot.  The thrower's epilogue
12242	 will then restore all the call-saved registers.
12243	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
12244      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12245			    cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12246    }
12247
12248  /* Update stack and set back pointer unless this is V.4,
12249     for which it was done previously.  */
12250  if (info->push_p
12251      && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12252    rs6000_emit_allocate_stack (info->total_size, FALSE);
12253
12254  /* Set frame pointer, if needed.  */
12255  if (frame_pointer_needed)
12256    {
12257      insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12258			     sp_reg_rtx);
12259      RTX_FRAME_RELATED_P (insn) = 1;
12260    }
12261
12262  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
12263  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12264      || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12265	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12266  {
12267    /* If emit_load_toc_table will use the link register, we need to save
12268       it.  We use R12 for this purpose because emit_load_toc_table
12269       can use register 0.  This allows us to use a plain 'blr' to return
12270       from the procedure more often.  */
12271    int save_LR_around_toc_setup = (TARGET_ELF
12272				    && DEFAULT_ABI != ABI_AIX
12273				    && flag_pic
12274				    && ! info->lr_save_p
12275				    && EXIT_BLOCK_PTR->pred != NULL);
12276    if (save_LR_around_toc_setup)
12277      {
12278	rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12279	rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12280	rs6000_emit_load_toc_table (TRUE);
12281	rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12282      }
12283    else
12284      rs6000_emit_load_toc_table (TRUE);
12285  }
12286
12287#if TARGET_MACHO
12288  if (DEFAULT_ABI == ABI_DARWIN
12289      && flag_pic && current_function_uses_pic_offset_table)
12290    {
12291      rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12292      const char *picbase = machopic_function_base_name ();
12293      rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12294
12295      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12296
12297      rs6000_maybe_dead (
12298	emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12299			gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12300    }
12301#endif
12302}
12303
12304/* Write function prologue.  */
12305
12306static void
12307rs6000_output_function_prologue (FILE *file,
12308				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12309{
12310  rs6000_stack_t *info = rs6000_stack_info ();
12311
12312  if (TARGET_DEBUG_STACK)
12313    debug_stack_info (info);
12314
12315  /* Write .extern for any function we will call to save and restore
12316     fp values.  */
12317  if (info->first_fp_reg_save < 64
12318      && !FP_SAVE_INLINE (info->first_fp_reg_save))
12319    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12320	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12321	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12322	     RESTORE_FP_SUFFIX);
12323
12324  /* Write .extern for AIX common mode routines, if needed.  */
12325  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12326    {
12327      fputs ("\t.extern __mulh\n", file);
12328      fputs ("\t.extern __mull\n", file);
12329      fputs ("\t.extern __divss\n", file);
12330      fputs ("\t.extern __divus\n", file);
12331      fputs ("\t.extern __quoss\n", file);
12332      fputs ("\t.extern __quous\n", file);
12333      common_mode_defined = 1;
12334    }
12335
12336  if (! HAVE_prologue)
12337    {
12338      start_sequence ();
12339
12340      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12341	 the "toplevel" insn chain.  */
12342      emit_note (NOTE_INSN_DELETED);
12343      rs6000_emit_prologue ();
12344      emit_note (NOTE_INSN_DELETED);
12345
12346      /* Expand INSN_ADDRESSES so final() doesn't crash.  */
12347      {
12348	rtx insn;
12349	unsigned addr = 0;
12350	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12351	  {
12352	    INSN_ADDRESSES_NEW (insn, addr);
12353	    addr += 4;
12354	  }
12355      }
12356
12357      if (TARGET_DEBUG_STACK)
12358	debug_rtx_list (get_insns (), 100);
12359      final (get_insns (), file, FALSE, FALSE);
12360      end_sequence ();
12361    }
12362
12363  rs6000_pic_labelno++;
12364}
12365
12366/* Emit function epilogue as insns.
12367
12368   At present, dwarf2out_frame_debug_expr doesn't understand
12369   register restores, so we don't bother setting RTX_FRAME_RELATED_P
12370   anywhere in the epilogue.  Most of the insns below would in any case
12371   need special notes to explain where r11 is in relation to the stack.  */
12372
12373void
12374rs6000_emit_epilogue (int sibcall)
12375{
12376  rs6000_stack_t *info;
12377  int restoring_FPRs_inline;
12378  int using_load_multiple;
12379  int using_mfcr_multiple;
12380  int use_backchain_to_restore_sp;
12381  int sp_offset = 0;
12382  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12383  rtx frame_reg_rtx = sp_reg_rtx;
12384  enum machine_mode reg_mode = Pmode;
12385  int reg_size = TARGET_32BIT ? 4 : 8;
12386  int i;
12387
12388  info = rs6000_stack_info ();
12389
12390  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12391    {
12392      reg_mode = V2SImode;
12393      reg_size = 8;
12394    }
12395
12396  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12397			 && (!TARGET_SPE_ABI
12398			     || info->spe_64bit_regs_used == 0)
12399			 && info->first_gp_reg_save < 31);
12400  restoring_FPRs_inline = (sibcall
12401			   || current_function_calls_eh_return
12402			   || info->first_fp_reg_save == 64
12403			   || FP_SAVE_INLINE (info->first_fp_reg_save));
12404  use_backchain_to_restore_sp = (frame_pointer_needed
12405				 || current_function_calls_alloca
12406				 || info->total_size > 32767);
12407  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12408			 || rs6000_cpu == PROCESSOR_PPC603
12409			 || rs6000_cpu == PROCESSOR_PPC750
12410			 || optimize_size);
12411
12412  /* If we have a frame pointer, a call to alloca,  or a large stack
12413     frame, restore the old stack pointer using the backchain.  Otherwise,
12414     we know what size to update it with.  */
12415  if (use_backchain_to_restore_sp)
12416    {
12417      /* Under V.4, don't reset the stack pointer until after we're done
12418	 loading the saved registers.  */
12419      if (DEFAULT_ABI == ABI_V4)
12420	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12421
12422      emit_move_insn (frame_reg_rtx,
12423		      gen_rtx_MEM (Pmode, sp_reg_rtx));
12424
12425    }
12426  else if (info->push_p)
12427    {
12428      if (DEFAULT_ABI == ABI_V4
12429	  || current_function_calls_eh_return)
12430	sp_offset = info->total_size;
12431      else
12432	{
12433	  emit_insn (TARGET_32BIT
12434		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12435				   GEN_INT (info->total_size))
12436		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12437				   GEN_INT (info->total_size)));
12438	}
12439    }
12440
12441  /* Restore AltiVec registers if needed.  */
12442  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12443    {
12444      int i;
12445
12446      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12447	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12448	  {
12449	    rtx addr, areg, mem;
12450
12451	    areg = gen_rtx_REG (Pmode, 0);
12452	    emit_move_insn
12453	      (areg, GEN_INT (info->altivec_save_offset
12454			      + sp_offset
12455			      + 16 * (i - info->first_altivec_reg_save)));
12456
12457	    /* AltiVec addressing mode is [reg+reg].  */
12458	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12459	    mem = gen_rtx_MEM (V4SImode, addr);
12460	    set_mem_alias_set (mem, rs6000_sr_alias_set);
12461
12462	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12463	  }
12464    }
12465
12466  /* Restore VRSAVE if needed.  */
12467  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12468      && info->vrsave_mask != 0)
12469    {
12470      rtx addr, mem, reg;
12471
12472      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12473			   GEN_INT (info->vrsave_save_offset + sp_offset));
12474      mem = gen_rtx_MEM (SImode, addr);
12475      set_mem_alias_set (mem, rs6000_sr_alias_set);
12476      reg = gen_rtx_REG (SImode, 12);
12477      emit_move_insn (reg, mem);
12478
12479      emit_insn (generate_set_vrsave (reg, info, 1));
12480    }
12481
12482  /* Get the old lr if we saved it.  */
12483  if (info->lr_save_p)
12484    {
12485      rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12486				      info->lr_save_offset + sp_offset);
12487
12488      set_mem_alias_set (mem, rs6000_sr_alias_set);
12489
12490      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12491    }
12492
12493  /* Get the old cr if we saved it.  */
12494  if (info->cr_save_p)
12495    {
12496      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12497			       GEN_INT (info->cr_save_offset + sp_offset));
12498      rtx mem = gen_rtx_MEM (SImode, addr);
12499
12500      set_mem_alias_set (mem, rs6000_sr_alias_set);
12501
12502      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12503    }
12504
12505  /* Set LR here to try to overlap restores below.  */
12506  if (info->lr_save_p)
12507    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12508		    gen_rtx_REG (Pmode, 0));
12509
12510  /* Load exception handler data registers, if needed.  */
12511  if (current_function_calls_eh_return)
12512    {
12513      unsigned int i, regno;
12514
12515      if (TARGET_AIX)
12516	{
12517	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12518				   GEN_INT (sp_offset + 5 * reg_size));
12519	  rtx mem = gen_rtx_MEM (reg_mode, addr);
12520
12521	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12522
12523	  emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12524	}
12525
12526      for (i = 0; ; ++i)
12527	{
12528	  rtx mem;
12529
12530	  regno = EH_RETURN_DATA_REGNO (i);
12531	  if (regno == INVALID_REGNUM)
12532	    break;
12533
12534	  mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12535				      info->ehrd_offset + sp_offset
12536				      + reg_size * (int) i);
12537	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12538
12539	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12540	}
12541    }
12542
12543  /* Restore GPRs.  This is done as a PARALLEL if we are using
12544     the load-multiple instructions.  */
12545  if (using_load_multiple)
12546    {
12547      rtvec p;
12548      p = rtvec_alloc (32 - info->first_gp_reg_save);
12549      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12550	{
12551	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12552				   GEN_INT (info->gp_save_offset
12553					    + sp_offset
12554					    + reg_size * i));
12555	  rtx mem = gen_rtx_MEM (reg_mode, addr);
12556
12557	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12558
12559	  RTVEC_ELT (p, i) =
12560	    gen_rtx_SET (VOIDmode,
12561			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12562			 mem);
12563	}
12564      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12565    }
12566  else
12567    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12568      if ((regs_ever_live[info->first_gp_reg_save+i]
12569	   && ! call_used_regs[info->first_gp_reg_save+i])
12570	  || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12571	      && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12572		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12573	{
12574	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12575				   GEN_INT (info->gp_save_offset
12576					    + sp_offset
12577					    + reg_size * i));
12578	  rtx mem = gen_rtx_MEM (reg_mode, addr);
12579
12580	  /* Restore 64-bit quantities for SPE.  */
12581	  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12582	    {
12583	      int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12584	      rtx b;
12585
12586	      if (!SPE_CONST_OFFSET_OK (offset))
12587		{
12588		  b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12589		  emit_move_insn (b, GEN_INT (offset));
12590		}
12591	      else
12592		b = GEN_INT (offset);
12593
12594	      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12595	      mem = gen_rtx_MEM (V2SImode, addr);
12596	    }
12597
12598	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12599
12600	  emit_move_insn (gen_rtx_REG (reg_mode,
12601				       info->first_gp_reg_save + i), mem);
12602	}
12603
12604  /* Restore fpr's if we need to do it without calling a function.  */
12605  if (restoring_FPRs_inline)
12606    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12607      if ((regs_ever_live[info->first_fp_reg_save+i]
12608	   && ! call_used_regs[info->first_fp_reg_save+i]))
12609	{
12610	  rtx addr, mem;
12611	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12612			       GEN_INT (info->fp_save_offset
12613					+ sp_offset
12614					+ 8 * i));
12615	  mem = gen_rtx_MEM (DFmode, addr);
12616	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12617
12618	  emit_move_insn (gen_rtx_REG (DFmode,
12619				       info->first_fp_reg_save + i),
12620			  mem);
12621	}
12622
12623  /* If we saved cr, restore it here.  Just those that were used.  */
12624  if (info->cr_save_p)
12625    {
12626      rtx r12_rtx = gen_rtx_REG (SImode, 12);
12627      int count = 0;
12628
12629      if (using_mfcr_multiple)
12630	{
12631	  for (i = 0; i < 8; i++)
12632	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12633	      count++;
12634	  if (count == 0)
12635	    abort ();
12636	}
12637
12638      if (using_mfcr_multiple && count > 1)
12639	{
12640	  rtvec p;
12641	  int ndx;
12642
12643	  p = rtvec_alloc (count);
12644
12645	  ndx = 0;
12646	  for (i = 0; i < 8; i++)
12647	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12648	      {
12649		rtvec r = rtvec_alloc (2);
12650		RTVEC_ELT (r, 0) = r12_rtx;
12651		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12652		RTVEC_ELT (p, ndx) =
12653		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12654			       gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12655		ndx++;
12656	      }
12657	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12658	  if (ndx != count)
12659	    abort ();
12660	}
12661      else
12662	for (i = 0; i < 8; i++)
12663	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12664	    {
12665	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12666							   CR0_REGNO+i),
12667					      r12_rtx));
12668	    }
12669    }
12670
12671  /* If this is V.4, unwind the stack pointer after all of the loads
12672     have been done.  We need to emit a block here so that sched
12673     doesn't decide to move the sp change before the register restores
12674     (which may not have any obvious dependency on the stack).  This
12675     doesn't hurt performance, because there is no scheduling that can
12676     be done after this point.  */
12677  if (DEFAULT_ABI == ABI_V4
12678      || current_function_calls_eh_return)
12679    {
12680      if (frame_reg_rtx != sp_reg_rtx)
12681	  rs6000_emit_stack_tie ();
12682
12683      if (use_backchain_to_restore_sp)
12684	{
12685	  emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12686	}
12687      else if (sp_offset != 0)
12688	{
12689	  emit_insn (TARGET_32BIT
12690		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12691				   GEN_INT (sp_offset))
12692		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12693				   GEN_INT (sp_offset)));
12694	}
12695    }
12696
12697  if (current_function_calls_eh_return)
12698    {
12699      rtx sa = EH_RETURN_STACKADJ_RTX;
12700      emit_insn (TARGET_32BIT
12701		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12702		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12703    }
12704
12705  if (!sibcall)
12706    {
12707      rtvec p;
12708      if (! restoring_FPRs_inline)
12709	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12710      else
12711	p = rtvec_alloc (2);
12712
12713      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12714      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12715				      gen_rtx_REG (Pmode,
12716						   LINK_REGISTER_REGNUM));
12717
12718      /* If we have to restore more than two FP registers, branch to the
12719	 restore function.  It will return to our caller.  */
12720      if (! restoring_FPRs_inline)
12721	{
12722	  int i;
12723	  char rname[30];
12724	  const char *alloc_rname;
12725
12726	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12727		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12728	  alloc_rname = ggc_strdup (rname);
12729	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12730					  gen_rtx_SYMBOL_REF (Pmode,
12731							      alloc_rname));
12732
12733	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12734	    {
12735	      rtx addr, mem;
12736	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12737				   GEN_INT (info->fp_save_offset + 8*i));
12738	      mem = gen_rtx_MEM (DFmode, addr);
12739	      set_mem_alias_set (mem, rs6000_sr_alias_set);
12740
12741	      RTVEC_ELT (p, i+3) =
12742		gen_rtx_SET (VOIDmode,
12743			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12744			     mem);
12745	    }
12746	}
12747
12748      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12749    }
12750}
12751
12752/* Write function epilogue.  */
12753
12754static void
12755rs6000_output_function_epilogue (FILE *file,
12756				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12757{
12758  rs6000_stack_t *info = rs6000_stack_info ();
12759
12760  if (! HAVE_epilogue)
12761    {
12762      rtx insn = get_last_insn ();
12763      /* If the last insn was a BARRIER, we don't have to write anything except
12764	 the trace table.  */
12765      if (GET_CODE (insn) == NOTE)
12766	insn = prev_nonnote_insn (insn);
12767      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
12768	{
12769	  /* This is slightly ugly, but at least we don't have two
12770	     copies of the epilogue-emitting code.  */
12771	  start_sequence ();
12772
12773	  /* A NOTE_INSN_DELETED is supposed to be at the start
12774	     and end of the "toplevel" insn chain.  */
12775	  emit_note (NOTE_INSN_DELETED);
12776	  rs6000_emit_epilogue (FALSE);
12777	  emit_note (NOTE_INSN_DELETED);
12778
12779	  /* Expand INSN_ADDRESSES so final() doesn't crash.  */
12780	  {
12781	    rtx insn;
12782	    unsigned addr = 0;
12783	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12784	      {
12785		INSN_ADDRESSES_NEW (insn, addr);
12786		addr += 4;
12787	      }
12788	  }
12789
12790	  if (TARGET_DEBUG_STACK)
12791	    debug_rtx_list (get_insns (), 100);
12792	  final (get_insns (), file, FALSE, FALSE);
12793	  end_sequence ();
12794	}
12795    }
12796
12797#if TARGET_MACHO
12798  macho_branch_islands ();
12799  /* Mach-O doesn't support labels at the end of objects, so if
12800     it looks like we might want one, insert a NOP.  */
12801  {
12802    rtx insn = get_last_insn ();
12803    while (insn
12804	   && NOTE_P (insn)
12805	   && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12806      insn = PREV_INSN (insn);
12807    if (insn
12808	&& (LABEL_P (insn)
12809	    || (NOTE_P (insn)
12810		&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12811      fputs ("\tnop\n", file);
12812  }
12813#endif
12814
12815  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
12816     on its format.
12817
12818     We don't output a traceback table if -finhibit-size-directive was
12819     used.  The documentation for -finhibit-size-directive reads
12820     ``don't output a @code{.size} assembler directive, or anything
12821     else that would cause trouble if the function is split in the
12822     middle, and the two halves are placed at locations far apart in
12823     memory.''  The traceback table has this property, since it
12824     includes the offset from the start of the function to the
12825     traceback table itself.
12826
12827     System V.4 Powerpc's (and the embedded ABI derived from it) use a
12828     different traceback table.  */
12829  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12830      && rs6000_traceback != traceback_none)
12831    {
12832      const char *fname = NULL;
12833      const char *language_string = lang_hooks.name;
12834      int fixed_parms = 0, float_parms = 0, parm_info = 0;
12835      int i;
12836      int optional_tbtab;
12837
12838      if (rs6000_traceback == traceback_full)
12839	optional_tbtab = 1;
12840      else if (rs6000_traceback == traceback_part)
12841	optional_tbtab = 0;
12842      else
12843	optional_tbtab = !optimize_size && !TARGET_ELF;
12844
12845      if (optional_tbtab)
12846	{
12847	  fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12848	  while (*fname == '.')	/* V.4 encodes . in the name */
12849	    fname++;
12850
12851	  /* Need label immediately before tbtab, so we can compute
12852	     its offset from the function start.  */
12853	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12854	  ASM_OUTPUT_LABEL (file, fname);
12855	}
12856
12857      /* The .tbtab pseudo-op can only be used for the first eight
12858	 expressions, since it can't handle the possibly variable
12859	 length fields that follow.  However, if you omit the optional
12860	 fields, the assembler outputs zeros for all optional fields
12861	 anyways, giving each variable length field is minimum length
12862	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
12863	 pseudo-op at all.  */
12864
12865      /* An all-zero word flags the start of the tbtab, for debuggers
12866	 that have to find it by searching forward from the entry
12867	 point or from the current pc.  */
12868      fputs ("\t.long 0\n", file);
12869
12870      /* Tbtab format type.  Use format type 0.  */
12871      fputs ("\t.byte 0,", file);
12872
12873      /* Language type.  Unfortunately, there does not seem to be any
12874	 official way to discover the language being compiled, so we
12875	 use language_string.
12876	 C is 0.  Fortran is 1.  Pascal is 2.  Ada is 3.  C++ is 9.
12877	 Java is 13.  Objective-C is 14.  */
12878      if (! strcmp (language_string, "GNU C"))
12879	i = 0;
12880      else if (! strcmp (language_string, "GNU F77"))
12881	i = 1;
12882      else if (! strcmp (language_string, "GNU Pascal"))
12883	i = 2;
12884      else if (! strcmp (language_string, "GNU Ada"))
12885	i = 3;
12886      else if (! strcmp (language_string, "GNU C++"))
12887	i = 9;
12888      else if (! strcmp (language_string, "GNU Java"))
12889	i = 13;
12890      else if (! strcmp (language_string, "GNU Objective-C"))
12891	i = 14;
12892      else
12893	abort ();
12894      fprintf (file, "%d,", i);
12895
12896      /* 8 single bit fields: global linkage (not set for C extern linkage,
12897	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12898	 from start of procedure stored in tbtab, internal function, function
12899	 has controlled storage, function has no toc, function uses fp,
12900	 function logs/aborts fp operations.  */
12901      /* Assume that fp operations are used if any fp reg must be saved.  */
12902      fprintf (file, "%d,",
12903	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12904
12905      /* 6 bitfields: function is interrupt handler, name present in
12906	 proc table, function calls alloca, on condition directives
12907	 (controls stack walks, 3 bits), saves condition reg, saves
12908	 link reg.  */
12909      /* The `function calls alloca' bit seems to be set whenever reg 31 is
12910	 set up as a frame pointer, even when there is no alloca call.  */
12911      fprintf (file, "%d,",
12912	       ((optional_tbtab << 6)
12913		| ((optional_tbtab & frame_pointer_needed) << 5)
12914		| (info->cr_save_p << 1)
12915		| (info->lr_save_p)));
12916
12917      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12918	 (6 bits).  */
12919      fprintf (file, "%d,",
12920	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
12921
12922      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
12923      fprintf (file, "%d,", (32 - first_reg_to_save ()));
12924
12925      if (optional_tbtab)
12926	{
12927	  /* Compute the parameter info from the function decl argument
12928	     list.  */
12929	  tree decl;
12930	  int next_parm_info_bit = 31;
12931
12932	  for (decl = DECL_ARGUMENTS (current_function_decl);
12933	       decl; decl = TREE_CHAIN (decl))
12934	    {
12935	      rtx parameter = DECL_INCOMING_RTL (decl);
12936	      enum machine_mode mode = GET_MODE (parameter);
12937
12938	      if (GET_CODE (parameter) == REG)
12939		{
12940		  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12941		    {
12942		      int bits;
12943
12944		      float_parms++;
12945
12946		      if (mode == SFmode)
12947			bits = 0x2;
12948		      else if (mode == DFmode || mode == TFmode)
12949			bits = 0x3;
12950		      else
12951			abort ();
12952
12953		      /* If only one bit will fit, don't or in this entry.  */
12954		      if (next_parm_info_bit > 0)
12955			parm_info |= (bits << (next_parm_info_bit - 1));
12956		      next_parm_info_bit -= 2;
12957		    }
12958		  else
12959		    {
12960		      fixed_parms += ((GET_MODE_SIZE (mode)
12961				       + (UNITS_PER_WORD - 1))
12962				      / UNITS_PER_WORD);
12963		      next_parm_info_bit -= 1;
12964		    }
12965		}
12966	    }
12967	}
12968
12969      /* Number of fixed point parameters.  */
12970      /* This is actually the number of words of fixed point parameters; thus
12971	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
12972      fprintf (file, "%d,", fixed_parms);
12973
12974      /* 2 bitfields: number of floating point parameters (7 bits), parameters
12975	 all on stack.  */
12976      /* This is actually the number of fp registers that hold parameters;
12977	 and thus the maximum value is 13.  */
12978      /* Set parameters on stack bit if parameters are not in their original
12979	 registers, regardless of whether they are on the stack?  Xlc
12980	 seems to set the bit when not optimizing.  */
12981      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12982
12983      if (! optional_tbtab)
12984	return;
12985
12986      /* Optional fields follow.  Some are variable length.  */
12987
12988      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12989	 11 double float.  */
12990      /* There is an entry for each parameter in a register, in the order that
12991	 they occur in the parameter list.  Any intervening arguments on the
12992	 stack are ignored.  If the list overflows a long (max possible length
12993	 34 bits) then completely leave off all elements that don't fit.  */
12994      /* Only emit this long if there was at least one parameter.  */
12995      if (fixed_parms || float_parms)
12996	fprintf (file, "\t.long %d\n", parm_info);
12997
12998      /* Offset from start of code to tb table.  */
12999      fputs ("\t.long ", file);
13000      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13001#if TARGET_AIX
13002      RS6000_OUTPUT_BASENAME (file, fname);
13003#else
13004      assemble_name (file, fname);
13005#endif
13006      fputs ("-.", file);
13007#if TARGET_AIX
13008      RS6000_OUTPUT_BASENAME (file, fname);
13009#else
13010      assemble_name (file, fname);
13011#endif
13012      putc ('\n', file);
13013
13014      /* Interrupt handler mask.  */
13015      /* Omit this long, since we never set the interrupt handler bit
13016	 above.  */
13017
13018      /* Number of CTL (controlled storage) anchors.  */
13019      /* Omit this long, since the has_ctl bit is never set above.  */
13020
13021      /* Displacement into stack of each CTL anchor.  */
13022      /* Omit this list of longs, because there are no CTL anchors.  */
13023
13024      /* Length of function name.  */
13025      if (*fname == '*')
13026	++fname;
13027      fprintf (file, "\t.short %d\n", (int) strlen (fname));
13028
13029      /* Function name.  */
13030      assemble_string (fname, strlen (fname));
13031
13032      /* Register for alloca automatic storage; this is always reg 31.
13033	 Only emit this if the alloca bit was set above.  */
13034      if (frame_pointer_needed)
13035	fputs ("\t.byte 31\n", file);
13036
13037      fputs ("\t.align 2\n", file);
13038    }
13039}
13040
13041/* A C compound statement that outputs the assembler code for a thunk
13042   function, used to implement C++ virtual function calls with
13043   multiple inheritance.  The thunk acts as a wrapper around a virtual
13044   function, adjusting the implicit object parameter before handing
13045   control off to the real function.
13046
13047   First, emit code to add the integer DELTA to the location that
13048   contains the incoming first argument.  Assume that this argument
13049   contains a pointer, and is the one used to pass the `this' pointer
13050   in C++.  This is the incoming argument *before* the function
13051   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
13052   values of all other incoming arguments.
13053
13054   After the addition, emit code to jump to FUNCTION, which is a
13055   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
13056   not touch the return address.  Hence returning from FUNCTION will
13057   return to whoever called the current `thunk'.
13058
13059   The effect must be as if FUNCTION had been called directly with the
13060   adjusted first argument.  This macro is responsible for emitting
13061   all of the code for a thunk function; output_function_prologue()
13062   and output_function_epilogue() are not invoked.
13063
13064   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
13065   been extracted from it.)  It might possibly be useful on some
13066   targets, but probably not.
13067
13068   If you do not define this macro, the target-independent code in the
13069   C++ frontend will generate a less efficient heavyweight thunk that
13070   calls FUNCTION instead of jumping to it.  The generic approach does
13071   not support varargs.  */
13072
13073static void
13074rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13075			HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13076			tree function)
13077{
13078  rtx this, insn, funexp;
13079
13080  reload_completed = 1;
13081  epilogue_completed = 1;
13082  no_new_pseudos = 1;
13083
13084  /* Mark the end of the (empty) prologue.  */
13085  emit_note (NOTE_INSN_PROLOGUE_END);
13086
13087  /* Find the "this" pointer.  If the function returns a structure,
13088     the structure return pointer is in r3.  */
13089  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13090    this = gen_rtx_REG (Pmode, 4);
13091  else
13092    this = gen_rtx_REG (Pmode, 3);
13093
13094  /* Apply the constant offset, if required.  */
13095  if (delta)
13096    {
13097      rtx delta_rtx = GEN_INT (delta);
13098      emit_insn (TARGET_32BIT
13099		 ? gen_addsi3 (this, this, delta_rtx)
13100		 : gen_adddi3 (this, this, delta_rtx));
13101    }
13102
13103  /* Apply the offset from the vtable, if required.  */
13104  if (vcall_offset)
13105    {
13106      rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13107      rtx tmp = gen_rtx_REG (Pmode, 12);
13108
13109      emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13110      if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13111	{
13112	  emit_insn (TARGET_32BIT
13113		     ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13114		     : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13115	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13116	}
13117      else
13118	{
13119	  rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13120
13121	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13122	}
13123      emit_insn (TARGET_32BIT
13124		 ? gen_addsi3 (this, this, tmp)
13125		 : gen_adddi3 (this, this, tmp));
13126    }
13127
13128  /* Generate a tail call to the target function.  */
13129  if (!TREE_USED (function))
13130    {
13131      assemble_external (function);
13132      TREE_USED (function) = 1;
13133    }
13134  funexp = XEXP (DECL_RTL (function), 0);
13135  funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13136
13137#if TARGET_MACHO
13138  if (MACHOPIC_INDIRECT)
13139    funexp = machopic_indirect_call_target (funexp);
13140#endif
13141
13142  /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13143     generate sibcall RTL explicitly to avoid constraint abort.  */
13144  insn = emit_call_insn (
13145	   gen_rtx_PARALLEL (VOIDmode,
13146	     gen_rtvec (4,
13147			gen_rtx_CALL (VOIDmode,
13148				      funexp, const0_rtx),
13149			gen_rtx_USE (VOIDmode, const0_rtx),
13150			gen_rtx_USE (VOIDmode,
13151				     gen_rtx_REG (SImode,
13152						  LINK_REGISTER_REGNUM)),
13153			gen_rtx_RETURN (VOIDmode))));
13154  SIBLING_CALL_P (insn) = 1;
13155  emit_barrier ();
13156
13157  /* Run just enough of rest_of_compilation to get the insns emitted.
13158     There's not really enough bulk here to make other passes such as
13159     instruction scheduling worth while.  Note that use_thunk calls
13160     assemble_start_function and assemble_end_function.  */
13161  insn = get_insns ();
13162  insn_locators_initialize ();
13163  shorten_branches (insn);
13164  final_start_function (insn, file, 1);
13165  final (insn, file, 1, 0);
13166  final_end_function ();
13167
13168  reload_completed = 0;
13169  epilogue_completed = 0;
13170  no_new_pseudos = 0;
13171}
13172
13173/* A quick summary of the various types of 'constant-pool tables'
13174   under PowerPC:
13175
13176   Target	Flags		Name		One table per
13177   AIX		(none)		AIX TOC		object file
13178   AIX		-mfull-toc	AIX TOC		object file
13179   AIX		-mminimal-toc	AIX minimal TOC	translation unit
13180   SVR4/EABI	(none)		SVR4 SDATA	object file
13181   SVR4/EABI	-fpic		SVR4 pic	object file
13182   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
13183   SVR4/EABI	-mrelocatable	EABI TOC	function
13184   SVR4/EABI	-maix		AIX TOC		object file
13185   SVR4/EABI	-maix -mminimal-toc
13186				AIX minimal TOC	translation unit
13187
13188   Name			Reg.	Set by	entries	      contains:
13189					made by	 addrs?	fp?	sum?
13190
13191   AIX TOC		2	crt0	as	 Y	option	option
13192   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
13193   SVR4 SDATA		13	crt0	gcc	 N	Y	N
13194   SVR4 pic		30	prolog	ld	 Y	not yet	N
13195   SVR4 PIC		30	prolog	gcc	 Y	option	option
13196   EABI TOC		30	prolog	gcc	 Y	option	option
13197
13198*/
13199
13200/* Hash functions for the hash table.  */
13201
13202static unsigned
13203rs6000_hash_constant (rtx k)
13204{
13205  enum rtx_code code = GET_CODE (k);
13206  enum machine_mode mode = GET_MODE (k);
13207  unsigned result = (code << 3) ^ mode;
13208  const char *format;
13209  int flen, fidx;
13210
13211  format = GET_RTX_FORMAT (code);
13212  flen = strlen (format);
13213  fidx = 0;
13214
13215  switch (code)
13216    {
13217    case LABEL_REF:
13218      return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13219
13220    case CONST_DOUBLE:
13221      if (mode != VOIDmode)
13222	return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13223      flen = 2;
13224      break;
13225
13226    case CODE_LABEL:
13227      fidx = 3;
13228      break;
13229
13230    default:
13231      break;
13232    }
13233
13234  for (; fidx < flen; fidx++)
13235    switch (format[fidx])
13236      {
13237      case 's':
13238	{
13239	  unsigned i, len;
13240	  const char *str = XSTR (k, fidx);
13241	  len = strlen (str);
13242	  result = result * 613 + len;
13243	  for (i = 0; i < len; i++)
13244	    result = result * 613 + (unsigned) str[i];
13245	  break;
13246	}
13247      case 'u':
13248      case 'e':
13249	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13250	break;
13251      case 'i':
13252      case 'n':
13253	result = result * 613 + (unsigned) XINT (k, fidx);
13254	break;
13255      case 'w':
13256	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13257	  result = result * 613 + (unsigned) XWINT (k, fidx);
13258	else
13259	  {
13260	    size_t i;
13261	    for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13262	      result = result * 613 + (unsigned) (XWINT (k, fidx)
13263						  >> CHAR_BIT * i);
13264	  }
13265	break;
13266      case '0':
13267	break;
13268      default:
13269	abort ();
13270      }
13271
13272  return result;
13273}
13274
13275static unsigned
13276toc_hash_function (const void *hash_entry)
13277{
13278  const struct toc_hash_struct *thc =
13279    (const struct toc_hash_struct *) hash_entry;
13280  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13281}
13282
13283/* Compare H1 and H2 for equivalence.  */
13284
13285static int
13286toc_hash_eq (const void *h1, const void *h2)
13287{
13288  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13289  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13290
13291  if (((const struct toc_hash_struct *) h1)->key_mode
13292      != ((const struct toc_hash_struct *) h2)->key_mode)
13293    return 0;
13294
13295  return rtx_equal_p (r1, r2);
13296}
13297
13298/* These are the names given by the C++ front-end to vtables, and
13299   vtable-like objects.  Ideally, this logic should not be here;
13300   instead, there should be some programmatic way of inquiring as
13301   to whether or not an object is a vtable.  */
13302
13303#define VTABLE_NAME_P(NAME)				\
13304  (strncmp ("_vt.", name, strlen("_vt.")) == 0		\
13305  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
13306  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
13307  || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0	\
13308  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13309
13310void
13311rs6000_output_symbol_ref (FILE *file, rtx x)
13312{
13313  /* Currently C++ toc references to vtables can be emitted before it
13314     is decided whether the vtable is public or private.  If this is
13315     the case, then the linker will eventually complain that there is
13316     a reference to an unknown section.  Thus, for vtables only,
13317     we emit the TOC reference to reference the symbol and not the
13318     section.  */
13319  const char *name = XSTR (x, 0);
13320
13321  if (VTABLE_NAME_P (name))
13322    {
13323      RS6000_OUTPUT_BASENAME (file, name);
13324    }
13325  else
13326    assemble_name (file, name);
13327}
13328
13329/* Output a TOC entry.  We derive the entry name from what is being
13330   written.  */
13331
13332void
13333output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13334{
13335  char buf[256];
13336  const char *name = buf;
13337  const char *real_name;
13338  rtx base = x;
13339  int offset = 0;
13340
13341  if (TARGET_NO_TOC)
13342    abort ();
13343
13344  /* When the linker won't eliminate them, don't output duplicate
13345     TOC entries (this happens on AIX if there is any kind of TOC,
13346     and on SVR4 under -fPIC or -mrelocatable).  Don't do this for
13347     CODE_LABELs.  */
13348  if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13349    {
13350      struct toc_hash_struct *h;
13351      void * * found;
13352
13353      /* Create toc_hash_table.  This can't be done at OVERRIDE_OPTIONS
13354         time because GGC is not initialized at that point.  */
13355      if (toc_hash_table == NULL)
13356	toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13357					  toc_hash_eq, NULL);
13358
13359      h = ggc_alloc (sizeof (*h));
13360      h->key = x;
13361      h->key_mode = mode;
13362      h->labelno = labelno;
13363
13364      found = htab_find_slot (toc_hash_table, h, 1);
13365      if (*found == NULL)
13366	*found = h;
13367      else  /* This is indeed a duplicate.
13368	       Set this label equal to that label.  */
13369	{
13370	  fputs ("\t.set ", file);
13371	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13372	  fprintf (file, "%d,", labelno);
13373	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13374	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13375					      found)->labelno));
13376	  return;
13377	}
13378    }
13379
13380  /* If we're going to put a double constant in the TOC, make sure it's
13381     aligned properly when strict alignment is on.  */
13382  if (GET_CODE (x) == CONST_DOUBLE
13383      && STRICT_ALIGNMENT
13384      && GET_MODE_BITSIZE (mode) >= 64
13385      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13386    ASM_OUTPUT_ALIGN (file, 3);
13387  }
13388
13389  (*targetm.asm_out.internal_label) (file, "LC", labelno);
13390
13391  /* Handle FP constants specially.  Note that if we have a minimal
13392     TOC, things we put here aren't actually in the TOC, so we can allow
13393     FP constants.  */
13394  if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13395    {
13396      REAL_VALUE_TYPE rv;
13397      long k[4];
13398
13399      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13400      REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13401
13402      if (TARGET_64BIT)
13403	{
13404	  if (TARGET_MINIMAL_TOC)
13405	    fputs (DOUBLE_INT_ASM_OP, file);
13406	  else
13407	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13408		     k[0] & 0xffffffff, k[1] & 0xffffffff,
13409		     k[2] & 0xffffffff, k[3] & 0xffffffff);
13410	  fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13411		   k[0] & 0xffffffff, k[1] & 0xffffffff,
13412		   k[2] & 0xffffffff, k[3] & 0xffffffff);
13413	  return;
13414	}
13415      else
13416	{
13417	  if (TARGET_MINIMAL_TOC)
13418	    fputs ("\t.long ", file);
13419	  else
13420	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13421		     k[0] & 0xffffffff, k[1] & 0xffffffff,
13422		     k[2] & 0xffffffff, k[3] & 0xffffffff);
13423	  fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13424		   k[0] & 0xffffffff, k[1] & 0xffffffff,
13425		   k[2] & 0xffffffff, k[3] & 0xffffffff);
13426	  return;
13427	}
13428    }
13429  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13430    {
13431      REAL_VALUE_TYPE rv;
13432      long k[2];
13433
13434      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13435      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13436
13437      if (TARGET_64BIT)
13438	{
13439	  if (TARGET_MINIMAL_TOC)
13440	    fputs (DOUBLE_INT_ASM_OP, file);
13441	  else
13442	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13443		     k[0] & 0xffffffff, k[1] & 0xffffffff);
13444	  fprintf (file, "0x%lx%08lx\n",
13445		   k[0] & 0xffffffff, k[1] & 0xffffffff);
13446	  return;
13447	}
13448      else
13449	{
13450	  if (TARGET_MINIMAL_TOC)
13451	    fputs ("\t.long ", file);
13452	  else
13453	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13454		     k[0] & 0xffffffff, k[1] & 0xffffffff);
13455	  fprintf (file, "0x%lx,0x%lx\n",
13456		   k[0] & 0xffffffff, k[1] & 0xffffffff);
13457	  return;
13458	}
13459    }
13460  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13461    {
13462      REAL_VALUE_TYPE rv;
13463      long l;
13464
13465      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13466      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13467
13468      if (TARGET_64BIT)
13469	{
13470	  if (TARGET_MINIMAL_TOC)
13471	    fputs (DOUBLE_INT_ASM_OP, file);
13472	  else
13473	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13474	  fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13475	  return;
13476	}
13477      else
13478	{
13479	  if (TARGET_MINIMAL_TOC)
13480	    fputs ("\t.long ", file);
13481	  else
13482	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13483	  fprintf (file, "0x%lx\n", l & 0xffffffff);
13484	  return;
13485	}
13486    }
13487  else if (GET_MODE (x) == VOIDmode
13488	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13489    {
13490      unsigned HOST_WIDE_INT low;
13491      HOST_WIDE_INT high;
13492
13493      if (GET_CODE (x) == CONST_DOUBLE)
13494	{
13495	  low = CONST_DOUBLE_LOW (x);
13496	  high = CONST_DOUBLE_HIGH (x);
13497	}
13498      else
13499#if HOST_BITS_PER_WIDE_INT == 32
13500	{
13501	  low = INTVAL (x);
13502	  high = (low & 0x80000000) ? ~0 : 0;
13503	}
13504#else
13505	{
13506          low = INTVAL (x) & 0xffffffff;
13507          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13508	}
13509#endif
13510
13511      /* TOC entries are always Pmode-sized, but since this
13512	 is a bigendian machine then if we're putting smaller
13513	 integer constants in the TOC we have to pad them.
13514	 (This is still a win over putting the constants in
13515	 a separate constant pool, because then we'd have
13516	 to have both a TOC entry _and_ the actual constant.)
13517
13518	 For a 32-bit target, CONST_INT values are loaded and shifted
13519	 entirely within `low' and can be stored in one TOC entry.  */
13520
13521      if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13522	abort ();/* It would be easy to make this work, but it doesn't now.  */
13523
13524      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13525	{
13526#if HOST_BITS_PER_WIDE_INT == 32
13527	  lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13528			 POINTER_SIZE, &low, &high, 0);
13529#else
13530	  low |= high << 32;
13531	  low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13532	  high = (HOST_WIDE_INT) low >> 32;
13533	  low &= 0xffffffff;
13534#endif
13535	}
13536
13537      if (TARGET_64BIT)
13538	{
13539	  if (TARGET_MINIMAL_TOC)
13540	    fputs (DOUBLE_INT_ASM_OP, file);
13541	  else
13542	    fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13543		     (long) high & 0xffffffff, (long) low & 0xffffffff);
13544	  fprintf (file, "0x%lx%08lx\n",
13545		   (long) high & 0xffffffff, (long) low & 0xffffffff);
13546	  return;
13547	}
13548      else
13549	{
13550	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13551	    {
13552	      if (TARGET_MINIMAL_TOC)
13553		fputs ("\t.long ", file);
13554	      else
13555		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13556			 (long) high & 0xffffffff, (long) low & 0xffffffff);
13557	      fprintf (file, "0x%lx,0x%lx\n",
13558		       (long) high & 0xffffffff, (long) low & 0xffffffff);
13559	    }
13560	  else
13561	    {
13562	      if (TARGET_MINIMAL_TOC)
13563		fputs ("\t.long ", file);
13564	      else
13565		fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13566	      fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13567	    }
13568	  return;
13569	}
13570    }
13571
13572  if (GET_CODE (x) == CONST)
13573    {
13574      if (GET_CODE (XEXP (x, 0)) != PLUS)
13575	abort ();
13576
13577      base = XEXP (XEXP (x, 0), 0);
13578      offset = INTVAL (XEXP (XEXP (x, 0), 1));
13579    }
13580
13581  if (GET_CODE (base) == SYMBOL_REF)
13582    name = XSTR (base, 0);
13583  else if (GET_CODE (base) == LABEL_REF)
13584    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13585  else if (GET_CODE (base) == CODE_LABEL)
13586    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13587  else
13588    abort ();
13589
13590  real_name = (*targetm.strip_name_encoding) (name);
13591  if (TARGET_MINIMAL_TOC)
13592    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13593  else
13594    {
13595      fprintf (file, "\t.tc %s", real_name);
13596
13597      if (offset < 0)
13598	fprintf (file, ".N%d", - offset);
13599      else if (offset)
13600	fprintf (file, ".P%d", offset);
13601
13602      fputs ("[TC],", file);
13603    }
13604
13605  /* Currently C++ toc references to vtables can be emitted before it
13606     is decided whether the vtable is public or private.  If this is
13607     the case, then the linker will eventually complain that there is
13608     a TOC reference to an unknown section.  Thus, for vtables only,
13609     we emit the TOC reference to reference the symbol and not the
13610     section.  */
13611  if (VTABLE_NAME_P (name))
13612    {
13613      RS6000_OUTPUT_BASENAME (file, name);
13614      if (offset < 0)
13615	fprintf (file, "%d", offset);
13616      else if (offset > 0)
13617	fprintf (file, "+%d", offset);
13618    }
13619  else
13620    output_addr_const (file, x);
13621  putc ('\n', file);
13622}
13623
13624/* Output an assembler pseudo-op to write an ASCII string of N characters
13625   starting at P to FILE.
13626
13627   On the RS/6000, we have to do this using the .byte operation and
13628   write out special characters outside the quoted string.
13629   Also, the assembler is broken; very long strings are truncated,
13630   so we must artificially break them up early.  */
13631
13632void
13633output_ascii (FILE *file, const char *p, int n)
13634{
13635  char c;
13636  int i, count_string;
13637  const char *for_string = "\t.byte \"";
13638  const char *for_decimal = "\t.byte ";
13639  const char *to_close = NULL;
13640
13641  count_string = 0;
13642  for (i = 0; i < n; i++)
13643    {
13644      c = *p++;
13645      if (c >= ' ' && c < 0177)
13646	{
13647	  if (for_string)
13648	    fputs (for_string, file);
13649	  putc (c, file);
13650
13651	  /* Write two quotes to get one.  */
13652	  if (c == '"')
13653	    {
13654	      putc (c, file);
13655	      ++count_string;
13656	    }
13657
13658	  for_string = NULL;
13659	  for_decimal = "\"\n\t.byte ";
13660	  to_close = "\"\n";
13661	  ++count_string;
13662
13663	  if (count_string >= 512)
13664	    {
13665	      fputs (to_close, file);
13666
13667	      for_string = "\t.byte \"";
13668	      for_decimal = "\t.byte ";
13669	      to_close = NULL;
13670	      count_string = 0;
13671	    }
13672	}
13673      else
13674	{
13675	  if (for_decimal)
13676	    fputs (for_decimal, file);
13677	  fprintf (file, "%d", c);
13678
13679	  for_string = "\n\t.byte \"";
13680	  for_decimal = ", ";
13681	  to_close = "\n";
13682	  count_string = 0;
13683	}
13684    }
13685
13686  /* Now close the string if we have written one.  Then end the line.  */
13687  if (to_close)
13688    fputs (to_close, file);
13689}
13690
13691/* Generate a unique section name for FILENAME for a section type
13692   represented by SECTION_DESC.  Output goes into BUF.
13693
13694   SECTION_DESC can be any string, as long as it is different for each
13695   possible section type.
13696
13697   We name the section in the same manner as xlc.  The name begins with an
13698   underscore followed by the filename (after stripping any leading directory
13699   names) with the last period replaced by the string SECTION_DESC.  If
13700   FILENAME does not contain a period, SECTION_DESC is appended to the end of
13701   the name.  */
13702
13703void
13704rs6000_gen_section_name (char **buf, const char *filename,
13705		         const char *section_desc)
13706{
13707  const char *q, *after_last_slash, *last_period = 0;
13708  char *p;
13709  int len;
13710
13711  after_last_slash = filename;
13712  for (q = filename; *q; q++)
13713    {
13714      if (*q == '/')
13715	after_last_slash = q + 1;
13716      else if (*q == '.')
13717	last_period = q;
13718    }
13719
13720  len = strlen (after_last_slash) + strlen (section_desc) + 2;
13721  *buf = (char *) xmalloc (len);
13722
13723  p = *buf;
13724  *p++ = '_';
13725
13726  for (q = after_last_slash; *q; q++)
13727    {
13728      if (q == last_period)
13729        {
13730	  strcpy (p, section_desc);
13731	  p += strlen (section_desc);
13732	  break;
13733        }
13734
13735      else if (ISALNUM (*q))
13736        *p++ = *q;
13737    }
13738
13739  if (last_period == 0)
13740    strcpy (p, section_desc);
13741  else
13742    *p = '\0';
13743}
13744
13745/* Emit profile function.  */
13746
13747void
13748output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13749{
13750  if (TARGET_PROFILE_KERNEL)
13751    return;
13752
13753  if (DEFAULT_ABI == ABI_AIX)
13754    {
13755#ifndef NO_PROFILE_COUNTERS
13756# define NO_PROFILE_COUNTERS 0
13757#endif
13758      if (NO_PROFILE_COUNTERS)
13759	emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13760      else
13761	{
13762	  char buf[30];
13763	  const char *label_name;
13764	  rtx fun;
13765
13766	  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13767	  label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13768	  fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13769
13770	  emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13771			     fun, Pmode);
13772	}
13773    }
13774  else if (DEFAULT_ABI == ABI_DARWIN)
13775    {
13776      const char *mcount_name = RS6000_MCOUNT;
13777      int caller_addr_regno = LINK_REGISTER_REGNUM;
13778
13779      /* Be conservative and always set this, at least for now.  */
13780      current_function_uses_pic_offset_table = 1;
13781
13782#if TARGET_MACHO
13783      /* For PIC code, set up a stub and collect the caller's address
13784	 from r0, which is where the prologue puts it.  */
13785      if (MACHOPIC_INDIRECT)
13786	{
13787	  mcount_name = machopic_stub_name (mcount_name);
13788	  if (current_function_uses_pic_offset_table)
13789	    caller_addr_regno = 0;
13790	}
13791#endif
13792      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13793			 0, VOIDmode, 1,
13794			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13795    }
13796}
13797
13798/* Write function profiler code.  */
13799
13800void
13801output_function_profiler (FILE *file, int labelno)
13802{
13803  char buf[100];
13804  int save_lr = 8;
13805
13806  switch (DEFAULT_ABI)
13807    {
13808    default:
13809      abort ();
13810
13811    case ABI_V4:
13812      save_lr = 4;
13813      if (!TARGET_32BIT)
13814	{
13815	  warning ("no profiling of 64-bit code for this ABI");
13816	  return;
13817	}
13818      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13819      fprintf (file, "\tmflr %s\n", reg_names[0]);
13820      if (flag_pic == 1)
13821	{
13822	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13823	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13824		       reg_names[0], save_lr, reg_names[1]);
13825	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13826	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13827	  assemble_name (file, buf);
13828	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13829	}
13830      else if (flag_pic > 1)
13831	{
13832	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13833		       reg_names[0], save_lr, reg_names[1]);
13834	  /* Now, we need to get the address of the label.  */
13835	  fputs ("\tbl 1f\n\t.long ", file);
13836	  assemble_name (file, buf);
13837	  fputs ("-.\n1:", file);
13838	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13839	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13840		       reg_names[0], reg_names[11]);
13841	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13842		       reg_names[0], reg_names[0], reg_names[11]);
13843	}
13844      else
13845	{
13846	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13847	  assemble_name (file, buf);
13848	  fputs ("@ha\n", file);
13849	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13850		       reg_names[0], save_lr, reg_names[1]);
13851	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13852	  assemble_name (file, buf);
13853	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13854	}
13855
13856      /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
13857      fprintf (file, "\tbl %s%s\n",
13858	       RS6000_MCOUNT, flag_pic ? "@plt" : "");
13859
13860      break;
13861
13862    case ABI_AIX:
13863    case ABI_DARWIN:
13864      if (!TARGET_PROFILE_KERNEL)
13865	{
13866	  /* Don't do anything, done in output_profile_hook ().  */
13867	}
13868      else
13869	{
13870	  if (TARGET_32BIT)
13871	    abort ();
13872
13873	  asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13874	  asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13875
13876	  if (current_function_needs_context)
13877	    {
13878	      asm_fprintf (file, "\tstd %s,24(%s)\n",
13879			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13880	      fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13881	      asm_fprintf (file, "\tld %s,24(%s)\n",
13882			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13883	    }
13884	  else
13885	    fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13886	}
13887      break;
13888    }
13889}
13890
13891
13892static int
13893rs6000_use_dfa_pipeline_interface (void)
13894{
13895  return 1;
13896}
13897
13898/* Power4 load update and store update instructions are cracked into a
13899   load or store and an integer insn which are executed in the same cycle.
13900   Branches have their own dispatch slot which does not count against the
13901   GCC issue rate, but it changes the program flow so there are no other
13902   instructions to issue in this cycle.  */
13903
13904static int
13905rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13906		       int verbose ATTRIBUTE_UNUSED,
13907		       rtx insn, int more)
13908{
13909  if (GET_CODE (PATTERN (insn)) == USE
13910      || GET_CODE (PATTERN (insn)) == CLOBBER)
13911    return more;
13912
13913  if (rs6000_sched_groups)
13914    {
13915      if (is_microcoded_insn (insn))
13916        return 0;
13917      else if (is_cracked_insn (insn))
13918        return more > 2 ? more - 2 : 0;
13919    }
13920
13921  return more - 1;
13922}
13923
13924/* Adjust the cost of a scheduling dependency.  Return the new cost of
13925   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
13926
13927static int
13928rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13929		    int cost)
13930{
13931  if (! recog_memoized (insn))
13932    return 0;
13933
13934  if (REG_NOTE_KIND (link) != 0)
13935    return 0;
13936
13937  if (REG_NOTE_KIND (link) == 0)
13938    {
13939      /* Data dependency; DEP_INSN writes a register that INSN reads
13940	 some cycles later.  */
13941      switch (get_attr_type (insn))
13942	{
13943	case TYPE_JMPREG:
13944	  /* Tell the first scheduling pass about the latency between
13945	     a mtctr and bctr (and mtlr and br/blr).  The first
13946	     scheduling pass will not know about this latency since
13947	     the mtctr instruction, which has the latency associated
13948	     to it, will be generated by reload.  */
13949	  return TARGET_POWER ? 5 : 4;
13950	case TYPE_BRANCH:
13951	  /* Leave some extra cycles between a compare and its
13952	     dependent branch, to inhibit expensive mispredicts.  */
13953	  if ((rs6000_cpu_attr == CPU_PPC603
13954	       || rs6000_cpu_attr == CPU_PPC604
13955	       || rs6000_cpu_attr == CPU_PPC604E
13956	       || rs6000_cpu_attr == CPU_PPC620
13957	       || rs6000_cpu_attr == CPU_PPC630
13958	       || rs6000_cpu_attr == CPU_PPC750
13959	       || rs6000_cpu_attr == CPU_PPC7400
13960	       || rs6000_cpu_attr == CPU_PPC7450
13961	       || rs6000_cpu_attr == CPU_POWER4
13962	       || rs6000_cpu_attr == CPU_POWER5)
13963	      && recog_memoized (dep_insn)
13964	      && (INSN_CODE (dep_insn) >= 0)
13965	      && (get_attr_type (dep_insn) == TYPE_CMP
13966		  || get_attr_type (dep_insn) == TYPE_COMPARE
13967		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13968		  || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13969		  || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13970		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13971		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13972		  || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13973	    return cost + 2;
13974	default:
13975	  break;
13976	}
13977      /* Fall out to return default cost.  */
13978    }
13979
13980  return cost;
13981}
13982
13983/* The function returns a true if INSN is microcoded.
13984   Return false otherwise.  */
13985
13986static bool
13987is_microcoded_insn (rtx insn)
13988{
13989  if (!insn || !INSN_P (insn)
13990      || GET_CODE (PATTERN (insn)) == USE
13991      || GET_CODE (PATTERN (insn)) == CLOBBER)
13992    return false;
13993
13994  if (rs6000_sched_groups)
13995    {
13996      enum attr_type type = get_attr_type (insn);
13997      if (type == TYPE_LOAD_EXT_U
13998	  || type == TYPE_LOAD_EXT_UX
13999	  || type == TYPE_LOAD_UX
14000	  || type == TYPE_STORE_UX
14001	  || type == TYPE_MFCR)
14002        return true;
14003    }
14004
14005  return false;
14006}
14007
14008/* The function returns a nonzero value if INSN can be scheduled only
14009   as the first insn in a dispatch group ("dispatch-slot restricted").
14010   In this case, the returned value indicates how many dispatch slots
14011   the insn occupies (at the beginning of the group).
14012   Return 0 otherwise.  */
14013
14014static int
14015is_dispatch_slot_restricted (rtx insn)
14016{
14017  enum attr_type type;
14018
14019  if (!rs6000_sched_groups)
14020    return 0;
14021
14022  if (!insn
14023      || insn == NULL_RTX
14024      || GET_CODE (insn) == NOTE
14025      || GET_CODE (PATTERN (insn)) == USE
14026      || GET_CODE (PATTERN (insn)) == CLOBBER)
14027    return 0;
14028
14029  type = get_attr_type (insn);
14030
14031  switch (type)
14032    {
14033    case TYPE_MFCR:
14034    case TYPE_MFCRF:
14035    case TYPE_MTCR:
14036    case TYPE_DELAYED_CR:
14037    case TYPE_CR_LOGICAL:
14038    case TYPE_MTJMPR:
14039    case TYPE_MFJMPR:
14040      return 1;
14041    case TYPE_IDIV:
14042    case TYPE_LDIV:
14043      return 2;
14044    default:
14045      if (rs6000_cpu == PROCESSOR_POWER5
14046	  && is_cracked_insn (insn))
14047	return 2;
14048      return 0;
14049    }
14050}
14051
14052/* The function returns true if INSN is cracked into 2 instructions
14053   by the processor (and therefore occupies 2 issue slots).  */
14054
14055static bool
14056is_cracked_insn (rtx insn)
14057{
14058  if (!insn || !INSN_P (insn)
14059      || GET_CODE (PATTERN (insn)) == USE
14060      || GET_CODE (PATTERN (insn)) == CLOBBER)
14061    return false;
14062
14063  if (rs6000_sched_groups)
14064    {
14065      enum attr_type type = get_attr_type (insn);
14066      if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14067	       || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14068	       || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14069	       || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14070	       || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14071	       || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14072	       || type == TYPE_IDIV || type == TYPE_LDIV
14073	       || type == TYPE_INSERT_WORD)
14074        return true;
14075    }
14076
14077  return false;
14078}
14079
14080/* The function returns true if INSN can be issued only from
14081   the branch slot.  */
14082
14083static bool
14084is_branch_slot_insn (rtx insn)
14085{
14086  if (!insn || !INSN_P (insn)
14087      || GET_CODE (PATTERN (insn)) == USE
14088      || GET_CODE (PATTERN (insn)) == CLOBBER)
14089    return false;
14090
14091  if (rs6000_sched_groups)
14092    {
14093      enum attr_type type = get_attr_type (insn);
14094      if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14095	return true;
14096      return false;
14097    }
14098
14099  return false;
14100}
14101
14102/* A C statement (sans semicolon) to update the integer scheduling
14103   priority INSN_PRIORITY (INSN). Increase the priority to execute the
14104   INSN earlier, reduce the priority to execute INSN later.  Do not
14105   define this macro if you do not need to adjust the scheduling
14106   priorities of insns.  */
14107
14108static int
14109rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14110{
14111  /* On machines (like the 750) which have asymmetric integer units,
14112     where one integer unit can do multiply and divides and the other
14113     can't, reduce the priority of multiply/divide so it is scheduled
14114     before other integer operations.  */
14115
14116#if 0
14117  if (! INSN_P (insn))
14118    return priority;
14119
14120  if (GET_CODE (PATTERN (insn)) == USE)
14121    return priority;
14122
14123  switch (rs6000_cpu_attr) {
14124  case CPU_PPC750:
14125    switch (get_attr_type (insn))
14126      {
14127      default:
14128	break;
14129
14130      case TYPE_IMUL:
14131      case TYPE_IDIV:
14132	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14133		 priority, priority);
14134	if (priority >= 0 && priority < 0x01000000)
14135	  priority >>= 3;
14136	break;
14137      }
14138  }
14139#endif
14140
14141  if (is_dispatch_slot_restricted (insn)
14142      && reload_completed
14143      && current_sched_info->sched_max_insns_priority
14144      && rs6000_sched_restricted_insns_priority)
14145    {
14146
14147      /* Prioritize insns that can be dispatched only in the first dispatch slot.  */
14148      if (rs6000_sched_restricted_insns_priority == 1)
14149	/* Attach highest priority to insn. This means that in
14150	   haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14151	   precede 'priority' (critical path) considerations.  */
14152	return current_sched_info->sched_max_insns_priority;
14153      else if (rs6000_sched_restricted_insns_priority == 2)
14154	/* Increase priority of insn by a minimal amount. This means that in
14155	   haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14156	   precede dispatch-slot restriction considerations.  */
14157	return (priority + 1);
14158    }
14159
14160  return priority;
14161}
14162
14163/* Return how many instructions the machine can issue per cycle.  */
14164
14165static int
14166rs6000_issue_rate (void)
14167{
14168  /* Use issue rate of 1 for first scheduling pass to decrease degradation.  */
14169  if (!reload_completed)
14170    return 1;
14171
14172  switch (rs6000_cpu_attr) {
14173  case CPU_RIOS1:  /* ? */
14174  case CPU_RS64A:
14175  case CPU_PPC601: /* ? */
14176  case CPU_PPC7450:
14177    return 3;
14178  case CPU_PPC440:
14179  case CPU_PPC603:
14180  case CPU_PPC750:
14181  case CPU_PPC7400:
14182  case CPU_PPC8540:
14183    return 2;
14184  case CPU_RIOS2:
14185  case CPU_PPC604:
14186  case CPU_PPC604E:
14187  case CPU_PPC620:
14188  case CPU_PPC630:
14189    return 4;
14190  case CPU_POWER4:
14191  case CPU_POWER5:
14192    return 5;
14193  default:
14194    return 1;
14195  }
14196}
14197
14198/* Return how many instructions to look ahead for better insn
14199   scheduling.  */
14200
14201static int
14202rs6000_use_sched_lookahead (void)
14203{
14204  if (rs6000_cpu_attr == CPU_PPC8540)
14205    return 4;
14206  return 0;
14207}
14208
14209/* Determine is PAT refers to memory.  */
14210
14211static bool
14212is_mem_ref (rtx pat)
14213{
14214  const char * fmt;
14215  int i, j;
14216  bool ret = false;
14217
14218  if (GET_CODE (pat) == MEM)
14219    return true;
14220
14221  /* Recursively process the pattern.  */
14222  fmt = GET_RTX_FORMAT (GET_CODE (pat));
14223
14224  for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14225    {
14226      if (fmt[i] == 'e')
14227	ret |= is_mem_ref (XEXP (pat, i));
14228      else if (fmt[i] == 'E')
14229	for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14230	  ret |= is_mem_ref (XVECEXP (pat, i, j));
14231    }
14232
14233  return ret;
14234}
14235
14236/* Determine if PAT is a PATTERN of a load insn.  */
14237
14238static bool
14239is_load_insn1 (rtx pat)
14240{
14241  if (!pat || pat == NULL_RTX)
14242    return false;
14243
14244  if (GET_CODE (pat) == SET)
14245    return is_mem_ref (SET_SRC (pat));
14246
14247  if (GET_CODE (pat) == PARALLEL)
14248    {
14249      int i;
14250
14251      for (i = 0; i < XVECLEN (pat, 0); i++)
14252	if (is_load_insn1 (XVECEXP (pat, 0, i)))
14253	  return true;
14254    }
14255
14256  return false;
14257}
14258
14259/* Determine if INSN loads from memory.  */
14260
14261static bool
14262is_load_insn (rtx insn)
14263{
14264  if (!insn || !INSN_P (insn))
14265    return false;
14266
14267  if (GET_CODE (insn) == CALL_INSN)
14268    return false;
14269
14270  return is_load_insn1 (PATTERN (insn));
14271}
14272
14273/* Determine if PAT is a PATTERN of a store insn.  */
14274
14275static bool
14276is_store_insn1 (rtx pat)
14277{
14278  if (!pat || pat == NULL_RTX)
14279    return false;
14280
14281  if (GET_CODE (pat) == SET)
14282    return is_mem_ref (SET_DEST (pat));
14283
14284  if (GET_CODE (pat) == PARALLEL)
14285    {
14286      int i;
14287
14288      for (i = 0; i < XVECLEN (pat, 0); i++)
14289	if (is_store_insn1 (XVECEXP (pat, 0, i)))
14290	  return true;
14291    }
14292
14293  return false;
14294}
14295
14296/* Determine if INSN stores to memory.  */
14297
14298static bool
14299is_store_insn (rtx insn)
14300{
14301  if (!insn || !INSN_P (insn))
14302    return false;
14303
14304  return is_store_insn1 (PATTERN (insn));
14305}
14306
14307/* Returns whether the dependence between INSN and NEXT is considered
14308   costly by the given target.  */
14309
14310static bool
14311rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14312{
14313  /* If the flag is not enbled - no dependence is considered costly;
14314     allow all dependent insns in the same group.
14315     This is the most aggressive option.  */
14316  if (rs6000_sched_costly_dep == no_dep_costly)
14317    return false;
14318
14319  /* If the flag is set to 1 - a dependence is always considered costly;
14320     do not allow dependent instructions in the same group.
14321     This is the most conservative option.  */
14322  if (rs6000_sched_costly_dep == all_deps_costly)
14323    return true;
14324
14325  if (rs6000_sched_costly_dep == store_to_load_dep_costly
14326      && is_load_insn (next)
14327      && is_store_insn (insn))
14328    /* Prevent load after store in the same group.  */
14329    return true;
14330
14331  if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14332      && is_load_insn (next)
14333      && is_store_insn (insn)
14334      && (!link || (int) REG_NOTE_KIND (link) == 0))
14335     /* Prevent load after store in the same group if it is a true dependence.  */
14336     return true;
14337
14338  /* The flag is set to X; dependences with latency >= X are considered costly,
14339     and will not be scheduled in the same group.  */
14340  if (rs6000_sched_costly_dep <= max_dep_latency
14341      && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14342    return true;
14343
14344  return false;
14345}
14346
14347/* Return the next insn after INSN that is found before TAIL is reached,
14348   skipping any "non-active" insns - insns that will not actually occupy
14349   an issue slot.  Return NULL_RTX if such an insn is not found.  */
14350
14351static rtx
14352get_next_active_insn (rtx insn, rtx tail)
14353{
14354  rtx next_insn;
14355
14356  if (!insn || insn == tail)
14357    return NULL_RTX;
14358
14359  next_insn = NEXT_INSN (insn);
14360
14361  while (next_insn
14362  	 && next_insn != tail
14363	 && (GET_CODE(next_insn) == NOTE
14364	     || GET_CODE (PATTERN (next_insn)) == USE
14365	     || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14366    {
14367      next_insn = NEXT_INSN (next_insn);
14368    }
14369
14370  if (!next_insn || next_insn == tail)
14371    return NULL_RTX;
14372
14373  return next_insn;
14374}
14375
14376/* Return whether the presence of INSN causes a dispatch group termination
14377   of group WHICH_GROUP.
14378
14379   If WHICH_GROUP == current_group, this function will return true if INSN
14380   causes the termination of the current group (i.e, the dispatch group to
14381   which INSN belongs). This means that INSN will be the last insn in the
14382   group it belongs to.
14383
14384   If WHICH_GROUP == previous_group, this function will return true if INSN
14385   causes the termination of the previous group (i.e, the dispatch group that
14386   precedes the group to which INSN belongs).  This means that INSN will be
14387   the first insn in the group it belongs to).  */
14388
14389static bool
14390insn_terminates_group_p (rtx insn, enum group_termination which_group)
14391{
14392  enum attr_type type;
14393
14394  if (! insn)
14395    return false;
14396
14397  type = get_attr_type (insn);
14398
14399  if (is_microcoded_insn (insn))
14400    return true;
14401
14402  if (which_group == current_group)
14403    {
14404      if (is_branch_slot_insn (insn))
14405        return true;
14406      return false;
14407    }
14408  else if (which_group == previous_group)
14409    {
14410      if (is_dispatch_slot_restricted (insn))
14411        return true;
14412      return false;
14413    }
14414
14415  return false;
14416}
14417
14418/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14419   dispatch group) from the insns in GROUP_INSNS.  Return false otherwise.  */
14420
14421static bool
14422is_costly_group (rtx *group_insns, rtx next_insn)
14423{
14424  int i;
14425  rtx link;
14426  int cost;
14427  int issue_rate = rs6000_issue_rate ();
14428
14429  for (i = 0; i < issue_rate; i++)
14430    {
14431      rtx insn = group_insns[i];
14432      if (!insn)
14433        continue;
14434      for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14435        {
14436          rtx next = XEXP (link, 0);
14437          if (next == next_insn)
14438            {
14439              cost = insn_cost (insn, link, next_insn);
14440              if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14441                return true;
14442            }
14443        }
14444    }
14445
14446  return false;
14447}
14448
14449/* Utility of the function redefine_groups.
14450   Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14451   in the same dispatch group.  If so, insert nops before NEXT_INSN, in order
14452   to keep it "far" (in a separate group) from GROUP_INSNS, following
14453   one of the following schemes, depending on the value of the flag
14454   -minsert_sched_nops = X:
14455   (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14456       in order to force NEXT_INSN into a separate group.
14457   (2) X < sched_finish_regroup_exact: insert exactly X nops.
14458   GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14459   insertion (has a group just ended, how many vacant issue slots remain in the
14460   last group, and how many dispatch groups were encountered so far).  */
14461
14462static int
14463force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14464		 bool *group_end, int can_issue_more, int *group_count)
14465{
14466  rtx nop;
14467  bool force;
14468  int issue_rate = rs6000_issue_rate ();
14469  bool end = *group_end;
14470  int i;
14471
14472  if (next_insn == NULL_RTX)
14473    return can_issue_more;
14474
14475  if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14476    return can_issue_more;
14477
14478  force = is_costly_group (group_insns, next_insn);
14479  if (!force)
14480    return can_issue_more;
14481
14482  if (sched_verbose > 6)
14483    fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14484			*group_count ,can_issue_more);
14485
14486  if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14487    {
14488      if (*group_end)
14489        can_issue_more = 0;
14490
14491      /* Since only a branch can be issued in the last issue_slot, it is
14492	 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14493	 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14494	 in this case the last nop will start a new group and the branch will be
14495	 forced to the new group.  */
14496      if (can_issue_more && !is_branch_slot_insn (next_insn))
14497        can_issue_more--;
14498
14499      while (can_issue_more > 0)
14500        {
14501          nop = gen_nop();
14502          emit_insn_before (nop, next_insn);
14503          can_issue_more--;
14504        }
14505
14506      *group_end = true;
14507      return 0;
14508    }
14509
14510  if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14511    {
14512      int n_nops = rs6000_sched_insert_nops;
14513
14514      /* Nops can't be issued from the branch slot, so the effective
14515         issue_rate for nops is 'issue_rate - 1'.  */
14516      if (can_issue_more == 0)
14517        can_issue_more = issue_rate;
14518      can_issue_more--;
14519      if (can_issue_more == 0)
14520        {
14521          can_issue_more = issue_rate - 1;
14522          (*group_count)++;
14523          end = true;
14524          for (i = 0; i < issue_rate; i++)
14525            {
14526              group_insns[i] = 0;
14527            }
14528        }
14529
14530      while (n_nops > 0)
14531        {
14532          nop = gen_nop ();
14533          emit_insn_before (nop, next_insn);
14534          if (can_issue_more == issue_rate - 1) /* new group begins */
14535            end = false;
14536          can_issue_more--;
14537          if (can_issue_more == 0)
14538            {
14539              can_issue_more = issue_rate - 1;
14540              (*group_count)++;
14541              end = true;
14542              for (i = 0; i < issue_rate; i++)
14543                {
14544                  group_insns[i] = 0;
14545                }
14546            }
14547          n_nops--;
14548        }
14549
14550      /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1').  */
14551      can_issue_more++;
14552
14553      *group_end = /* Is next_insn going to start a new group?  */
14554	  (end
14555	   || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14556	   || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14557	   || (can_issue_more < issue_rate &&
14558	      insn_terminates_group_p (next_insn, previous_group)));
14559      if (*group_end && end)
14560        (*group_count)--;
14561
14562      if (sched_verbose > 6)
14563        fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14564			*group_count, can_issue_more);
14565      return can_issue_more;
14566    }
14567
14568  return can_issue_more;
14569}
14570
14571/* This function tries to synch the dispatch groups that the compiler "sees"
14572   with the dispatch groups that the processor dispatcher is expected to
14573   form in practice.  It tries to achieve this synchronization by forcing the
14574   estimated processor grouping on the compiler (as opposed to the function
14575   'pad_goups' which tries to force the scheduler's grouping on the processor).
14576
14577   The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14578   examines the (estimated) dispatch groups that will be formed by the processor
14579   dispatcher.  It marks these group boundaries to reflect the estimated
14580   processor grouping, overriding the grouping that the scheduler had marked.
14581   Depending on the value of the flag '-minsert-sched-nops' this function can
14582   force certain insns into separate groups or force a certain distance between
14583   them by inserting nops, for example, if there exists a "costly dependence"
14584   between the insns.
14585
14586   The function estimates the group boundaries that the processor will form as
14587   folllows:  It keeps track of how many vacant issue slots are available after
14588   each insn.  A subsequent insn will start a new group if one of the following
14589   4 cases applies:
14590   - no more vacant issue slots remain in the current dispatch group.
14591   - only the last issue slot, which is the branch slot, is vacant, but the next
14592     insn is not a branch.
14593   - only the last 2 or less issue slots, including the branch slot, are vacant,
14594     which means that a cracked insn (which occupies two issue slots) can't be
14595     issued in this group.
14596   - less than 'issue_rate' slots are vacant, and the next insn always needs to
14597     start a new group.  */
14598
14599static int
14600redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14601{
14602  rtx insn, next_insn;
14603  int issue_rate;
14604  int can_issue_more;
14605  int slot, i;
14606  bool group_end;
14607  int group_count = 0;
14608  rtx *group_insns;
14609
14610  /* Initialize.  */
14611  issue_rate = rs6000_issue_rate ();
14612  group_insns = alloca (issue_rate * sizeof (rtx));
14613  for (i = 0; i < issue_rate; i++)
14614    {
14615      group_insns[i] = 0;
14616    }
14617  can_issue_more = issue_rate;
14618  slot = 0;
14619  insn = get_next_active_insn (prev_head_insn, tail);
14620  group_end = false;
14621
14622  while (insn != NULL_RTX)
14623    {
14624      slot = (issue_rate - can_issue_more);
14625      group_insns[slot] = insn;
14626      can_issue_more =
14627        rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14628      if (insn_terminates_group_p (insn, current_group))
14629        can_issue_more = 0;
14630
14631      next_insn = get_next_active_insn (insn, tail);
14632      if (next_insn == NULL_RTX)
14633        return group_count + 1;
14634
14635      group_end = /* Is next_insn going to start a new group?  */
14636        (can_issue_more == 0
14637         || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14638         || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14639         || (can_issue_more < issue_rate &&
14640             insn_terminates_group_p (next_insn, previous_group)));
14641
14642      can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14643			next_insn, &group_end, can_issue_more, &group_count);
14644
14645      if (group_end)
14646        {
14647          group_count++;
14648          can_issue_more = 0;
14649          for (i = 0; i < issue_rate; i++)
14650            {
14651              group_insns[i] = 0;
14652            }
14653        }
14654
14655      if (GET_MODE (next_insn) == TImode && can_issue_more)
14656        PUT_MODE(next_insn, VOIDmode);
14657      else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14658        PUT_MODE (next_insn, TImode);
14659
14660      insn = next_insn;
14661      if (can_issue_more == 0)
14662        can_issue_more = issue_rate;
14663   } /* while */
14664
14665  return group_count;
14666}
14667
14668/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14669   dispatch group boundaries that the scheduler had marked.  Pad with nops
14670   any dispatch groups which have vacant issue slots, in order to force the
14671   scheduler's grouping on the processor dispatcher.  The function
14672   returns the number of dispatch groups found.  */
14673
14674static int
14675pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14676{
14677  rtx insn, next_insn;
14678  rtx nop;
14679  int issue_rate;
14680  int can_issue_more;
14681  int group_end;
14682  int group_count = 0;
14683
14684  /* Initialize issue_rate.  */
14685  issue_rate = rs6000_issue_rate ();
14686  can_issue_more = issue_rate;
14687
14688  insn = get_next_active_insn (prev_head_insn, tail);
14689  next_insn = get_next_active_insn (insn, tail);
14690
14691  while (insn != NULL_RTX)
14692    {
14693      can_issue_more =
14694      	rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14695
14696      group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14697
14698      if (next_insn == NULL_RTX)
14699        break;
14700
14701      if (group_end)
14702        {
14703          /* If the scheduler had marked group termination at this location
14704             (between insn and next_indn), and neither insn nor next_insn will
14705             force group termination, pad the group with nops to force group
14706             termination.  */
14707          if (can_issue_more
14708              && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14709              && !insn_terminates_group_p (insn, current_group)
14710              && !insn_terminates_group_p (next_insn, previous_group))
14711            {
14712              if (!is_branch_slot_insn(next_insn))
14713                can_issue_more--;
14714
14715              while (can_issue_more)
14716                {
14717                  nop = gen_nop ();
14718                  emit_insn_before (nop, next_insn);
14719                  can_issue_more--;
14720                }
14721            }
14722
14723          can_issue_more = issue_rate;
14724          group_count++;
14725        }
14726
14727      insn = next_insn;
14728      next_insn = get_next_active_insn (insn, tail);
14729    }
14730
14731  return group_count;
14732}
14733
14734/* The following function is called at the end of scheduling BB.
14735   After reload, it inserts nops at insn group bundling.  */
14736
14737static void
14738rs6000_sched_finish (FILE *dump, int sched_verbose)
14739{
14740  int n_groups;
14741
14742  if (sched_verbose)
14743    fprintf (dump, "=== Finishing schedule.\n");
14744
14745  if (reload_completed && rs6000_sched_groups)
14746    {
14747      if (rs6000_sched_insert_nops == sched_finish_none)
14748        return;
14749
14750      if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14751        n_groups = pad_groups (dump, sched_verbose,
14752				current_sched_info->prev_head,
14753  			   	current_sched_info->next_tail);
14754      else
14755        n_groups = redefine_groups (dump, sched_verbose,
14756				current_sched_info->prev_head,
14757  				current_sched_info->next_tail);
14758
14759      if (sched_verbose >= 6)
14760	{
14761    	  fprintf (dump, "ngroups = %d\n", n_groups);
14762	  print_rtl (dump, current_sched_info->prev_head);
14763	  fprintf (dump, "Done finish_sched\n");
14764	}
14765    }
14766}
14767
14768/* Length in units of the trampoline for entering a nested function.  */
14769
14770int
14771rs6000_trampoline_size (void)
14772{
14773  int ret = 0;
14774
14775  switch (DEFAULT_ABI)
14776    {
14777    default:
14778      abort ();
14779
14780    case ABI_AIX:
14781      ret = (TARGET_32BIT) ? 12 : 24;
14782      break;
14783
14784    case ABI_DARWIN:
14785    case ABI_V4:
14786      ret = (TARGET_32BIT) ? 40 : 48;
14787      break;
14788    }
14789
14790  return ret;
14791}
14792
14793/* Emit RTL insns to initialize the variable parts of a trampoline.
14794   FNADDR is an RTX for the address of the function's pure code.
14795   CXT is an RTX for the static chain value for the function.  */
14796
14797void
14798rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14799{
14800  enum machine_mode pmode = Pmode;
14801  int regsize = (TARGET_32BIT) ? 4 : 8;
14802  rtx ctx_reg = force_reg (pmode, cxt);
14803
14804  switch (DEFAULT_ABI)
14805    {
14806    default:
14807      abort ();
14808
14809/* Macros to shorten the code expansions below.  */
14810#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14811#define MEM_PLUS(addr,offset) \
14812  gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14813
14814    /* Under AIX, just build the 3 word function descriptor */
14815    case ABI_AIX:
14816      {
14817	rtx fn_reg = gen_reg_rtx (pmode);
14818	rtx toc_reg = gen_reg_rtx (pmode);
14819	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14820	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14821	emit_move_insn (MEM_DEREF (addr), fn_reg);
14822	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14823	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14824      }
14825      break;
14826
14827    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
14828    case ABI_DARWIN:
14829    case ABI_V4:
14830      emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14831			 FALSE, VOIDmode, 4,
14832			 addr, pmode,
14833			 GEN_INT (rs6000_trampoline_size ()), SImode,
14834			 fnaddr, pmode,
14835			 ctx_reg, pmode);
14836      break;
14837    }
14838
14839  return;
14840}
14841
14842
14843/* Table of valid machine attributes.  */
14844
14845const struct attribute_spec rs6000_attribute_table[] =
14846{
14847  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14848  { "longcall",  0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
14849  { "shortcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
14850  { NULL,        0, 0, false, false, false, NULL }
14851};
14852
14853/* Handle a "longcall" or "shortcall" attribute; arguments as in
14854   struct attribute_spec.handler.  */
14855
14856static tree
14857rs6000_handle_longcall_attribute (tree *node, tree name,
14858				  tree args ATTRIBUTE_UNUSED,
14859				  int flags ATTRIBUTE_UNUSED,
14860				  bool *no_add_attrs)
14861{
14862  if (TREE_CODE (*node) != FUNCTION_TYPE
14863      && TREE_CODE (*node) != FIELD_DECL
14864      && TREE_CODE (*node) != TYPE_DECL)
14865    {
14866      warning ("`%s' attribute only applies to functions",
14867	       IDENTIFIER_POINTER (name));
14868      *no_add_attrs = true;
14869    }
14870
14871  return NULL_TREE;
14872}
14873
14874/* Set longcall attributes on all functions declared when
14875   rs6000_default_long_calls is true.  */
14876static void
14877rs6000_set_default_type_attributes (tree type)
14878{
14879  if (rs6000_default_long_calls
14880      && (TREE_CODE (type) == FUNCTION_TYPE
14881	  || TREE_CODE (type) == METHOD_TYPE))
14882    TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
14883					NULL_TREE,
14884					TYPE_ATTRIBUTES (type));
14885}
14886
14887/* Return a reference suitable for calling a function with the
14888   longcall attribute.  */
14889
14890struct rtx_def *
14891rs6000_longcall_ref (rtx call_ref)
14892{
14893  const char *call_name;
14894  tree node;
14895
14896  if (GET_CODE (call_ref) != SYMBOL_REF)
14897    return call_ref;
14898
14899  /* System V adds '.' to the internal name, so skip them.  */
14900  call_name = XSTR (call_ref, 0);
14901  if (*call_name == '.')
14902    {
14903      while (*call_name == '.')
14904	call_name++;
14905
14906      node = get_identifier (call_name);
14907      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
14908    }
14909
14910  return force_reg (Pmode, call_ref);
14911}
14912
14913#ifdef USING_ELFOS_H
14914
14915/* A C statement or statements to switch to the appropriate section
14916   for output of RTX in mode MODE.  You can assume that RTX is some
14917   kind of constant in RTL.  The argument MODE is redundant except in
14918   the case of a `const_int' rtx.  Select the section by calling
14919   `text_section' or one of the alternatives for other sections.
14920
14921   Do not define this macro if you put all constants in the read-only
14922   data section.  */
14923
14924static void
14925rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
14926			       unsigned HOST_WIDE_INT align)
14927{
14928  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14929    toc_section ();
14930  else
14931    default_elf_select_rtx_section (mode, x, align);
14932}
14933
14934/* A C statement or statements to switch to the appropriate
14935   section for output of DECL.  DECL is either a `VAR_DECL' node
14936   or a constant of some sort.  RELOC indicates whether forming
14937   the initial value of DECL requires link-time relocations.  */
14938
14939static void
14940rs6000_elf_select_section (tree decl, int reloc,
14941			   unsigned HOST_WIDE_INT align)
14942{
14943  /* Pretend that we're always building for a shared library when
14944     ABI_AIX, because otherwise we end up with dynamic relocations
14945     in read-only sections.  This happens for function pointers,
14946     references to vtables in typeinfo, and probably other cases.  */
14947  default_elf_select_section_1 (decl, reloc, align,
14948				flag_pic || DEFAULT_ABI == ABI_AIX);
14949}
14950
14951/* A C statement to build up a unique section name, expressed as a
14952   STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
14953   RELOC indicates whether the initial value of EXP requires
14954   link-time relocations.  If you do not define this macro, GCC will use
14955   the symbol name prefixed by `.' as the section name.  Note - this
14956   macro can now be called for uninitialized data items as well as
14957   initialized data and functions.  */
14958
14959static void
14960rs6000_elf_unique_section (tree decl, int reloc)
14961{
14962  /* As above, pretend that we're always building for a shared library
14963     when ABI_AIX, to avoid dynamic relocations in read-only sections.  */
14964  default_unique_section_1 (decl, reloc,
14965			    flag_pic || DEFAULT_ABI == ABI_AIX);
14966}
14967
14968/* For a SYMBOL_REF, set generic flags and then perform some
14969   target-specific processing.
14970
14971   When the AIX ABI is requested on a non-AIX system, replace the
14972   function name with the real name (with a leading .) rather than the
14973   function descriptor name.  This saves a lot of overriding code to
14974   read the prefixes.  */
14975
14976static void
14977rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
14978{
14979  default_encode_section_info (decl, rtl, first);
14980
14981  if (first
14982      && TREE_CODE (decl) == FUNCTION_DECL
14983      && !TARGET_AIX
14984      && DEFAULT_ABI == ABI_AIX)
14985    {
14986      rtx sym_ref = XEXP (rtl, 0);
14987      size_t len = strlen (XSTR (sym_ref, 0));
14988      char *str = alloca (len + 2);
14989      str[0] = '.';
14990      memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
14991      XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
14992    }
14993}
14994
14995static bool
14996rs6000_elf_in_small_data_p (tree decl)
14997{
14998  if (rs6000_sdata == SDATA_NONE)
14999    return false;
15000
15001  if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15002    {
15003      const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15004      if (strcmp (section, ".sdata") == 0
15005	  || strcmp (section, ".sdata2") == 0
15006	  || strcmp (section, ".sbss") == 0
15007	  || strcmp (section, ".sbss2") == 0
15008	  || strcmp (section, ".PPC.EMB.sdata0") == 0
15009	  || strcmp (section, ".PPC.EMB.sbss0") == 0)
15010	return true;
15011    }
15012  else
15013    {
15014      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15015
15016      if (size > 0
15017	  && (unsigned HOST_WIDE_INT) size <= g_switch_value
15018	  /* If it's not public, and we're not going to reference it there,
15019	     there's no need to put it in the small data section.  */
15020	  && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15021	return true;
15022    }
15023
15024  return false;
15025}
15026
15027#endif /* USING_ELFOS_H */
15028
15029
15030/* Return a REG that occurs in ADDR with coefficient 1.
15031   ADDR can be effectively incremented by incrementing REG.
15032
15033   r0 is special and we must not select it as an address
15034   register by this routine since our caller will try to
15035   increment the returned register via an "la" instruction.  */
15036
15037struct rtx_def *
15038find_addr_reg (rtx addr)
15039{
15040  while (GET_CODE (addr) == PLUS)
15041    {
15042      if (GET_CODE (XEXP (addr, 0)) == REG
15043	  && REGNO (XEXP (addr, 0)) != 0)
15044	addr = XEXP (addr, 0);
15045      else if (GET_CODE (XEXP (addr, 1)) == REG
15046	       && REGNO (XEXP (addr, 1)) != 0)
15047	addr = XEXP (addr, 1);
15048      else if (CONSTANT_P (XEXP (addr, 0)))
15049	addr = XEXP (addr, 1);
15050      else if (CONSTANT_P (XEXP (addr, 1)))
15051	addr = XEXP (addr, 0);
15052      else
15053	abort ();
15054    }
15055  if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15056    return addr;
15057  abort ();
15058}
15059
15060void
15061rs6000_fatal_bad_address (rtx op)
15062{
15063  fatal_insn ("bad address", op);
15064}
15065
15066#if TARGET_MACHO
15067
15068#if 0
15069/* Returns 1 if OP is either a symbol reference or a sum of a symbol
15070   reference and a constant.  */
15071
15072int
15073symbolic_operand (rtx op)
15074{
15075  switch (GET_CODE (op))
15076    {
15077    case SYMBOL_REF:
15078    case LABEL_REF:
15079      return 1;
15080    case CONST:
15081      op = XEXP (op, 0);
15082      return (GET_CODE (op) == SYMBOL_REF ||
15083	      (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15084	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15085	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
15086    default:
15087      return 0;
15088    }
15089}
15090#endif
15091
15092#if TARGET_MACHO
15093
15094static tree branch_island_list = 0;
15095
15096/* Remember to generate a branch island for far calls to the given
15097   function.  */
15098
15099static void
15100add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15101{
15102  tree branch_island = build_tree_list (function_name, label_name);
15103  TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15104  TREE_CHAIN (branch_island) = branch_island_list;
15105  branch_island_list = branch_island;
15106}
15107
15108#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND)     TREE_VALUE (BRANCH_ISLAND)
15109#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND)  TREE_PURPOSE (BRANCH_ISLAND)
15110#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND)    \
15111		TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15112
15113/* Generate far-jump branch islands for everything on the
15114   branch_island_list.  Invoked immediately after the last instruction
15115   of the epilogue has been emitted; the branch-islands must be
15116   appended to, and contiguous with, the function body.  Mach-O stubs
15117   are generated in machopic_output_stub().  */
15118
15119static void
15120macho_branch_islands (void)
15121{
15122  char tmp_buf[512];
15123  tree branch_island;
15124
15125  for (branch_island = branch_island_list;
15126       branch_island;
15127       branch_island = TREE_CHAIN (branch_island))
15128    {
15129      const char *label =
15130	IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15131      const char *name  =
15132	darwin_strip_name_encoding (
15133	  IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15134      char name_buf[512];
15135      /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF().  */
15136      if (name[0] == '*' || name[0] == '&')
15137	strcpy (name_buf, name+1);
15138      else
15139	{
15140	  name_buf[0] = '_';
15141	  strcpy (name_buf+1, name);
15142	}
15143      strcpy (tmp_buf, "\n");
15144      strcat (tmp_buf, label);
15145#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15146      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15147	fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15148		 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15149#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15150      if (flag_pic)
15151	{
15152	  strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15153	  strcat (tmp_buf, label);
15154	  strcat (tmp_buf, "_pic\n");
15155	  strcat (tmp_buf, label);
15156	  strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15157
15158	  strcat (tmp_buf, "\taddis r11,r11,ha16(");
15159	  strcat (tmp_buf, name_buf);
15160	  strcat (tmp_buf, " - ");
15161	  strcat (tmp_buf, label);
15162	  strcat (tmp_buf, "_pic)\n");
15163
15164	  strcat (tmp_buf, "\tmtlr r0\n");
15165
15166	  strcat (tmp_buf, "\taddi r12,r11,lo16(");
15167	  strcat (tmp_buf, name_buf);
15168	  strcat (tmp_buf, " - ");
15169	  strcat (tmp_buf, label);
15170	  strcat (tmp_buf, "_pic)\n");
15171
15172	  strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15173	}
15174      else
15175	{
15176	  strcat (tmp_buf, ":\nlis r12,hi16(");
15177	  strcat (tmp_buf, name_buf);
15178	  strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15179	  strcat (tmp_buf, name_buf);
15180	  strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15181	}
15182      output_asm_insn (tmp_buf, 0);
15183#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15184      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15185	fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15186		BRANCH_ISLAND_LINE_NUMBER (branch_island));
15187#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15188    }
15189
15190  branch_island_list = 0;
15191}
15192
15193/* NO_PREVIOUS_DEF checks in the link list whether the function name is
15194   already there or not.  */
15195
15196static int
15197no_previous_def (tree function_name)
15198{
15199  tree branch_island;
15200  for (branch_island = branch_island_list;
15201       branch_island;
15202       branch_island = TREE_CHAIN (branch_island))
15203    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15204      return 0;
15205  return 1;
15206}
15207
15208/* GET_PREV_LABEL gets the label name from the previous definition of
15209   the function.  */
15210
15211static tree
15212get_prev_label (tree function_name)
15213{
15214  tree branch_island;
15215  for (branch_island = branch_island_list;
15216       branch_island;
15217       branch_island = TREE_CHAIN (branch_island))
15218    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15219      return BRANCH_ISLAND_LABEL_NAME (branch_island);
15220  return 0;
15221}
15222
15223/* INSN is either a function call or a millicode call.  It may have an
15224   unconditional jump in its delay slot.
15225
15226   CALL_DEST is the routine we are calling.  */
15227
15228char *
15229output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15230{
15231  static char buf[256];
15232  if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15233      && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15234    {
15235      tree labelname;
15236      tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15237
15238      if (no_previous_def (funname))
15239	{
15240	  int line_number = 0;
15241	  rtx label_rtx = gen_label_rtx ();
15242	  char *label_buf, temp_buf[256];
15243	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15244				       CODE_LABEL_NUMBER (label_rtx));
15245	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15246	  labelname = get_identifier (label_buf);
15247	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15248	  if (insn)
15249	    line_number = NOTE_LINE_NUMBER (insn);
15250	  add_compiler_branch_island (labelname, funname, line_number);
15251	}
15252      else
15253	labelname = get_prev_label (funname);
15254
15255      /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15256	 instruction will reach 'foo', otherwise link as 'bl L42'".
15257	 "L42" should be a 'branch island', that will do a far jump to
15258	 'foo'.  Branch islands are generated in
15259	 macho_branch_islands().  */
15260      sprintf (buf, "jbsr %%z%d,%.246s",
15261	       dest_operand_number, IDENTIFIER_POINTER (labelname));
15262    }
15263  else
15264    sprintf (buf, "bl %%z%d", dest_operand_number);
15265  return buf;
15266}
15267
15268#endif /* TARGET_MACHO */
15269
15270/* Generate PIC and indirect symbol stubs.  */
15271
15272void
15273machopic_output_stub (FILE *file, const char *symb, const char *stub)
15274{
15275  unsigned int length;
15276  char *symbol_name, *lazy_ptr_name;
15277  char *local_label_0;
15278  static int label = 0;
15279
15280  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
15281  symb = (*targetm.strip_name_encoding) (symb);
15282
15283
15284  length = strlen (symb);
15285  symbol_name = alloca (length + 32);
15286  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15287
15288  lazy_ptr_name = alloca (length + 32);
15289  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15290
15291  if (flag_pic == 2)
15292    machopic_picsymbol_stub1_section ();
15293  else
15294    machopic_symbol_stub1_section ();
15295  fprintf (file, "\t.align 2\n");
15296
15297  fprintf (file, "%s:\n", stub);
15298  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15299
15300  if (flag_pic == 2)
15301    {
15302      label++;
15303      local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15304      sprintf (local_label_0, "\"L%011d$spb\"", label);
15305
15306      fprintf (file, "\tmflr r0\n");
15307      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15308      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15309      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15310	       lazy_ptr_name, local_label_0);
15311      fprintf (file, "\tmtlr r0\n");
15312      fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15313	       lazy_ptr_name, local_label_0);
15314      fprintf (file, "\tmtctr r12\n");
15315      fprintf (file, "\tbctr\n");
15316    }
15317  else
15318   {
15319     fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15320     fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15321     fprintf (file, "\tmtctr r12\n");
15322     fprintf (file, "\tbctr\n");
15323   }
15324
15325  machopic_lazy_symbol_ptr_section ();
15326  fprintf (file, "%s:\n", lazy_ptr_name);
15327  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15328  fprintf (file, "\t.long dyld_stub_binding_helper\n");
15329}
15330
15331/* Legitimize PIC addresses.  If the address is already
15332   position-independent, we return ORIG.  Newly generated
15333   position-independent addresses go into a reg.  This is REG if non
15334   zero, otherwise we allocate register(s) as necessary.  */
15335
15336#define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15337
15338rtx
15339rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15340					rtx reg)
15341{
15342  rtx base, offset;
15343
15344  if (reg == NULL && ! reload_in_progress && ! reload_completed)
15345    reg = gen_reg_rtx (Pmode);
15346
15347  if (GET_CODE (orig) == CONST)
15348    {
15349      if (GET_CODE (XEXP (orig, 0)) == PLUS
15350	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15351	return orig;
15352
15353      if (GET_CODE (XEXP (orig, 0)) == PLUS)
15354	{
15355	  /* Use a different reg for the intermediate value, as
15356	     it will be marked UNCHANGING.  */
15357	  rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15358
15359	  base =
15360	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15361						    Pmode, reg_temp);
15362	  offset =
15363	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15364						    Pmode, reg);
15365	}
15366      else
15367	abort ();
15368
15369      if (GET_CODE (offset) == CONST_INT)
15370	{
15371	  if (SMALL_INT (offset))
15372	    return plus_constant (base, INTVAL (offset));
15373	  else if (! reload_in_progress && ! reload_completed)
15374	    offset = force_reg (Pmode, offset);
15375	  else
15376	    {
15377 	      rtx mem = force_const_mem (Pmode, orig);
15378	      return machopic_legitimize_pic_address (mem, Pmode, reg);
15379	    }
15380	}
15381      return gen_rtx (PLUS, Pmode, base, offset);
15382    }
15383
15384  /* Fall back on generic machopic code.  */
15385  return machopic_legitimize_pic_address (orig, mode, reg);
15386}
15387
15388/* This is just a placeholder to make linking work without having to
15389   add this to the generic Darwin EXTRA_SECTIONS.  If -mcall-aix is
15390   ever needed for Darwin (not too likely!) this would have to get a
15391   real definition.  */
15392
15393void
15394toc_section (void)
15395{
15396}
15397
15398#endif /* TARGET_MACHO */
15399
15400#if TARGET_ELF
15401static unsigned int
15402rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15403{
15404  return default_section_type_flags_1 (decl, name, reloc,
15405				       flag_pic || DEFAULT_ABI == ABI_AIX);
15406}
15407
15408/* Record an element in the table of global constructors.  SYMBOL is
15409   a SYMBOL_REF of the function to be called; PRIORITY is a number
15410   between 0 and MAX_INIT_PRIORITY.
15411
15412   This differs from default_named_section_asm_out_constructor in
15413   that we have special handling for -mrelocatable.  */
15414
15415static void
15416rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15417{
15418  const char *section = ".ctors";
15419  char buf[16];
15420
15421  if (priority != DEFAULT_INIT_PRIORITY)
15422    {
15423      sprintf (buf, ".ctors.%.5u",
15424               /* Invert the numbering so the linker puts us in the proper
15425                  order; constructors are run from right to left, and the
15426                  linker sorts in increasing order.  */
15427               MAX_INIT_PRIORITY - priority);
15428      section = buf;
15429    }
15430
15431  named_section_flags (section, SECTION_WRITE);
15432  assemble_align (POINTER_SIZE);
15433
15434  if (TARGET_RELOCATABLE)
15435    {
15436      fputs ("\t.long (", asm_out_file);
15437      output_addr_const (asm_out_file, symbol);
15438      fputs (")@fixup\n", asm_out_file);
15439    }
15440  else
15441    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15442}
15443
15444static void
15445rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15446{
15447  const char *section = ".dtors";
15448  char buf[16];
15449
15450  if (priority != DEFAULT_INIT_PRIORITY)
15451    {
15452      sprintf (buf, ".dtors.%.5u",
15453               /* Invert the numbering so the linker puts us in the proper
15454                  order; constructors are run from right to left, and the
15455                  linker sorts in increasing order.  */
15456               MAX_INIT_PRIORITY - priority);
15457      section = buf;
15458    }
15459
15460  named_section_flags (section, SECTION_WRITE);
15461  assemble_align (POINTER_SIZE);
15462
15463  if (TARGET_RELOCATABLE)
15464    {
15465      fputs ("\t.long (", asm_out_file);
15466      output_addr_const (asm_out_file, symbol);
15467      fputs (")@fixup\n", asm_out_file);
15468    }
15469  else
15470    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15471}
15472
15473void
15474rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15475{
15476  if (TARGET_64BIT)
15477    {
15478      fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15479      ASM_OUTPUT_LABEL (file, name);
15480      fputs (DOUBLE_INT_ASM_OP, file);
15481      putc ('.', file);
15482      assemble_name (file, name);
15483      fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15484      assemble_name (file, name);
15485      fputs (",24\n\t.type\t.", file);
15486      assemble_name (file, name);
15487      fputs (",@function\n", file);
15488      if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15489	{
15490	  fputs ("\t.globl\t.", file);
15491	  assemble_name (file, name);
15492	  putc ('\n', file);
15493	}
15494      ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15495      putc ('.', file);
15496      ASM_OUTPUT_LABEL (file, name);
15497      return;
15498    }
15499
15500  if (TARGET_RELOCATABLE
15501      && (get_pool_size () != 0 || current_function_profile)
15502      && uses_TOC ())
15503    {
15504      char buf[256];
15505
15506      (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15507
15508      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15509      fprintf (file, "\t.long ");
15510      assemble_name (file, buf);
15511      putc ('-', file);
15512      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15513      assemble_name (file, buf);
15514      putc ('\n', file);
15515    }
15516
15517  ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15518  ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15519
15520  if (DEFAULT_ABI == ABI_AIX)
15521    {
15522      const char *desc_name, *orig_name;
15523
15524      orig_name = (*targetm.strip_name_encoding) (name);
15525      desc_name = orig_name;
15526      while (*desc_name == '.')
15527	desc_name++;
15528
15529      if (TREE_PUBLIC (decl))
15530	fprintf (file, "\t.globl %s\n", desc_name);
15531
15532      fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15533      fprintf (file, "%s:\n", desc_name);
15534      fprintf (file, "\t.long %s\n", orig_name);
15535      fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15536      if (DEFAULT_ABI == ABI_AIX)
15537	fputs ("\t.long 0\n", file);
15538      fprintf (file, "\t.previous\n");
15539    }
15540  ASM_OUTPUT_LABEL (file, name);
15541}
15542#endif
15543
15544#if TARGET_XCOFF
15545static void
15546rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15547{
15548  fputs (GLOBAL_ASM_OP, stream);
15549  RS6000_OUTPUT_BASENAME (stream, name);
15550  putc ('\n', stream);
15551}
15552
15553static void
15554rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15555{
15556  int smclass;
15557  static const char * const suffix[3] = { "PR", "RO", "RW" };
15558
15559  if (flags & SECTION_CODE)
15560    smclass = 0;
15561  else if (flags & SECTION_WRITE)
15562    smclass = 2;
15563  else
15564    smclass = 1;
15565
15566  fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15567	   (flags & SECTION_CODE) ? "." : "",
15568	   name, suffix[smclass], flags & SECTION_ENTSIZE);
15569}
15570
15571static void
15572rs6000_xcoff_select_section (tree decl, int reloc,
15573			    unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15574{
15575  if (decl_readonly_section_1 (decl, reloc, 1))
15576    {
15577      if (TREE_PUBLIC (decl))
15578        read_only_data_section ();
15579      else
15580        read_only_private_data_section ();
15581    }
15582  else
15583    {
15584      if (TREE_PUBLIC (decl))
15585        data_section ();
15586      else
15587        private_data_section ();
15588    }
15589}
15590
15591static void
15592rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15593{
15594  const char *name;
15595
15596  /* Use select_section for private and uninitialized data.  */
15597  if (!TREE_PUBLIC (decl)
15598      || DECL_COMMON (decl)
15599      || DECL_INITIAL (decl) == NULL_TREE
15600      || DECL_INITIAL (decl) == error_mark_node
15601      || (flag_zero_initialized_in_bss
15602	  && initializer_zerop (DECL_INITIAL (decl))))
15603    return;
15604
15605  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15606  name = (*targetm.strip_name_encoding) (name);
15607  DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15608}
15609
15610/* Select section for constant in constant pool.
15611
15612   On RS/6000, all constants are in the private read-only data area.
15613   However, if this is being placed in the TOC it must be output as a
15614   toc entry.  */
15615
15616static void
15617rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15618				unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15619{
15620  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15621    toc_section ();
15622  else
15623    read_only_private_data_section ();
15624}
15625
15626/* Remove any trailing [DS] or the like from the symbol name.  */
15627
15628static const char *
15629rs6000_xcoff_strip_name_encoding (const char *name)
15630{
15631  size_t len;
15632  if (*name == '*')
15633    name++;
15634  len = strlen (name);
15635  if (name[len - 1] == ']')
15636    return ggc_alloc_string (name, len - 4);
15637  else
15638    return name;
15639}
15640
15641/* Section attributes.  AIX is always PIC.  */
15642
15643static unsigned int
15644rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15645{
15646  unsigned int align;
15647  unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15648
15649  /* Align to at least UNIT size.  */
15650  if (flags & SECTION_CODE)
15651    align = MIN_UNITS_PER_WORD;
15652  else
15653    /* Increase alignment of large objects if not already stricter.  */
15654    align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15655		 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15656		 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15657
15658  return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15659}
15660
15661/* Output at beginning of assembler file.
15662
15663   Initialize the section names for the RS/6000 at this point.
15664
15665   Specify filename, including full path, to assembler.
15666
15667   We want to go into the TOC section so at least one .toc will be emitted.
15668   Also, in order to output proper .bs/.es pairs, we need at least one static
15669   [RW] section emitted.
15670
15671   Finally, declare mcount when profiling to make the assembler happy.  */
15672
15673static void
15674rs6000_xcoff_file_start (void)
15675{
15676  rs6000_gen_section_name (&xcoff_bss_section_name,
15677			   main_input_filename, ".bss_");
15678  rs6000_gen_section_name (&xcoff_private_data_section_name,
15679			   main_input_filename, ".rw_");
15680  rs6000_gen_section_name (&xcoff_read_only_section_name,
15681			   main_input_filename, ".ro_");
15682
15683  fputs ("\t.file\t", asm_out_file);
15684  output_quoted_string (asm_out_file, main_input_filename);
15685  fputc ('\n', asm_out_file);
15686  toc_section ();
15687  if (write_symbols != NO_DEBUG)
15688    private_data_section ();
15689  text_section ();
15690  if (profile_flag)
15691    fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15692  rs6000_file_start ();
15693}
15694
15695/* Output at end of assembler file.
15696   On the RS/6000, referencing data should automatically pull in text.  */
15697
15698static void
15699rs6000_xcoff_file_end (void)
15700{
15701  text_section ();
15702  fputs ("_section_.text:\n", asm_out_file);
15703  data_section ();
15704  fputs (TARGET_32BIT
15705	 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15706	 asm_out_file);
15707}
15708#endif /* TARGET_XCOFF */
15709
15710#if TARGET_MACHO
15711/* Cross-module name binding.  Darwin does not support overriding
15712   functions at dynamic-link time.  */
15713
15714static bool
15715rs6000_binds_local_p (tree decl)
15716{
15717  return default_binds_local_p_1 (decl, 0);
15718}
15719#endif
15720
15721/* Compute a (partial) cost for rtx X.  Return true if the complete
15722   cost has been computed, and false if subexpressions should be
15723   scanned.  In either case, *TOTAL contains the cost result.  */
15724
15725static bool
15726rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15727		  int *total)
15728{
15729  switch (code)
15730    {
15731      /* On the RS/6000, if it is valid in the insn, it is free.
15732	 So this always returns 0.  */
15733    case CONST_INT:
15734    case CONST:
15735    case LABEL_REF:
15736    case SYMBOL_REF:
15737    case CONST_DOUBLE:
15738    case HIGH:
15739      *total = 0;
15740      return true;
15741
15742    case PLUS:
15743      *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15744		 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15745					       + 0x8000) >= 0x10000)
15746		 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15747		? COSTS_N_INSNS (2)
15748		: COSTS_N_INSNS (1));
15749      return true;
15750
15751    case AND:
15752    case IOR:
15753    case XOR:
15754      *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15755		 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15756		 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15757		? COSTS_N_INSNS (2)
15758		: COSTS_N_INSNS (1));
15759      return true;
15760
15761    case MULT:
15762      if (optimize_size)
15763	{
15764	  *total = COSTS_N_INSNS (2);
15765	  return true;
15766	}
15767      switch (rs6000_cpu)
15768	{
15769	case PROCESSOR_RIOS1:
15770	case PROCESSOR_PPC405:
15771	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15772		    ? COSTS_N_INSNS (5)
15773		    : (INTVAL (XEXP (x, 1)) >= -256
15774		       && INTVAL (XEXP (x, 1)) <= 255)
15775		    ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15776	  return true;
15777
15778	case PROCESSOR_PPC440:
15779	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15780		    ? COSTS_N_INSNS (3)
15781		    : COSTS_N_INSNS (2));
15782	  return true;
15783
15784	case PROCESSOR_RS64A:
15785	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15786		    ? GET_MODE (XEXP (x, 1)) != DImode
15787		    ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15788		    : (INTVAL (XEXP (x, 1)) >= -256
15789		       && INTVAL (XEXP (x, 1)) <= 255)
15790		    ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15791	  return true;
15792
15793	case PROCESSOR_RIOS2:
15794	case PROCESSOR_MPCCORE:
15795	case PROCESSOR_PPC604e:
15796	  *total = COSTS_N_INSNS (2);
15797	  return true;
15798
15799	case PROCESSOR_PPC601:
15800	  *total = COSTS_N_INSNS (5);
15801	  return true;
15802
15803	case PROCESSOR_PPC603:
15804	case PROCESSOR_PPC7400:
15805	case PROCESSOR_PPC750:
15806	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15807		    ? COSTS_N_INSNS (5)
15808		    : (INTVAL (XEXP (x, 1)) >= -256
15809		       && INTVAL (XEXP (x, 1)) <= 255)
15810		    ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15811	  return true;
15812
15813	case PROCESSOR_PPC7450:
15814	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15815		    ? COSTS_N_INSNS (4)
15816		    : COSTS_N_INSNS (3));
15817	  return true;
15818
15819	case PROCESSOR_PPC403:
15820	case PROCESSOR_PPC604:
15821	case PROCESSOR_PPC8540:
15822	  *total = COSTS_N_INSNS (4);
15823	  return true;
15824
15825	case PROCESSOR_PPC620:
15826	case PROCESSOR_PPC630:
15827	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15828		    ? GET_MODE (XEXP (x, 1)) != DImode
15829		    ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15830		    : (INTVAL (XEXP (x, 1)) >= -256
15831		       && INTVAL (XEXP (x, 1)) <= 255)
15832		    ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15833	  return true;
15834
15835	case PROCESSOR_POWER4:
15836	case PROCESSOR_POWER5:
15837	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15838		    ? GET_MODE (XEXP (x, 1)) != DImode
15839		    ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15840		    : COSTS_N_INSNS (2));
15841	  return true;
15842
15843	default:
15844	  abort ();
15845	}
15846
15847    case DIV:
15848    case MOD:
15849      if (GET_CODE (XEXP (x, 1)) == CONST_INT
15850	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
15851	{
15852	  *total = COSTS_N_INSNS (2);
15853	  return true;
15854	}
15855      /* FALLTHRU */
15856
15857    case UDIV:
15858    case UMOD:
15859      switch (rs6000_cpu)
15860	{
15861	case PROCESSOR_RIOS1:
15862	  *total = COSTS_N_INSNS (19);
15863	  return true;
15864
15865	case PROCESSOR_RIOS2:
15866	  *total = COSTS_N_INSNS (13);
15867	  return true;
15868
15869	case PROCESSOR_RS64A:
15870	  *total = (GET_MODE (XEXP (x, 1)) != DImode
15871		    ? COSTS_N_INSNS (65)
15872		    : COSTS_N_INSNS (67));
15873	  return true;
15874
15875	case PROCESSOR_MPCCORE:
15876	  *total = COSTS_N_INSNS (6);
15877	  return true;
15878
15879	case PROCESSOR_PPC403:
15880	  *total = COSTS_N_INSNS (33);
15881	  return true;
15882
15883	case PROCESSOR_PPC405:
15884	  *total = COSTS_N_INSNS (35);
15885	  return true;
15886
15887	case PROCESSOR_PPC440:
15888	  *total = COSTS_N_INSNS (34);
15889	  return true;
15890
15891	case PROCESSOR_PPC601:
15892	  *total = COSTS_N_INSNS (36);
15893	  return true;
15894
15895	case PROCESSOR_PPC603:
15896	  *total = COSTS_N_INSNS (37);
15897	  return true;
15898
15899	case PROCESSOR_PPC604:
15900	case PROCESSOR_PPC604e:
15901	  *total = COSTS_N_INSNS (20);
15902	  return true;
15903
15904	case PROCESSOR_PPC620:
15905	case PROCESSOR_PPC630:
15906	  *total = (GET_MODE (XEXP (x, 1)) != DImode
15907		    ? COSTS_N_INSNS (21)
15908		    : COSTS_N_INSNS (37));
15909	  return true;
15910
15911	case PROCESSOR_PPC750:
15912	case PROCESSOR_PPC8540:
15913	case PROCESSOR_PPC7400:
15914	  *total = COSTS_N_INSNS (19);
15915	  return true;
15916
15917	case PROCESSOR_PPC7450:
15918	  *total = COSTS_N_INSNS (23);
15919	  return true;
15920
15921	case PROCESSOR_POWER4:
15922	case PROCESSOR_POWER5:
15923	  *total = (GET_MODE (XEXP (x, 1)) != DImode
15924		    ? COSTS_N_INSNS (18)
15925		    : COSTS_N_INSNS (34));
15926	  return true;
15927
15928	default:
15929	  abort ();
15930	}
15931
15932    case FFS:
15933      *total = COSTS_N_INSNS (4);
15934      return true;
15935
15936    case MEM:
15937      /* MEM should be slightly more expensive than (plus (reg) (const)).  */
15938      *total = 5;
15939      return true;
15940
15941    default:
15942      return false;
15943    }
15944}
15945
15946/* A C expression returning the cost of moving data from a register of class
15947   CLASS1 to one of CLASS2.  */
15948
15949int
15950rs6000_register_move_cost (enum machine_mode mode,
15951			   enum reg_class from, enum reg_class to)
15952{
15953  /*  Moves from/to GENERAL_REGS.  */
15954  if (reg_classes_intersect_p (to, GENERAL_REGS)
15955      || reg_classes_intersect_p (from, GENERAL_REGS))
15956    {
15957      if (! reg_classes_intersect_p (to, GENERAL_REGS))
15958	from = to;
15959
15960      if (from == FLOAT_REGS || from == ALTIVEC_REGS)
15961	return (rs6000_memory_move_cost (mode, from, 0)
15962		+ rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
15963
15964/* It's more expensive to move CR_REGS than CR0_REGS because of the shift....  */
15965      else if (from == CR_REGS)
15966	return 4;
15967
15968      else
15969/* A move will cost one instruction per GPR moved.  */
15970	return 2 * HARD_REGNO_NREGS (0, mode);
15971    }
15972
15973/* Moving between two similar registers is just one instruction.  */
15974  else if (reg_classes_intersect_p (to, from))
15975    return mode == TFmode ? 4 : 2;
15976
15977/* Everything else has to go through GENERAL_REGS.  */
15978  else
15979    return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
15980	    + rs6000_register_move_cost (mode, from, GENERAL_REGS));
15981}
15982
15983/* A C expressions returning the cost of moving data of MODE from a register to
15984   or from memory.  */
15985
15986int
15987rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
15988			 int in ATTRIBUTE_UNUSED)
15989{
15990  if (reg_classes_intersect_p (class, GENERAL_REGS))
15991    return 4 * HARD_REGNO_NREGS (0, mode);
15992  else if (reg_classes_intersect_p (class, FLOAT_REGS))
15993    return 4 * HARD_REGNO_NREGS (32, mode);
15994  else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
15995    return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
15996  else
15997    return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
15998}
15999
16000/* Return an RTX representing where to find the function value of a
16001   function returning MODE.  */
16002static rtx
16003rs6000_complex_function_value (enum machine_mode mode)
16004{
16005  unsigned int regno;
16006  rtx r1, r2;
16007  enum machine_mode inner = GET_MODE_INNER (mode);
16008  unsigned int inner_bytes = GET_MODE_SIZE (inner);
16009
16010  if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
16011    regno = FP_ARG_RETURN;
16012  else
16013    {
16014      regno = GP_ARG_RETURN;
16015
16016      /* 32-bit is OK since it'll go in r3/r4.  */
16017      if (TARGET_32BIT && inner_bytes >= 4)
16018	return gen_rtx_REG (mode, regno);
16019    }
16020
16021  if (inner_bytes >= 8)
16022    return gen_rtx_REG (mode, regno);
16023
16024  r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16025			  const0_rtx);
16026  r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16027			  GEN_INT (inner_bytes));
16028  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16029}
16030
16031/* Define how to find the value returned by a function.
16032   VALTYPE is the data type of the value (as a tree).
16033   If the precise function being called is known, FUNC is its FUNCTION_DECL;
16034   otherwise, FUNC is 0.
16035
16036   On the SPE, both FPs and vectors are returned in r3.
16037
16038   On RS/6000 an integer value is in r3 and a floating-point value is in
16039   fp1, unless -msoft-float.  */
16040
16041rtx
16042rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16043{
16044  enum machine_mode mode;
16045  unsigned int regno;
16046
16047  if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16048    {
16049      /* Long long return value need be split in -mpowerpc64, 32bit ABI.  */
16050      return gen_rtx_PARALLEL (DImode,
16051	gen_rtvec (2,
16052		   gen_rtx_EXPR_LIST (VOIDmode,
16053				      gen_rtx_REG (SImode, GP_ARG_RETURN),
16054				      const0_rtx),
16055		   gen_rtx_EXPR_LIST (VOIDmode,
16056				      gen_rtx_REG (SImode,
16057						   GP_ARG_RETURN + 1),
16058				      GEN_INT (4))));
16059    }
16060
16061  if ((INTEGRAL_TYPE_P (valtype)
16062       && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16063      || POINTER_TYPE_P (valtype))
16064    mode = TARGET_32BIT ? SImode : DImode;
16065  else
16066    mode = TYPE_MODE (valtype);
16067
16068  if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
16069    regno = FP_ARG_RETURN;
16070  else if (TREE_CODE (valtype) == COMPLEX_TYPE
16071	   && targetm.calls.split_complex_arg)
16072    return rs6000_complex_function_value (mode);
16073  else if (TREE_CODE (valtype) == VECTOR_TYPE
16074	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16075    regno = ALTIVEC_ARG_RETURN;
16076  else
16077    regno = GP_ARG_RETURN;
16078
16079  return gen_rtx_REG (mode, regno);
16080}
16081
16082/* Define how to find the value returned by a library function
16083   assuming the value has mode MODE.  */
16084rtx
16085rs6000_libcall_value (enum machine_mode mode)
16086{
16087  unsigned int regno;
16088
16089  if (GET_MODE_CLASS (mode) == MODE_FLOAT
16090	   && TARGET_HARD_FLOAT && TARGET_FPRS)
16091    regno = FP_ARG_RETURN;
16092  else if (ALTIVEC_VECTOR_MODE (mode)
16093	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16094    regno = ALTIVEC_ARG_RETURN;
16095  else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16096    return rs6000_complex_function_value (mode);
16097  else
16098    regno = GP_ARG_RETURN;
16099
16100  return gen_rtx_REG (mode, regno);
16101}
16102
16103/* Define the offset between two registers, FROM to be eliminated and its
16104   replacement TO, at the start of a routine.  */
16105HOST_WIDE_INT
16106rs6000_initial_elimination_offset (int from, int to)
16107{
16108  rs6000_stack_t *info = rs6000_stack_info ();
16109  HOST_WIDE_INT offset;
16110
16111  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16112    offset = info->push_p ? 0 : -info->total_size;
16113  else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16114    offset = info->total_size;
16115  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16116    offset = info->push_p ? info->total_size : 0;
16117  else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16118    offset = 0;
16119  else
16120    abort ();
16121
16122  return offset;
16123}
16124
16125/* Return true if TYPE is of type __ev64_opaque__.  */
16126
16127static bool
16128is_ev64_opaque_type (tree type)
16129{
16130  return (TARGET_SPE
16131	  && (type == opaque_V2SI_type_node
16132	      || type == opaque_V2SF_type_node
16133	      || type == opaque_p_V2SI_type_node));
16134}
16135
16136static rtx
16137rs6000_dwarf_register_span (rtx reg)
16138{
16139  unsigned regno;
16140
16141  if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16142    return NULL_RTX;
16143
16144  regno = REGNO (reg);
16145
16146  /* The duality of the SPE register size wreaks all kinds of havoc.
16147     This is a way of distinguishing r0 in 32-bits from r0 in
16148     64-bits.  */
16149  return
16150    gen_rtx_PARALLEL (VOIDmode,
16151		      BYTES_BIG_ENDIAN
16152		      ? gen_rtvec (2,
16153				   gen_rtx_REG (SImode, regno + 1200),
16154				   gen_rtx_REG (SImode, regno))
16155		      : gen_rtvec (2,
16156				   gen_rtx_REG (SImode, regno),
16157				   gen_rtx_REG (SImode, regno + 1200)));
16158}
16159
16160/* Map internal gcc register numbers to DWARF2 register numbers.  */
16161
16162unsigned int
16163rs6000_dbx_register_number (unsigned int regno)
16164{
16165  if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16166    return regno;
16167  if (regno == MQ_REGNO)
16168    return 100;
16169  if (regno == LINK_REGISTER_REGNUM)
16170    return 108;
16171  if (regno == COUNT_REGISTER_REGNUM)
16172    return 109;
16173  if (CR_REGNO_P (regno))
16174    return regno - CR0_REGNO + 86;
16175  if (regno == XER_REGNO)
16176    return 101;
16177  if (ALTIVEC_REGNO_P (regno))
16178    return regno - FIRST_ALTIVEC_REGNO + 1124;
16179  if (regno == VRSAVE_REGNO)
16180    return 356;
16181  if (regno == VSCR_REGNO)
16182    return 67;
16183  if (regno == SPE_ACC_REGNO)
16184    return 99;
16185  if (regno == SPEFSCR_REGNO)
16186    return 612;
16187  /* SPE high reg number.  We get these values of regno from
16188     rs6000_dwarf_register_span.  */
16189  if (regno >= 1200 && regno < 1232)
16190    return regno;
16191
16192  abort ();
16193}
16194
16195#include "gt-rs6000.h"
16196