rs6000.c revision 146895
1/* Subroutines used for code generation on IBM RS/6000.
2   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6   This file is part of GCC.
7
8   GCC is free software; you can redistribute it and/or modify it
9   under the terms of the GNU General Public License as published
10   by the Free Software Foundation; either version 2, or (at your
11   option) any later version.
12
13   GCC is distributed in the hope that it will be useful, but WITHOUT
14   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16   License for more details.
17
18   You should have received a copy of the GNU General Public License
19   along with GCC; see the file COPYING.  If not, write to the
20   Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21   MA 02111-1307, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
36#include "obstack.h"
37#include "tree.h"
38#include "expr.h"
39#include "optabs.h"
40#include "except.h"
41#include "function.h"
42#include "output.h"
43#include "basic-block.h"
44#include "integrate.h"
45#include "toplev.h"
46#include "ggc.h"
47#include "hashtab.h"
48#include "tm_p.h"
49#include "target.h"
50#include "target-def.h"
51#include "langhooks.h"
52#include "reload.h"
53#include "cfglayout.h"
54#include "sched-int.h"
55#if TARGET_XCOFF
56#include "xcoffout.h"  /* get declarations of xcoff_*_section_name */
57#endif
58
59#ifndef TARGET_NO_PROTOTYPE
60#define TARGET_NO_PROTOTYPE 0
61#endif
62
63#define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
64#define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
65                                          && !((n) & 1))
66
67#define min(A,B)	((A) < (B) ? (A) : (B))
68#define max(A,B)	((A) > (B) ? (A) : (B))
69
70/* Structure used to define the rs6000 stack */
71typedef struct rs6000_stack {
72  int first_gp_reg_save;	/* first callee saved GP register used */
73  int first_fp_reg_save;	/* first callee saved FP register used */
74  int first_altivec_reg_save;	/* first callee saved AltiVec register used */
75  int lr_save_p;		/* true if the link reg needs to be saved */
76  int cr_save_p;		/* true if the CR reg needs to be saved */
77  unsigned int vrsave_mask;	/* mask of vec registers to save */
78  int toc_save_p;		/* true if the TOC needs to be saved */
79  int push_p;			/* true if we need to allocate stack space */
80  int calls_p;			/* true if the function makes any calls */
81  enum rs6000_abi abi;		/* which ABI to use */
82  int gp_save_offset;		/* offset to save GP regs from initial SP */
83  int fp_save_offset;		/* offset to save FP regs from initial SP */
84  int altivec_save_offset;	/* offset to save AltiVec regs from initial SP */
85  int lr_save_offset;		/* offset to save LR from initial SP */
86  int cr_save_offset;		/* offset to save CR from initial SP */
87  int vrsave_save_offset;	/* offset to save VRSAVE from initial SP */
88  int spe_gp_save_offset;	/* offset to save spe 64-bit gprs  */
89  int toc_save_offset;		/* offset to save the TOC pointer */
90  int varargs_save_offset;	/* offset to save the varargs registers */
91  int ehrd_offset;		/* offset to EH return data */
92  int reg_size;			/* register size (4 or 8) */
93  int varargs_size;		/* size to hold V.4 args passed in regs */
94  HOST_WIDE_INT vars_size;	/* variable save area size */
95  int parm_size;		/* outgoing parameter size */
96  int save_size;		/* save area size */
97  int fixed_size;		/* fixed size of stack frame */
98  int gp_size;			/* size of saved GP registers */
99  int fp_size;			/* size of saved FP registers */
100  int altivec_size;		/* size of saved AltiVec registers */
101  int cr_size;			/* size to hold CR if not in save_size */
102  int lr_size;			/* size to hold LR if not in save_size */
103  int vrsave_size;		/* size to hold VRSAVE if not in save_size */
104  int altivec_padding_size;	/* size of altivec alignment padding if
105				   not in save_size */
106  int spe_gp_size;		/* size of 64-bit GPR save size for SPE */
107  int spe_padding_size;
108  int toc_size;			/* size to hold TOC if not in save_size */
109  HOST_WIDE_INT total_size;	/* total bytes allocated for stack */
110  int spe_64bit_regs_used;
111} rs6000_stack_t;
112
113/* Target cpu type */
114
115enum processor_type rs6000_cpu;
116struct rs6000_cpu_select rs6000_select[3] =
117{
118  /* switch		name,			tune	arch */
119  { (const char *)0,	"--with-cpu=",		1,	1 },
120  { (const char *)0,	"-mcpu=",		1,	1 },
121  { (const char *)0,	"-mtune=",		1,	0 },
122};
123
124/* Always emit branch hint bits.  */
125static GTY(()) bool rs6000_always_hint;
126
127/* Schedule instructions for group formation.  */
128static GTY(()) bool rs6000_sched_groups;
129
130/* Support adjust_priority scheduler hook
131   and -mprioritize-restricted-insns= option.  */
132const char *rs6000_sched_restricted_insns_priority_str;
133int rs6000_sched_restricted_insns_priority;
134
135/* Support for -msched-costly-dep option.  */
136const char *rs6000_sched_costly_dep_str;
137enum rs6000_dependence_cost rs6000_sched_costly_dep;
138
139/* Support for -minsert-sched-nops option.  */
140const char *rs6000_sched_insert_nops_str;
141enum rs6000_nop_insertion rs6000_sched_insert_nops;
142
143/* Size of long double */
144const char *rs6000_long_double_size_string;
145int rs6000_long_double_type_size;
146
147/* Whether -mabi=altivec has appeared */
148int rs6000_altivec_abi;
149
150/* Whether VRSAVE instructions should be generated.  */
151int rs6000_altivec_vrsave;
152
153/* String from -mvrsave= option.  */
154const char *rs6000_altivec_vrsave_string;
155
156/* Nonzero if we want SPE ABI extensions.  */
157int rs6000_spe_abi;
158
159/* Whether isel instructions should be generated.  */
160int rs6000_isel;
161
162/* Whether SPE simd instructions should be generated.  */
163int rs6000_spe;
164
165/* Nonzero if floating point operations are done in the GPRs.  */
166int rs6000_float_gprs = 0;
167
168/* String from -mfloat-gprs=.  */
169const char *rs6000_float_gprs_string;
170
171/* String from -misel=.  */
172const char *rs6000_isel_string;
173
174/* String from -mspe=.  */
175const char *rs6000_spe_string;
176
177/* Set to nonzero once AIX common-mode calls have been defined.  */
178static GTY(()) int common_mode_defined;
179
180/* Save information from a "cmpxx" operation until the branch or scc is
181   emitted.  */
182rtx rs6000_compare_op0, rs6000_compare_op1;
183int rs6000_compare_fp_p;
184
185/* Label number of label created for -mrelocatable, to call to so we can
186   get the address of the GOT section */
187int rs6000_pic_labelno;
188
189#ifdef USING_ELFOS_H
190/* Which abi to adhere to */
191const char *rs6000_abi_name;
192
193/* Semantics of the small data area */
194enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
195
196/* Which small data model to use */
197const char *rs6000_sdata_name = (char *)0;
198
199/* Counter for labels which are to be placed in .fixup.  */
200int fixuplabelno = 0;
201#endif
202
203/* Bit size of immediate TLS offsets and string from which it is decoded.  */
204int rs6000_tls_size = 32;
205const char *rs6000_tls_size_string;
206
207/* ABI enumeration available for subtarget to use.  */
208enum rs6000_abi rs6000_current_abi;
209
210/* ABI string from -mabi= option.  */
211const char *rs6000_abi_string;
212
213/* Debug flags */
214const char *rs6000_debug_name;
215int rs6000_debug_stack;		/* debug stack applications */
216int rs6000_debug_arg;		/* debug argument handling */
217
218/* Opaque types.  */
219static GTY(()) tree opaque_V2SI_type_node;
220static GTY(()) tree opaque_V2SF_type_node;
221static GTY(()) tree opaque_p_V2SI_type_node;
222
223/* AltiVec requires a few more basic types in addition to the vector
224   types already defined in tree.c.  */
225static GTY(()) tree bool_char_type_node;	/* __bool char */
226static GTY(()) tree bool_short_type_node;	/* __bool short */
227static GTY(()) tree bool_int_type_node;		/* __bool int */
228static GTY(()) tree pixel_type_node;		/* __pixel */
229static GTY(()) tree bool_V16QI_type_node;	/* __vector __bool char */
230static GTY(()) tree bool_V8HI_type_node;	/* __vector __bool short */
231static GTY(()) tree bool_V4SI_type_node;	/* __vector __bool int */
232static GTY(()) tree pixel_V8HI_type_node;	/* __vector __pixel */
233
234int rs6000_warn_altivec_long = 1;		/* On by default. */
235const char *rs6000_warn_altivec_long_switch;
236
237const char *rs6000_traceback_name;
238static enum {
239  traceback_default = 0,
240  traceback_none,
241  traceback_part,
242  traceback_full
243} rs6000_traceback;
244
245/* Flag to say the TOC is initialized */
246int toc_initialized;
247char toc_label_name[10];
248
249/* Alias set for saves and restores from the rs6000 stack.  */
250static GTY(()) int rs6000_sr_alias_set;
251
252/* Call distance, overridden by -mlongcall and #pragma longcall(1).
253   The only place that looks at this is rs6000_set_default_type_attributes;
254   everywhere else should rely on the presence or absence of a longcall
255   attribute on the function declaration.  Exception: init_cumulative_args
256   looks at it too, for libcalls.  */
257int rs6000_default_long_calls;
258const char *rs6000_longcall_switch;
259
260/* Control alignment for fields within structures.  */
261/* String from -malign-XXXXX.  */
262const char *rs6000_alignment_string;
263int rs6000_alignment_flags;
264
265struct builtin_description
266{
267  /* mask is not const because we're going to alter it below.  This
268     nonsense will go away when we rewrite the -march infrastructure
269     to give us more target flag bits.  */
270  unsigned int mask;
271  const enum insn_code icode;
272  const char *const name;
273  const enum rs6000_builtins code;
274};
275
276static bool rs6000_function_ok_for_sibcall (tree, tree);
277static int num_insns_constant_wide (HOST_WIDE_INT);
278static void validate_condition_mode (enum rtx_code, enum machine_mode);
279static rtx rs6000_generate_compare (enum rtx_code);
280static void rs6000_maybe_dead (rtx);
281static void rs6000_emit_stack_tie (void);
282static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
283static rtx spe_synthesize_frame_save (rtx);
284static bool spe_func_has_64bit_regs_p (void);
285static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
286			     int, HOST_WIDE_INT);
287static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
288static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
289static unsigned rs6000_hash_constant (rtx);
290static unsigned toc_hash_function (const void *);
291static int toc_hash_eq (const void *, const void *);
292static int constant_pool_expr_1 (rtx, int *, int *);
293static bool constant_pool_expr_p (rtx);
294static bool toc_relative_expr_p (rtx);
295static bool legitimate_small_data_p (enum machine_mode, rtx);
296static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
297static bool legitimate_indexed_address_p (rtx, int);
298static bool legitimate_indirect_address_p (rtx, int);
299static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
300static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
301static struct machine_function * rs6000_init_machine_status (void);
302static bool rs6000_assemble_integer (rtx, unsigned int, int);
303#ifdef HAVE_GAS_HIDDEN
304static void rs6000_assemble_visibility (tree, int);
305#endif
306static int rs6000_ra_ever_killed (void);
307static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
308static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
309static const char *rs6000_mangle_fundamental_type (tree);
310extern const struct attribute_spec rs6000_attribute_table[];
311static void rs6000_set_default_type_attributes (tree);
312static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
313static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
314static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
315				    tree);
316static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
317static bool rs6000_return_in_memory (tree, tree);
318static void rs6000_file_start (void);
319#if TARGET_ELF
320static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
321static void rs6000_elf_asm_out_constructor (rtx, int);
322static void rs6000_elf_asm_out_destructor (rtx, int);
323static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
324static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
325static void rs6000_elf_unique_section (tree, int);
326static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
327					   unsigned HOST_WIDE_INT);
328static void rs6000_elf_encode_section_info (tree, rtx, int)
329     ATTRIBUTE_UNUSED;
330static bool rs6000_elf_in_small_data_p (tree);
331#endif
332#if TARGET_XCOFF
333static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
334static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
335static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
336static void rs6000_xcoff_unique_section (tree, int);
337static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
338					     unsigned HOST_WIDE_INT);
339static const char * rs6000_xcoff_strip_name_encoding (const char *);
340static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
341static void rs6000_xcoff_file_start (void);
342static void rs6000_xcoff_file_end (void);
343#endif
344#if TARGET_MACHO
345static bool rs6000_binds_local_p (tree);
346#endif
347static int rs6000_use_dfa_pipeline_interface (void);
348static int rs6000_variable_issue (FILE *, int, rtx, int);
349static bool rs6000_rtx_costs (rtx, int, int, int *);
350static int rs6000_adjust_cost (rtx, rtx, rtx, int);
351static bool is_microcoded_insn (rtx);
352static int is_dispatch_slot_restricted (rtx);
353static bool is_cracked_insn (rtx);
354static bool is_branch_slot_insn (rtx);
355static int rs6000_adjust_priority (rtx, int);
356static int rs6000_issue_rate (void);
357static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
358static rtx get_next_active_insn (rtx, rtx);
359static bool insn_terminates_group_p (rtx , enum group_termination);
360static bool is_costly_group (rtx *, rtx);
361static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
362static int redefine_groups (FILE *, int, rtx, rtx);
363static int pad_groups (FILE *, int, rtx, rtx);
364static void rs6000_sched_finish (FILE *, int);
365static int rs6000_use_sched_lookahead (void);
366
367static void rs6000_init_builtins (void);
368static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
369static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
370static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
371static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
372static void altivec_init_builtins (void);
373static void rs6000_common_init_builtins (void);
374static void rs6000_init_libfuncs (void);
375
376static void enable_mask_for_builtins (struct builtin_description *, int,
377				      enum rs6000_builtins,
378				      enum rs6000_builtins);
379static void spe_init_builtins (void);
380static rtx spe_expand_builtin (tree, rtx, bool *);
381static rtx spe_expand_stv_builtin (enum insn_code, tree);
382static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
383static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
384static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
385static rs6000_stack_t *rs6000_stack_info (void);
386static void debug_stack_info (rs6000_stack_t *);
387
388static rtx altivec_expand_builtin (tree, rtx, bool *);
389static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
390static rtx altivec_expand_st_builtin (tree, rtx, bool *);
391static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
392static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
393static rtx altivec_expand_predicate_builtin (enum insn_code,
394					    const char *, tree, rtx);
395static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
396static rtx altivec_expand_stv_builtin (enum insn_code, tree);
397static void rs6000_parse_abi_options (void);
398static void rs6000_parse_alignment_option (void);
399static void rs6000_parse_tls_size_option (void);
400static void rs6000_parse_yes_no_option (const char *, const char *, int *);
401static int first_altivec_reg_to_save (void);
402static unsigned int compute_vrsave_mask (void);
403static void is_altivec_return_reg (rtx, void *);
404static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
405int easy_vector_constant (rtx, enum machine_mode);
406static int easy_vector_same (rtx, enum machine_mode);
407static int easy_vector_splat_const (int, enum machine_mode);
408static bool is_ev64_opaque_type (tree);
409static rtx rs6000_dwarf_register_span (rtx);
410static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
411static rtx rs6000_tls_get_addr (void);
412static rtx rs6000_got_sym (void);
413static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
414static const char *rs6000_get_some_local_dynamic_name (void);
415static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
416static rtx rs6000_complex_function_value (enum machine_mode);
417static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
418				    enum machine_mode, tree);
419static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
420static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
421static void setup_incoming_varargs (CUMULATIVE_ARGS *,
422				    enum machine_mode, tree,
423				    int *, int);
424#if TARGET_MACHO
425static void macho_branch_islands (void);
426static void add_compiler_branch_island (tree, tree, int);
427static int no_previous_def (tree function_name);
428static tree get_prev_label (tree function_name);
429#endif
430
431static tree rs6000_build_builtin_va_list (void);
432
433/* Hash table stuff for keeping track of TOC entries.  */
434
435struct toc_hash_struct GTY(())
436{
437  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
438     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
439  rtx key;
440  enum machine_mode key_mode;
441  int labelno;
442};
443
444static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
445
446/* Default register names.  */
447char rs6000_reg_names[][8] =
448{
449      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
450      "8",  "9", "10", "11", "12", "13", "14", "15",
451     "16", "17", "18", "19", "20", "21", "22", "23",
452     "24", "25", "26", "27", "28", "29", "30", "31",
453      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
454      "8",  "9", "10", "11", "12", "13", "14", "15",
455     "16", "17", "18", "19", "20", "21", "22", "23",
456     "24", "25", "26", "27", "28", "29", "30", "31",
457     "mq", "lr", "ctr","ap",
458      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
459      "xer",
460      /* AltiVec registers.  */
461      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
462      "8",  "9",  "10", "11", "12", "13", "14", "15",
463      "16", "17", "18", "19", "20", "21", "22", "23",
464      "24", "25", "26", "27", "28", "29", "30", "31",
465      "vrsave", "vscr",
466      /* SPE registers.  */
467      "spe_acc", "spefscr"
468};
469
470#ifdef TARGET_REGNAMES
471static const char alt_reg_names[][8] =
472{
473   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
474   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
475  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
476  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
477   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
478   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
479  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
480  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
481    "mq",    "lr",  "ctr",   "ap",
482  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
483   "xer",
484  /* AltiVec registers.  */
485   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
486   "%v8",  "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
487  "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
488  "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
489  "vrsave", "vscr",
490  /* SPE registers.  */
491  "spe_acc", "spefscr"
492};
493#endif
494
495#ifndef MASK_STRICT_ALIGN
496#define MASK_STRICT_ALIGN 0
497#endif
498#ifndef TARGET_PROFILE_KERNEL
499#define TARGET_PROFILE_KERNEL 0
500#endif
501
502/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
503#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
504
505/* Return 1 for a symbol ref for a thread-local storage symbol.  */
506#define RS6000_SYMBOL_REF_TLS_P(RTX) \
507  (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
508
509/* Initialize the GCC target structure.  */
510#undef TARGET_ATTRIBUTE_TABLE
511#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
512#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
513#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
514
515#undef TARGET_ASM_ALIGNED_DI_OP
516#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
517
518/* Default unaligned ops are only provided for ELF.  Find the ops needed
519   for non-ELF systems.  */
520#ifndef OBJECT_FORMAT_ELF
521#if TARGET_XCOFF
522/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
523   64-bit targets.  */
524#undef TARGET_ASM_UNALIGNED_HI_OP
525#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
526#undef TARGET_ASM_UNALIGNED_SI_OP
527#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
528#undef TARGET_ASM_UNALIGNED_DI_OP
529#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
530#else
531/* For Darwin.  */
532#undef TARGET_ASM_UNALIGNED_HI_OP
533#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
534#undef TARGET_ASM_UNALIGNED_SI_OP
535#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
536#endif
537#endif
538
539/* This hook deals with fixups for relocatable code and DI-mode objects
540   in 64-bit code.  */
541#undef TARGET_ASM_INTEGER
542#define TARGET_ASM_INTEGER rs6000_assemble_integer
543
544#ifdef HAVE_GAS_HIDDEN
545#undef TARGET_ASM_ASSEMBLE_VISIBILITY
546#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
547#endif
548
549#undef TARGET_HAVE_TLS
550#define TARGET_HAVE_TLS HAVE_AS_TLS
551
552#undef TARGET_CANNOT_FORCE_CONST_MEM
553#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
554
555#undef TARGET_ASM_FUNCTION_PROLOGUE
556#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
557#undef TARGET_ASM_FUNCTION_EPILOGUE
558#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
559
560#undef  TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
561#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
562#undef  TARGET_SCHED_VARIABLE_ISSUE
563#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
564
565#undef TARGET_SCHED_ISSUE_RATE
566#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
567#undef TARGET_SCHED_ADJUST_COST
568#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
569#undef TARGET_SCHED_ADJUST_PRIORITY
570#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
571#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
572#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
573#undef TARGET_SCHED_FINISH
574#define TARGET_SCHED_FINISH rs6000_sched_finish
575
576#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
577#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
578
579#undef TARGET_INIT_BUILTINS
580#define TARGET_INIT_BUILTINS rs6000_init_builtins
581
582#undef TARGET_EXPAND_BUILTIN
583#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
584
585#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
586#define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
587
588#undef TARGET_INIT_LIBFUNCS
589#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
590
591#if TARGET_MACHO
592#undef TARGET_BINDS_LOCAL_P
593#define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
594#endif
595
596#undef TARGET_ASM_OUTPUT_MI_THUNK
597#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
598
599#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
600#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
601
602#undef TARGET_FUNCTION_OK_FOR_SIBCALL
603#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
604
605#undef TARGET_RTX_COSTS
606#define TARGET_RTX_COSTS rs6000_rtx_costs
607#undef TARGET_ADDRESS_COST
608#define TARGET_ADDRESS_COST hook_int_rtx_0
609
610#undef TARGET_VECTOR_OPAQUE_P
611#define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
612
613#undef TARGET_DWARF_REGISTER_SPAN
614#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
615
616/* On rs6000, function arguments are promoted, as are function return
617   values.  */
618#undef TARGET_PROMOTE_FUNCTION_ARGS
619#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
620#undef TARGET_PROMOTE_FUNCTION_RETURN
621#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
622
623/* Structure return values are passed as an extra parameter.  */
624#undef TARGET_STRUCT_VALUE_RTX
625#define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
626
627#undef TARGET_RETURN_IN_MEMORY
628#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
629
630#undef TARGET_SETUP_INCOMING_VARARGS
631#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
632
633/* Always strict argument naming on rs6000.  */
634#undef TARGET_STRICT_ARGUMENT_NAMING
635#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
636#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
637#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
638#undef TARGET_SPLIT_COMPLEX_ARG
639#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
640
641#undef TARGET_BUILD_BUILTIN_VA_LIST
642#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
643
644struct gcc_target targetm = TARGET_INITIALIZER;
645
646/* Override command line options.  Mostly we process the processor
647   type and sometimes adjust other TARGET_ options.  */
648
649void
650rs6000_override_options (const char *default_cpu)
651{
652  size_t i, j;
653  struct rs6000_cpu_select *ptr;
654  int set_masks;
655
656  /* Simplifications for entries below.  */
657
658  enum {
659    POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
660    POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
661  };
662
663  /* This table occasionally claims that a processor does not support
664     a particular feature even though it does, but the feature is slower
665     than the alternative.  Thus, it shouldn't be relied on as a
666     complete description of the processor's support.
667
668     Please keep this list in order, and don't forget to update the
669     documentation in invoke.texi when adding a new processor or
670     flag.  */
671  static struct ptt
672    {
673      const char *const name;		/* Canonical processor name.  */
674      const enum processor_type processor; /* Processor type enum value.  */
675      const int target_enable;	/* Target flags to enable.  */
676    } const processor_target_table[]
677      = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
678	 {"403", PROCESSOR_PPC403,
679	  POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
680	 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
681	 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
682	 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
683	 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
684	 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
685	 {"601", PROCESSOR_PPC601,
686	  MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
687	 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
688	 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
689	 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
690	 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
691	 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
692	 {"620", PROCESSOR_PPC620,
693	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
694	 {"630", PROCESSOR_PPC630,
695	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
696	 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
697	 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
698	 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
699	 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
700	 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
701	 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
702	 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
703	 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
704	 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
705	 {"970", PROCESSOR_POWER4,
706	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
707	 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
708	 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
709	 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
710	 {"G4",  PROCESSOR_PPC7450, POWERPC_7400_MASK},
711	 {"G5", PROCESSOR_POWER4,
712	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
713	 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
714	 {"power2", PROCESSOR_POWER,
715	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
716	 {"power3", PROCESSOR_PPC630,
717	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
718	 {"power4", PROCESSOR_POWER4,
719	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
720	 {"power5", PROCESSOR_POWER5,
721	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
722	 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
723	 {"powerpc64", PROCESSOR_POWERPC64,
724	  POWERPC_BASE_MASK | MASK_POWERPC64},
725	 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
726	 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
727	 {"rios2", PROCESSOR_RIOS2,
728	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
729	 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
730	 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
731	 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
732      };
733
734  const size_t ptt_size = ARRAY_SIZE (processor_target_table);
735
736  /* Some OSs don't support saving the high part of 64-bit registers on
737     context switch.  Other OSs don't support saving Altivec registers.
738     On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
739     settings; if the user wants either, the user must explicitly specify
740     them and we won't interfere with the user's specification.  */
741
742  enum {
743    POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
744    POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
745		     | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
746		     | MASK_MFCRF)
747  };
748 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
749#ifdef OS_MISSING_POWERPC64
750  if (OS_MISSING_POWERPC64)
751    set_masks &= ~MASK_POWERPC64;
752#endif
753#ifdef OS_MISSING_ALTIVEC
754  if (OS_MISSING_ALTIVEC)
755    set_masks &= ~MASK_ALTIVEC;
756#endif
757
758  /* Don't override by the processor default if given explicitly.  */
759  set_masks &= ~target_flags_explicit;
760
761  /* Identify the processor type.  */
762  rs6000_select[0].string = default_cpu;
763  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
764
765  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
766    {
767      ptr = &rs6000_select[i];
768      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
769	{
770	  for (j = 0; j < ptt_size; j++)
771	    if (! strcmp (ptr->string, processor_target_table[j].name))
772	      {
773		if (ptr->set_tune_p)
774		  rs6000_cpu = processor_target_table[j].processor;
775
776		if (ptr->set_arch_p)
777		  {
778		    target_flags &= ~set_masks;
779		    target_flags |= (processor_target_table[j].target_enable
780				     & set_masks);
781		  }
782		break;
783	      }
784
785	  if (j == ptt_size)
786	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
787	}
788    }
789
790  if (TARGET_E500)
791    rs6000_isel = 1;
792
793  /* If we are optimizing big endian systems for space, use the load/store
794     multiple and string instructions.  */
795  if (BYTES_BIG_ENDIAN && optimize_size)
796    target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
797
798  /* Don't allow -mmultiple or -mstring on little endian systems
799     unless the cpu is a 750, because the hardware doesn't support the
800     instructions used in little endian mode, and causes an alignment
801     trap.  The 750 does not cause an alignment trap (except when the
802     target is unaligned).  */
803
804  if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
805    {
806      if (TARGET_MULTIPLE)
807	{
808	  target_flags &= ~MASK_MULTIPLE;
809	  if ((target_flags_explicit & MASK_MULTIPLE) != 0)
810	    warning ("-mmultiple is not supported on little endian systems");
811	}
812
813      if (TARGET_STRING)
814	{
815	  target_flags &= ~MASK_STRING;
816	  if ((target_flags_explicit & MASK_STRING) != 0)
817	    warning ("-mstring is not supported on little endian systems");
818	}
819    }
820
821  /* Set debug flags */
822  if (rs6000_debug_name)
823    {
824      if (! strcmp (rs6000_debug_name, "all"))
825	rs6000_debug_stack = rs6000_debug_arg = 1;
826      else if (! strcmp (rs6000_debug_name, "stack"))
827	rs6000_debug_stack = 1;
828      else if (! strcmp (rs6000_debug_name, "arg"))
829	rs6000_debug_arg = 1;
830      else
831	error ("unknown -mdebug-%s switch", rs6000_debug_name);
832    }
833
834  if (rs6000_traceback_name)
835    {
836      if (! strncmp (rs6000_traceback_name, "full", 4))
837	rs6000_traceback = traceback_full;
838      else if (! strncmp (rs6000_traceback_name, "part", 4))
839	rs6000_traceback = traceback_part;
840      else if (! strncmp (rs6000_traceback_name, "no", 2))
841	rs6000_traceback = traceback_none;
842      else
843	error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
844	       rs6000_traceback_name);
845    }
846
847  /* Set size of long double */
848  rs6000_long_double_type_size = 64;
849  if (rs6000_long_double_size_string)
850    {
851      char *tail;
852      int size = strtol (rs6000_long_double_size_string, &tail, 10);
853      if (*tail != '\0' || (size != 64 && size != 128))
854	error ("Unknown switch -mlong-double-%s",
855	       rs6000_long_double_size_string);
856      else
857	rs6000_long_double_type_size = size;
858    }
859
860  /* Set Altivec ABI as default for powerpc64 linux.  */
861  if (TARGET_ELF && TARGET_64BIT)
862    {
863      rs6000_altivec_abi = 1;
864      rs6000_altivec_vrsave = 1;
865    }
866
867  /* Handle -mabi= options.  */
868  rs6000_parse_abi_options ();
869
870  /* Handle -malign-XXXXX option.  */
871  rs6000_parse_alignment_option ();
872
873  /* Handle generic -mFOO=YES/NO options.  */
874  rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
875			      &rs6000_altivec_vrsave);
876  rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
877			      &rs6000_isel);
878  rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
879  rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
880			      &rs6000_float_gprs);
881
882  /* Handle -mtls-size option.  */
883  rs6000_parse_tls_size_option ();
884
885#ifdef SUBTARGET_OVERRIDE_OPTIONS
886  SUBTARGET_OVERRIDE_OPTIONS;
887#endif
888#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
889  SUBSUBTARGET_OVERRIDE_OPTIONS;
890#endif
891
892  if (TARGET_E500)
893    {
894      if (TARGET_ALTIVEC)
895      error ("AltiVec and E500 instructions cannot coexist");
896
897      /* The e500 does not have string instructions, and we set
898	 MASK_STRING above when optimizing for size.  */
899      if ((target_flags & MASK_STRING) != 0)
900	target_flags = target_flags & ~MASK_STRING;
901
902      /* No SPE means 64-bit long doubles, even if an E500.  */
903      if (rs6000_spe_string != 0
904          && !strcmp (rs6000_spe_string, "no"))
905	rs6000_long_double_type_size = 64;
906    }
907  else if (rs6000_select[1].string != NULL)
908    {
909      /* For the powerpc-eabispe configuration, we set all these by
910	 default, so let's unset them if we manually set another
911	 CPU that is not the E500.  */
912      if (rs6000_abi_string == 0)
913	rs6000_spe_abi = 0;
914      if (rs6000_spe_string == 0)
915	rs6000_spe = 0;
916      if (rs6000_float_gprs_string == 0)
917	rs6000_float_gprs = 0;
918      if (rs6000_isel_string == 0)
919	rs6000_isel = 0;
920      if (rs6000_long_double_size_string == 0)
921	rs6000_long_double_type_size = 64;
922    }
923
924  rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
925			&& rs6000_cpu != PROCESSOR_POWER5);
926  rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
927			 || rs6000_cpu == PROCESSOR_POWER5);
928
929  /* Handle -m(no-)longcall option.  This is a bit of a cheap hack,
930     using TARGET_OPTIONS to handle a toggle switch, but we're out of
931     bits in target_flags so TARGET_SWITCHES cannot be used.
932     Assumption here is that rs6000_longcall_switch points into the
933     text of the complete option, rather than being a copy, so we can
934     scan back for the presence or absence of the no- modifier.  */
935  if (rs6000_longcall_switch)
936    {
937      const char *base = rs6000_longcall_switch;
938      while (base[-1] != 'm') base--;
939
940      if (*rs6000_longcall_switch != '\0')
941	error ("invalid option `%s'", base);
942      rs6000_default_long_calls = (base[0] != 'n');
943    }
944
945  /* Handle -m(no-)warn-altivec-long similarly.  */
946  if (rs6000_warn_altivec_long_switch)
947    {
948      const char *base = rs6000_warn_altivec_long_switch;
949      while (base[-1] != 'm') base--;
950
951      if (*rs6000_warn_altivec_long_switch != '\0')
952       error ("invalid option `%s'", base);
953      rs6000_warn_altivec_long = (base[0] != 'n');
954    }
955
956  /* Handle -mprioritize-restricted-insns option.  */
957  rs6000_sched_restricted_insns_priority
958    = (rs6000_sched_groups ? 1 : 0);
959  if (rs6000_sched_restricted_insns_priority_str)
960    rs6000_sched_restricted_insns_priority =
961      atoi (rs6000_sched_restricted_insns_priority_str);
962
963  /* Handle -msched-costly-dep option.  */
964  rs6000_sched_costly_dep
965    = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
966  if (rs6000_sched_costly_dep_str)
967    {
968      if (! strcmp (rs6000_sched_costly_dep_str, "no"))
969        rs6000_sched_costly_dep = no_dep_costly;
970      else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
971        rs6000_sched_costly_dep = all_deps_costly;
972      else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
973        rs6000_sched_costly_dep = true_store_to_load_dep_costly;
974      else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
975        rs6000_sched_costly_dep = store_to_load_dep_costly;
976      else
977        rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
978    }
979
980  /* Handle -minsert-sched-nops option.  */
981  rs6000_sched_insert_nops
982    = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
983  if (rs6000_sched_insert_nops_str)
984    {
985      if (! strcmp (rs6000_sched_insert_nops_str, "no"))
986        rs6000_sched_insert_nops = sched_finish_none;
987      else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
988        rs6000_sched_insert_nops = sched_finish_pad_groups;
989      else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
990        rs6000_sched_insert_nops = sched_finish_regroup_exact;
991      else
992        rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
993    }
994
995#ifdef TARGET_REGNAMES
996  /* If the user desires alternate register names, copy in the
997     alternate names now.  */
998  if (TARGET_REGNAMES)
999    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1000#endif
1001
1002  /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1003     If -maix-struct-return or -msvr4-struct-return was explicitly
1004     used, don't override with the ABI default.  */
1005  if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1006    {
1007      if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1008	target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1009      else
1010	target_flags |= MASK_AIX_STRUCT_RET;
1011    }
1012
1013  if (TARGET_LONG_DOUBLE_128
1014      && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1015    REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1016
1017  /* Allocate an alias set for register saves & restores from stack.  */
1018  rs6000_sr_alias_set = new_alias_set ();
1019
1020  if (TARGET_TOC)
1021    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1022
1023  /* We can only guarantee the availability of DI pseudo-ops when
1024     assembling for 64-bit targets.  */
1025  if (!TARGET_64BIT)
1026    {
1027      targetm.asm_out.aligned_op.di = NULL;
1028      targetm.asm_out.unaligned_op.di = NULL;
1029    }
1030
1031  /* Set maximum branch target alignment at two instructions, eight bytes.  */
1032  align_jumps_max_skip = 8;
1033  align_loops_max_skip = 8;
1034
1035  /* Arrange to save and restore machine status around nested functions.  */
1036  init_machine_status = rs6000_init_machine_status;
1037
1038  /* We should always be splitting complex arguments, but we can't break
1039     Linux and Darwin ABIs at the moment.  For now, only AIX is fixed.  */
1040  if (DEFAULT_ABI != ABI_AIX)
1041    targetm.calls.split_complex_arg = NULL;
1042}
1043
1044/* Handle generic options of the form -mfoo=yes/no.
1045   NAME is the option name.
1046   VALUE is the option value.
1047   FLAG is the pointer to the flag where to store a 1 or 0, depending on
1048   whether the option value is 'yes' or 'no' respectively.  */
1049static void
1050rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1051{
1052  if (value == 0)
1053    return;
1054  else if (!strcmp (value, "yes"))
1055    *flag = 1;
1056  else if (!strcmp (value, "no"))
1057    *flag = 0;
1058  else
1059    error ("unknown -m%s= option specified: '%s'", name, value);
1060}
1061
1062/* Handle -mabi= options.  */
1063static void
1064rs6000_parse_abi_options (void)
1065{
1066  if (rs6000_abi_string == 0)
1067    return;
1068  else if (! strcmp (rs6000_abi_string, "altivec"))
1069    {
1070      rs6000_altivec_abi = 1;
1071      rs6000_spe_abi = 0;
1072    }
1073  else if (! strcmp (rs6000_abi_string, "no-altivec"))
1074    rs6000_altivec_abi = 0;
1075  else if (! strcmp (rs6000_abi_string, "spe"))
1076    {
1077      rs6000_spe_abi = 1;
1078      rs6000_altivec_abi = 0;
1079      if (!TARGET_SPE_ABI)
1080	error ("not configured for ABI: '%s'", rs6000_abi_string);
1081    }
1082
1083  else if (! strcmp (rs6000_abi_string, "no-spe"))
1084    rs6000_spe_abi = 0;
1085  else
1086    error ("unknown ABI specified: '%s'", rs6000_abi_string);
1087}
1088
1089/* Handle -malign-XXXXXX options.  */
1090static void
1091rs6000_parse_alignment_option (void)
1092{
1093  if (rs6000_alignment_string == 0)
1094    return;
1095  else if (! strcmp (rs6000_alignment_string, "power"))
1096    rs6000_alignment_flags = MASK_ALIGN_POWER;
1097  else if (! strcmp (rs6000_alignment_string, "natural"))
1098    rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1099  else
1100    error ("unknown -malign-XXXXX option specified: '%s'",
1101	   rs6000_alignment_string);
1102}
1103
1104/* Validate and record the size specified with the -mtls-size option.  */
1105
1106static void
1107rs6000_parse_tls_size_option (void)
1108{
1109  if (rs6000_tls_size_string == 0)
1110    return;
1111  else if (strcmp (rs6000_tls_size_string, "16") == 0)
1112    rs6000_tls_size = 16;
1113  else if (strcmp (rs6000_tls_size_string, "32") == 0)
1114    rs6000_tls_size = 32;
1115  else if (strcmp (rs6000_tls_size_string, "64") == 0)
1116    rs6000_tls_size = 64;
1117  else
1118    error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1119}
1120
1121void
1122optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1123{
1124}
1125
1126/* Do anything needed at the start of the asm file.  */
1127
1128static void
1129rs6000_file_start (void)
1130{
1131  size_t i;
1132  char buffer[80];
1133  const char *start = buffer;
1134  struct rs6000_cpu_select *ptr;
1135  const char *default_cpu = TARGET_CPU_DEFAULT;
1136  FILE *file = asm_out_file;
1137
1138  default_file_start ();
1139
1140#ifdef TARGET_BI_ARCH
1141  if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1142    default_cpu = 0;
1143#endif
1144
1145  if (flag_verbose_asm)
1146    {
1147      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1148      rs6000_select[0].string = default_cpu;
1149
1150      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1151	{
1152	  ptr = &rs6000_select[i];
1153	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1154	    {
1155	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1156	      start = "";
1157	    }
1158	}
1159
1160#ifdef USING_ELFOS_H
1161      switch (rs6000_sdata)
1162	{
1163	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1164	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1165	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1166	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1167	}
1168
1169      if (rs6000_sdata && g_switch_value)
1170	{
1171	  fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1172		   g_switch_value);
1173	  start = "";
1174	}
1175#endif
1176
1177      if (*start == '\0')
1178	putc ('\n', file);
1179    }
1180}
1181
1182/* Return nonzero if this function is known to have a null epilogue.  */
1183
1184int
1185direct_return (void)
1186{
1187  if (reload_completed)
1188    {
1189      rs6000_stack_t *info = rs6000_stack_info ();
1190
1191      if (info->first_gp_reg_save == 32
1192	  && info->first_fp_reg_save == 64
1193	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1194	  && ! info->lr_save_p
1195	  && ! info->cr_save_p
1196	  && info->vrsave_mask == 0
1197	  && ! info->push_p)
1198	return 1;
1199    }
1200
1201  return 0;
1202}
1203
1204/* Returns 1 always.  */
1205
1206int
1207any_operand (rtx op ATTRIBUTE_UNUSED,
1208	     enum machine_mode mode ATTRIBUTE_UNUSED)
1209{
1210  return 1;
1211}
1212
1213/* Returns 1 if op is the count register.  */
1214int
1215count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1216{
1217  if (GET_CODE (op) != REG)
1218    return 0;
1219
1220  if (REGNO (op) == COUNT_REGISTER_REGNUM)
1221    return 1;
1222
1223  if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1224    return 1;
1225
1226  return 0;
1227}
1228
1229/* Returns 1 if op is an altivec register.  */
1230int
1231altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1232{
1233
1234  return (register_operand (op, mode)
1235	  && (GET_CODE (op) != REG
1236	      || REGNO (op) > FIRST_PSEUDO_REGISTER
1237	      || ALTIVEC_REGNO_P (REGNO (op))));
1238}
1239
1240int
1241xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1242{
1243  if (GET_CODE (op) != REG)
1244    return 0;
1245
1246  if (XER_REGNO_P (REGNO (op)))
1247    return 1;
1248
1249  return 0;
1250}
1251
1252/* Return 1 if OP is a signed 8-bit constant.  Int multiplication
1253   by such constants completes more quickly.  */
1254
1255int
1256s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1257{
1258  return ( GET_CODE (op) == CONST_INT
1259	  && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1260}
1261
1262/* Return 1 if OP is a constant that can fit in a D field.  */
1263
1264int
1265short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1266{
1267  return (GET_CODE (op) == CONST_INT
1268	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1269}
1270
1271/* Similar for an unsigned D field.  */
1272
1273int
1274u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1275{
1276  return (GET_CODE (op) == CONST_INT
1277	  && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1278}
1279
1280/* Return 1 if OP is a CONST_INT that cannot fit in a signed D field.  */
1281
1282int
1283non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1284{
1285  return (GET_CODE (op) == CONST_INT
1286	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1287}
1288
1289/* Returns 1 if OP is a CONST_INT that is a positive value
1290   and an exact power of 2.  */
1291
1292int
1293exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1294{
1295  return (GET_CODE (op) == CONST_INT
1296	  && INTVAL (op) > 0
1297	  && exact_log2 (INTVAL (op)) >= 0);
1298}
1299
1300/* Returns 1 if OP is a register that is not special (i.e., not MQ,
1301   ctr, or lr).  */
1302
1303int
1304gpc_reg_operand (rtx op, enum machine_mode mode)
1305{
1306  return (register_operand (op, mode)
1307	  && (GET_CODE (op) != REG
1308	      || (REGNO (op) >= ARG_POINTER_REGNUM
1309		  && !XER_REGNO_P (REGNO (op)))
1310	      || REGNO (op) < MQ_REGNO));
1311}
1312
1313/* Returns 1 if OP is either a pseudo-register or a register denoting a
1314   CR field.  */
1315
1316int
1317cc_reg_operand (rtx op, enum machine_mode mode)
1318{
1319  return (register_operand (op, mode)
1320	  && (GET_CODE (op) != REG
1321	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
1322	      || CR_REGNO_P (REGNO (op))));
1323}
1324
1325/* Returns 1 if OP is either a pseudo-register or a register denoting a
1326   CR field that isn't CR0.  */
1327
1328int
1329cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1330{
1331  return (register_operand (op, mode)
1332	  && (GET_CODE (op) != REG
1333	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
1334	      || CR_REGNO_NOT_CR0_P (REGNO (op))));
1335}
1336
1337/* Returns 1 if OP is either a constant integer valid for a D-field or
1338   a non-special register.  If a register, it must be in the proper
1339   mode unless MODE is VOIDmode.  */
1340
1341int
1342reg_or_short_operand (rtx op, enum machine_mode mode)
1343{
1344  return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1345}
1346
1347/* Similar, except check if the negation of the constant would be
1348   valid for a D-field.  Don't allow a constant zero, since all the
1349   patterns that call this predicate use "addic r1,r2,-constant" on
1350   a constant value to set a carry when r2 is greater or equal to
1351   "constant".  That doesn't work for zero.  */
1352
1353int
1354reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1355{
1356  if (GET_CODE (op) == CONST_INT)
1357    return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1358
1359  return gpc_reg_operand (op, mode);
1360}
1361
1362/* Returns 1 if OP is either a constant integer valid for a DS-field or
1363   a non-special register.  If a register, it must be in the proper
1364   mode unless MODE is VOIDmode.  */
1365
1366int
1367reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1368{
1369  if (gpc_reg_operand (op, mode))
1370    return 1;
1371  else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1372    return 1;
1373
1374  return 0;
1375}
1376
1377
1378/* Return 1 if the operand is either a register or an integer whose
1379   high-order 16 bits are zero.  */
1380
1381int
1382reg_or_u_short_operand (rtx op, enum machine_mode mode)
1383{
1384  return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1385}
1386
1387/* Return 1 is the operand is either a non-special register or ANY
1388   constant integer.  */
1389
1390int
1391reg_or_cint_operand (rtx op, enum machine_mode mode)
1392{
1393  return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1394}
1395
1396/* Return 1 is the operand is either a non-special register or ANY
1397   32-bit signed constant integer.  */
1398
1399int
1400reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1401{
1402  return (gpc_reg_operand (op, mode)
1403	  || (GET_CODE (op) == CONST_INT
1404#if HOST_BITS_PER_WIDE_INT != 32
1405	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1406		  < (unsigned HOST_WIDE_INT) 0x100000000ll)
1407#endif
1408	      ));
1409}
1410
1411/* Return 1 is the operand is either a non-special register or a 32-bit
1412   signed constant integer valid for 64-bit addition.  */
1413
1414int
1415reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1416{
1417  return (gpc_reg_operand (op, mode)
1418	  || (GET_CODE (op) == CONST_INT
1419#if HOST_BITS_PER_WIDE_INT == 32
1420	      && INTVAL (op) < 0x7fff8000
1421#else
1422	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1423		  < 0x100000000ll)
1424#endif
1425	      ));
1426}
1427
1428/* Return 1 is the operand is either a non-special register or a 32-bit
1429   signed constant integer valid for 64-bit subtraction.  */
1430
1431int
1432reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1433{
1434  return (gpc_reg_operand (op, mode)
1435	  || (GET_CODE (op) == CONST_INT
1436#if HOST_BITS_PER_WIDE_INT == 32
1437	      && (- INTVAL (op)) < 0x7fff8000
1438#else
1439	      && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1440		  < 0x100000000ll)
1441#endif
1442	      ));
1443}
1444
1445/* Return 1 is the operand is either a non-special register or ANY
1446   32-bit unsigned constant integer.  */
1447
1448int
1449reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1450{
1451  if (GET_CODE (op) == CONST_INT)
1452    {
1453      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1454	{
1455	  if (GET_MODE_BITSIZE (mode) <= 32)
1456	    abort ();
1457
1458	  if (INTVAL (op) < 0)
1459	    return 0;
1460	}
1461
1462      return ((INTVAL (op) & GET_MODE_MASK (mode)
1463	       & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1464    }
1465  else if (GET_CODE (op) == CONST_DOUBLE)
1466    {
1467      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1468	  || mode != DImode)
1469	abort ();
1470
1471      return CONST_DOUBLE_HIGH (op) == 0;
1472    }
1473  else
1474    return gpc_reg_operand (op, mode);
1475}
1476
1477/* Return 1 if the operand is an operand that can be loaded via the GOT.  */
1478
1479int
1480got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1481{
1482  return (GET_CODE (op) == SYMBOL_REF
1483	  || GET_CODE (op) == CONST
1484	  || GET_CODE (op) == LABEL_REF);
1485}
1486
1487/* Return 1 if the operand is a simple references that can be loaded via
1488   the GOT (labels involving addition aren't allowed).  */
1489
1490int
1491got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1492{
1493  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1494}
1495
1496/* Return the number of instructions it takes to form a constant in an
1497   integer register.  */
1498
1499static int
1500num_insns_constant_wide (HOST_WIDE_INT value)
1501{
1502  /* signed constant loadable with {cal|addi} */
1503  if (CONST_OK_FOR_LETTER_P (value, 'I'))
1504    return 1;
1505
1506  /* constant loadable with {cau|addis} */
1507  else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1508    return 1;
1509
1510#if HOST_BITS_PER_WIDE_INT == 64
1511  else if (TARGET_POWERPC64)
1512    {
1513      HOST_WIDE_INT low  = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1514      HOST_WIDE_INT high = value >> 31;
1515
1516      if (high == 0 || high == -1)
1517	return 2;
1518
1519      high >>= 1;
1520
1521      if (low == 0)
1522	return num_insns_constant_wide (high) + 1;
1523      else
1524	return (num_insns_constant_wide (high)
1525		+ num_insns_constant_wide (low) + 1);
1526    }
1527#endif
1528
1529  else
1530    return 2;
1531}
1532
1533int
1534num_insns_constant (rtx op, enum machine_mode mode)
1535{
1536  if (GET_CODE (op) == CONST_INT)
1537    {
1538#if HOST_BITS_PER_WIDE_INT == 64
1539      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1540	  && mask64_operand (op, mode))
1541	    return 2;
1542      else
1543#endif
1544	return num_insns_constant_wide (INTVAL (op));
1545    }
1546
1547  else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1548    {
1549      long l;
1550      REAL_VALUE_TYPE rv;
1551
1552      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1553      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1554      return num_insns_constant_wide ((HOST_WIDE_INT) l);
1555    }
1556
1557  else if (GET_CODE (op) == CONST_DOUBLE)
1558    {
1559      HOST_WIDE_INT low;
1560      HOST_WIDE_INT high;
1561      long l[2];
1562      REAL_VALUE_TYPE rv;
1563      int endian = (WORDS_BIG_ENDIAN == 0);
1564
1565      if (mode == VOIDmode || mode == DImode)
1566	{
1567	  high = CONST_DOUBLE_HIGH (op);
1568	  low  = CONST_DOUBLE_LOW (op);
1569	}
1570      else
1571	{
1572	  REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1573	  REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1574	  high = l[endian];
1575	  low  = l[1 - endian];
1576	}
1577
1578      if (TARGET_32BIT)
1579	return (num_insns_constant_wide (low)
1580		+ num_insns_constant_wide (high));
1581
1582      else
1583	{
1584	  if (high == 0 && low >= 0)
1585	    return num_insns_constant_wide (low);
1586
1587	  else if (high == -1 && low < 0)
1588	    return num_insns_constant_wide (low);
1589
1590	  else if (mask64_operand (op, mode))
1591	    return 2;
1592
1593	  else if (low == 0)
1594	    return num_insns_constant_wide (high) + 1;
1595
1596	  else
1597	    return (num_insns_constant_wide (high)
1598		    + num_insns_constant_wide (low) + 1);
1599	}
1600    }
1601
1602  else
1603    abort ();
1604}
1605
1606/* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1607   register with one instruction per word.  We only do this if we can
1608   safely read CONST_DOUBLE_{LOW,HIGH}.  */
1609
1610int
1611easy_fp_constant (rtx op, enum machine_mode mode)
1612{
1613  if (GET_CODE (op) != CONST_DOUBLE
1614      || GET_MODE (op) != mode
1615      || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1616    return 0;
1617
1618  /* Consider all constants with -msoft-float to be easy.  */
1619  if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1620      && mode != DImode)
1621    return 1;
1622
1623  /* If we are using V.4 style PIC, consider all constants to be hard.  */
1624  if (flag_pic && DEFAULT_ABI == ABI_V4)
1625    return 0;
1626
1627#ifdef TARGET_RELOCATABLE
1628  /* Similarly if we are using -mrelocatable, consider all constants
1629     to be hard.  */
1630  if (TARGET_RELOCATABLE)
1631    return 0;
1632#endif
1633
1634  if (mode == TFmode)
1635    {
1636      long k[4];
1637      REAL_VALUE_TYPE rv;
1638
1639      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1640      REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1641
1642      return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1643	      && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1644	      && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1645	      && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1646    }
1647
1648  else if (mode == DFmode)
1649    {
1650      long k[2];
1651      REAL_VALUE_TYPE rv;
1652
1653      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1654      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1655
1656      return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1657	      && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1658    }
1659
1660  else if (mode == SFmode)
1661    {
1662      long l;
1663      REAL_VALUE_TYPE rv;
1664
1665      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1666      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1667
1668      return num_insns_constant_wide (l) == 1;
1669    }
1670
1671  else if (mode == DImode)
1672    return ((TARGET_POWERPC64
1673	     && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1674	    || (num_insns_constant (op, DImode) <= 2));
1675
1676  else if (mode == SImode)
1677    return 1;
1678  else
1679    abort ();
1680}
1681
1682/* Returns the constant for the splat instrunction, if exists.  */
1683
1684static int
1685easy_vector_splat_const (int cst, enum machine_mode mode)
1686{
1687  switch (mode)
1688    {
1689    case V4SImode:
1690      if (EASY_VECTOR_15 (cst)
1691	  || EASY_VECTOR_15_ADD_SELF (cst))
1692	return cst;
1693      if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1694	break;
1695      cst = cst >> 16;
1696    case V8HImode:
1697      if (EASY_VECTOR_15 (cst)
1698	  || EASY_VECTOR_15_ADD_SELF (cst))
1699	return cst;
1700      if ((cst & 0xff) != ((cst >> 8) & 0xff))
1701	break;
1702      cst = cst >> 8;
1703    case V16QImode:
1704	  if (EASY_VECTOR_15 (cst)
1705	      || EASY_VECTOR_15_ADD_SELF (cst))
1706	    return cst;
1707    default:
1708      break;
1709    }
1710  return 0;
1711}
1712
1713
1714/* Return nonzero if all elements of a vector have the same value.  */
1715
1716static int
1717easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1718{
1719  int units, i, cst;
1720
1721  units = CONST_VECTOR_NUNITS (op);
1722
1723  cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1724  for (i = 1; i < units; ++i)
1725    if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1726      break;
1727  if (i == units && easy_vector_splat_const (cst, mode))
1728    return 1;
1729  return 0;
1730}
1731
1732/* Return 1 if the operand is a CONST_INT and can be put into a
1733   register without using memory.  */
1734
1735int
1736easy_vector_constant (rtx op, enum machine_mode mode)
1737{
1738  int cst, cst2;
1739
1740  if (GET_CODE (op) != CONST_VECTOR
1741      || (!TARGET_ALTIVEC
1742	  && !TARGET_SPE))
1743    return 0;
1744
1745  if (zero_constant (op, mode)
1746      && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1747	  || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1748    return 1;
1749
1750  if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1751    return 0;
1752
1753  if (TARGET_SPE && mode == V1DImode)
1754    return 0;
1755
1756  cst  = INTVAL (CONST_VECTOR_ELT (op, 0));
1757  cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1758
1759  /* Limit SPE vectors to 15 bits signed.  These we can generate with:
1760       li r0, CONSTANT1
1761       evmergelo r0, r0, r0
1762       li r0, CONSTANT2
1763
1764     I don't know how efficient it would be to allow bigger constants,
1765     considering we'll have an extra 'ori' for every 'li'.  I doubt 5
1766     instructions is better than a 64-bit memory load, but I don't
1767     have the e500 timing specs.  */
1768  if (TARGET_SPE && mode == V2SImode
1769      && cst  >= -0x7fff && cst <= 0x7fff
1770      && cst2 >= -0x7fff && cst2 <= 0x7fff)
1771    return 1;
1772
1773  if (TARGET_ALTIVEC
1774      && easy_vector_same (op, mode))
1775    {
1776      cst = easy_vector_splat_const (cst, mode);
1777      if (EASY_VECTOR_15_ADD_SELF (cst)
1778	  || EASY_VECTOR_15 (cst))
1779	return 1;
1780    }
1781  return 0;
1782}
1783
1784/* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF.  */
1785
1786int
1787easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1788{
1789  int cst;
1790  if (TARGET_ALTIVEC
1791      && GET_CODE (op) == CONST_VECTOR
1792      && easy_vector_same (op, mode))
1793    {
1794      cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
1795      if (EASY_VECTOR_15_ADD_SELF (cst))
1796	return 1;
1797    }
1798  return 0;
1799}
1800
1801/* Generate easy_vector_constant out of a easy_vector_constant_add_self.  */
1802
1803rtx
1804gen_easy_vector_constant_add_self (rtx op)
1805{
1806  int i, units;
1807  rtvec v;
1808  units = GET_MODE_NUNITS (GET_MODE (op));
1809  v = rtvec_alloc (units);
1810
1811  for (i = 0; i < units; i++)
1812    RTVEC_ELT (v, i) =
1813      GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
1814  return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
1815}
1816
1817const char *
1818output_vec_const_move (rtx *operands)
1819{
1820  int cst, cst2;
1821  enum machine_mode mode;
1822  rtx dest, vec;
1823
1824  dest = operands[0];
1825  vec = operands[1];
1826
1827  cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1828  cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1829  mode = GET_MODE (dest);
1830
1831  if (TARGET_ALTIVEC)
1832    {
1833      if (zero_constant (vec, mode))
1834	return "vxor %0,%0,%0";
1835      else if (easy_vector_constant (vec, mode))
1836	{
1837	  operands[1] = GEN_INT (cst);
1838	  switch (mode)
1839	    {
1840	    case V4SImode:
1841	      if (EASY_VECTOR_15 (cst))
1842		{
1843		  operands[1] = GEN_INT (cst);
1844		  return "vspltisw %0,%1";
1845		}
1846	      else if (EASY_VECTOR_15_ADD_SELF (cst))
1847		return "#";
1848	      cst = cst >> 16;
1849	    case V8HImode:
1850	      if (EASY_VECTOR_15 (cst))
1851		{
1852		  operands[1] = GEN_INT (cst);
1853		  return "vspltish %0,%1";
1854		}
1855	      else if (EASY_VECTOR_15_ADD_SELF (cst))
1856		return "#";
1857	      cst = cst >> 8;
1858	    case V16QImode:
1859	      if (EASY_VECTOR_15 (cst))
1860		{
1861		  operands[1] = GEN_INT (cst);
1862		  return "vspltisb %0,%1";
1863		}
1864	      else if (EASY_VECTOR_15_ADD_SELF (cst))
1865		return "#";
1866	    default:
1867	      abort ();
1868	    }
1869	}
1870      else
1871	abort ();
1872    }
1873
1874  if (TARGET_SPE)
1875    {
1876      /* Vector constant 0 is handled as a splitter of V2SI, and in the
1877	 pattern of V1DI, V4HI, and V2SF.
1878
1879	 FIXME: We should probably return # and add post reload
1880	 splitters for these, but this way is so easy ;-).
1881      */
1882      operands[1] = GEN_INT (cst);
1883      operands[2] = GEN_INT (cst2);
1884      if (cst == cst2)
1885	return "li %0,%1\n\tevmergelo %0,%0,%0";
1886      else
1887	return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1888    }
1889
1890  abort ();
1891}
1892
1893/* Return 1 if the operand is the constant 0.  This works for scalars
1894   as well as vectors.  */
1895int
1896zero_constant (rtx op, enum machine_mode mode)
1897{
1898  return op == CONST0_RTX (mode);
1899}
1900
1901/* Return 1 if the operand is 0.0.  */
1902int
1903zero_fp_constant (rtx op, enum machine_mode mode)
1904{
1905  return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1906}
1907
1908/* Return 1 if the operand is in volatile memory.  Note that during
1909   the RTL generation phase, memory_operand does not return TRUE for
1910   volatile memory references.  So this function allows us to
1911   recognize volatile references where its safe.  */
1912
1913int
1914volatile_mem_operand (rtx op, enum machine_mode mode)
1915{
1916  if (GET_CODE (op) != MEM)
1917    return 0;
1918
1919  if (!MEM_VOLATILE_P (op))
1920    return 0;
1921
1922  if (mode != GET_MODE (op))
1923    return 0;
1924
1925  if (reload_completed)
1926    return memory_operand (op, mode);
1927
1928  if (reload_in_progress)
1929    return strict_memory_address_p (mode, XEXP (op, 0));
1930
1931  return memory_address_p (mode, XEXP (op, 0));
1932}
1933
1934/* Return 1 if the operand is an offsettable memory operand.  */
1935
1936int
1937offsettable_mem_operand (rtx op, enum machine_mode mode)
1938{
1939  return ((GET_CODE (op) == MEM)
1940	  && offsettable_address_p (reload_completed || reload_in_progress,
1941				    mode, XEXP (op, 0)));
1942}
1943
1944/* Return 1 if the operand is either an easy FP constant (see above) or
1945   memory.  */
1946
1947int
1948mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1949{
1950  return memory_operand (op, mode) || easy_fp_constant (op, mode);
1951}
1952
1953/* Return 1 if the operand is either a non-special register or an item
1954   that can be used as the operand of a `mode' add insn.  */
1955
1956int
1957add_operand (rtx op, enum machine_mode mode)
1958{
1959  if (GET_CODE (op) == CONST_INT)
1960    return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1961	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1962
1963  return gpc_reg_operand (op, mode);
1964}
1965
1966/* Return 1 if OP is a constant but not a valid add_operand.  */
1967
1968int
1969non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1970{
1971  return (GET_CODE (op) == CONST_INT
1972	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1973	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1974}
1975
1976/* Return 1 if the operand is a non-special register or a constant that
1977   can be used as the operand of an OR or XOR insn on the RS/6000.  */
1978
1979int
1980logical_operand (rtx op, enum machine_mode mode)
1981{
1982  HOST_WIDE_INT opl, oph;
1983
1984  if (gpc_reg_operand (op, mode))
1985    return 1;
1986
1987  if (GET_CODE (op) == CONST_INT)
1988    {
1989      opl = INTVAL (op) & GET_MODE_MASK (mode);
1990
1991#if HOST_BITS_PER_WIDE_INT <= 32
1992      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1993	return 0;
1994#endif
1995    }
1996  else if (GET_CODE (op) == CONST_DOUBLE)
1997    {
1998      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1999	abort ();
2000
2001      opl = CONST_DOUBLE_LOW (op);
2002      oph = CONST_DOUBLE_HIGH (op);
2003      if (oph != 0)
2004	return 0;
2005    }
2006  else
2007    return 0;
2008
2009  return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2010	  || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2011}
2012
2013/* Return 1 if C is a constant that is not a logical operand (as
2014   above), but could be split into one.  */
2015
2016int
2017non_logical_cint_operand (rtx op, enum machine_mode mode)
2018{
2019  return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2020	  && ! logical_operand (op, mode)
2021	  && reg_or_logical_cint_operand (op, mode));
2022}
2023
2024/* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2025   RS/6000.  It is if there are no more than two 1->0 or 0->1 transitions.
2026   Reject all ones and all zeros, since these should have been optimized
2027   away and confuse the making of MB and ME.  */
2028
2029int
2030mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2031{
2032  HOST_WIDE_INT c, lsb;
2033
2034  if (GET_CODE (op) != CONST_INT)
2035    return 0;
2036
2037  c = INTVAL (op);
2038
2039  /* Fail in 64-bit mode if the mask wraps around because the upper
2040     32-bits of the mask will all be 1s, contrary to GCC's internal view.  */
2041  if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2042    return 0;
2043
2044  /* We don't change the number of transitions by inverting,
2045     so make sure we start with the LS bit zero.  */
2046  if (c & 1)
2047    c = ~c;
2048
2049  /* Reject all zeros or all ones.  */
2050  if (c == 0)
2051    return 0;
2052
2053  /* Find the first transition.  */
2054  lsb = c & -c;
2055
2056  /* Invert to look for a second transition.  */
2057  c = ~c;
2058
2059  /* Erase first transition.  */
2060  c &= -lsb;
2061
2062  /* Find the second transition (if any).  */
2063  lsb = c & -c;
2064
2065  /* Match if all the bits above are 1's (or c is zero).  */
2066  return c == -lsb;
2067}
2068
2069/* Return 1 for the PowerPC64 rlwinm corner case.  */
2070
2071int
2072mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2073{
2074  HOST_WIDE_INT c, lsb;
2075
2076  if (GET_CODE (op) != CONST_INT)
2077    return 0;
2078
2079  c = INTVAL (op);
2080
2081  if ((c & 0x80000001) != 0x80000001)
2082    return 0;
2083
2084  c = ~c;
2085  if (c == 0)
2086    return 0;
2087
2088  lsb = c & -c;
2089  c = ~c;
2090  c &= -lsb;
2091  lsb = c & -c;
2092  return c == -lsb;
2093}
2094
2095/* Return 1 if the operand is a constant that is a PowerPC64 mask.
2096   It is if there are no more than one 1->0 or 0->1 transitions.
2097   Reject all zeros, since zero should have been optimized away and
2098   confuses the making of MB and ME.  */
2099
2100int
2101mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2102{
2103  if (GET_CODE (op) == CONST_INT)
2104    {
2105      HOST_WIDE_INT c, lsb;
2106
2107      c = INTVAL (op);
2108
2109      /* Reject all zeros.  */
2110      if (c == 0)
2111	return 0;
2112
2113      /* We don't change the number of transitions by inverting,
2114	 so make sure we start with the LS bit zero.  */
2115      if (c & 1)
2116	c = ~c;
2117
2118      /* Find the transition, and check that all bits above are 1's.  */
2119      lsb = c & -c;
2120
2121      /* Match if all the bits above are 1's (or c is zero).  */
2122      return c == -lsb;
2123    }
2124  return 0;
2125}
2126
2127/* Like mask64_operand, but allow up to three transitions.  This
2128   predicate is used by insn patterns that generate two rldicl or
2129   rldicr machine insns.  */
2130
2131int
2132mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2133{
2134  if (GET_CODE (op) == CONST_INT)
2135    {
2136      HOST_WIDE_INT c, lsb;
2137
2138      c = INTVAL (op);
2139
2140      /* Disallow all zeros.  */
2141      if (c == 0)
2142	return 0;
2143
2144      /* We don't change the number of transitions by inverting,
2145	 so make sure we start with the LS bit zero.  */
2146      if (c & 1)
2147	c = ~c;
2148
2149      /* Find the first transition.  */
2150      lsb = c & -c;
2151
2152      /* Invert to look for a second transition.  */
2153      c = ~c;
2154
2155      /* Erase first transition.  */
2156      c &= -lsb;
2157
2158      /* Find the second transition.  */
2159      lsb = c & -c;
2160
2161      /* Invert to look for a third transition.  */
2162      c = ~c;
2163
2164      /* Erase second transition.  */
2165      c &= -lsb;
2166
2167      /* Find the third transition (if any).  */
2168      lsb = c & -c;
2169
2170      /* Match if all the bits above are 1's (or c is zero).  */
2171      return c == -lsb;
2172    }
2173  return 0;
2174}
2175
2176/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2177   implement ANDing by the mask IN.  */
2178void
2179build_mask64_2_operands (rtx in, rtx *out)
2180{
2181#if HOST_BITS_PER_WIDE_INT >= 64
2182  unsigned HOST_WIDE_INT c, lsb, m1, m2;
2183  int shift;
2184
2185  if (GET_CODE (in) != CONST_INT)
2186    abort ();
2187
2188  c = INTVAL (in);
2189  if (c & 1)
2190    {
2191      /* Assume c initially something like 0x00fff000000fffff.  The idea
2192	 is to rotate the word so that the middle ^^^^^^ group of zeros
2193	 is at the MS end and can be cleared with an rldicl mask.  We then
2194	 rotate back and clear off the MS    ^^ group of zeros with a
2195	 second rldicl.  */
2196      c = ~c;			/*   c == 0xff000ffffff00000 */
2197      lsb = c & -c;		/* lsb == 0x0000000000100000 */
2198      m1 = -lsb;		/*  m1 == 0xfffffffffff00000 */
2199      c = ~c;			/*   c == 0x00fff000000fffff */
2200      c &= -lsb;		/*   c == 0x00fff00000000000 */
2201      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2202      c = ~c;			/*   c == 0xff000fffffffffff */
2203      c &= -lsb;		/*   c == 0xff00000000000000 */
2204      shift = 0;
2205      while ((lsb >>= 1) != 0)
2206	shift++;		/* shift == 44 on exit from loop */
2207      m1 <<= 64 - shift;	/*  m1 == 0xffffff0000000000 */
2208      m1 = ~m1;			/*  m1 == 0x000000ffffffffff */
2209      m2 = ~c;			/*  m2 == 0x00ffffffffffffff */
2210    }
2211  else
2212    {
2213      /* Assume c initially something like 0xff000f0000000000.  The idea
2214	 is to rotate the word so that the     ^^^  middle group of zeros
2215	 is at the LS end and can be cleared with an rldicr mask.  We then
2216	 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2217	 a second rldicr.  */
2218      lsb = c & -c;		/* lsb == 0x0000010000000000 */
2219      m2 = -lsb;		/*  m2 == 0xffffff0000000000 */
2220      c = ~c;			/*   c == 0x00fff0ffffffffff */
2221      c &= -lsb;		/*   c == 0x00fff00000000000 */
2222      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2223      c = ~c;			/*   c == 0xff000fffffffffff */
2224      c &= -lsb;		/*   c == 0xff00000000000000 */
2225      shift = 0;
2226      while ((lsb >>= 1) != 0)
2227	shift++;		/* shift == 44 on exit from loop */
2228      m1 = ~c;			/*  m1 == 0x00ffffffffffffff */
2229      m1 >>= shift;		/*  m1 == 0x0000000000000fff */
2230      m1 = ~m1;			/*  m1 == 0xfffffffffffff000 */
2231    }
2232
2233  /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2234     masks will be all 1's.  We are guaranteed more than one transition.  */
2235  out[0] = GEN_INT (64 - shift);
2236  out[1] = GEN_INT (m1);
2237  out[2] = GEN_INT (shift);
2238  out[3] = GEN_INT (m2);
2239#else
2240  (void)in;
2241  (void)out;
2242  abort ();
2243#endif
2244}
2245
2246/* Return 1 if the operand is either a non-special register or a constant
2247   that can be used as the operand of a PowerPC64 logical AND insn.  */
2248
2249int
2250and64_operand (rtx op, enum machine_mode mode)
2251{
2252  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
2253    return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2254
2255  return (logical_operand (op, mode) || mask64_operand (op, mode));
2256}
2257
2258/* Like the above, but also match constants that can be implemented
2259   with two rldicl or rldicr insns.  */
2260
2261int
2262and64_2_operand (rtx op, enum machine_mode mode)
2263{
2264  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
2265    return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2266
2267  return logical_operand (op, mode) || mask64_2_operand (op, mode);
2268}
2269
2270/* Return 1 if the operand is either a non-special register or a
2271   constant that can be used as the operand of an RS/6000 logical AND insn.  */
2272
2273int
2274and_operand (rtx op, enum machine_mode mode)
2275{
2276  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
2277    return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2278
2279  return (logical_operand (op, mode) || mask_operand (op, mode));
2280}
2281
2282/* Return 1 if the operand is a general register or memory operand.  */
2283
2284int
2285reg_or_mem_operand (rtx op, enum machine_mode mode)
2286{
2287  return (gpc_reg_operand (op, mode)
2288	  || memory_operand (op, mode)
2289	  || macho_lo_sum_memory_operand (op, mode)
2290	  || volatile_mem_operand (op, mode));
2291}
2292
2293/* Return 1 if the operand is a general register or memory operand without
2294   pre_inc or pre_dec which produces invalid form of PowerPC lwa
2295   instruction.  */
2296
2297int
2298lwa_operand (rtx op, enum machine_mode mode)
2299{
2300  rtx inner = op;
2301
2302  if (reload_completed && GET_CODE (inner) == SUBREG)
2303    inner = SUBREG_REG (inner);
2304
2305  return gpc_reg_operand (inner, mode)
2306    || (memory_operand (inner, mode)
2307	&& GET_CODE (XEXP (inner, 0)) != PRE_INC
2308	&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
2309	&& (GET_CODE (XEXP (inner, 0)) != PLUS
2310	    || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2311	    || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2312}
2313
2314/* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF.  */
2315
2316int
2317symbol_ref_operand (rtx op, enum machine_mode mode)
2318{
2319  if (mode != VOIDmode && GET_MODE (op) != mode)
2320    return 0;
2321
2322  return (GET_CODE (op) == SYMBOL_REF
2323	  && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2324}
2325
2326/* Return 1 if the operand, used inside a MEM, is a valid first argument
2327   to CALL.  This is a SYMBOL_REF, a pseudo-register, LR or CTR.  */
2328
2329int
2330call_operand (rtx op, enum machine_mode mode)
2331{
2332  if (mode != VOIDmode && GET_MODE (op) != mode)
2333    return 0;
2334
2335  return (GET_CODE (op) == SYMBOL_REF
2336	  || (GET_CODE (op) == REG
2337	      && (REGNO (op) == LINK_REGISTER_REGNUM
2338		  || REGNO (op) == COUNT_REGISTER_REGNUM
2339		  || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2340}
2341
2342/* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2343   this file.  */
2344
2345int
2346current_file_function_operand (rtx op,
2347                              enum machine_mode mode ATTRIBUTE_UNUSED)
2348{
2349  return (GET_CODE (op) == SYMBOL_REF
2350	  && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2351	  && (SYMBOL_REF_LOCAL_P (op)
2352	      || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2353}
2354
2355/* Return 1 if this operand is a valid input for a move insn.  */
2356
2357int
2358input_operand (rtx op, enum machine_mode mode)
2359{
2360  /* Memory is always valid.  */
2361  if (memory_operand (op, mode))
2362    return 1;
2363
2364  /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary.  */
2365  if (GET_CODE (op) == CONSTANT_P_RTX)
2366    return 1;
2367
2368  /* For floating-point, easy constants are valid.  */
2369  if (GET_MODE_CLASS (mode) == MODE_FLOAT
2370      && CONSTANT_P (op)
2371      && easy_fp_constant (op, mode))
2372    return 1;
2373
2374  /* Allow any integer constant.  */
2375  if (GET_MODE_CLASS (mode) == MODE_INT
2376      && (GET_CODE (op) == CONST_INT
2377	  || GET_CODE (op) == CONST_DOUBLE))
2378    return 1;
2379
2380  /* Allow easy vector constants.  */
2381  if (GET_CODE (op) == CONST_VECTOR
2382      && easy_vector_constant (op, mode))
2383    return 1;
2384
2385  /* For floating-point or multi-word mode, the only remaining valid type
2386     is a register.  */
2387  if (GET_MODE_CLASS (mode) == MODE_FLOAT
2388      || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2389    return register_operand (op, mode);
2390
2391  /* The only cases left are integral modes one word or smaller (we
2392     do not get called for MODE_CC values).  These can be in any
2393     register.  */
2394  if (register_operand (op, mode))
2395    return 1;
2396
2397  /* A SYMBOL_REF referring to the TOC is valid.  */
2398  if (legitimate_constant_pool_address_p (op))
2399    return 1;
2400
2401  /* A constant pool expression (relative to the TOC) is valid */
2402  if (toc_relative_expr_p (op))
2403    return 1;
2404
2405  /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2406     to be valid.  */
2407  if (DEFAULT_ABI == ABI_V4
2408      && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2409      && small_data_operand (op, Pmode))
2410    return 1;
2411
2412  return 0;
2413}
2414
2415
2416/* Darwin, AIX increases natural record alignment to doubleword if the first
2417   field is an FP double while the FP fields remain word aligned.  */
2418
2419unsigned int
2420rs6000_special_round_type_align (tree type, int computed, int specified)
2421{
2422  tree field = TYPE_FIELDS (type);
2423
2424  /* Skip all the static variables only if ABI is greater than
2425     1 or equal to 0.   */
2426  while (field != NULL && TREE_CODE (field) == VAR_DECL)
2427    field = TREE_CHAIN (field);
2428
2429  if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2430    return MAX (computed, specified);
2431
2432  return MAX (MAX (computed, specified), 64);
2433}
2434
2435/* Return 1 for an operand in small memory on V.4/eabi.  */
2436
2437int
2438small_data_operand (rtx op ATTRIBUTE_UNUSED,
2439		    enum machine_mode mode ATTRIBUTE_UNUSED)
2440{
2441#if TARGET_ELF
2442  rtx sym_ref;
2443
2444  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2445    return 0;
2446
2447  if (DEFAULT_ABI != ABI_V4)
2448    return 0;
2449
2450  if (GET_CODE (op) == SYMBOL_REF)
2451    sym_ref = op;
2452
2453  else if (GET_CODE (op) != CONST
2454	   || GET_CODE (XEXP (op, 0)) != PLUS
2455	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2456	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2457    return 0;
2458
2459  else
2460    {
2461      rtx sum = XEXP (op, 0);
2462      HOST_WIDE_INT summand;
2463
2464      /* We have to be careful here, because it is the referenced address
2465        that must be 32k from _SDA_BASE_, not just the symbol.  */
2466      summand = INTVAL (XEXP (sum, 1));
2467      if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2468       return 0;
2469
2470      sym_ref = XEXP (sum, 0);
2471    }
2472
2473  return SYMBOL_REF_SMALL_P (sym_ref);
2474#else
2475  return 0;
2476#endif
2477}
2478
2479/* Return true, if operand is a memory operand and has a
2480   displacement divisible by 4.  */
2481
2482int
2483word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2484{
2485  rtx addr;
2486  int off = 0;
2487
2488  if (!memory_operand (op, mode))
2489    return 0;
2490
2491  addr = XEXP (op, 0);
2492  if (GET_CODE (addr) == PLUS
2493      && GET_CODE (XEXP (addr, 0)) == REG
2494      && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2495    off = INTVAL (XEXP (addr, 1));
2496
2497  return (off % 4) == 0;
2498}
2499
2500/* Return true if operand is a (MEM (PLUS (REG) (offset))) where offset
2501   is not divisible by four.  */
2502
2503int
2504invalid_gpr_mem (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2505{
2506  rtx addr;
2507  long off;
2508
2509  if (GET_CODE (op) != MEM)
2510    return 0;
2511
2512  addr = XEXP (op, 0);
2513  if (GET_CODE (addr) != PLUS
2514      || GET_CODE (XEXP (addr, 0)) != REG
2515      || GET_CODE (XEXP (addr, 1)) != CONST_INT)
2516    return 0;
2517
2518  off = INTVAL (XEXP (addr, 1));
2519  return (off & 3) != 0;
2520}
2521
2522/* Return true if operand is a hard register that can be used as a base
2523   register.  */
2524
2525int
2526base_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2527{
2528  unsigned int regno;
2529
2530  if (!REG_P (op))
2531    return 0;
2532
2533  regno = REGNO (op);
2534  return regno != 0 && regno <= 31;
2535}
2536
2537/* Return true if either operand is a general purpose register.  */
2538
2539bool
2540gpr_or_gpr_p (rtx op0, rtx op1)
2541{
2542  return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2543	  || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2544}
2545
2546
2547/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address.  */
2548
2549static int
2550constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2551{
2552  switch (GET_CODE(op))
2553    {
2554    case SYMBOL_REF:
2555      if (RS6000_SYMBOL_REF_TLS_P (op))
2556	return 0;
2557      else if (CONSTANT_POOL_ADDRESS_P (op))
2558	{
2559	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2560	    {
2561	      *have_sym = 1;
2562	      return 1;
2563	    }
2564	  else
2565	    return 0;
2566	}
2567      else if (! strcmp (XSTR (op, 0), toc_label_name))
2568	{
2569	  *have_toc = 1;
2570	  return 1;
2571	}
2572      else
2573	return 0;
2574    case PLUS:
2575    case MINUS:
2576      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2577	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2578    case CONST:
2579      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2580    case CONST_INT:
2581      return 1;
2582    default:
2583      return 0;
2584    }
2585}
2586
2587static bool
2588constant_pool_expr_p (rtx op)
2589{
2590  int have_sym = 0;
2591  int have_toc = 0;
2592  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2593}
2594
2595static bool
2596toc_relative_expr_p (rtx op)
2597{
2598  int have_sym = 0;
2599  int have_toc = 0;
2600  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2601}
2602
2603/* SPE offset addressing is limited to 5-bits worth of double words.  */
2604#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2605
2606bool
2607legitimate_constant_pool_address_p (rtx x)
2608{
2609  return (TARGET_TOC
2610	  && GET_CODE (x) == PLUS
2611	  && GET_CODE (XEXP (x, 0)) == REG
2612	  && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2613	  && constant_pool_expr_p (XEXP (x, 1)));
2614}
2615
2616static bool
2617legitimate_small_data_p (enum machine_mode mode, rtx x)
2618{
2619  return (DEFAULT_ABI == ABI_V4
2620	  && !flag_pic && !TARGET_TOC
2621	  && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2622	  && small_data_operand (x, mode));
2623}
2624
2625static bool
2626legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2627{
2628  unsigned HOST_WIDE_INT offset, extra;
2629
2630  if (GET_CODE (x) != PLUS)
2631    return false;
2632  if (GET_CODE (XEXP (x, 0)) != REG)
2633    return false;
2634  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2635    return false;
2636  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2637    return false;
2638
2639  offset = INTVAL (XEXP (x, 1));
2640  extra = 0;
2641  switch (mode)
2642    {
2643    case V16QImode:
2644    case V8HImode:
2645    case V4SFmode:
2646    case V4SImode:
2647      /* AltiVec vector modes.  Only reg+reg addressing is valid here,
2648	 which leaves the only valid constant offset of zero, which by
2649	 canonicalization rules is also invalid.  */
2650      return false;
2651
2652    case V4HImode:
2653    case V2SImode:
2654    case V1DImode:
2655    case V2SFmode:
2656      /* SPE vector modes.  */
2657      return SPE_CONST_OFFSET_OK (offset);
2658
2659    case DFmode:
2660    case DImode:
2661      /* Both DFmode and DImode may end up in gprs.  If gprs are 32-bit,
2662	 then we need to load/store at both offset and offset+4.  */
2663      if (!TARGET_POWERPC64)
2664	extra = 4;
2665      break;
2666
2667    case TFmode:
2668    case TImode:
2669      if (!TARGET_POWERPC64)
2670	extra = 12;
2671      else
2672	extra = 8;
2673      break;
2674
2675    default:
2676      break;
2677    }
2678
2679  offset += 0x8000;
2680  return (offset < 0x10000) && (offset + extra < 0x10000);
2681}
2682
2683static bool
2684legitimate_indexed_address_p (rtx x, int strict)
2685{
2686  rtx op0, op1;
2687
2688  if (GET_CODE (x) != PLUS)
2689    return false;
2690  op0 = XEXP (x, 0);
2691  op1 = XEXP (x, 1);
2692
2693  if (!REG_P (op0) || !REG_P (op1))
2694    return false;
2695
2696  return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2697	   && INT_REG_OK_FOR_INDEX_P (op1, strict))
2698	  || (INT_REG_OK_FOR_BASE_P (op1, strict)
2699	      && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2700}
2701
2702static inline bool
2703legitimate_indirect_address_p (rtx x, int strict)
2704{
2705  return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2706}
2707
2708static bool
2709macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2710{
2711    if (!TARGET_MACHO || !flag_pic
2712        || mode != SImode || GET_CODE(x) != MEM)
2713      return false;
2714    x = XEXP (x, 0);
2715
2716  if (GET_CODE (x) != LO_SUM)
2717    return false;
2718  if (GET_CODE (XEXP (x, 0)) != REG)
2719    return false;
2720  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2721    return false;
2722  x = XEXP (x, 1);
2723
2724  return CONSTANT_P (x);
2725}
2726
2727static bool
2728legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2729{
2730  if (GET_CODE (x) != LO_SUM)
2731    return false;
2732  if (GET_CODE (XEXP (x, 0)) != REG)
2733    return false;
2734  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2735    return false;
2736  x = XEXP (x, 1);
2737
2738  if (TARGET_ELF || TARGET_MACHO)
2739    {
2740      if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2741	return false;
2742      if (TARGET_TOC)
2743	return false;
2744      if (GET_MODE_NUNITS (mode) != 1)
2745	return false;
2746      if (GET_MODE_BITSIZE (mode) > 32
2747	  && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2748	return false;
2749
2750      return CONSTANT_P (x);
2751    }
2752
2753  return false;
2754}
2755
2756
2757/* Try machine-dependent ways of modifying an illegitimate address
2758   to be legitimate.  If we find one, return the new, valid address.
2759   This is used from only one place: `memory_address' in explow.c.
2760
2761   OLDX is the address as it was before break_out_memory_refs was
2762   called.  In some cases it is useful to look at this to decide what
2763   needs to be done.
2764
2765   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2766
2767   It is always safe for this function to do nothing.  It exists to
2768   recognize opportunities to optimize the output.
2769
2770   On RS/6000, first check for the sum of a register with a constant
2771   integer that is out of range.  If so, generate code to add the
2772   constant with the low-order 16 bits masked to the register and force
2773   this result into another register (this can be done with `cau').
2774   Then generate an address of REG+(CONST&0xffff), allowing for the
2775   possibility of bit 16 being a one.
2776
2777   Then check for the sum of a register and something not constant, try to
2778   load the other things into a register and return the sum.  */
2779
2780rtx
2781rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2782			   enum machine_mode mode)
2783{
2784  if (GET_CODE (x) == SYMBOL_REF)
2785    {
2786      enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2787      if (model != 0)
2788	return rs6000_legitimize_tls_address (x, model);
2789    }
2790
2791  if (GET_CODE (x) == PLUS
2792      && GET_CODE (XEXP (x, 0)) == REG
2793      && GET_CODE (XEXP (x, 1)) == CONST_INT
2794      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2795    {
2796      HOST_WIDE_INT high_int, low_int;
2797      rtx sum;
2798      low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2799      high_int = INTVAL (XEXP (x, 1)) - low_int;
2800      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2801					 GEN_INT (high_int)), 0);
2802      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2803    }
2804  else if (GET_CODE (x) == PLUS
2805	   && GET_CODE (XEXP (x, 0)) == REG
2806	   && GET_CODE (XEXP (x, 1)) != CONST_INT
2807	   && GET_MODE_NUNITS (mode) == 1
2808	   && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2809	       || TARGET_POWERPC64
2810	       || (mode != DFmode && mode != TFmode))
2811	   && (TARGET_POWERPC64 || mode != DImode)
2812	   && mode != TImode)
2813    {
2814      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2815			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2816    }
2817  else if (ALTIVEC_VECTOR_MODE (mode))
2818    {
2819      rtx reg;
2820
2821      /* Make sure both operands are registers.  */
2822      if (GET_CODE (x) == PLUS)
2823	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2824			     force_reg (Pmode, XEXP (x, 1)));
2825
2826      reg = force_reg (Pmode, x);
2827      return reg;
2828    }
2829  else if (SPE_VECTOR_MODE (mode))
2830    {
2831      /* We accept [reg + reg] and [reg + OFFSET].  */
2832
2833      if (GET_CODE (x) == PLUS)
2834      {
2835        rtx op1 = XEXP (x, 0);
2836        rtx op2 = XEXP (x, 1);
2837
2838        op1 = force_reg (Pmode, op1);
2839
2840        if (GET_CODE (op2) != REG
2841            && (GET_CODE (op2) != CONST_INT
2842                || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2843          op2 = force_reg (Pmode, op2);
2844
2845        return gen_rtx_PLUS (Pmode, op1, op2);
2846      }
2847
2848      return force_reg (Pmode, x);
2849    }
2850  else if (TARGET_ELF
2851	   && TARGET_32BIT
2852	   && TARGET_NO_TOC
2853	   && ! flag_pic
2854	   && GET_CODE (x) != CONST_INT
2855	   && GET_CODE (x) != CONST_DOUBLE
2856	   && CONSTANT_P (x)
2857	   && GET_MODE_NUNITS (mode) == 1
2858	   && (GET_MODE_BITSIZE (mode) <= 32
2859	       || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2860    {
2861      rtx reg = gen_reg_rtx (Pmode);
2862      emit_insn (gen_elf_high (reg, x));
2863      return gen_rtx_LO_SUM (Pmode, reg, x);
2864    }
2865  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2866	   && ! flag_pic
2867#if TARGET_MACHO
2868	   && ! MACHO_DYNAMIC_NO_PIC_P
2869#endif
2870	   && GET_CODE (x) != CONST_INT
2871	   && GET_CODE (x) != CONST_DOUBLE
2872	   && CONSTANT_P (x)
2873	   && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2874	   && mode != DImode
2875	   && mode != TImode)
2876    {
2877      rtx reg = gen_reg_rtx (Pmode);
2878      emit_insn (gen_macho_high (reg, x));
2879      return gen_rtx_LO_SUM (Pmode, reg, x);
2880    }
2881  else if (TARGET_TOC
2882	   && constant_pool_expr_p (x)
2883	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2884    {
2885      return create_TOC_reference (x);
2886    }
2887  else
2888    return NULL_RTX;
2889}
2890
2891/* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2892   We need to emit DTP-relative relocations.  */
2893
2894void
2895rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2896{
2897  switch (size)
2898    {
2899    case 4:
2900      fputs ("\t.long\t", file);
2901      break;
2902    case 8:
2903      fputs (DOUBLE_INT_ASM_OP, file);
2904      break;
2905    default:
2906      abort ();
2907    }
2908  output_addr_const (file, x);
2909  fputs ("@dtprel+0x8000", file);
2910}
2911
2912/* Construct the SYMBOL_REF for the tls_get_addr function.  */
2913
2914static GTY(()) rtx rs6000_tls_symbol;
2915static rtx
2916rs6000_tls_get_addr (void)
2917{
2918  if (!rs6000_tls_symbol)
2919    rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2920
2921  return rs6000_tls_symbol;
2922}
2923
2924/* Construct the SYMBOL_REF for TLS GOT references.  */
2925
2926static GTY(()) rtx rs6000_got_symbol;
2927static rtx
2928rs6000_got_sym (void)
2929{
2930  if (!rs6000_got_symbol)
2931    {
2932      rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2933      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2934      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2935    }
2936
2937  return rs6000_got_symbol;
2938}
2939
2940/* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
2941   this (thread-local) address.  */
2942
2943static rtx
2944rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2945{
2946  rtx dest, insn;
2947
2948  dest = gen_reg_rtx (Pmode);
2949  if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2950    {
2951      rtx tlsreg;
2952
2953      if (TARGET_64BIT)
2954	{
2955	  tlsreg = gen_rtx_REG (Pmode, 13);
2956	  insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2957	}
2958      else
2959	{
2960	  tlsreg = gen_rtx_REG (Pmode, 2);
2961	  insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2962	}
2963      emit_insn (insn);
2964    }
2965  else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2966    {
2967      rtx tlsreg, tmp;
2968
2969      tmp = gen_reg_rtx (Pmode);
2970      if (TARGET_64BIT)
2971	{
2972	  tlsreg = gen_rtx_REG (Pmode, 13);
2973	  insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2974	}
2975      else
2976	{
2977	  tlsreg = gen_rtx_REG (Pmode, 2);
2978	  insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2979	}
2980      emit_insn (insn);
2981      if (TARGET_64BIT)
2982	insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2983      else
2984	insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2985      emit_insn (insn);
2986    }
2987  else
2988    {
2989      rtx r3, got, tga, tmp1, tmp2, eqv;
2990
2991      if (TARGET_64BIT)
2992	got = gen_rtx_REG (Pmode, TOC_REGISTER);
2993      else
2994	{
2995	  if (flag_pic == 1)
2996	    got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2997	  else
2998	    {
2999	      rtx gsym = rs6000_got_sym ();
3000	      got = gen_reg_rtx (Pmode);
3001	      if (flag_pic == 0)
3002		rs6000_emit_move (got, gsym, Pmode);
3003	      else
3004		{
3005		  rtx tempLR, tmp3, mem;
3006		  rtx first, last;
3007
3008		  tempLR = gen_reg_rtx (Pmode);
3009		  tmp1 = gen_reg_rtx (Pmode);
3010		  tmp2 = gen_reg_rtx (Pmode);
3011		  tmp3 = gen_reg_rtx (Pmode);
3012		  mem = gen_rtx_MEM (Pmode, tmp1);
3013		  RTX_UNCHANGING_P (mem) = 1;
3014
3015		  first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
3016		  emit_move_insn (tmp1, tempLR);
3017		  emit_move_insn (tmp2, mem);
3018		  emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3019		  last = emit_move_insn (got, tmp3);
3020		  REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3021							REG_NOTES (last));
3022		  REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3023							 REG_NOTES (first));
3024		  REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3025							REG_NOTES (last));
3026		}
3027	    }
3028	}
3029
3030      if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3031	{
3032	  r3 = gen_rtx_REG (Pmode, 3);
3033	  if (TARGET_64BIT)
3034	    insn = gen_tls_gd_64 (r3, got, addr);
3035	  else
3036	    insn = gen_tls_gd_32 (r3, got, addr);
3037	  start_sequence ();
3038	  emit_insn (insn);
3039	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3040	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3041	  insn = emit_call_insn (insn);
3042	  CONST_OR_PURE_CALL_P (insn) = 1;
3043	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3044	  insn = get_insns ();
3045	  end_sequence ();
3046	  emit_libcall_block (insn, dest, r3, addr);
3047	}
3048      else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3049	{
3050	  r3 = gen_rtx_REG (Pmode, 3);
3051	  if (TARGET_64BIT)
3052	    insn = gen_tls_ld_64 (r3, got);
3053	  else
3054	    insn = gen_tls_ld_32 (r3, got);
3055	  start_sequence ();
3056	  emit_insn (insn);
3057	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3058	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3059	  insn = emit_call_insn (insn);
3060	  CONST_OR_PURE_CALL_P (insn) = 1;
3061	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3062	  insn = get_insns ();
3063	  end_sequence ();
3064	  tmp1 = gen_reg_rtx (Pmode);
3065	  eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3066				UNSPEC_TLSLD);
3067	  emit_libcall_block (insn, tmp1, r3, eqv);
3068	  if (rs6000_tls_size == 16)
3069	    {
3070	      if (TARGET_64BIT)
3071		insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3072	      else
3073		insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3074	    }
3075	  else if (rs6000_tls_size == 32)
3076	    {
3077	      tmp2 = gen_reg_rtx (Pmode);
3078	      if (TARGET_64BIT)
3079		insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3080	      else
3081		insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3082	      emit_insn (insn);
3083	      if (TARGET_64BIT)
3084		insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3085	      else
3086		insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3087	    }
3088	  else
3089	    {
3090	      tmp2 = gen_reg_rtx (Pmode);
3091	      if (TARGET_64BIT)
3092		insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3093	      else
3094		insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3095	      emit_insn (insn);
3096	      insn = gen_rtx_SET (Pmode, dest,
3097				  gen_rtx_PLUS (Pmode, tmp2, tmp1));
3098	    }
3099	  emit_insn (insn);
3100	}
3101      else
3102	{
3103	  /* IE, or 64 bit offset LE.  */
3104	  tmp2 = gen_reg_rtx (Pmode);
3105	  if (TARGET_64BIT)
3106	    insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3107	  else
3108	    insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3109	  emit_insn (insn);
3110	  if (TARGET_64BIT)
3111	    insn = gen_tls_tls_64 (dest, tmp2, addr);
3112	  else
3113	    insn = gen_tls_tls_32 (dest, tmp2, addr);
3114	  emit_insn (insn);
3115	}
3116    }
3117
3118  return dest;
3119}
3120
3121/* Return 1 if X is a SYMBOL_REF for a TLS symbol.  This is used in
3122   instruction definitions.  */
3123
3124int
3125rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3126{
3127  return RS6000_SYMBOL_REF_TLS_P (x);
3128}
3129
3130/* Return 1 if X contains a thread-local symbol.  */
3131
3132bool
3133rs6000_tls_referenced_p (rtx x)
3134{
3135  if (! TARGET_HAVE_TLS)
3136    return false;
3137
3138  return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3139}
3140
3141/* Return 1 if *X is a thread-local symbol.  This is the same as
3142   rs6000_tls_symbol_ref except for the type of the unused argument.  */
3143
3144static inline int
3145rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3146{
3147  return RS6000_SYMBOL_REF_TLS_P (*x);
3148}
3149
3150/* The convention appears to be to define this wherever it is used.
3151   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3152   is now used here.  */
3153#ifndef REG_MODE_OK_FOR_BASE_P
3154#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3155#endif
3156
3157/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
3158   replace the input X, or the original X if no replacement is called for.
3159   The output parameter *WIN is 1 if the calling macro should goto WIN,
3160   0 if it should not.
3161
3162   For RS/6000, we wish to handle large displacements off a base
3163   register by splitting the addend across an addiu/addis and the mem insn.
3164   This cuts number of extra insns needed from 3 to 1.
3165
3166   On Darwin, we use this to generate code for floating point constants.
3167   A movsf_low is generated so we wind up with 2 instructions rather than 3.
3168   The Darwin code is inside #if TARGET_MACHO because only then is
3169   machopic_function_base_name() defined.  */
3170rtx
3171rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3172	int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3173{
3174  /* We must recognize output that we have already generated ourselves.  */
3175  if (GET_CODE (x) == PLUS
3176      && GET_CODE (XEXP (x, 0)) == PLUS
3177      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3178      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3179      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3180    {
3181      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3182                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3183                   opnum, (enum reload_type)type);
3184      *win = 1;
3185      return x;
3186    }
3187
3188#if TARGET_MACHO
3189  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3190      && GET_CODE (x) == LO_SUM
3191      && GET_CODE (XEXP (x, 0)) == PLUS
3192      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3193      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3194      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3195      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3196      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3197      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3198      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3199    {
3200      /* Result of previous invocation of this function on Darwin
3201	 floating point constant.  */
3202      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3203		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3204		opnum, (enum reload_type)type);
3205      *win = 1;
3206      return x;
3207    }
3208#endif
3209  if (GET_CODE (x) == PLUS
3210      && GET_CODE (XEXP (x, 0)) == REG
3211      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3212      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3213      && GET_CODE (XEXP (x, 1)) == CONST_INT
3214      && !SPE_VECTOR_MODE (mode)
3215      && !ALTIVEC_VECTOR_MODE (mode))
3216    {
3217      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3218      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3219      HOST_WIDE_INT high
3220        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3221
3222      /* Check for 32-bit overflow.  */
3223      if (high + low != val)
3224        {
3225	  *win = 0;
3226	  return x;
3227	}
3228
3229      /* Reload the high part into a base reg; leave the low part
3230         in the mem directly.  */
3231
3232      x = gen_rtx_PLUS (GET_MODE (x),
3233                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3234                                      GEN_INT (high)),
3235                        GEN_INT (low));
3236
3237      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3238                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3239                   opnum, (enum reload_type)type);
3240      *win = 1;
3241      return x;
3242    }
3243#if TARGET_MACHO
3244  if (GET_CODE (x) == SYMBOL_REF
3245      && DEFAULT_ABI == ABI_DARWIN
3246      && !ALTIVEC_VECTOR_MODE (mode)
3247      && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3248      /* Don't do this for TFmode, since the result isn't offsettable.  */
3249      && mode != TFmode)
3250    {
3251      if (flag_pic)
3252	{
3253	  rtx offset = gen_rtx_CONST (Pmode,
3254			 gen_rtx_MINUS (Pmode, x,
3255			   gen_rtx_SYMBOL_REF (Pmode,
3256			     machopic_function_base_name ())));
3257	  x = gen_rtx_LO_SUM (GET_MODE (x),
3258		gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3259		  gen_rtx_HIGH (Pmode, offset)), offset);
3260	}
3261      else
3262	x = gen_rtx_LO_SUM (GET_MODE (x),
3263              gen_rtx_HIGH (Pmode, x), x);
3264
3265      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3266		   BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3267		   opnum, (enum reload_type)type);
3268      *win = 1;
3269      return x;
3270    }
3271#endif
3272  if (TARGET_TOC
3273      && constant_pool_expr_p (x)
3274      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3275    {
3276      (x) = create_TOC_reference (x);
3277      *win = 1;
3278      return x;
3279    }
3280  *win = 0;
3281  return x;
3282}
3283
3284/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3285   that is a valid memory address for an instruction.
3286   The MODE argument is the machine mode for the MEM expression
3287   that wants to use this address.
3288
3289   On the RS/6000, there are four valid address: a SYMBOL_REF that
3290   refers to a constant pool entry of an address (or the sum of it
3291   plus a constant), a short (16-bit signed) constant plus a register,
3292   the sum of two registers, or a register indirect, possibly with an
3293   auto-increment.  For DFmode and DImode with a constant plus register,
3294   we must ensure that both words are addressable or PowerPC64 with offset
3295   word aligned.
3296
3297   For modes spanning multiple registers (DFmode in 32-bit GPRs,
3298   32-bit DImode, TImode), indexed addressing cannot be used because
3299   adjacent memory cells are accessed by adding word-sized offsets
3300   during assembly output.  */
3301int
3302rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3303{
3304  if (RS6000_SYMBOL_REF_TLS_P (x))
3305    return 0;
3306  if (legitimate_indirect_address_p (x, reg_ok_strict))
3307    return 1;
3308  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3309      && !ALTIVEC_VECTOR_MODE (mode)
3310      && !SPE_VECTOR_MODE (mode)
3311      && TARGET_UPDATE
3312      && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3313    return 1;
3314  if (legitimate_small_data_p (mode, x))
3315    return 1;
3316  if (legitimate_constant_pool_address_p (x))
3317    return 1;
3318  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
3319  if (! reg_ok_strict
3320      && GET_CODE (x) == PLUS
3321      && GET_CODE (XEXP (x, 0)) == REG
3322      && (XEXP (x, 0) == virtual_stack_vars_rtx
3323	  || XEXP (x, 0) == arg_pointer_rtx)
3324      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3325    return 1;
3326  if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3327    return 1;
3328  if (mode != TImode
3329      && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3330	  || TARGET_POWERPC64
3331	  || (mode != DFmode && mode != TFmode))
3332      && (TARGET_POWERPC64 || mode != DImode)
3333      && legitimate_indexed_address_p (x, reg_ok_strict))
3334    return 1;
3335  if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3336    return 1;
3337  return 0;
3338}
3339
3340/* Go to LABEL if ADDR (a legitimate address expression)
3341   has an effect that depends on the machine mode it is used for.
3342
3343   On the RS/6000 this is true of all integral offsets (since AltiVec
3344   modes don't allow them) or is a pre-increment or decrement.
3345
3346   ??? Except that due to conceptual problems in offsettable_address_p
3347   we can't really report the problems of integral offsets.  So leave
3348   this assuming that the adjustable offset must be valid for the
3349   sub-words of a TFmode operand, which is what we had before.  */
3350
3351bool
3352rs6000_mode_dependent_address (rtx addr)
3353{
3354  switch (GET_CODE (addr))
3355    {
3356    case PLUS:
3357      if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3358	{
3359	  unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3360	  return val + 12 + 0x8000 >= 0x10000;
3361	}
3362      break;
3363
3364    case LO_SUM:
3365      return true;
3366
3367    case PRE_INC:
3368    case PRE_DEC:
3369      return TARGET_UPDATE;
3370
3371    default:
3372      break;
3373    }
3374
3375  return false;
3376}
3377
3378/* Try to output insns to set TARGET equal to the constant C if it can
3379   be done in less than N insns.  Do all computations in MODE.
3380   Returns the place where the output has been placed if it can be
3381   done and the insns have been emitted.  If it would take more than N
3382   insns, zero is returned and no insns and emitted.  */
3383
3384rtx
3385rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3386		       rtx source, int n ATTRIBUTE_UNUSED)
3387{
3388  rtx result, insn, set;
3389  HOST_WIDE_INT c0, c1;
3390
3391  if (mode == QImode || mode == HImode)
3392    {
3393      if (dest == NULL)
3394        dest = gen_reg_rtx (mode);
3395      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3396      return dest;
3397    }
3398  else if (mode == SImode)
3399    {
3400      result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3401
3402      emit_insn (gen_rtx_SET (VOIDmode, result,
3403			      GEN_INT (INTVAL (source)
3404				       & (~ (HOST_WIDE_INT) 0xffff))));
3405      emit_insn (gen_rtx_SET (VOIDmode, dest,
3406			      gen_rtx_IOR (SImode, result,
3407					   GEN_INT (INTVAL (source) & 0xffff))));
3408      result = dest;
3409    }
3410  else if (mode == DImode)
3411    {
3412      if (GET_CODE (source) == CONST_INT)
3413	{
3414	  c0 = INTVAL (source);
3415	  c1 = -(c0 < 0);
3416	}
3417      else if (GET_CODE (source) == CONST_DOUBLE)
3418	{
3419#if HOST_BITS_PER_WIDE_INT >= 64
3420	  c0 = CONST_DOUBLE_LOW (source);
3421	  c1 = -(c0 < 0);
3422#else
3423	  c0 = CONST_DOUBLE_LOW (source);
3424	  c1 = CONST_DOUBLE_HIGH (source);
3425#endif
3426	}
3427      else
3428	abort ();
3429
3430      result = rs6000_emit_set_long_const (dest, c0, c1);
3431    }
3432  else
3433    abort ();
3434
3435  insn = get_last_insn ();
3436  set = single_set (insn);
3437  if (! CONSTANT_P (SET_SRC (set)))
3438    set_unique_reg_note (insn, REG_EQUAL, source);
3439
3440  return result;
3441}
3442
3443/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3444   fall back to a straight forward decomposition.  We do this to avoid
3445   exponential run times encountered when looking for longer sequences
3446   with rs6000_emit_set_const.  */
3447static rtx
3448rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3449{
3450  if (!TARGET_POWERPC64)
3451    {
3452      rtx operand1, operand2;
3453
3454      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3455					DImode);
3456      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3457					DImode);
3458      emit_move_insn (operand1, GEN_INT (c1));
3459      emit_move_insn (operand2, GEN_INT (c2));
3460    }
3461  else
3462    {
3463      HOST_WIDE_INT ud1, ud2, ud3, ud4;
3464
3465      ud1 = c1 & 0xffff;
3466      ud2 = (c1 & 0xffff0000) >> 16;
3467#if HOST_BITS_PER_WIDE_INT >= 64
3468      c2 = c1 >> 32;
3469#endif
3470      ud3 = c2 & 0xffff;
3471      ud4 = (c2 & 0xffff0000) >> 16;
3472
3473      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3474	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3475	{
3476	  if (ud1 & 0x8000)
3477	    emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) -  0x8000)));
3478	  else
3479	    emit_move_insn (dest, GEN_INT (ud1));
3480	}
3481
3482      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3483	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3484	{
3485	  if (ud2 & 0x8000)
3486	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3487					   - 0x80000000));
3488	  else
3489	    emit_move_insn (dest, GEN_INT (ud2 << 16));
3490	  if (ud1 != 0)
3491	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3492	}
3493      else if ((ud4 == 0xffff && (ud3 & 0x8000))
3494	       || (ud4 == 0 && ! (ud3 & 0x8000)))
3495	{
3496	  if (ud3 & 0x8000)
3497	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3498					   - 0x80000000));
3499	  else
3500	    emit_move_insn (dest, GEN_INT (ud3 << 16));
3501
3502	  if (ud2 != 0)
3503	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3504	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3505	  if (ud1 != 0)
3506	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3507	}
3508      else
3509	{
3510	  if (ud4 & 0x8000)
3511	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3512					   - 0x80000000));
3513	  else
3514	    emit_move_insn (dest, GEN_INT (ud4 << 16));
3515
3516	  if (ud3 != 0)
3517	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3518
3519	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3520	  if (ud2 != 0)
3521	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3522					       GEN_INT (ud2 << 16)));
3523	  if (ud1 != 0)
3524	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3525	}
3526    }
3527  return dest;
3528}
3529
3530/* Emit a move from SOURCE to DEST in mode MODE.  */
3531void
3532rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3533{
3534  rtx operands[2];
3535  operands[0] = dest;
3536  operands[1] = source;
3537
3538  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
3539  if (GET_CODE (operands[1]) == CONST_DOUBLE
3540      && ! FLOAT_MODE_P (mode)
3541      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3542    {
3543      /* FIXME.  This should never happen.  */
3544      /* Since it seems that it does, do the safe thing and convert
3545	 to a CONST_INT.  */
3546      operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3547    }
3548  if (GET_CODE (operands[1]) == CONST_DOUBLE
3549      && ! FLOAT_MODE_P (mode)
3550      && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3551	   && CONST_DOUBLE_LOW (operands[1]) >= 0)
3552	  || (CONST_DOUBLE_HIGH (operands[1]) == -1
3553	      && CONST_DOUBLE_LOW (operands[1]) < 0)))
3554    abort ();
3555
3556  /* Check if GCC is setting up a block move that will end up using FP
3557     registers as temporaries.  We must make sure this is acceptable.  */
3558  if (GET_CODE (operands[0]) == MEM
3559      && GET_CODE (operands[1]) == MEM
3560      && mode == DImode
3561      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3562	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3563      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3564					    ? 32 : MEM_ALIGN (operands[0])))
3565	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3566					       ? 32
3567					       : MEM_ALIGN (operands[1]))))
3568      && ! MEM_VOLATILE_P (operands [0])
3569      && ! MEM_VOLATILE_P (operands [1]))
3570    {
3571      emit_move_insn (adjust_address (operands[0], SImode, 0),
3572		      adjust_address (operands[1], SImode, 0));
3573      emit_move_insn (adjust_address (operands[0], SImode, 4),
3574		      adjust_address (operands[1], SImode, 4));
3575      return;
3576    }
3577
3578  if (!no_new_pseudos)
3579    {
3580      if (GET_CODE (operands[1]) == MEM && optimize > 0
3581	  && (mode == QImode || mode == HImode || mode == SImode)
3582	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3583	{
3584	  rtx reg = gen_reg_rtx (word_mode);
3585
3586	  emit_insn (gen_rtx_SET (word_mode, reg,
3587				  gen_rtx_ZERO_EXTEND (word_mode,
3588						       operands[1])));
3589	  operands[1] = gen_lowpart (mode, reg);
3590	}
3591      if (GET_CODE (operands[0]) != REG)
3592	operands[1] = force_reg (mode, operands[1]);
3593    }
3594
3595  if (mode == SFmode && ! TARGET_POWERPC
3596      && TARGET_HARD_FLOAT && TARGET_FPRS
3597      && GET_CODE (operands[0]) == MEM)
3598    {
3599      int regnum;
3600
3601      if (reload_in_progress || reload_completed)
3602	regnum = true_regnum (operands[1]);
3603      else if (GET_CODE (operands[1]) == REG)
3604	regnum = REGNO (operands[1]);
3605      else
3606	regnum = -1;
3607
3608      /* If operands[1] is a register, on POWER it may have
3609	 double-precision data in it, so truncate it to single
3610	 precision.  */
3611      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3612	{
3613	  rtx newreg;
3614	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3615	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3616	  operands[1] = newreg;
3617	}
3618    }
3619
3620  /* Recognize the case where operand[1] is a reference to thread-local
3621     data and load its address to a register.  */
3622  if (GET_CODE (operands[1]) == SYMBOL_REF)
3623    {
3624      enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3625      if (model != 0)
3626	operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3627    }
3628
3629  /* Handle the case where reload calls us with an invalid address.  */
3630  if (reload_in_progress && mode == Pmode
3631      && (! general_operand (operands[1], mode)
3632	  || ! nonimmediate_operand (operands[0], mode)))
3633    goto emit_set;
3634
3635  /* Handle the case of CONSTANT_P_RTX.  */
3636  if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3637    goto emit_set;
3638
3639  /* 128-bit constant floating-point values on Darwin should really be
3640     loaded as two parts.  */
3641  if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3642      && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3643      && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3644    {
3645      /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3646	 know how to get a DFmode SUBREG of a TFmode.  */
3647      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3648			simplify_gen_subreg (DImode, operands[1], mode, 0),
3649			DImode);
3650      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3651					     GET_MODE_SIZE (DImode)),
3652			simplify_gen_subreg (DImode, operands[1], mode,
3653					     GET_MODE_SIZE (DImode)),
3654			DImode);
3655      return;
3656    }
3657
3658  /* FIXME:  In the long term, this switch statement should go away
3659     and be replaced by a sequence of tests based on things like
3660     mode == Pmode.  */
3661  switch (mode)
3662    {
3663    case HImode:
3664    case QImode:
3665      if (CONSTANT_P (operands[1])
3666	  && GET_CODE (operands[1]) != CONST_INT)
3667	operands[1] = force_const_mem (mode, operands[1]);
3668      break;
3669
3670    case TFmode:
3671    case DFmode:
3672    case SFmode:
3673      if (CONSTANT_P (operands[1])
3674	  && ! easy_fp_constant (operands[1], mode))
3675	operands[1] = force_const_mem (mode, operands[1]);
3676      break;
3677
3678    case V16QImode:
3679    case V8HImode:
3680    case V4SFmode:
3681    case V4SImode:
3682    case V4HImode:
3683    case V2SFmode:
3684    case V2SImode:
3685    case V1DImode:
3686      if (CONSTANT_P (operands[1])
3687	  && !easy_vector_constant (operands[1], mode))
3688	operands[1] = force_const_mem (mode, operands[1]);
3689      break;
3690
3691    case SImode:
3692    case DImode:
3693      /* Use default pattern for address of ELF small data */
3694      if (TARGET_ELF
3695	  && mode == Pmode
3696	  && DEFAULT_ABI == ABI_V4
3697	  && (GET_CODE (operands[1]) == SYMBOL_REF
3698	      || GET_CODE (operands[1]) == CONST)
3699	  && small_data_operand (operands[1], mode))
3700	{
3701	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3702	  return;
3703	}
3704
3705      if (DEFAULT_ABI == ABI_V4
3706	  && mode == Pmode && mode == SImode
3707	  && flag_pic == 1 && got_operand (operands[1], mode))
3708	{
3709	  emit_insn (gen_movsi_got (operands[0], operands[1]));
3710	  return;
3711	}
3712
3713      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3714	  && TARGET_NO_TOC
3715	  && ! flag_pic
3716	  && mode == Pmode
3717	  && CONSTANT_P (operands[1])
3718	  && GET_CODE (operands[1]) != HIGH
3719	  && GET_CODE (operands[1]) != CONST_INT)
3720	{
3721	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3722
3723	  /* If this is a function address on -mcall-aixdesc,
3724	     convert it to the address of the descriptor.  */
3725	  if (DEFAULT_ABI == ABI_AIX
3726	      && GET_CODE (operands[1]) == SYMBOL_REF
3727	      && XSTR (operands[1], 0)[0] == '.')
3728	    {
3729	      const char *name = XSTR (operands[1], 0);
3730	      rtx new_ref;
3731	      while (*name == '.')
3732		name++;
3733	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3734	      CONSTANT_POOL_ADDRESS_P (new_ref)
3735		= CONSTANT_POOL_ADDRESS_P (operands[1]);
3736	      SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3737	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3738	      SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3739	      operands[1] = new_ref;
3740	    }
3741
3742	  if (DEFAULT_ABI == ABI_DARWIN)
3743	    {
3744#if TARGET_MACHO
3745	      if (MACHO_DYNAMIC_NO_PIC_P)
3746		{
3747		  /* Take care of any required data indirection.  */
3748		  operands[1] = rs6000_machopic_legitimize_pic_address (
3749				  operands[1], mode, operands[0]);
3750		  if (operands[0] != operands[1])
3751		    emit_insn (gen_rtx_SET (VOIDmode,
3752				            operands[0], operands[1]));
3753		  return;
3754		}
3755#endif
3756	      emit_insn (gen_macho_high (target, operands[1]));
3757	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
3758	      return;
3759	    }
3760
3761	  emit_insn (gen_elf_high (target, operands[1]));
3762	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
3763	  return;
3764	}
3765
3766      /* If this is a SYMBOL_REF that refers to a constant pool entry,
3767	 and we have put it in the TOC, we just need to make a TOC-relative
3768	 reference to it.  */
3769      if (TARGET_TOC
3770	  && GET_CODE (operands[1]) == SYMBOL_REF
3771	  && constant_pool_expr_p (operands[1])
3772	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3773					      get_pool_mode (operands[1])))
3774	{
3775	  operands[1] = create_TOC_reference (operands[1]);
3776	}
3777      else if (mode == Pmode
3778	       && CONSTANT_P (operands[1])
3779	       && ((GET_CODE (operands[1]) != CONST_INT
3780		    && ! easy_fp_constant (operands[1], mode))
3781		   || (GET_CODE (operands[1]) == CONST_INT
3782		       && num_insns_constant (operands[1], mode) > 2)
3783		   || (GET_CODE (operands[0]) == REG
3784		       && FP_REGNO_P (REGNO (operands[0]))))
3785	       && GET_CODE (operands[1]) != HIGH
3786	       && ! legitimate_constant_pool_address_p (operands[1])
3787	       && ! toc_relative_expr_p (operands[1]))
3788	{
3789	  /* Emit a USE operation so that the constant isn't deleted if
3790	     expensive optimizations are turned on because nobody
3791	     references it.  This should only be done for operands that
3792	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3793	     This should not be done for operands that contain LABEL_REFs.
3794	     For now, we just handle the obvious case.  */
3795	  if (GET_CODE (operands[1]) != LABEL_REF)
3796	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3797
3798#if TARGET_MACHO
3799	  /* Darwin uses a special PIC legitimizer.  */
3800	  if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3801	    {
3802	      operands[1] =
3803		rs6000_machopic_legitimize_pic_address (operands[1], mode,
3804							operands[0]);
3805	      if (operands[0] != operands[1])
3806		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3807	      return;
3808	    }
3809#endif
3810
3811	  /* If we are to limit the number of things we put in the TOC and
3812	     this is a symbol plus a constant we can add in one insn,
3813	     just put the symbol in the TOC and add the constant.  Don't do
3814	     this if reload is in progress.  */
3815	  if (GET_CODE (operands[1]) == CONST
3816	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3817	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
3818	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3819	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3820		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3821	      && ! side_effects_p (operands[0]))
3822	    {
3823	      rtx sym =
3824		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3825	      rtx other = XEXP (XEXP (operands[1], 0), 1);
3826
3827	      sym = force_reg (mode, sym);
3828	      if (mode == SImode)
3829		emit_insn (gen_addsi3 (operands[0], sym, other));
3830	      else
3831		emit_insn (gen_adddi3 (operands[0], sym, other));
3832	      return;
3833	    }
3834
3835	  operands[1] = force_const_mem (mode, operands[1]);
3836
3837	  if (TARGET_TOC
3838	      && constant_pool_expr_p (XEXP (operands[1], 0))
3839	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3840			get_pool_constant (XEXP (operands[1], 0)),
3841			get_pool_mode (XEXP (operands[1], 0))))
3842	    {
3843	      operands[1]
3844		= gen_rtx_MEM (mode,
3845			       create_TOC_reference (XEXP (operands[1], 0)));
3846	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
3847	      RTX_UNCHANGING_P (operands[1]) = 1;
3848	    }
3849	}
3850      break;
3851
3852    case TImode:
3853      if (GET_CODE (operands[0]) == MEM
3854	  && GET_CODE (XEXP (operands[0], 0)) != REG
3855	  && ! reload_in_progress)
3856	operands[0]
3857	  = replace_equiv_address (operands[0],
3858				   copy_addr_to_reg (XEXP (operands[0], 0)));
3859
3860      if (GET_CODE (operands[1]) == MEM
3861	  && GET_CODE (XEXP (operands[1], 0)) != REG
3862	  && ! reload_in_progress)
3863	operands[1]
3864	  = replace_equiv_address (operands[1],
3865				   copy_addr_to_reg (XEXP (operands[1], 0)));
3866      if (TARGET_POWER)
3867	{
3868	  emit_insn (gen_rtx_PARALLEL (VOIDmode,
3869		       gen_rtvec (2,
3870				  gen_rtx_SET (VOIDmode,
3871					       operands[0], operands[1]),
3872				  gen_rtx_CLOBBER (VOIDmode,
3873						   gen_rtx_SCRATCH (SImode)))));
3874	  return;
3875	}
3876      break;
3877
3878    default:
3879      abort ();
3880    }
3881
3882  /* Above, we may have called force_const_mem which may have returned
3883     an invalid address.  If we can, fix this up; otherwise, reload will
3884     have to deal with it.  */
3885  if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3886    operands[1] = validize_mem (operands[1]);
3887
3888 emit_set:
3889  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3890}
3891
3892/* Nonzero if we can use a floating-point register to pass this arg.  */
3893#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE)		\
3894  (GET_MODE_CLASS (MODE) == MODE_FLOAT		\
3895   && (CUM)->fregno <= FP_ARG_MAX_REG		\
3896   && TARGET_HARD_FLOAT && TARGET_FPRS)
3897
3898/* Nonzero if we can use an AltiVec register to pass this arg.  */
3899#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED)	\
3900  (ALTIVEC_VECTOR_MODE (MODE)				\
3901   && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG		\
3902   && TARGET_ALTIVEC_ABI				\
3903   && (NAMED))
3904
3905/* Return a nonzero value to say to return the function value in
3906   memory, just as large structures are always returned.  TYPE will be
3907   the data type of the value, and FNTYPE will be the type of the
3908   function doing the returning, or @code{NULL} for libcalls.
3909
3910   The AIX ABI for the RS/6000 specifies that all structures are
3911   returned in memory.  The Darwin ABI does the same.  The SVR4 ABI
3912   specifies that structures <= 8 bytes are returned in r3/r4, but a
3913   draft put them in memory, and GCC used to implement the draft
3914   instead of the final standard.  Therefore, TARGET_AIX_STRUCT_RET
3915   controls this instead of DEFAULT_ABI; V.4 targets needing backward
3916   compatibility can change DRAFT_V4_STRUCT_RET to override the
3917   default, and -m switches get the final word.  See
3918   rs6000_override_options for more details.
3919
3920   The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3921   long double support is enabled.  These values are returned in memory.
3922
3923   int_size_in_bytes returns -1 for variable size objects, which go in
3924   memory always.  The cast to unsigned makes -1 > 8.  */
3925
3926static bool
3927rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3928{
3929  if (AGGREGATE_TYPE_P (type)
3930      && (TARGET_AIX_STRUCT_RET
3931	  || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3932    return true;
3933  if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3934    return true;
3935  return false;
3936}
3937
3938/* Initialize a variable CUM of type CUMULATIVE_ARGS
3939   for a call to a function whose data type is FNTYPE.
3940   For a library call, FNTYPE is 0.
3941
3942   For incoming args we set the number of arguments in the prototype large
3943   so we never return a PARALLEL.  */
3944
3945void
3946init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3947		      rtx libname ATTRIBUTE_UNUSED, int incoming,
3948		      int libcall, int n_named_args)
3949{
3950  static CUMULATIVE_ARGS zero_cumulative;
3951
3952  *cum = zero_cumulative;
3953  cum->words = 0;
3954  cum->fregno = FP_ARG_MIN_REG;
3955  cum->vregno = ALTIVEC_ARG_MIN_REG;
3956  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3957  cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3958		      ? CALL_LIBCALL : CALL_NORMAL);
3959  cum->sysv_gregno = GP_ARG_MIN_REG;
3960  cum->stdarg = fntype
3961    && (TYPE_ARG_TYPES (fntype) != 0
3962	&& (TREE_VALUE (tree_last  (TYPE_ARG_TYPES (fntype)))
3963	    != void_type_node));
3964
3965  cum->nargs_prototype = 0;
3966  if (incoming || cum->prototype)
3967    cum->nargs_prototype = n_named_args;
3968
3969  /* Check for a longcall attribute.  */
3970  if ((!fntype && rs6000_default_long_calls)
3971      || (fntype
3972	  && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3973	  && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
3974    cum->call_cookie |= CALL_LONG;
3975
3976  if (TARGET_DEBUG_ARG)
3977    {
3978      fprintf (stderr, "\ninit_cumulative_args:");
3979      if (fntype)
3980	{
3981	  tree ret_type = TREE_TYPE (fntype);
3982	  fprintf (stderr, " ret code = %s,",
3983		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
3984	}
3985
3986      if (cum->call_cookie & CALL_LONG)
3987	fprintf (stderr, " longcall,");
3988
3989      fprintf (stderr, " proto = %d, nargs = %d\n",
3990	       cum->prototype, cum->nargs_prototype);
3991    }
3992
3993    if (fntype
3994	&& !TARGET_ALTIVEC
3995	&& TARGET_ALTIVEC_ABI
3996        && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
3997      {
3998	error ("Cannot return value in vector register because"
3999	       " altivec instructions are disabled, use -maltivec"
4000	       " to enable them.");
4001      }
4002}
4003
4004/* If defined, a C expression which determines whether, and in which
4005   direction, to pad out an argument with extra space.  The value
4006   should be of type `enum direction': either `upward' to pad above
4007   the argument, `downward' to pad below, or `none' to inhibit
4008   padding.
4009
4010   For the AIX ABI structs are always stored left shifted in their
4011   argument slot.  */
4012
4013enum direction
4014function_arg_padding (enum machine_mode mode, tree type)
4015{
4016#ifndef AGGREGATE_PADDING_FIXED
4017#define AGGREGATE_PADDING_FIXED 0
4018#endif
4019#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4020#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4021#endif
4022
4023  if (!AGGREGATE_PADDING_FIXED)
4024    {
4025      /* GCC used to pass structures of the same size as integer types as
4026	 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4027	 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4028	 passed padded downward, except that -mstrict-align further
4029	 muddied the water in that multi-component structures of 2 and 4
4030	 bytes in size were passed padded upward.
4031
4032	 The following arranges for best compatibility with previous
4033	 versions of gcc, but removes the -mstrict-align dependency.  */
4034      if (BYTES_BIG_ENDIAN)
4035	{
4036	  HOST_WIDE_INT size = 0;
4037
4038	  if (mode == BLKmode)
4039	    {
4040	      if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4041		size = int_size_in_bytes (type);
4042	    }
4043	  else
4044	    size = GET_MODE_SIZE (mode);
4045
4046	  if (size == 1 || size == 2 || size == 4)
4047	    return downward;
4048	}
4049      return upward;
4050    }
4051
4052  if (AGGREGATES_PAD_UPWARD_ALWAYS)
4053    {
4054      if (type != 0 && AGGREGATE_TYPE_P (type))
4055	return upward;
4056    }
4057
4058  /* Fall back to the default.  */
4059  return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4060}
4061
4062/* If defined, a C expression that gives the alignment boundary, in bits,
4063   of an argument with the specified mode and type.  If it is not defined,
4064   PARM_BOUNDARY is used for all arguments.
4065
4066   V.4 wants long longs to be double word aligned.  */
4067
4068int
4069function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4070{
4071  if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4072    return 64;
4073  else if (SPE_VECTOR_MODE (mode))
4074    return 64;
4075  else if (ALTIVEC_VECTOR_MODE (mode))
4076    return 128;
4077  else
4078    return PARM_BOUNDARY;
4079}
4080
4081/* Compute the size (in words) of a function argument.  */
4082
4083static unsigned long
4084rs6000_arg_size (enum machine_mode mode, tree type)
4085{
4086  unsigned long size;
4087
4088  if (mode != BLKmode)
4089    size = GET_MODE_SIZE (mode);
4090  else
4091    size = int_size_in_bytes (type);
4092
4093  if (TARGET_32BIT)
4094    return (size + 3) >> 2;
4095  else
4096    return (size + 7) >> 3;
4097}
4098
4099/* Update the data in CUM to advance over an argument
4100   of mode MODE and data type TYPE.
4101   (TYPE is null for libcalls where that information may not be available.)
4102
4103   Note that for args passed by reference, function_arg will be called
4104   with MODE and TYPE set to that of the pointer to the arg, not the arg
4105   itself.  */
4106
4107void
4108function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4109		      tree type, int named)
4110{
4111  cum->nargs_prototype--;
4112
4113  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4114    {
4115      bool stack = false;
4116
4117      if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4118        {
4119	  cum->vregno++;
4120	  if (!TARGET_ALTIVEC)
4121	    error ("Cannot pass argument in vector register because"
4122		   " altivec instructions are disabled, use -maltivec"
4123		   " to enable them.");
4124
4125	  /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4126	     even if it is going to be passed in a vector register.
4127	     Darwin does the same for variable-argument functions.  */
4128	  if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4129	      || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4130	    stack = true;
4131	}
4132      else
4133	stack = true;
4134
4135      if (stack)
4136        {
4137	  int align;
4138
4139	  /* Vector parameters must be 16-byte aligned.  This places
4140	     them at 2 mod 4 in terms of words in 32-bit mode, since
4141	     the parameter save area starts at offset 24 from the
4142	     stack.  In 64-bit mode, they just have to start on an
4143	     even word, since the parameter save area is 16-byte
4144	     aligned.  Space for GPRs is reserved even if the argument
4145	     will be passed in memory.  */
4146	  if (TARGET_32BIT)
4147	    align = (2 - cum->words) & 3;
4148	  else
4149	    align = cum->words & 1;
4150	  cum->words += align + rs6000_arg_size (mode, type);
4151
4152	  if (TARGET_DEBUG_ARG)
4153	    {
4154	      fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4155		       cum->words, align);
4156	      fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4157		       cum->nargs_prototype, cum->prototype,
4158		       GET_MODE_NAME (mode));
4159	    }
4160	}
4161    }
4162  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4163	   && !cum->stdarg
4164	   && cum->sysv_gregno <= GP_ARG_MAX_REG)
4165    cum->sysv_gregno++;
4166  else if (DEFAULT_ABI == ABI_V4)
4167    {
4168      if (TARGET_HARD_FLOAT && TARGET_FPRS
4169	  && (mode == SFmode || mode == DFmode))
4170	{
4171	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
4172	    cum->fregno++;
4173	  else
4174	    {
4175	      if (mode == DFmode)
4176	        cum->words += cum->words & 1;
4177	      cum->words += rs6000_arg_size (mode, type);
4178	    }
4179	}
4180      else
4181	{
4182	  int n_words = rs6000_arg_size (mode, type);
4183	  int gregno = cum->sysv_gregno;
4184
4185	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4186	     (r7,r8) or (r9,r10).  As does any other 2 word item such
4187	     as complex int due to a historical mistake.  */
4188	  if (n_words == 2)
4189	    gregno += (1 - gregno) & 1;
4190
4191	  /* Multi-reg args are not split between registers and stack.  */
4192	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4193	    {
4194	      /* Long long and SPE vectors are aligned on the stack.
4195		 So are other 2 word items such as complex int due to
4196		 a historical mistake.  */
4197	      if (n_words == 2)
4198		cum->words += cum->words & 1;
4199	      cum->words += n_words;
4200	    }
4201
4202	  /* Note: continuing to accumulate gregno past when we've started
4203	     spilling to the stack indicates the fact that we've started
4204	     spilling to the stack to expand_builtin_saveregs.  */
4205	  cum->sysv_gregno = gregno + n_words;
4206	}
4207
4208      if (TARGET_DEBUG_ARG)
4209	{
4210	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4211		   cum->words, cum->fregno);
4212	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4213		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4214	  fprintf (stderr, "mode = %4s, named = %d\n",
4215		   GET_MODE_NAME (mode), named);
4216	}
4217    }
4218  else
4219    {
4220      int n_words = rs6000_arg_size (mode, type);
4221      int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4222
4223      /* The simple alignment calculation here works because
4224	 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4225	 If we ever want to handle alignments larger than 8 bytes for
4226	 32-bit or 16 bytes for 64-bit, then we'll need to take into
4227	 account the offset to the start of the parm save area.  */
4228      align &= cum->words;
4229      cum->words += align + n_words;
4230
4231      if (GET_MODE_CLASS (mode) == MODE_FLOAT
4232	  && TARGET_HARD_FLOAT && TARGET_FPRS)
4233	cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4234
4235      if (TARGET_DEBUG_ARG)
4236	{
4237	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4238		   cum->words, cum->fregno);
4239	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4240		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4241	  fprintf (stderr, "named = %d, align = %d\n", named, align);
4242	}
4243    }
4244}
4245
4246/* Determine where to put a SIMD argument on the SPE.  */
4247
4248static rtx
4249rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4250			 tree type)
4251{
4252  if (cum->stdarg)
4253    {
4254      int gregno = cum->sysv_gregno;
4255      int n_words = rs6000_arg_size (mode, type);
4256
4257      /* SPE vectors are put in odd registers.  */
4258      if (n_words == 2 && (gregno & 1) == 0)
4259	gregno += 1;
4260
4261      if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4262	{
4263	  rtx r1, r2;
4264	  enum machine_mode m = SImode;
4265
4266	  r1 = gen_rtx_REG (m, gregno);
4267	  r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4268	  r2 = gen_rtx_REG (m, gregno + 1);
4269	  r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4270	  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4271	}
4272      else
4273	return NULL_RTX;
4274    }
4275  else
4276    {
4277      if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4278	return gen_rtx_REG (mode, cum->sysv_gregno);
4279      else
4280	return NULL_RTX;
4281    }
4282}
4283
4284/* Determine where to place an argument in 64-bit mode with 32-bit ABI.  */
4285
4286static rtx
4287rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4288{
4289  int n_units;
4290  int i, k;
4291  rtx rvec[GP_ARG_NUM_REG + 1];
4292
4293  if (align_words >= GP_ARG_NUM_REG)
4294    return NULL_RTX;
4295
4296  n_units = rs6000_arg_size (mode, type);
4297
4298  /* Optimize the simple case where the arg fits in one gpr, except in
4299     the case of BLKmode due to assign_parms assuming that registers are
4300     BITS_PER_WORD wide.  */
4301  if (n_units == 0
4302      || (n_units == 1 && mode != BLKmode))
4303    return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4304
4305  k = 0;
4306  if (align_words + n_units > GP_ARG_NUM_REG)
4307    /* Not all of the arg fits in gprs.  Say that it goes in memory too,
4308       using a magic NULL_RTX component.
4309       FIXME: This is not strictly correct.  Only some of the arg
4310       belongs in memory, not all of it.  However, there isn't any way
4311       to do this currently, apart from building rtx descriptions for
4312       the pieces of memory we want stored.  Due to bugs in the generic
4313       code we can't use the normal function_arg_partial_nregs scheme
4314       with the PARALLEL arg description we emit here.
4315       In any case, the code to store the whole arg to memory is often
4316       more efficient than code to store pieces, and we know that space
4317       is available in the right place for the whole arg.  */
4318    rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4319
4320  i = 0;
4321  do
4322    {
4323      rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4324      rtx off = GEN_INT (i++ * 4);
4325      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4326    }
4327  while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4328
4329  return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4330}
4331
4332/* Determine where to put an argument to a function.
4333   Value is zero to push the argument on the stack,
4334   or a hard register in which to store the argument.
4335
4336   MODE is the argument's machine mode.
4337   TYPE is the data type of the argument (as a tree).
4338    This is null for libcalls where that information may
4339    not be available.
4340   CUM is a variable of type CUMULATIVE_ARGS which gives info about
4341    the preceding args and about the function being called.
4342   NAMED is nonzero if this argument is a named parameter
4343    (otherwise it is an extra parameter matching an ellipsis).
4344
4345   On RS/6000 the first eight words of non-FP are normally in registers
4346   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
4347   Under V.4, the first 8 FP args are in registers.
4348
4349   If this is floating-point and no prototype is specified, we use
4350   both an FP and integer register (or possibly FP reg and stack).  Library
4351   functions (when CALL_LIBCALL is set) always have the proper types for args,
4352   so we can pass the FP value just in one register.  emit_library_function
4353   doesn't support PARALLEL anyway.
4354
4355   Note that for args passed by reference, function_arg will be called
4356   with MODE and TYPE set to that of the pointer to the arg, not the arg
4357   itself.  */
4358
4359struct rtx_def *
4360function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4361	      tree type, int named)
4362{
4363  enum rs6000_abi abi = DEFAULT_ABI;
4364
4365  /* Return a marker to indicate whether CR1 needs to set or clear the
4366     bit that V.4 uses to say fp args were passed in registers.
4367     Assume that we don't need the marker for software floating point,
4368     or compiler generated library calls.  */
4369  if (mode == VOIDmode)
4370    {
4371      if (abi == ABI_V4
4372	  && cum->nargs_prototype < 0
4373	  && (cum->call_cookie & CALL_LIBCALL) == 0
4374	  && (cum->prototype || TARGET_NO_PROTOTYPE))
4375	{
4376	  /* For the SPE, we need to crxor CR6 always.  */
4377	  if (TARGET_SPE_ABI)
4378	    return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4379	  else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4380	    return GEN_INT (cum->call_cookie
4381			    | ((cum->fregno == FP_ARG_MIN_REG)
4382			       ? CALL_V4_SET_FP_ARGS
4383			       : CALL_V4_CLEAR_FP_ARGS));
4384	}
4385
4386      return GEN_INT (cum->call_cookie);
4387    }
4388
4389  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4390    if (TARGET_64BIT && ! cum->prototype)
4391      {
4392       /* Vector parameters get passed in vector register
4393          and also in GPRs or memory, in absence of prototype.  */
4394       int align_words;
4395       rtx slot;
4396       align_words = (cum->words + 1) & ~1;
4397
4398       if (align_words >= GP_ARG_NUM_REG)
4399         {
4400           slot = NULL_RTX;
4401         }
4402       else
4403         {
4404           slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4405         }
4406       return gen_rtx_PARALLEL (mode,
4407                gen_rtvec (2,
4408                           gen_rtx_EXPR_LIST (VOIDmode,
4409                                              slot, const0_rtx),
4410                           gen_rtx_EXPR_LIST (VOIDmode,
4411                                              gen_rtx_REG (mode, cum->vregno),
4412                                              const0_rtx)));
4413      }
4414    else
4415      return gen_rtx_REG (mode, cum->vregno);
4416  else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4417    {
4418      if (named || abi == ABI_V4)
4419	return NULL_RTX;
4420      else
4421	{
4422	  /* Vector parameters to varargs functions under AIX or Darwin
4423	     get passed in memory and possibly also in GPRs.  */
4424	  int align, align_words, n_words;
4425	  enum machine_mode part_mode;
4426
4427	  /* Vector parameters must be 16-byte aligned.  This places them at
4428	     2 mod 4 in terms of words in 32-bit mode, since the parameter
4429	     save area starts at offset 24 from the stack.  In 64-bit mode,
4430	     they just have to start on an even word, since the parameter
4431	     save area is 16-byte aligned.  */
4432	  if (TARGET_32BIT)
4433	    align = (2 - cum->words) & 3;
4434	  else
4435	    align = cum->words & 1;
4436	  align_words = cum->words + align;
4437
4438	  /* Out of registers?  Memory, then.  */
4439	  if (align_words >= GP_ARG_NUM_REG)
4440	    return NULL_RTX;
4441
4442	  if (TARGET_32BIT && TARGET_POWERPC64)
4443	    return rs6000_mixed_function_arg (mode, type, align_words);
4444
4445	  /* The vector value goes in GPRs.  Only the part of the
4446	     value in GPRs is reported here.  */
4447	  part_mode = mode;
4448	  n_words = rs6000_arg_size (mode, type);
4449	  if (align_words + n_words > GP_ARG_NUM_REG)
4450	    /* Fortunately, there are only two possibilities, the value
4451	       is either wholly in GPRs or half in GPRs and half not.  */
4452	    part_mode = DImode;
4453
4454	  return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4455	}
4456    }
4457  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4458    return rs6000_spe_function_arg (cum, mode, type);
4459  else if (abi == ABI_V4)
4460    {
4461      if (TARGET_HARD_FLOAT && TARGET_FPRS
4462	  && (mode == SFmode || mode == DFmode))
4463	{
4464	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
4465	    return gen_rtx_REG (mode, cum->fregno);
4466	  else
4467	    return NULL_RTX;
4468	}
4469      else
4470	{
4471	  int n_words = rs6000_arg_size (mode, type);
4472	  int gregno = cum->sysv_gregno;
4473
4474	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4475	     (r7,r8) or (r9,r10).  As does any other 2 word item such
4476	     as complex int due to a historical mistake.  */
4477	  if (n_words == 2)
4478	    gregno += (1 - gregno) & 1;
4479
4480	  /* Multi-reg args are not split between registers and stack.  */
4481	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4482	    return NULL_RTX;
4483
4484	  if (TARGET_32BIT && TARGET_POWERPC64)
4485	    return rs6000_mixed_function_arg (mode, type,
4486					      gregno - GP_ARG_MIN_REG);
4487	  return gen_rtx_REG (mode, gregno);
4488	}
4489    }
4490  else
4491    {
4492      int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4493      int align_words = cum->words + (cum->words & align);
4494
4495      if (USE_FP_FOR_ARG_P (cum, mode, type))
4496	{
4497	  rtx rvec[GP_ARG_NUM_REG + 1];
4498	  rtx r;
4499	  int k;
4500	  bool needs_psave;
4501	  enum machine_mode fmode = mode;
4502	  unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4503
4504	  if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4505	    {
4506	      /* Currently, we only ever need one reg here because complex
4507		 doubles are split.  */
4508	      if (cum->fregno != FP_ARG_MAX_REG || fmode != TFmode)
4509		abort ();
4510
4511	      /* Long double split over regs and memory.  */
4512	      fmode = DFmode;
4513	    }
4514
4515	  /* Do we also need to pass this arg in the parameter save
4516	     area?  */
4517	  needs_psave = (type
4518			 && (cum->nargs_prototype <= 0
4519			     || (DEFAULT_ABI == ABI_AIX
4520				 && TARGET_XL_COMPAT
4521				 && align_words >= GP_ARG_NUM_REG)));
4522
4523	  if (!needs_psave && mode == fmode)
4524	    return gen_rtx_REG (fmode, cum->fregno);
4525
4526	  k = 0;
4527	  if (needs_psave)
4528	    {
4529	      /* Describe the part that goes in gprs or the stack.
4530		 This piece must come first, before the fprs.  */
4531	      if (align_words < GP_ARG_NUM_REG)
4532		{
4533		  unsigned long n_words = rs6000_arg_size (mode, type);
4534
4535		  if (align_words + n_words > GP_ARG_NUM_REG
4536		      || (TARGET_32BIT && TARGET_POWERPC64))
4537		    {
4538		      /* If this is partially on the stack, then we only
4539			 include the portion actually in registers here.  */
4540		      enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
4541		      rtx off;
4542		      do
4543			{
4544			  r = gen_rtx_REG (rmode,
4545					   GP_ARG_MIN_REG + align_words);
4546			  off = GEN_INT (k * GET_MODE_SIZE (rmode));
4547			  rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4548			}
4549		      while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
4550		    }
4551		  else
4552		    {
4553		      /* The whole arg fits in gprs.  */
4554		      r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4555		      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
4556		    }
4557		}
4558	      else
4559		/* It's entirely in memory.  */
4560		rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4561	    }
4562
4563	  /* Describe where this piece goes in the fprs.  */
4564	  r = gen_rtx_REG (fmode, cum->fregno);
4565	  rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
4566
4567	  return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4568	}
4569      else if (align_words < GP_ARG_NUM_REG)
4570	{
4571	  if (TARGET_32BIT && TARGET_POWERPC64)
4572	    return rs6000_mixed_function_arg (mode, type, align_words);
4573
4574	  return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4575	}
4576      else
4577	return NULL_RTX;
4578    }
4579}
4580
4581/* For an arg passed partly in registers and partly in memory, this is
4582   the number of registers used.  For args passed entirely in registers
4583   or entirely in memory, zero.  When an arg is described by a PARALLEL,
4584   perhaps using more than one register type, this function returns the
4585   number of registers used by the first element of the PARALLEL.  */
4586
4587int
4588function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4589			    tree type, int named)
4590{
4591  int ret = 0;
4592  int align;
4593  int parm_offset;
4594  int align_words;
4595
4596  if (DEFAULT_ABI == ABI_V4)
4597    return 0;
4598
4599  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4600      && cum->nargs_prototype >= 0)
4601    return 0;
4602
4603  align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4604  parm_offset = TARGET_32BIT ? 2 : 0;
4605  align_words = cum->words + ((parm_offset - cum->words) & align);
4606
4607  if (USE_FP_FOR_ARG_P (cum, mode, type)
4608      /* If we are passing this arg in gprs as well, then this function
4609	 should return the number of gprs (or memory) partially passed,
4610	 *not* the number of fprs.  */
4611      && !(type
4612	   && (cum->nargs_prototype <= 0
4613	       || (DEFAULT_ABI == ABI_AIX
4614		   && TARGET_XL_COMPAT
4615		   && align_words >= GP_ARG_NUM_REG))))
4616    {
4617      if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4618	ret = FP_ARG_MAX_REG + 1 - cum->fregno;
4619      else if (cum->nargs_prototype >= 0)
4620	return 0;
4621    }
4622
4623  if (align_words < GP_ARG_NUM_REG
4624      && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
4625    ret = GP_ARG_NUM_REG - align_words;
4626
4627  if (ret != 0 && TARGET_DEBUG_ARG)
4628    fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4629
4630  return ret;
4631}
4632
4633/* A C expression that indicates when an argument must be passed by
4634   reference.  If nonzero for an argument, a copy of that argument is
4635   made in memory and a pointer to the argument is passed instead of
4636   the argument itself.  The pointer is passed in whatever way is
4637   appropriate for passing a pointer to that type.
4638
4639   Under V.4, aggregates and long double are passed by reference.
4640
4641   As an extension to all 32-bit ABIs, AltiVec vectors are passed by
4642   reference unless the AltiVec vector extension ABI is in force.
4643
4644   As an extension to all ABIs, variable sized types are passed by
4645   reference.  */
4646
4647int
4648function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4649				enum machine_mode mode ATTRIBUTE_UNUSED,
4650				tree type, int named ATTRIBUTE_UNUSED)
4651{
4652  if ((DEFAULT_ABI == ABI_V4
4653       && ((type && AGGREGATE_TYPE_P (type))
4654	   || mode == TFmode))
4655      || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4656      || (type && int_size_in_bytes (type) < 0))
4657    {
4658      if (TARGET_DEBUG_ARG)
4659	fprintf (stderr, "function_arg_pass_by_reference\n");
4660
4661      return 1;
4662    }
4663  return 0;
4664}
4665
4666static void
4667rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4668{
4669  int i;
4670  enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4671
4672  if (nregs == 0)
4673    return;
4674
4675    for (i = 0; i < nregs; i++)
4676    {
4677      rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4678      if (reload_completed)
4679      {
4680	if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4681	  tem = NULL_RTX;
4682	else
4683	  tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4684				     i * GET_MODE_SIZE(reg_mode));
4685      }
4686      else
4687	tem = replace_equiv_address (tem, XEXP (tem, 0));
4688
4689      if (tem == NULL_RTX)
4690        abort ();
4691
4692      emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4693    }
4694}
4695
4696
4697/* Perform any needed actions needed for a function that is receiving a
4698   variable number of arguments.
4699
4700   CUM is as above.
4701
4702   MODE and TYPE are the mode and type of the current parameter.
4703
4704   PRETEND_SIZE is a variable that should be set to the amount of stack
4705   that must be pushed by the prolog to pretend that our caller pushed
4706   it.
4707
4708   Normally, this macro will push all remaining incoming registers on the
4709   stack and set PRETEND_SIZE to the length of the registers pushed.  */
4710
4711static void
4712setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4713		tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4714{
4715  CUMULATIVE_ARGS next_cum;
4716  int reg_size = TARGET_32BIT ? 4 : 8;
4717  rtx save_area = NULL_RTX, mem;
4718  int first_reg_offset, set;
4719
4720  /* Skip the last named argument.  */
4721  next_cum = *cum;
4722  function_arg_advance (&next_cum, mode, type, 1);
4723
4724  if (DEFAULT_ABI == ABI_V4)
4725    {
4726      /* Indicate to allocate space on the stack for varargs save area.  */
4727      cfun->machine->sysv_varargs_p = 1;
4728      if (! no_rtl)
4729	save_area = plus_constant (virtual_stack_vars_rtx,
4730				   - RS6000_VARARGS_SIZE);
4731
4732      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4733    }
4734  else
4735    {
4736      first_reg_offset = next_cum.words;
4737      save_area = virtual_incoming_args_rtx;
4738      cfun->machine->sysv_varargs_p = 0;
4739
4740      if (MUST_PASS_IN_STACK (mode, type))
4741	first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4742    }
4743
4744  set = get_varargs_alias_set ();
4745  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4746    {
4747      mem = gen_rtx_MEM (BLKmode,
4748		         plus_constant (save_area,
4749					first_reg_offset * reg_size)),
4750      set_mem_alias_set (mem, set);
4751      set_mem_align (mem, BITS_PER_WORD);
4752
4753      rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4754			          GP_ARG_NUM_REG - first_reg_offset);
4755    }
4756
4757  /* Save FP registers if needed.  */
4758  if (DEFAULT_ABI == ABI_V4
4759      && TARGET_HARD_FLOAT && TARGET_FPRS
4760      && ! no_rtl
4761      && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4762    {
4763      int fregno = next_cum.fregno;
4764      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4765      rtx lab = gen_label_rtx ();
4766      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4767
4768      emit_jump_insn (gen_rtx_SET (VOIDmode,
4769				   pc_rtx,
4770				   gen_rtx_IF_THEN_ELSE (VOIDmode,
4771					    gen_rtx_NE (VOIDmode, cr1,
4772						        const0_rtx),
4773					    gen_rtx_LABEL_REF (VOIDmode, lab),
4774					    pc_rtx)));
4775
4776      while (fregno <= FP_ARG_V4_MAX_REG)
4777	{
4778	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4779          set_mem_alias_set (mem, set);
4780	  set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
4781	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4782	  fregno++;
4783	  off += 8;
4784	}
4785
4786      emit_label (lab);
4787    }
4788}
4789
4790/* Create the va_list data type.  */
4791
4792static tree
4793rs6000_build_builtin_va_list (void)
4794{
4795  tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4796
4797  /* For AIX, prefer 'char *' because that's what the system
4798     header files like.  */
4799  if (DEFAULT_ABI != ABI_V4)
4800    return build_pointer_type (char_type_node);
4801
4802  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4803  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4804
4805  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4806		      unsigned_char_type_node);
4807  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4808		      unsigned_char_type_node);
4809  /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4810     every user file.  */
4811  f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4812		      short_unsigned_type_node);
4813  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4814		      ptr_type_node);
4815  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4816		      ptr_type_node);
4817
4818  DECL_FIELD_CONTEXT (f_gpr) = record;
4819  DECL_FIELD_CONTEXT (f_fpr) = record;
4820  DECL_FIELD_CONTEXT (f_res) = record;
4821  DECL_FIELD_CONTEXT (f_ovf) = record;
4822  DECL_FIELD_CONTEXT (f_sav) = record;
4823
4824  TREE_CHAIN (record) = type_decl;
4825  TYPE_NAME (record) = type_decl;
4826  TYPE_FIELDS (record) = f_gpr;
4827  TREE_CHAIN (f_gpr) = f_fpr;
4828  TREE_CHAIN (f_fpr) = f_res;
4829  TREE_CHAIN (f_res) = f_ovf;
4830  TREE_CHAIN (f_ovf) = f_sav;
4831
4832  layout_type (record);
4833
4834  /* The correct type is an array type of one element.  */
4835  return build_array_type (record, build_index_type (size_zero_node));
4836}
4837
4838/* Implement va_start.  */
4839
4840void
4841rs6000_va_start (tree valist, rtx nextarg)
4842{
4843  HOST_WIDE_INT words, n_gpr, n_fpr;
4844  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4845  tree gpr, fpr, ovf, sav, t;
4846
4847  /* Only SVR4 needs something special.  */
4848  if (DEFAULT_ABI != ABI_V4)
4849    {
4850      std_expand_builtin_va_start (valist, nextarg);
4851      return;
4852    }
4853
4854  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4855  f_fpr = TREE_CHAIN (f_gpr);
4856  f_res = TREE_CHAIN (f_fpr);
4857  f_ovf = TREE_CHAIN (f_res);
4858  f_sav = TREE_CHAIN (f_ovf);
4859
4860  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4861  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4862  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4863  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4864  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4865
4866  /* Count number of gp and fp argument registers used.  */
4867  words = current_function_args_info.words;
4868  n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4869  n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4870
4871  if (TARGET_DEBUG_ARG)
4872    fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4873	     HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4874	     words, n_gpr, n_fpr);
4875
4876  t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4877  TREE_SIDE_EFFECTS (t) = 1;
4878  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4879
4880  t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4881  TREE_SIDE_EFFECTS (t) = 1;
4882  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4883
4884  /* Find the overflow area.  */
4885  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4886  if (words != 0)
4887    t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4888	       build_int_2 (words * UNITS_PER_WORD, 0));
4889  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4890  TREE_SIDE_EFFECTS (t) = 1;
4891  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4892
4893  /* Find the register save area.  */
4894  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4895  t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4896	     build_int_2 (-RS6000_VARARGS_SIZE, -1));
4897  t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4898  TREE_SIDE_EFFECTS (t) = 1;
4899  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4900}
4901
4902/* Implement va_arg.  */
4903
4904rtx
4905rs6000_va_arg (tree valist, tree type)
4906{
4907  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4908  tree gpr, fpr, ovf, sav, reg, t, u;
4909  int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4910  rtx lab_false, lab_over, addr_rtx, r;
4911  int align;
4912
4913  if (DEFAULT_ABI != ABI_V4)
4914    {
4915      /* Variable sized types are passed by reference, as are AltiVec
4916	 vectors when 32-bit and not using the AltiVec ABI extension.  */
4917      if (int_size_in_bytes (type) < 0
4918	  || (TARGET_32BIT
4919	      && !TARGET_ALTIVEC_ABI
4920	      && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
4921	{
4922	  u = build_pointer_type (type);
4923
4924	  /* Args grow upward.  */
4925	  t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4926		     build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4927	  TREE_SIDE_EFFECTS (t) = 1;
4928
4929	  t = build1 (NOP_EXPR, build_pointer_type (u), t);
4930	  TREE_SIDE_EFFECTS (t) = 1;
4931
4932	  t = build1 (INDIRECT_REF, u, t);
4933	  TREE_SIDE_EFFECTS (t) = 1;
4934
4935	  return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4936	}
4937      if (targetm.calls.split_complex_arg
4938	  && TREE_CODE (type) == COMPLEX_TYPE)
4939	{
4940	  tree elem_type = TREE_TYPE (type);
4941	  enum machine_mode elem_mode = TYPE_MODE (elem_type);
4942	  int elem_size = GET_MODE_SIZE (elem_mode);
4943
4944	  if (elem_size < UNITS_PER_WORD)
4945	    {
4946	      rtx real_part, imag_part, dest_real, rr;
4947
4948	      real_part = rs6000_va_arg (valist, elem_type);
4949	      imag_part = rs6000_va_arg (valist, elem_type);
4950
4951	      /* We're not returning the value here, but the address.
4952		 real_part and imag_part are not contiguous, and we know
4953		 there is space available to pack real_part next to
4954		 imag_part.  float _Complex is not promoted to
4955		 double _Complex by the default promotion rules that
4956		 promote float to double.  */
4957	      if (2 * elem_size > UNITS_PER_WORD)
4958		abort ();
4959
4960	      real_part = gen_rtx_MEM (elem_mode, real_part);
4961	      imag_part = gen_rtx_MEM (elem_mode, imag_part);
4962
4963	      dest_real = adjust_address (imag_part, elem_mode, -elem_size);
4964	      rr = gen_reg_rtx (elem_mode);
4965	      emit_move_insn (rr, real_part);
4966	      emit_move_insn (dest_real, rr);
4967
4968	      return XEXP (dest_real, 0);
4969	    }
4970	}
4971
4972      return std_expand_builtin_va_arg (valist, type);
4973    }
4974
4975  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4976  f_fpr = TREE_CHAIN (f_gpr);
4977  f_res = TREE_CHAIN (f_fpr);
4978  f_ovf = TREE_CHAIN (f_res);
4979  f_sav = TREE_CHAIN (f_ovf);
4980
4981  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4982  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4983  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4984  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4985  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4986
4987  size = int_size_in_bytes (type);
4988  rsize = (size + 3) / 4;
4989  align = 1;
4990
4991  if (AGGREGATE_TYPE_P (type)
4992      || TYPE_MODE (type) == TFmode
4993      || (!TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
4994    {
4995      /* Aggregates, long doubles, and AltiVec vectors are passed by
4996	 reference.  */
4997      indirect_p = 1;
4998      reg = gpr;
4999      n_reg = 1;
5000      sav_ofs = 0;
5001      sav_scale = 4;
5002      size = 4;
5003      rsize = 1;
5004    }
5005  else if (TARGET_HARD_FLOAT && TARGET_FPRS
5006	   && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5007    {
5008      /* FP args go in FP registers, if present.  */
5009      indirect_p = 0;
5010      reg = fpr;
5011      n_reg = 1;
5012      sav_ofs = 8*4;
5013      sav_scale = 8;
5014      if (TYPE_MODE (type) == DFmode)
5015	align = 8;
5016    }
5017  else
5018    {
5019      /* Otherwise into GP registers.  */
5020      indirect_p = 0;
5021      reg = gpr;
5022      n_reg = rsize;
5023      sav_ofs = 0;
5024      sav_scale = 4;
5025      if (n_reg == 2)
5026	align = 8;
5027    }
5028
5029  /* Pull the value out of the saved registers....  */
5030
5031  lab_over = NULL_RTX;
5032  addr_rtx = gen_reg_rtx (Pmode);
5033
5034  /*  AltiVec vectors never go in registers when -mabi=altivec.  */
5035  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5036    align = 16;
5037  else
5038    {
5039      lab_false = gen_label_rtx ();
5040      lab_over = gen_label_rtx ();
5041
5042      /* Long long and SPE vectors are aligned in the registers.
5043	 As are any other 2 gpr item such as complex int due to a
5044	 historical mistake.  */
5045      u = reg;
5046      if (n_reg == 2)
5047	{
5048	  u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5049		     build_int_2 (n_reg - 1, 0));
5050	  u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5051	  TREE_SIDE_EFFECTS (u) = 1;
5052	}
5053
5054      emit_cmp_and_jump_insns
5055	(expand_expr (u, NULL_RTX, QImode, EXPAND_NORMAL),
5056	 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
5057	 lab_false);
5058
5059      t = sav;
5060      if (sav_ofs)
5061	t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5062
5063      u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
5064		 build_int_2 (n_reg, 0));
5065      TREE_SIDE_EFFECTS (u) = 1;
5066
5067      u = build1 (CONVERT_EXPR, integer_type_node, u);
5068      TREE_SIDE_EFFECTS (u) = 1;
5069
5070      u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
5071      TREE_SIDE_EFFECTS (u) = 1;
5072
5073      t = build (PLUS_EXPR, ptr_type_node, t, u);
5074      TREE_SIDE_EFFECTS (t) = 1;
5075
5076      r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5077      if (r != addr_rtx)
5078	emit_move_insn (addr_rtx, r);
5079
5080      emit_jump_insn (gen_jump (lab_over));
5081      emit_barrier ();
5082
5083      emit_label (lab_false);
5084      if (n_reg > 2)
5085	{
5086	  /* Ensure that we don't find any more args in regs.
5087	     Alignment has taken care of the n_reg == 2 case.  */
5088	  t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
5089	  TREE_SIDE_EFFECTS (t) = 1;
5090	  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5091	}
5092    }
5093
5094  /* ... otherwise out of the overflow area.  */
5095
5096  /* Care for on-stack alignment if needed.  */
5097  t = ovf;
5098  if (align != 1)
5099    {
5100      t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (align - 1, 0));
5101      t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align, -1));
5102    }
5103  t = save_expr (t);
5104
5105  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5106  if (r != addr_rtx)
5107    emit_move_insn (addr_rtx, r);
5108
5109  t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5110  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5111  TREE_SIDE_EFFECTS (t) = 1;
5112  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5113
5114  if (lab_over)
5115    emit_label (lab_over);
5116
5117  if (indirect_p)
5118    {
5119      r = gen_rtx_MEM (Pmode, addr_rtx);
5120      set_mem_alias_set (r, get_varargs_alias_set ());
5121      emit_move_insn (addr_rtx, r);
5122    }
5123
5124  return addr_rtx;
5125}
5126
5127/* Builtins.  */
5128
5129#define def_builtin(MASK, NAME, TYPE, CODE)			\
5130do {								\
5131  if ((MASK) & target_flags)					\
5132    builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD,	\
5133		      NULL, NULL_TREE);				\
5134} while (0)
5135
5136/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
5137
5138static const struct builtin_description bdesc_3arg[] =
5139{
5140  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5141  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5142  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5143  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5144  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5145  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5146  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5147  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5148  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5149  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5150  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5151  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5152  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5153  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5154  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5155  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5156  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5157  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5158  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5159  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5160  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5161  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5162  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5163};
5164
5165/* DST operations: void foo (void *, const int, const char).  */
5166
5167static const struct builtin_description bdesc_dst[] =
5168{
5169  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5170  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5171  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5172  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5173};
5174
5175/* Simple binary operations: VECc = foo (VECa, VECb).  */
5176
5177static struct builtin_description bdesc_2arg[] =
5178{
5179  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5180  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5181  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5182  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5183  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5184  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5185  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5186  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5187  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5188  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5189  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5190  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5191  { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5192  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5193  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5194  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5195  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5196  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5197  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5198  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5199  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5200  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5201  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5202  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5203  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5204  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5205  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5206  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5207  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5208  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5209  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5210  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5211  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5212  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5213  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5214  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5215  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5216  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5217  { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5218  { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5219  { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5220  { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5221  { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5222  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5223  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5224  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5225  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5226  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5227  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5228  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5229  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5230  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5231  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5232  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5233  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5234  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5235  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5236  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5237  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5238  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5239  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5240  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5241  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5242  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5243  { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5244  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5245  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5246  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5247  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5248  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5249  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5250  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5251  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5252  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5253  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5254  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5255  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5256  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5257  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5258  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5259  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5260  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5261  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5262  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5263  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5264  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5265  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5266  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5267  { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5268  { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5269  { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5270  { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5271  { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5272  { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5273  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5274  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5275  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5276  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5277  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5278  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5279  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5280  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5281  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5282  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5283  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5284  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5285  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5286  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5287  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5288  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5289  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5290  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5291  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5292
5293  /* Place holder, leave as first spe builtin.  */
5294  { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5295  { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5296  { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5297  { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5298  { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5299  { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5300  { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5301  { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5302  { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5303  { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5304  { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5305  { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5306  { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5307  { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5308  { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5309  { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5310  { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5311  { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5312  { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5313  { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5314  { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5315  { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5316  { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5317  { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5318  { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5319  { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5320  { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5321  { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5322  { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5323  { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5324  { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5325  { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5326  { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5327  { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5328  { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5329  { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5330  { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5331  { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5332  { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5333  { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5334  { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5335  { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5336  { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5337  { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5338  { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5339  { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5340  { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5341  { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5342  { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5343  { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5344  { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5345  { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5346  { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5347  { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5348  { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5349  { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5350  { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5351  { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5352  { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5353  { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5354  { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5355  { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5356  { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5357  { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5358  { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5359  { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5360  { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5361  { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5362  { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5363  { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5364  { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5365  { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5366  { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5367  { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5368  { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5369  { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5370  { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5371  { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5372  { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5373  { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5374  { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5375  { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5376  { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5377  { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5378  { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5379  { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5380  { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5381  { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5382  { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5383  { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5384  { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5385  { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5386  { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5387  { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5388  { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5389  { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5390  { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5391  { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5392  { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5393  { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5394  { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5395  { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5396  { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5397  { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5398  { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5399  { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5400  { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5401  { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5402  { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5403
5404  /* SPE binary operations expecting a 5-bit unsigned literal.  */
5405  { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5406
5407  { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5408  { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5409  { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5410  { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5411  { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5412  { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5413  { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5414  { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5415  { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5416  { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5417  { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5418  { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5419  { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5420  { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5421  { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5422  { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5423  { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5424  { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5425  { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5426  { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5427  { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5428  { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5429  { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5430  { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5431  { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5432  { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5433
5434  /* Place-holder.  Leave as last binary SPE builtin.  */
5435  { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5436};
5437
5438/* AltiVec predicates.  */
5439
5440struct builtin_description_predicates
5441{
5442  const unsigned int mask;
5443  const enum insn_code icode;
5444  const char *opcode;
5445  const char *const name;
5446  const enum rs6000_builtins code;
5447};
5448
5449static const struct builtin_description_predicates bdesc_altivec_preds[] =
5450{
5451  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5452  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5453  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5454  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5455  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5456  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5457  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5458  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5459  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5460  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5461  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5462  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5463  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5464};
5465
5466/* SPE predicates.  */
5467static struct builtin_description bdesc_spe_predicates[] =
5468{
5469  /* Place-holder.  Leave as first.  */
5470  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5471  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5472  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5473  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5474  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5475  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5476  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5477  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5478  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5479  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5480  /* Place-holder.  Leave as last.  */
5481  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5482};
5483
5484/* SPE evsel predicates.  */
5485static struct builtin_description bdesc_spe_evsel[] =
5486{
5487  /* Place-holder.  Leave as first.  */
5488  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5489  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5490  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5491  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5492  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5493  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5494  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5495  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5496  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5497  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5498  /* Place-holder.  Leave as last.  */
5499  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5500};
5501
5502/* ABS* operations.  */
5503
5504static const struct builtin_description bdesc_abs[] =
5505{
5506  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5507  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5508  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5509  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5510  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5511  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5512  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5513};
5514
5515/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5516   foo (VECa).  */
5517
5518static struct builtin_description bdesc_1arg[] =
5519{
5520  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5521  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5522  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5523  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5524  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5525  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5526  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5527  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5528  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5529  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5530  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5531  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5532  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5533  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5534  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5535  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5536  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5537
5538  /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5539     end with SPE_BUILTIN_EVSUBFUSIAAW.  */
5540  { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5541  { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5542  { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5543  { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5544  { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5545  { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5546  { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5547  { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5548  { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5549  { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5550  { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5551  { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5552  { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5553  { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5554  { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5555  { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5556  { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5557  { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5558  { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5559  { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5560  { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5561  { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5562  { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5563  { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5564  { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5565  { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5566  { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5567  { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5568
5569  /* Place-holder.  Leave as last unary SPE builtin.  */
5570  { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5571};
5572
5573static rtx
5574rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5575{
5576  rtx pat;
5577  tree arg0 = TREE_VALUE (arglist);
5578  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5579  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5580  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5581
5582  if (icode == CODE_FOR_nothing)
5583    /* Builtin not supported on this processor.  */
5584    return 0;
5585
5586  /* If we got invalid arguments bail out before generating bad rtl.  */
5587  if (arg0 == error_mark_node)
5588    return const0_rtx;
5589
5590  if (icode == CODE_FOR_altivec_vspltisb
5591      || icode == CODE_FOR_altivec_vspltish
5592      || icode == CODE_FOR_altivec_vspltisw
5593      || icode == CODE_FOR_spe_evsplatfi
5594      || icode == CODE_FOR_spe_evsplati)
5595    {
5596      /* Only allow 5-bit *signed* literals.  */
5597      if (GET_CODE (op0) != CONST_INT
5598	  || INTVAL (op0) > 0x1f
5599	  || INTVAL (op0) < -0x1f)
5600	{
5601	  error ("argument 1 must be a 5-bit signed literal");
5602	  return const0_rtx;
5603	}
5604    }
5605
5606  if (target == 0
5607      || GET_MODE (target) != tmode
5608      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5609    target = gen_reg_rtx (tmode);
5610
5611  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5612    op0 = copy_to_mode_reg (mode0, op0);
5613
5614  pat = GEN_FCN (icode) (target, op0);
5615  if (! pat)
5616    return 0;
5617  emit_insn (pat);
5618
5619  return target;
5620}
5621
5622static rtx
5623altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5624{
5625  rtx pat, scratch1, scratch2;
5626  tree arg0 = TREE_VALUE (arglist);
5627  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5628  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5629  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5630
5631  /* If we have invalid arguments, bail out before generating bad rtl.  */
5632  if (arg0 == error_mark_node)
5633    return const0_rtx;
5634
5635  if (target == 0
5636      || GET_MODE (target) != tmode
5637      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5638    target = gen_reg_rtx (tmode);
5639
5640  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5641    op0 = copy_to_mode_reg (mode0, op0);
5642
5643  scratch1 = gen_reg_rtx (mode0);
5644  scratch2 = gen_reg_rtx (mode0);
5645
5646  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5647  if (! pat)
5648    return 0;
5649  emit_insn (pat);
5650
5651  return target;
5652}
5653
5654static rtx
5655rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5656{
5657  rtx pat;
5658  tree arg0 = TREE_VALUE (arglist);
5659  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5660  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5661  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5662  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5663  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5664  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5665
5666  if (icode == CODE_FOR_nothing)
5667    /* Builtin not supported on this processor.  */
5668    return 0;
5669
5670  /* If we got invalid arguments bail out before generating bad rtl.  */
5671  if (arg0 == error_mark_node || arg1 == error_mark_node)
5672    return const0_rtx;
5673
5674  if (icode == CODE_FOR_altivec_vcfux
5675      || icode == CODE_FOR_altivec_vcfsx
5676      || icode == CODE_FOR_altivec_vctsxs
5677      || icode == CODE_FOR_altivec_vctuxs
5678      || icode == CODE_FOR_altivec_vspltb
5679      || icode == CODE_FOR_altivec_vsplth
5680      || icode == CODE_FOR_altivec_vspltw
5681      || icode == CODE_FOR_spe_evaddiw
5682      || icode == CODE_FOR_spe_evldd
5683      || icode == CODE_FOR_spe_evldh
5684      || icode == CODE_FOR_spe_evldw
5685      || icode == CODE_FOR_spe_evlhhesplat
5686      || icode == CODE_FOR_spe_evlhhossplat
5687      || icode == CODE_FOR_spe_evlhhousplat
5688      || icode == CODE_FOR_spe_evlwhe
5689      || icode == CODE_FOR_spe_evlwhos
5690      || icode == CODE_FOR_spe_evlwhou
5691      || icode == CODE_FOR_spe_evlwhsplat
5692      || icode == CODE_FOR_spe_evlwwsplat
5693      || icode == CODE_FOR_spe_evrlwi
5694      || icode == CODE_FOR_spe_evslwi
5695      || icode == CODE_FOR_spe_evsrwis
5696      || icode == CODE_FOR_spe_evsubifw
5697      || icode == CODE_FOR_spe_evsrwiu)
5698    {
5699      /* Only allow 5-bit unsigned literals.  */
5700      STRIP_NOPS (arg1);
5701      if (TREE_CODE (arg1) != INTEGER_CST
5702	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
5703	{
5704	  error ("argument 2 must be a 5-bit unsigned literal");
5705	  return const0_rtx;
5706	}
5707    }
5708
5709  if (target == 0
5710      || GET_MODE (target) != tmode
5711      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5712    target = gen_reg_rtx (tmode);
5713
5714  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5715    op0 = copy_to_mode_reg (mode0, op0);
5716  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5717    op1 = copy_to_mode_reg (mode1, op1);
5718
5719  pat = GEN_FCN (icode) (target, op0, op1);
5720  if (! pat)
5721    return 0;
5722  emit_insn (pat);
5723
5724  return target;
5725}
5726
5727static rtx
5728altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5729				  tree arglist, rtx target)
5730{
5731  rtx pat, scratch;
5732  tree cr6_form = TREE_VALUE (arglist);
5733  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5734  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5735  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5736  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5737  enum machine_mode tmode = SImode;
5738  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5739  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5740  int cr6_form_int;
5741
5742  if (TREE_CODE (cr6_form) != INTEGER_CST)
5743    {
5744      error ("argument 1 of __builtin_altivec_predicate must be a constant");
5745      return const0_rtx;
5746    }
5747  else
5748    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5749
5750  if (mode0 != mode1)
5751    abort ();
5752
5753  /* If we have invalid arguments, bail out before generating bad rtl.  */
5754  if (arg0 == error_mark_node || arg1 == error_mark_node)
5755    return const0_rtx;
5756
5757  if (target == 0
5758      || GET_MODE (target) != tmode
5759      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5760    target = gen_reg_rtx (tmode);
5761
5762  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5763    op0 = copy_to_mode_reg (mode0, op0);
5764  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5765    op1 = copy_to_mode_reg (mode1, op1);
5766
5767  scratch = gen_reg_rtx (mode0);
5768
5769  pat = GEN_FCN (icode) (scratch, op0, op1,
5770			 gen_rtx (SYMBOL_REF, Pmode, opcode));
5771  if (! pat)
5772    return 0;
5773  emit_insn (pat);
5774
5775  /* The vec_any* and vec_all* predicates use the same opcodes for two
5776     different operations, but the bits in CR6 will be different
5777     depending on what information we want.  So we have to play tricks
5778     with CR6 to get the right bits out.
5779
5780     If you think this is disgusting, look at the specs for the
5781     AltiVec predicates.  */
5782
5783     switch (cr6_form_int)
5784       {
5785       case 0:
5786	 emit_insn (gen_cr6_test_for_zero (target));
5787	 break;
5788       case 1:
5789	 emit_insn (gen_cr6_test_for_zero_reverse (target));
5790	 break;
5791       case 2:
5792	 emit_insn (gen_cr6_test_for_lt (target));
5793	 break;
5794       case 3:
5795	 emit_insn (gen_cr6_test_for_lt_reverse (target));
5796	 break;
5797       default:
5798	 error ("argument 1 of __builtin_altivec_predicate is out of range");
5799	 break;
5800       }
5801
5802  return target;
5803}
5804
5805static rtx
5806altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5807{
5808  rtx pat, addr;
5809  tree arg0 = TREE_VALUE (arglist);
5810  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5811  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5812  enum machine_mode mode0 = Pmode;
5813  enum machine_mode mode1 = Pmode;
5814  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5815  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5816
5817  if (icode == CODE_FOR_nothing)
5818    /* Builtin not supported on this processor.  */
5819    return 0;
5820
5821  /* If we got invalid arguments bail out before generating bad rtl.  */
5822  if (arg0 == error_mark_node || arg1 == error_mark_node)
5823    return const0_rtx;
5824
5825  if (target == 0
5826      || GET_MODE (target) != tmode
5827      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5828    target = gen_reg_rtx (tmode);
5829
5830  op1 = copy_to_mode_reg (mode1, op1);
5831
5832  if (op0 == const0_rtx)
5833    {
5834      addr = gen_rtx_MEM (tmode, op1);
5835    }
5836  else
5837    {
5838      op0 = copy_to_mode_reg (mode0, op0);
5839      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5840    }
5841
5842  pat = GEN_FCN (icode) (target, addr);
5843
5844  if (! pat)
5845    return 0;
5846  emit_insn (pat);
5847
5848  return target;
5849}
5850
5851static rtx
5852spe_expand_stv_builtin (enum insn_code icode, tree arglist)
5853{
5854  tree arg0 = TREE_VALUE (arglist);
5855  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5856  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5857  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5858  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5859  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5860  rtx pat;
5861  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5862  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5863  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5864
5865  /* Invalid arguments.  Bail before doing anything stoopid!  */
5866  if (arg0 == error_mark_node
5867      || arg1 == error_mark_node
5868      || arg2 == error_mark_node)
5869    return const0_rtx;
5870
5871  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5872    op0 = copy_to_mode_reg (mode2, op0);
5873  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5874    op1 = copy_to_mode_reg (mode0, op1);
5875  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5876    op2 = copy_to_mode_reg (mode1, op2);
5877
5878  pat = GEN_FCN (icode) (op1, op2, op0);
5879  if (pat)
5880    emit_insn (pat);
5881  return NULL_RTX;
5882}
5883
5884static rtx
5885altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5886{
5887  tree arg0 = TREE_VALUE (arglist);
5888  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5889  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5890  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5891  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5892  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5893  rtx pat, addr;
5894  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5895  enum machine_mode mode1 = Pmode;
5896  enum machine_mode mode2 = Pmode;
5897
5898  /* Invalid arguments.  Bail before doing anything stoopid!  */
5899  if (arg0 == error_mark_node
5900      || arg1 == error_mark_node
5901      || arg2 == error_mark_node)
5902    return const0_rtx;
5903
5904  if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5905    op0 = copy_to_mode_reg (tmode, op0);
5906
5907  op2 = copy_to_mode_reg (mode2, op2);
5908
5909  if (op1 == const0_rtx)
5910    {
5911      addr = gen_rtx_MEM (tmode, op2);
5912    }
5913  else
5914    {
5915      op1 = copy_to_mode_reg (mode1, op1);
5916      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5917    }
5918
5919  pat = GEN_FCN (icode) (addr, op0);
5920  if (pat)
5921    emit_insn (pat);
5922  return NULL_RTX;
5923}
5924
5925static rtx
5926rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5927{
5928  rtx pat;
5929  tree arg0 = TREE_VALUE (arglist);
5930  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5931  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5932  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5933  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5934  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5935  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5936  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5937  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5938  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5939
5940  if (icode == CODE_FOR_nothing)
5941    /* Builtin not supported on this processor.  */
5942    return 0;
5943
5944  /* If we got invalid arguments bail out before generating bad rtl.  */
5945  if (arg0 == error_mark_node
5946      || arg1 == error_mark_node
5947      || arg2 == error_mark_node)
5948    return const0_rtx;
5949
5950  if (icode == CODE_FOR_altivec_vsldoi_4sf
5951      || icode == CODE_FOR_altivec_vsldoi_4si
5952      || icode == CODE_FOR_altivec_vsldoi_8hi
5953      || icode == CODE_FOR_altivec_vsldoi_16qi)
5954    {
5955      /* Only allow 4-bit unsigned literals.  */
5956      if (TREE_CODE (arg2) != INTEGER_CST
5957	  || TREE_INT_CST_LOW (arg2) & ~0xf)
5958	{
5959	  error ("argument 3 must be a 4-bit unsigned literal");
5960	  return const0_rtx;
5961	}
5962    }
5963
5964  if (target == 0
5965      || GET_MODE (target) != tmode
5966      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5967    target = gen_reg_rtx (tmode);
5968
5969  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5970    op0 = copy_to_mode_reg (mode0, op0);
5971  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5972    op1 = copy_to_mode_reg (mode1, op1);
5973  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5974    op2 = copy_to_mode_reg (mode2, op2);
5975
5976  pat = GEN_FCN (icode) (target, op0, op1, op2);
5977  if (! pat)
5978    return 0;
5979  emit_insn (pat);
5980
5981  return target;
5982}
5983
5984/* Expand the lvx builtins.  */
5985static rtx
5986altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5987{
5988  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5989  tree arglist = TREE_OPERAND (exp, 1);
5990  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5991  tree arg0;
5992  enum machine_mode tmode, mode0;
5993  rtx pat, op0;
5994  enum insn_code icode;
5995
5996  switch (fcode)
5997    {
5998    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5999      icode = CODE_FOR_altivec_lvx_16qi;
6000      break;
6001    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6002      icode = CODE_FOR_altivec_lvx_8hi;
6003      break;
6004    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6005      icode = CODE_FOR_altivec_lvx_4si;
6006      break;
6007    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6008      icode = CODE_FOR_altivec_lvx_4sf;
6009      break;
6010    default:
6011      *expandedp = false;
6012      return NULL_RTX;
6013    }
6014
6015  *expandedp = true;
6016
6017  arg0 = TREE_VALUE (arglist);
6018  op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6019  tmode = insn_data[icode].operand[0].mode;
6020  mode0 = insn_data[icode].operand[1].mode;
6021
6022  if (target == 0
6023      || GET_MODE (target) != tmode
6024      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6025    target = gen_reg_rtx (tmode);
6026
6027  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6028    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6029
6030  pat = GEN_FCN (icode) (target, op0);
6031  if (! pat)
6032    return 0;
6033  emit_insn (pat);
6034  return target;
6035}
6036
6037/* Expand the stvx builtins.  */
6038static rtx
6039altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6040			   bool *expandedp)
6041{
6042  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6043  tree arglist = TREE_OPERAND (exp, 1);
6044  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6045  tree arg0, arg1;
6046  enum machine_mode mode0, mode1;
6047  rtx pat, op0, op1;
6048  enum insn_code icode;
6049
6050  switch (fcode)
6051    {
6052    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6053      icode = CODE_FOR_altivec_stvx_16qi;
6054      break;
6055    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6056      icode = CODE_FOR_altivec_stvx_8hi;
6057      break;
6058    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6059      icode = CODE_FOR_altivec_stvx_4si;
6060      break;
6061    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6062      icode = CODE_FOR_altivec_stvx_4sf;
6063      break;
6064    default:
6065      *expandedp = false;
6066      return NULL_RTX;
6067    }
6068
6069  arg0 = TREE_VALUE (arglist);
6070  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6071  op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6072  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6073  mode0 = insn_data[icode].operand[0].mode;
6074  mode1 = insn_data[icode].operand[1].mode;
6075
6076  if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6077    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6078  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6079    op1 = copy_to_mode_reg (mode1, op1);
6080
6081  pat = GEN_FCN (icode) (op0, op1);
6082  if (pat)
6083    emit_insn (pat);
6084
6085  *expandedp = true;
6086  return NULL_RTX;
6087}
6088
6089/* Expand the dst builtins.  */
6090static rtx
6091altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6092			    bool *expandedp)
6093{
6094  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6095  tree arglist = TREE_OPERAND (exp, 1);
6096  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6097  tree arg0, arg1, arg2;
6098  enum machine_mode mode0, mode1, mode2;
6099  rtx pat, op0, op1, op2;
6100  struct builtin_description *d;
6101  size_t i;
6102
6103  *expandedp = false;
6104
6105  /* Handle DST variants.  */
6106  d = (struct builtin_description *) bdesc_dst;
6107  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6108    if (d->code == fcode)
6109      {
6110	arg0 = TREE_VALUE (arglist);
6111	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6112	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6113	op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6114	op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6115	op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6116	mode0 = insn_data[d->icode].operand[0].mode;
6117	mode1 = insn_data[d->icode].operand[1].mode;
6118	mode2 = insn_data[d->icode].operand[2].mode;
6119
6120	/* Invalid arguments, bail out before generating bad rtl.  */
6121	if (arg0 == error_mark_node
6122	    || arg1 == error_mark_node
6123	    || arg2 == error_mark_node)
6124	  return const0_rtx;
6125
6126	*expandedp = true;
6127	STRIP_NOPS (arg2);
6128	if (TREE_CODE (arg2) != INTEGER_CST
6129	    || TREE_INT_CST_LOW (arg2) & ~0x3)
6130	  {
6131	    error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6132	    return const0_rtx;
6133	  }
6134
6135	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6136	  op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6137	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6138	  op1 = copy_to_mode_reg (mode1, op1);
6139
6140	pat = GEN_FCN (d->icode) (op0, op1, op2);
6141	if (pat != 0)
6142	  emit_insn (pat);
6143
6144	return NULL_RTX;
6145      }
6146
6147  return NULL_RTX;
6148}
6149
6150/* Expand the builtin in EXP and store the result in TARGET.  Store
6151   true in *EXPANDEDP if we found a builtin to expand.  */
6152static rtx
6153altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6154{
6155  struct builtin_description *d;
6156  struct builtin_description_predicates *dp;
6157  size_t i;
6158  enum insn_code icode;
6159  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6160  tree arglist = TREE_OPERAND (exp, 1);
6161  tree arg0;
6162  rtx op0, pat;
6163  enum machine_mode tmode, mode0;
6164  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6165
6166  target = altivec_expand_ld_builtin (exp, target, expandedp);
6167  if (*expandedp)
6168    return target;
6169
6170  target = altivec_expand_st_builtin (exp, target, expandedp);
6171  if (*expandedp)
6172    return target;
6173
6174  target = altivec_expand_dst_builtin (exp, target, expandedp);
6175  if (*expandedp)
6176    return target;
6177
6178  *expandedp = true;
6179
6180  switch (fcode)
6181    {
6182    case ALTIVEC_BUILTIN_STVX:
6183      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6184    case ALTIVEC_BUILTIN_STVEBX:
6185      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6186    case ALTIVEC_BUILTIN_STVEHX:
6187      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6188    case ALTIVEC_BUILTIN_STVEWX:
6189      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6190    case ALTIVEC_BUILTIN_STVXL:
6191      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6192
6193    case ALTIVEC_BUILTIN_MFVSCR:
6194      icode = CODE_FOR_altivec_mfvscr;
6195      tmode = insn_data[icode].operand[0].mode;
6196
6197      if (target == 0
6198	  || GET_MODE (target) != tmode
6199	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6200	target = gen_reg_rtx (tmode);
6201
6202      pat = GEN_FCN (icode) (target);
6203      if (! pat)
6204	return 0;
6205      emit_insn (pat);
6206      return target;
6207
6208    case ALTIVEC_BUILTIN_MTVSCR:
6209      icode = CODE_FOR_altivec_mtvscr;
6210      arg0 = TREE_VALUE (arglist);
6211      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6212      mode0 = insn_data[icode].operand[0].mode;
6213
6214      /* If we got invalid arguments bail out before generating bad rtl.  */
6215      if (arg0 == error_mark_node)
6216	return const0_rtx;
6217
6218      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6219	op0 = copy_to_mode_reg (mode0, op0);
6220
6221      pat = GEN_FCN (icode) (op0);
6222      if (pat)
6223	emit_insn (pat);
6224      return NULL_RTX;
6225
6226    case ALTIVEC_BUILTIN_DSSALL:
6227      emit_insn (gen_altivec_dssall ());
6228      return NULL_RTX;
6229
6230    case ALTIVEC_BUILTIN_DSS:
6231      icode = CODE_FOR_altivec_dss;
6232      arg0 = TREE_VALUE (arglist);
6233      STRIP_NOPS (arg0);
6234      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6235      mode0 = insn_data[icode].operand[0].mode;
6236
6237      /* If we got invalid arguments bail out before generating bad rtl.  */
6238      if (arg0 == error_mark_node)
6239	return const0_rtx;
6240
6241      if (TREE_CODE (arg0) != INTEGER_CST
6242	  || TREE_INT_CST_LOW (arg0) & ~0x3)
6243	{
6244	  error ("argument to dss must be a 2-bit unsigned literal");
6245	  return const0_rtx;
6246	}
6247
6248      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6249	op0 = copy_to_mode_reg (mode0, op0);
6250
6251      emit_insn (gen_altivec_dss (op0));
6252      return NULL_RTX;
6253
6254    case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6255      arg0 = TREE_VALUE (arglist);
6256      while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6257	arg0 = TREE_OPERAND (arg0, 0);
6258      error ("invalid parameter combination for `%s' AltiVec intrinsic",
6259	     TREE_STRING_POINTER (arg0));
6260
6261      return const0_rtx;
6262    }
6263
6264  /* Expand abs* operations.  */
6265  d = (struct builtin_description *) bdesc_abs;
6266  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6267    if (d->code == fcode)
6268      return altivec_expand_abs_builtin (d->icode, arglist, target);
6269
6270  /* Expand the AltiVec predicates.  */
6271  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6272  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6273    if (dp->code == fcode)
6274      return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6275
6276  /* LV* are funky.  We initialized them differently.  */
6277  switch (fcode)
6278    {
6279    case ALTIVEC_BUILTIN_LVSL:
6280      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6281					   arglist, target);
6282    case ALTIVEC_BUILTIN_LVSR:
6283      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6284					  arglist, target);
6285    case ALTIVEC_BUILTIN_LVEBX:
6286      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6287					  arglist, target);
6288    case ALTIVEC_BUILTIN_LVEHX:
6289      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6290					  arglist, target);
6291    case ALTIVEC_BUILTIN_LVEWX:
6292      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6293					  arglist, target);
6294    case ALTIVEC_BUILTIN_LVXL:
6295      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6296					  arglist, target);
6297    case ALTIVEC_BUILTIN_LVX:
6298      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6299					  arglist, target);
6300    default:
6301      break;
6302      /* Fall through.  */
6303    }
6304
6305  *expandedp = false;
6306  return NULL_RTX;
6307}
6308
6309/* Binops that need to be initialized manually, but can be expanded
6310   automagically by rs6000_expand_binop_builtin.  */
6311static struct builtin_description bdesc_2arg_spe[] =
6312{
6313  { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6314  { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6315  { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6316  { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6317  { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6318  { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6319  { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6320  { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6321  { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6322  { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6323  { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6324  { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6325  { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6326  { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6327  { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6328  { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6329  { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6330  { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6331  { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6332  { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6333  { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6334  { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6335};
6336
6337/* Expand the builtin in EXP and store the result in TARGET.  Store
6338   true in *EXPANDEDP if we found a builtin to expand.
6339
6340   This expands the SPE builtins that are not simple unary and binary
6341   operations.  */
6342static rtx
6343spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6344{
6345  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6346  tree arglist = TREE_OPERAND (exp, 1);
6347  tree arg1, arg0;
6348  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6349  enum insn_code icode;
6350  enum machine_mode tmode, mode0;
6351  rtx pat, op0;
6352  struct builtin_description *d;
6353  size_t i;
6354
6355  *expandedp = true;
6356
6357  /* Syntax check for a 5-bit unsigned immediate.  */
6358  switch (fcode)
6359    {
6360    case SPE_BUILTIN_EVSTDD:
6361    case SPE_BUILTIN_EVSTDH:
6362    case SPE_BUILTIN_EVSTDW:
6363    case SPE_BUILTIN_EVSTWHE:
6364    case SPE_BUILTIN_EVSTWHO:
6365    case SPE_BUILTIN_EVSTWWE:
6366    case SPE_BUILTIN_EVSTWWO:
6367      arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6368      if (TREE_CODE (arg1) != INTEGER_CST
6369	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
6370	{
6371	  error ("argument 2 must be a 5-bit unsigned literal");
6372	  return const0_rtx;
6373	}
6374      break;
6375    default:
6376      break;
6377    }
6378
6379  /* The evsplat*i instructions are not quite generic.  */
6380  switch (fcode)
6381    {
6382    case SPE_BUILTIN_EVSPLATFI:
6383      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6384					 arglist, target);
6385    case SPE_BUILTIN_EVSPLATI:
6386      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6387					 arglist, target);
6388    default:
6389      break;
6390    }
6391
6392  d = (struct builtin_description *) bdesc_2arg_spe;
6393  for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6394    if (d->code == fcode)
6395      return rs6000_expand_binop_builtin (d->icode, arglist, target);
6396
6397  d = (struct builtin_description *) bdesc_spe_predicates;
6398  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6399    if (d->code == fcode)
6400      return spe_expand_predicate_builtin (d->icode, arglist, target);
6401
6402  d = (struct builtin_description *) bdesc_spe_evsel;
6403  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6404    if (d->code == fcode)
6405      return spe_expand_evsel_builtin (d->icode, arglist, target);
6406
6407  switch (fcode)
6408    {
6409    case SPE_BUILTIN_EVSTDDX:
6410      return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6411    case SPE_BUILTIN_EVSTDHX:
6412      return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6413    case SPE_BUILTIN_EVSTDWX:
6414      return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6415    case SPE_BUILTIN_EVSTWHEX:
6416      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6417    case SPE_BUILTIN_EVSTWHOX:
6418      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6419    case SPE_BUILTIN_EVSTWWEX:
6420      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6421    case SPE_BUILTIN_EVSTWWOX:
6422      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6423    case SPE_BUILTIN_EVSTDD:
6424      return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6425    case SPE_BUILTIN_EVSTDH:
6426      return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6427    case SPE_BUILTIN_EVSTDW:
6428      return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6429    case SPE_BUILTIN_EVSTWHE:
6430      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6431    case SPE_BUILTIN_EVSTWHO:
6432      return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6433    case SPE_BUILTIN_EVSTWWE:
6434      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6435    case SPE_BUILTIN_EVSTWWO:
6436      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6437    case SPE_BUILTIN_MFSPEFSCR:
6438      icode = CODE_FOR_spe_mfspefscr;
6439      tmode = insn_data[icode].operand[0].mode;
6440
6441      if (target == 0
6442	  || GET_MODE (target) != tmode
6443	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6444	target = gen_reg_rtx (tmode);
6445
6446      pat = GEN_FCN (icode) (target);
6447      if (! pat)
6448	return 0;
6449      emit_insn (pat);
6450      return target;
6451    case SPE_BUILTIN_MTSPEFSCR:
6452      icode = CODE_FOR_spe_mtspefscr;
6453      arg0 = TREE_VALUE (arglist);
6454      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6455      mode0 = insn_data[icode].operand[0].mode;
6456
6457      if (arg0 == error_mark_node)
6458	return const0_rtx;
6459
6460      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6461	op0 = copy_to_mode_reg (mode0, op0);
6462
6463      pat = GEN_FCN (icode) (op0);
6464      if (pat)
6465	emit_insn (pat);
6466      return NULL_RTX;
6467    default:
6468      break;
6469    }
6470
6471  *expandedp = false;
6472  return NULL_RTX;
6473}
6474
6475static rtx
6476spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6477{
6478  rtx pat, scratch, tmp;
6479  tree form = TREE_VALUE (arglist);
6480  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6481  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6482  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6483  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6484  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6485  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6486  int form_int;
6487  enum rtx_code code;
6488
6489  if (TREE_CODE (form) != INTEGER_CST)
6490    {
6491      error ("argument 1 of __builtin_spe_predicate must be a constant");
6492      return const0_rtx;
6493    }
6494  else
6495    form_int = TREE_INT_CST_LOW (form);
6496
6497  if (mode0 != mode1)
6498    abort ();
6499
6500  if (arg0 == error_mark_node || arg1 == error_mark_node)
6501    return const0_rtx;
6502
6503  if (target == 0
6504      || GET_MODE (target) != SImode
6505      || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6506    target = gen_reg_rtx (SImode);
6507
6508  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6509    op0 = copy_to_mode_reg (mode0, op0);
6510  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6511    op1 = copy_to_mode_reg (mode1, op1);
6512
6513  scratch = gen_reg_rtx (CCmode);
6514
6515  pat = GEN_FCN (icode) (scratch, op0, op1);
6516  if (! pat)
6517    return const0_rtx;
6518  emit_insn (pat);
6519
6520  /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6521     _lower_.  We use one compare, but look in different bits of the
6522     CR for each variant.
6523
6524     There are 2 elements in each SPE simd type (upper/lower).  The CR
6525     bits are set as follows:
6526
6527     BIT0  | BIT 1  | BIT 2   | BIT 3
6528     U     |   L    | (U | L) | (U & L)
6529
6530     So, for an "all" relationship, BIT 3 would be set.
6531     For an "any" relationship, BIT 2 would be set.  Etc.
6532
6533     Following traditional nomenclature, these bits map to:
6534
6535     BIT0  | BIT 1  | BIT 2   | BIT 3
6536     LT    | GT     | EQ      | OV
6537
6538     Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6539  */
6540
6541  switch (form_int)
6542    {
6543      /* All variant.  OV bit.  */
6544    case 0:
6545      /* We need to get to the OV bit, which is the ORDERED bit.  We
6546	 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6547	 that's ugly and will trigger a validate_condition_mode abort.
6548	 So let's just use another pattern.  */
6549      emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6550      return target;
6551      /* Any variant.  EQ bit.  */
6552    case 1:
6553      code = EQ;
6554      break;
6555      /* Upper variant.  LT bit.  */
6556    case 2:
6557      code = LT;
6558      break;
6559      /* Lower variant.  GT bit.  */
6560    case 3:
6561      code = GT;
6562      break;
6563    default:
6564      error ("argument 1 of __builtin_spe_predicate is out of range");
6565      return const0_rtx;
6566    }
6567
6568  tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6569  emit_move_insn (target, tmp);
6570
6571  return target;
6572}
6573
6574/* The evsel builtins look like this:
6575
6576     e = __builtin_spe_evsel_OP (a, b, c, d);
6577
6578   and work like this:
6579
6580     e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6581     e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6582*/
6583
6584static rtx
6585spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6586{
6587  rtx pat, scratch;
6588  tree arg0 = TREE_VALUE (arglist);
6589  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6590  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6591  tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6592  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6593  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6594  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6595  rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6596  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6597  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6598
6599  if (mode0 != mode1)
6600    abort ();
6601
6602  if (arg0 == error_mark_node || arg1 == error_mark_node
6603      || arg2 == error_mark_node || arg3 == error_mark_node)
6604    return const0_rtx;
6605
6606  if (target == 0
6607      || GET_MODE (target) != mode0
6608      || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6609    target = gen_reg_rtx (mode0);
6610
6611  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6612    op0 = copy_to_mode_reg (mode0, op0);
6613  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6614    op1 = copy_to_mode_reg (mode0, op1);
6615  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6616    op2 = copy_to_mode_reg (mode0, op2);
6617  if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6618    op3 = copy_to_mode_reg (mode0, op3);
6619
6620  /* Generate the compare.  */
6621  scratch = gen_reg_rtx (CCmode);
6622  pat = GEN_FCN (icode) (scratch, op0, op1);
6623  if (! pat)
6624    return const0_rtx;
6625  emit_insn (pat);
6626
6627  if (mode0 == V2SImode)
6628    emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6629  else
6630    emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6631
6632  return target;
6633}
6634
6635/* Expand an expression EXP that calls a built-in function,
6636   with result going to TARGET if that's convenient
6637   (and in mode MODE if that's convenient).
6638   SUBTARGET may be used as the target for computing one of EXP's operands.
6639   IGNORE is nonzero if the value is to be ignored.  */
6640
6641static rtx
6642rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6643		      enum machine_mode mode ATTRIBUTE_UNUSED,
6644		      int ignore ATTRIBUTE_UNUSED)
6645{
6646  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6647  tree arglist = TREE_OPERAND (exp, 1);
6648  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6649  struct builtin_description *d;
6650  size_t i;
6651  rtx ret;
6652  bool success;
6653
6654  if (TARGET_ALTIVEC)
6655    {
6656      ret = altivec_expand_builtin (exp, target, &success);
6657
6658      if (success)
6659	return ret;
6660    }
6661  if (TARGET_SPE)
6662    {
6663      ret = spe_expand_builtin (exp, target, &success);
6664
6665      if (success)
6666	return ret;
6667    }
6668
6669  if (TARGET_ALTIVEC || TARGET_SPE)
6670    {
6671      /* Handle simple unary operations.  */
6672      d = (struct builtin_description *) bdesc_1arg;
6673      for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6674	if (d->code == fcode)
6675	  return rs6000_expand_unop_builtin (d->icode, arglist, target);
6676
6677      /* Handle simple binary operations.  */
6678      d = (struct builtin_description *) bdesc_2arg;
6679      for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6680	if (d->code == fcode)
6681	  return rs6000_expand_binop_builtin (d->icode, arglist, target);
6682
6683      /* Handle simple ternary operations.  */
6684      d = (struct builtin_description *) bdesc_3arg;
6685      for (i = 0; i < ARRAY_SIZE  (bdesc_3arg); i++, d++)
6686	if (d->code == fcode)
6687	  return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6688    }
6689
6690  abort ();
6691  return NULL_RTX;
6692}
6693
6694static void
6695rs6000_init_builtins (void)
6696{
6697  opaque_V2SI_type_node = copy_node (V2SI_type_node);
6698  opaque_V2SF_type_node = copy_node (V2SF_type_node);
6699  opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6700
6701  /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
6702     types, especially in C++ land.  Similarly, 'vector pixel' is distinct from+     'vector unsigned short'.  */
6703
6704  bool_char_type_node = copy_node (unsigned_intQI_type_node);
6705  TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
6706  bool_short_type_node = copy_node (unsigned_intHI_type_node);
6707  TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
6708  bool_int_type_node = copy_node (unsigned_intSI_type_node);
6709  TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
6710  pixel_type_node = copy_node (unsigned_intHI_type_node);
6711  TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
6712
6713  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6714					    get_identifier ("__bool char"),
6715					    bool_char_type_node));
6716  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6717					    get_identifier ("__bool short"),
6718					    bool_short_type_node));
6719  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6720					    get_identifier ("__bool int"),
6721					    bool_int_type_node));
6722  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6723					    get_identifier ("__pixel"),
6724					    pixel_type_node));
6725
6726  bool_V16QI_type_node = make_vector (V16QImode, bool_char_type_node, 1);
6727  bool_V8HI_type_node = make_vector (V8HImode, bool_short_type_node, 1);
6728  bool_V4SI_type_node = make_vector (V4SImode, bool_int_type_node, 1);
6729  pixel_V8HI_type_node = make_vector (V8HImode, pixel_type_node, 1);
6730
6731  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6732					    get_identifier ("__vector unsigned char"),
6733					    unsigned_V16QI_type_node));
6734  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6735					    get_identifier ("__vector signed char"),
6736					    V16QI_type_node));
6737  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6738					    get_identifier ("__vector __bool char"),
6739					    bool_V16QI_type_node));
6740
6741  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6742					    get_identifier ("__vector unsigned short"),
6743					    unsigned_V8HI_type_node));
6744  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6745					    get_identifier ("__vector signed short"),
6746					    V8HI_type_node));
6747  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6748					    get_identifier ("__vector __bool short"),
6749					    bool_V8HI_type_node));
6750
6751  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6752					    get_identifier ("__vector unsigned int"),
6753					    unsigned_V4SI_type_node));
6754  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6755					    get_identifier ("__vector signed int"),
6756					    V4SI_type_node));
6757  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6758					    get_identifier ("__vector __bool int"),
6759					    bool_V4SI_type_node));
6760
6761  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6762					    get_identifier ("__vector float"),
6763					    V4SF_type_node));
6764  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6765					    get_identifier ("__vector __pixel"),
6766					    pixel_V8HI_type_node));
6767
6768  if (TARGET_SPE)
6769    spe_init_builtins ();
6770  if (TARGET_ALTIVEC)
6771    altivec_init_builtins ();
6772  if (TARGET_ALTIVEC || TARGET_SPE)
6773    rs6000_common_init_builtins ();
6774}
6775
6776/* Search through a set of builtins and enable the mask bits.
6777   DESC is an array of builtins.
6778   SIZE is the total number of builtins.
6779   START is the builtin enum at which to start.
6780   END is the builtin enum at which to end.  */
6781static void
6782enable_mask_for_builtins (struct builtin_description *desc, int size,
6783			  enum rs6000_builtins start,
6784			  enum rs6000_builtins end)
6785{
6786  int i;
6787
6788  for (i = 0; i < size; ++i)
6789    if (desc[i].code == start)
6790      break;
6791
6792  if (i == size)
6793    return;
6794
6795  for (; i < size; ++i)
6796    {
6797      /* Flip all the bits on.  */
6798      desc[i].mask = target_flags;
6799      if (desc[i].code == end)
6800	break;
6801    }
6802}
6803
6804static void
6805spe_init_builtins (void)
6806{
6807  tree endlink = void_list_node;
6808  tree puint_type_node = build_pointer_type (unsigned_type_node);
6809  tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6810  struct builtin_description *d;
6811  size_t i;
6812
6813  tree v2si_ftype_4_v2si
6814    = build_function_type
6815    (opaque_V2SI_type_node,
6816     tree_cons (NULL_TREE, opaque_V2SI_type_node,
6817		tree_cons (NULL_TREE, opaque_V2SI_type_node,
6818			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6819				      tree_cons (NULL_TREE, opaque_V2SI_type_node,
6820						 endlink)))));
6821
6822  tree v2sf_ftype_4_v2sf
6823    = build_function_type
6824    (opaque_V2SF_type_node,
6825     tree_cons (NULL_TREE, opaque_V2SF_type_node,
6826		tree_cons (NULL_TREE, opaque_V2SF_type_node,
6827			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
6828				      tree_cons (NULL_TREE, opaque_V2SF_type_node,
6829						 endlink)))));
6830
6831  tree int_ftype_int_v2si_v2si
6832    = build_function_type
6833    (integer_type_node,
6834     tree_cons (NULL_TREE, integer_type_node,
6835		tree_cons (NULL_TREE, opaque_V2SI_type_node,
6836			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6837				      endlink))));
6838
6839  tree int_ftype_int_v2sf_v2sf
6840    = build_function_type
6841    (integer_type_node,
6842     tree_cons (NULL_TREE, integer_type_node,
6843		tree_cons (NULL_TREE, opaque_V2SF_type_node,
6844			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
6845				      endlink))));
6846
6847  tree void_ftype_v2si_puint_int
6848    = build_function_type (void_type_node,
6849			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6850				      tree_cons (NULL_TREE, puint_type_node,
6851						 tree_cons (NULL_TREE,
6852							    integer_type_node,
6853							    endlink))));
6854
6855  tree void_ftype_v2si_puint_char
6856    = build_function_type (void_type_node,
6857			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6858				      tree_cons (NULL_TREE, puint_type_node,
6859						 tree_cons (NULL_TREE,
6860							    char_type_node,
6861							    endlink))));
6862
6863  tree void_ftype_v2si_pv2si_int
6864    = build_function_type (void_type_node,
6865			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6866				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6867						 tree_cons (NULL_TREE,
6868							    integer_type_node,
6869							    endlink))));
6870
6871  tree void_ftype_v2si_pv2si_char
6872    = build_function_type (void_type_node,
6873			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6874				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6875						 tree_cons (NULL_TREE,
6876							    char_type_node,
6877							    endlink))));
6878
6879  tree void_ftype_int
6880    = build_function_type (void_type_node,
6881			   tree_cons (NULL_TREE, integer_type_node, endlink));
6882
6883  tree int_ftype_void
6884    = build_function_type (integer_type_node, endlink);
6885
6886  tree v2si_ftype_pv2si_int
6887    = build_function_type (opaque_V2SI_type_node,
6888			   tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6889				      tree_cons (NULL_TREE, integer_type_node,
6890						 endlink)));
6891
6892  tree v2si_ftype_puint_int
6893    = build_function_type (opaque_V2SI_type_node,
6894			   tree_cons (NULL_TREE, puint_type_node,
6895				      tree_cons (NULL_TREE, integer_type_node,
6896						 endlink)));
6897
6898  tree v2si_ftype_pushort_int
6899    = build_function_type (opaque_V2SI_type_node,
6900			   tree_cons (NULL_TREE, pushort_type_node,
6901				      tree_cons (NULL_TREE, integer_type_node,
6902						 endlink)));
6903
6904  tree v2si_ftype_signed_char
6905    = build_function_type (opaque_V2SI_type_node,
6906			   tree_cons (NULL_TREE, signed_char_type_node,
6907				      endlink));
6908
6909  /* The initialization of the simple binary and unary builtins is
6910     done in rs6000_common_init_builtins, but we have to enable the
6911     mask bits here manually because we have run out of `target_flags'
6912     bits.  We really need to redesign this mask business.  */
6913
6914  enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6915			    ARRAY_SIZE (bdesc_2arg),
6916			    SPE_BUILTIN_EVADDW,
6917			    SPE_BUILTIN_EVXOR);
6918  enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6919			    ARRAY_SIZE (bdesc_1arg),
6920			    SPE_BUILTIN_EVABS,
6921			    SPE_BUILTIN_EVSUBFUSIAAW);
6922  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6923			    ARRAY_SIZE (bdesc_spe_predicates),
6924			    SPE_BUILTIN_EVCMPEQ,
6925			    SPE_BUILTIN_EVFSTSTLT);
6926  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6927			    ARRAY_SIZE (bdesc_spe_evsel),
6928			    SPE_BUILTIN_EVSEL_CMPGTS,
6929			    SPE_BUILTIN_EVSEL_FSTSTEQ);
6930
6931  (*lang_hooks.decls.pushdecl)
6932    (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6933		 opaque_V2SI_type_node));
6934
6935  /* Initialize irregular SPE builtins.  */
6936
6937  def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6938  def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6939  def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6940  def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6941  def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6942  def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6943  def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6944  def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6945  def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6946  def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6947  def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6948  def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6949  def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6950  def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6951  def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6952  def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6953  def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6954  def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6955
6956  /* Loads.  */
6957  def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6958  def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6959  def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6960  def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6961  def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6962  def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6963  def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6964  def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6965  def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6966  def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6967  def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6968  def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6969  def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6970  def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6971  def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6972  def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6973  def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6974  def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6975  def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6976  def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6977  def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6978  def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6979
6980  /* Predicates.  */
6981  d = (struct builtin_description *) bdesc_spe_predicates;
6982  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6983    {
6984      tree type;
6985
6986      switch (insn_data[d->icode].operand[1].mode)
6987	{
6988	case V2SImode:
6989	  type = int_ftype_int_v2si_v2si;
6990	  break;
6991	case V2SFmode:
6992	  type = int_ftype_int_v2sf_v2sf;
6993	  break;
6994	default:
6995	  abort ();
6996	}
6997
6998      def_builtin (d->mask, d->name, type, d->code);
6999    }
7000
7001  /* Evsel predicates.  */
7002  d = (struct builtin_description *) bdesc_spe_evsel;
7003  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7004    {
7005      tree type;
7006
7007      switch (insn_data[d->icode].operand[1].mode)
7008	{
7009	case V2SImode:
7010	  type = v2si_ftype_4_v2si;
7011	  break;
7012	case V2SFmode:
7013	  type = v2sf_ftype_4_v2sf;
7014	  break;
7015	default:
7016	  abort ();
7017	}
7018
7019      def_builtin (d->mask, d->name, type, d->code);
7020    }
7021}
7022
7023static void
7024altivec_init_builtins (void)
7025{
7026  struct builtin_description *d;
7027  struct builtin_description_predicates *dp;
7028  size_t i;
7029  tree pfloat_type_node = build_pointer_type (float_type_node);
7030  tree pint_type_node = build_pointer_type (integer_type_node);
7031  tree pshort_type_node = build_pointer_type (short_integer_type_node);
7032  tree pchar_type_node = build_pointer_type (char_type_node);
7033
7034  tree pvoid_type_node = build_pointer_type (void_type_node);
7035
7036  tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7037  tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7038  tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7039  tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7040
7041  tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7042
7043  tree int_ftype_int_v4si_v4si
7044    = build_function_type_list (integer_type_node,
7045				integer_type_node, V4SI_type_node,
7046				V4SI_type_node, NULL_TREE);
7047  tree v4sf_ftype_pcfloat
7048    = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7049  tree void_ftype_pfloat_v4sf
7050    = build_function_type_list (void_type_node,
7051				pfloat_type_node, V4SF_type_node, NULL_TREE);
7052  tree v4si_ftype_pcint
7053    = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7054  tree void_ftype_pint_v4si
7055    = build_function_type_list (void_type_node,
7056				pint_type_node, V4SI_type_node, NULL_TREE);
7057  tree v8hi_ftype_pcshort
7058    = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7059  tree void_ftype_pshort_v8hi
7060    = build_function_type_list (void_type_node,
7061				pshort_type_node, V8HI_type_node, NULL_TREE);
7062  tree v16qi_ftype_pcchar
7063    = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7064  tree void_ftype_pchar_v16qi
7065    = build_function_type_list (void_type_node,
7066				pchar_type_node, V16QI_type_node, NULL_TREE);
7067  tree void_ftype_v4si
7068    = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7069  tree v8hi_ftype_void
7070    = build_function_type (V8HI_type_node, void_list_node);
7071  tree void_ftype_void
7072    = build_function_type (void_type_node, void_list_node);
7073  tree void_ftype_int
7074    = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
7075
7076  tree v16qi_ftype_long_pcvoid
7077    = build_function_type_list (V16QI_type_node,
7078				long_integer_type_node, pcvoid_type_node, NULL_TREE);
7079  tree v8hi_ftype_long_pcvoid
7080    = build_function_type_list (V8HI_type_node,
7081				long_integer_type_node, pcvoid_type_node, NULL_TREE);
7082  tree v4si_ftype_long_pcvoid
7083    = build_function_type_list (V4SI_type_node,
7084				long_integer_type_node, pcvoid_type_node, NULL_TREE);
7085
7086  tree void_ftype_v4si_long_pvoid
7087    = build_function_type_list (void_type_node,
7088				V4SI_type_node, long_integer_type_node,
7089				pvoid_type_node, NULL_TREE);
7090  tree void_ftype_v16qi_long_pvoid
7091    = build_function_type_list (void_type_node,
7092				V16QI_type_node, long_integer_type_node,
7093				pvoid_type_node, NULL_TREE);
7094  tree void_ftype_v8hi_long_pvoid
7095    = build_function_type_list (void_type_node,
7096				V8HI_type_node, long_integer_type_node,
7097				pvoid_type_node, NULL_TREE);
7098  tree int_ftype_int_v8hi_v8hi
7099    = build_function_type_list (integer_type_node,
7100				integer_type_node, V8HI_type_node,
7101				V8HI_type_node, NULL_TREE);
7102  tree int_ftype_int_v16qi_v16qi
7103    = build_function_type_list (integer_type_node,
7104				integer_type_node, V16QI_type_node,
7105				V16QI_type_node, NULL_TREE);
7106  tree int_ftype_int_v4sf_v4sf
7107    = build_function_type_list (integer_type_node,
7108				integer_type_node, V4SF_type_node,
7109				V4SF_type_node, NULL_TREE);
7110  tree v4si_ftype_v4si
7111    = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7112  tree v8hi_ftype_v8hi
7113    = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7114  tree v16qi_ftype_v16qi
7115    = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7116  tree v4sf_ftype_v4sf
7117    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7118  tree void_ftype_pcvoid_int_int
7119    = build_function_type_list (void_type_node,
7120				pcvoid_type_node, integer_type_node,
7121				integer_type_node, NULL_TREE);
7122  tree int_ftype_pcchar
7123    = build_function_type_list (integer_type_node,
7124				pcchar_type_node, NULL_TREE);
7125
7126  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7127	       ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7128  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7129	       ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7130  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7131	       ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7132  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7133	       ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7134  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7135	       ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7136  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7137	       ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7138  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7139	       ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7140  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7141	       ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7142  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7143  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7144  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7145  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
7146  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7147  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7148  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7149  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7150  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7151  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7152  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7153  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7154  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7155  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7156  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7157  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7158
7159  /* See altivec.h for usage of "__builtin_altivec_compiletime_error".  */
7160  def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7161	       ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7162
7163  /* Add the DST variants.  */
7164  d = (struct builtin_description *) bdesc_dst;
7165  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7166    def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7167
7168  /* Initialize the predicates.  */
7169  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7170  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7171    {
7172      enum machine_mode mode1;
7173      tree type;
7174
7175      mode1 = insn_data[dp->icode].operand[1].mode;
7176
7177      switch (mode1)
7178	{
7179	case V4SImode:
7180	  type = int_ftype_int_v4si_v4si;
7181	  break;
7182	case V8HImode:
7183	  type = int_ftype_int_v8hi_v8hi;
7184	  break;
7185	case V16QImode:
7186	  type = int_ftype_int_v16qi_v16qi;
7187	  break;
7188	case V4SFmode:
7189	  type = int_ftype_int_v4sf_v4sf;
7190	  break;
7191	default:
7192	  abort ();
7193	}
7194
7195      def_builtin (dp->mask, dp->name, type, dp->code);
7196    }
7197
7198  /* Initialize the abs* operators.  */
7199  d = (struct builtin_description *) bdesc_abs;
7200  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7201    {
7202      enum machine_mode mode0;
7203      tree type;
7204
7205      mode0 = insn_data[d->icode].operand[0].mode;
7206
7207      switch (mode0)
7208	{
7209	case V4SImode:
7210	  type = v4si_ftype_v4si;
7211	  break;
7212	case V8HImode:
7213	  type = v8hi_ftype_v8hi;
7214	  break;
7215	case V16QImode:
7216	  type = v16qi_ftype_v16qi;
7217	  break;
7218	case V4SFmode:
7219	  type = v4sf_ftype_v4sf;
7220	  break;
7221	default:
7222	  abort ();
7223	}
7224
7225      def_builtin (d->mask, d->name, type, d->code);
7226    }
7227}
7228
7229static void
7230rs6000_common_init_builtins (void)
7231{
7232  struct builtin_description *d;
7233  size_t i;
7234
7235  tree v4sf_ftype_v4sf_v4sf_v16qi
7236    = build_function_type_list (V4SF_type_node,
7237				V4SF_type_node, V4SF_type_node,
7238				V16QI_type_node, NULL_TREE);
7239  tree v4si_ftype_v4si_v4si_v16qi
7240    = build_function_type_list (V4SI_type_node,
7241				V4SI_type_node, V4SI_type_node,
7242				V16QI_type_node, NULL_TREE);
7243  tree v8hi_ftype_v8hi_v8hi_v16qi
7244    = build_function_type_list (V8HI_type_node,
7245				V8HI_type_node, V8HI_type_node,
7246				V16QI_type_node, NULL_TREE);
7247  tree v16qi_ftype_v16qi_v16qi_v16qi
7248    = build_function_type_list (V16QI_type_node,
7249				V16QI_type_node, V16QI_type_node,
7250				V16QI_type_node, NULL_TREE);
7251  tree v4si_ftype_int
7252    = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7253  tree v8hi_ftype_int
7254    = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7255  tree v16qi_ftype_int
7256    = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7257  tree v8hi_ftype_v16qi
7258    = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7259  tree v4sf_ftype_v4sf
7260    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7261
7262  tree v2si_ftype_v2si_v2si
7263    = build_function_type_list (opaque_V2SI_type_node,
7264				opaque_V2SI_type_node,
7265				opaque_V2SI_type_node, NULL_TREE);
7266
7267  tree v2sf_ftype_v2sf_v2sf
7268    = build_function_type_list (opaque_V2SF_type_node,
7269				opaque_V2SF_type_node,
7270				opaque_V2SF_type_node, NULL_TREE);
7271
7272  tree v2si_ftype_int_int
7273    = build_function_type_list (opaque_V2SI_type_node,
7274				integer_type_node, integer_type_node,
7275				NULL_TREE);
7276
7277  tree v2si_ftype_v2si
7278    = build_function_type_list (opaque_V2SI_type_node,
7279				opaque_V2SI_type_node, NULL_TREE);
7280
7281  tree v2sf_ftype_v2sf
7282    = build_function_type_list (opaque_V2SF_type_node,
7283				opaque_V2SF_type_node, NULL_TREE);
7284
7285  tree v2sf_ftype_v2si
7286    = build_function_type_list (opaque_V2SF_type_node,
7287				opaque_V2SI_type_node, NULL_TREE);
7288
7289  tree v2si_ftype_v2sf
7290    = build_function_type_list (opaque_V2SI_type_node,
7291				opaque_V2SF_type_node, NULL_TREE);
7292
7293  tree v2si_ftype_v2si_char
7294    = build_function_type_list (opaque_V2SI_type_node,
7295				opaque_V2SI_type_node,
7296				char_type_node, NULL_TREE);
7297
7298  tree v2si_ftype_int_char
7299    = build_function_type_list (opaque_V2SI_type_node,
7300				integer_type_node, char_type_node, NULL_TREE);
7301
7302  tree v2si_ftype_char
7303    = build_function_type_list (opaque_V2SI_type_node,
7304				char_type_node, NULL_TREE);
7305
7306  tree int_ftype_int_int
7307    = build_function_type_list (integer_type_node,
7308				integer_type_node, integer_type_node,
7309				NULL_TREE);
7310
7311  tree v4si_ftype_v4si_v4si
7312    = build_function_type_list (V4SI_type_node,
7313				V4SI_type_node, V4SI_type_node, NULL_TREE);
7314  tree v4sf_ftype_v4si_int
7315    = build_function_type_list (V4SF_type_node,
7316				V4SI_type_node, integer_type_node, NULL_TREE);
7317  tree v4si_ftype_v4sf_int
7318    = build_function_type_list (V4SI_type_node,
7319				V4SF_type_node, integer_type_node, NULL_TREE);
7320  tree v4si_ftype_v4si_int
7321    = build_function_type_list (V4SI_type_node,
7322				V4SI_type_node, integer_type_node, NULL_TREE);
7323  tree v8hi_ftype_v8hi_int
7324    = build_function_type_list (V8HI_type_node,
7325				V8HI_type_node, integer_type_node, NULL_TREE);
7326  tree v16qi_ftype_v16qi_int
7327    = build_function_type_list (V16QI_type_node,
7328				V16QI_type_node, integer_type_node, NULL_TREE);
7329  tree v16qi_ftype_v16qi_v16qi_int
7330    = build_function_type_list (V16QI_type_node,
7331				V16QI_type_node, V16QI_type_node,
7332				integer_type_node, NULL_TREE);
7333  tree v8hi_ftype_v8hi_v8hi_int
7334    = build_function_type_list (V8HI_type_node,
7335				V8HI_type_node, V8HI_type_node,
7336				integer_type_node, NULL_TREE);
7337  tree v4si_ftype_v4si_v4si_int
7338    = build_function_type_list (V4SI_type_node,
7339				V4SI_type_node, V4SI_type_node,
7340				integer_type_node, NULL_TREE);
7341  tree v4sf_ftype_v4sf_v4sf_int
7342    = build_function_type_list (V4SF_type_node,
7343				V4SF_type_node, V4SF_type_node,
7344				integer_type_node, NULL_TREE);
7345  tree v4sf_ftype_v4sf_v4sf
7346    = build_function_type_list (V4SF_type_node,
7347				V4SF_type_node, V4SF_type_node, NULL_TREE);
7348  tree v4sf_ftype_v4sf_v4sf_v4si
7349    = build_function_type_list (V4SF_type_node,
7350				V4SF_type_node, V4SF_type_node,
7351				V4SI_type_node, NULL_TREE);
7352  tree v4sf_ftype_v4sf_v4sf_v4sf
7353    = build_function_type_list (V4SF_type_node,
7354				V4SF_type_node, V4SF_type_node,
7355				V4SF_type_node, NULL_TREE);
7356  tree v4si_ftype_v4si_v4si_v4si
7357    = build_function_type_list (V4SI_type_node,
7358				V4SI_type_node, V4SI_type_node,
7359				V4SI_type_node, NULL_TREE);
7360  tree v8hi_ftype_v8hi_v8hi
7361    = build_function_type_list (V8HI_type_node,
7362				V8HI_type_node, V8HI_type_node, NULL_TREE);
7363  tree v8hi_ftype_v8hi_v8hi_v8hi
7364    = build_function_type_list (V8HI_type_node,
7365				V8HI_type_node, V8HI_type_node,
7366				V8HI_type_node, NULL_TREE);
7367 tree v4si_ftype_v8hi_v8hi_v4si
7368    = build_function_type_list (V4SI_type_node,
7369				V8HI_type_node, V8HI_type_node,
7370				V4SI_type_node, NULL_TREE);
7371 tree v4si_ftype_v16qi_v16qi_v4si
7372    = build_function_type_list (V4SI_type_node,
7373				V16QI_type_node, V16QI_type_node,
7374				V4SI_type_node, NULL_TREE);
7375  tree v16qi_ftype_v16qi_v16qi
7376    = build_function_type_list (V16QI_type_node,
7377				V16QI_type_node, V16QI_type_node, NULL_TREE);
7378  tree v4si_ftype_v4sf_v4sf
7379    = build_function_type_list (V4SI_type_node,
7380				V4SF_type_node, V4SF_type_node, NULL_TREE);
7381  tree v8hi_ftype_v16qi_v16qi
7382    = build_function_type_list (V8HI_type_node,
7383				V16QI_type_node, V16QI_type_node, NULL_TREE);
7384  tree v4si_ftype_v8hi_v8hi
7385    = build_function_type_list (V4SI_type_node,
7386				V8HI_type_node, V8HI_type_node, NULL_TREE);
7387  tree v8hi_ftype_v4si_v4si
7388    = build_function_type_list (V8HI_type_node,
7389				V4SI_type_node, V4SI_type_node, NULL_TREE);
7390  tree v16qi_ftype_v8hi_v8hi
7391    = build_function_type_list (V16QI_type_node,
7392				V8HI_type_node, V8HI_type_node, NULL_TREE);
7393  tree v4si_ftype_v16qi_v4si
7394    = build_function_type_list (V4SI_type_node,
7395				V16QI_type_node, V4SI_type_node, NULL_TREE);
7396  tree v4si_ftype_v16qi_v16qi
7397    = build_function_type_list (V4SI_type_node,
7398				V16QI_type_node, V16QI_type_node, NULL_TREE);
7399  tree v4si_ftype_v8hi_v4si
7400    = build_function_type_list (V4SI_type_node,
7401				V8HI_type_node, V4SI_type_node, NULL_TREE);
7402  tree v4si_ftype_v8hi
7403    = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7404  tree int_ftype_v4si_v4si
7405    = build_function_type_list (integer_type_node,
7406				V4SI_type_node, V4SI_type_node, NULL_TREE);
7407  tree int_ftype_v4sf_v4sf
7408    = build_function_type_list (integer_type_node,
7409				V4SF_type_node, V4SF_type_node, NULL_TREE);
7410  tree int_ftype_v16qi_v16qi
7411    = build_function_type_list (integer_type_node,
7412				V16QI_type_node, V16QI_type_node, NULL_TREE);
7413  tree int_ftype_v8hi_v8hi
7414    = build_function_type_list (integer_type_node,
7415				V8HI_type_node, V8HI_type_node, NULL_TREE);
7416
7417  /* Add the simple ternary operators.  */
7418  d = (struct builtin_description *) bdesc_3arg;
7419  for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7420    {
7421
7422      enum machine_mode mode0, mode1, mode2, mode3;
7423      tree type;
7424
7425      if (d->name == 0 || d->icode == CODE_FOR_nothing)
7426	continue;
7427
7428      mode0 = insn_data[d->icode].operand[0].mode;
7429      mode1 = insn_data[d->icode].operand[1].mode;
7430      mode2 = insn_data[d->icode].operand[2].mode;
7431      mode3 = insn_data[d->icode].operand[3].mode;
7432
7433      /* When all four are of the same mode.  */
7434      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7435	{
7436	  switch (mode0)
7437	    {
7438	    case V4SImode:
7439	      type = v4si_ftype_v4si_v4si_v4si;
7440	      break;
7441	    case V4SFmode:
7442	      type = v4sf_ftype_v4sf_v4sf_v4sf;
7443	      break;
7444	    case V8HImode:
7445	      type = v8hi_ftype_v8hi_v8hi_v8hi;
7446	      break;
7447	    case V16QImode:
7448	      type = v16qi_ftype_v16qi_v16qi_v16qi;
7449	      break;
7450	    default:
7451	      abort();
7452	    }
7453	}
7454      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7455        {
7456	  switch (mode0)
7457	    {
7458	    case V4SImode:
7459	      type = v4si_ftype_v4si_v4si_v16qi;
7460	      break;
7461	    case V4SFmode:
7462	      type = v4sf_ftype_v4sf_v4sf_v16qi;
7463	      break;
7464	    case V8HImode:
7465	      type = v8hi_ftype_v8hi_v8hi_v16qi;
7466	      break;
7467	    case V16QImode:
7468	      type = v16qi_ftype_v16qi_v16qi_v16qi;
7469	      break;
7470	    default:
7471	      abort();
7472	    }
7473	}
7474      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7475	       && mode3 == V4SImode)
7476	type = v4si_ftype_v16qi_v16qi_v4si;
7477      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7478	       && mode3 == V4SImode)
7479	type = v4si_ftype_v8hi_v8hi_v4si;
7480      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7481	       && mode3 == V4SImode)
7482	type = v4sf_ftype_v4sf_v4sf_v4si;
7483
7484      /* vchar, vchar, vchar, 4 bit literal.  */
7485      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7486	       && mode3 == QImode)
7487	type = v16qi_ftype_v16qi_v16qi_int;
7488
7489      /* vshort, vshort, vshort, 4 bit literal.  */
7490      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7491	       && mode3 == QImode)
7492	type = v8hi_ftype_v8hi_v8hi_int;
7493
7494      /* vint, vint, vint, 4 bit literal.  */
7495      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7496	       && mode3 == QImode)
7497	type = v4si_ftype_v4si_v4si_int;
7498
7499      /* vfloat, vfloat, vfloat, 4 bit literal.  */
7500      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7501	       && mode3 == QImode)
7502	type = v4sf_ftype_v4sf_v4sf_int;
7503
7504      else
7505	abort ();
7506
7507      def_builtin (d->mask, d->name, type, d->code);
7508    }
7509
7510  /* Add the simple binary operators.  */
7511  d = (struct builtin_description *) bdesc_2arg;
7512  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7513    {
7514      enum machine_mode mode0, mode1, mode2;
7515      tree type;
7516
7517      if (d->name == 0 || d->icode == CODE_FOR_nothing)
7518	continue;
7519
7520      mode0 = insn_data[d->icode].operand[0].mode;
7521      mode1 = insn_data[d->icode].operand[1].mode;
7522      mode2 = insn_data[d->icode].operand[2].mode;
7523
7524      /* When all three operands are of the same mode.  */
7525      if (mode0 == mode1 && mode1 == mode2)
7526	{
7527	  switch (mode0)
7528	    {
7529	    case V4SFmode:
7530	      type = v4sf_ftype_v4sf_v4sf;
7531	      break;
7532	    case V4SImode:
7533	      type = v4si_ftype_v4si_v4si;
7534	      break;
7535	    case V16QImode:
7536	      type = v16qi_ftype_v16qi_v16qi;
7537	      break;
7538	    case V8HImode:
7539	      type = v8hi_ftype_v8hi_v8hi;
7540	      break;
7541	    case V2SImode:
7542	      type = v2si_ftype_v2si_v2si;
7543	      break;
7544	    case V2SFmode:
7545	      type = v2sf_ftype_v2sf_v2sf;
7546	      break;
7547	    case SImode:
7548	      type = int_ftype_int_int;
7549	      break;
7550	    default:
7551	      abort ();
7552	    }
7553	}
7554
7555      /* A few other combos we really don't want to do manually.  */
7556
7557      /* vint, vfloat, vfloat.  */
7558      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7559	type = v4si_ftype_v4sf_v4sf;
7560
7561      /* vshort, vchar, vchar.  */
7562      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7563	type = v8hi_ftype_v16qi_v16qi;
7564
7565      /* vint, vshort, vshort.  */
7566      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7567	type = v4si_ftype_v8hi_v8hi;
7568
7569      /* vshort, vint, vint.  */
7570      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7571	type = v8hi_ftype_v4si_v4si;
7572
7573      /* vchar, vshort, vshort.  */
7574      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7575	type = v16qi_ftype_v8hi_v8hi;
7576
7577      /* vint, vchar, vint.  */
7578      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7579	type = v4si_ftype_v16qi_v4si;
7580
7581      /* vint, vchar, vchar.  */
7582      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7583	type = v4si_ftype_v16qi_v16qi;
7584
7585      /* vint, vshort, vint.  */
7586      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7587	type = v4si_ftype_v8hi_v4si;
7588
7589      /* vint, vint, 5 bit literal.  */
7590      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7591	type = v4si_ftype_v4si_int;
7592
7593      /* vshort, vshort, 5 bit literal.  */
7594      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7595	type = v8hi_ftype_v8hi_int;
7596
7597      /* vchar, vchar, 5 bit literal.  */
7598      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7599	type = v16qi_ftype_v16qi_int;
7600
7601      /* vfloat, vint, 5 bit literal.  */
7602      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7603	type = v4sf_ftype_v4si_int;
7604
7605      /* vint, vfloat, 5 bit literal.  */
7606      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7607	type = v4si_ftype_v4sf_int;
7608
7609      else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7610	type = v2si_ftype_int_int;
7611
7612      else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7613	type = v2si_ftype_v2si_char;
7614
7615      else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7616	type = v2si_ftype_int_char;
7617
7618      /* int, x, x.  */
7619      else if (mode0 == SImode)
7620	{
7621	  switch (mode1)
7622	    {
7623	    case V4SImode:
7624	      type = int_ftype_v4si_v4si;
7625	      break;
7626	    case V4SFmode:
7627	      type = int_ftype_v4sf_v4sf;
7628	      break;
7629	    case V16QImode:
7630	      type = int_ftype_v16qi_v16qi;
7631	      break;
7632	    case V8HImode:
7633	      type = int_ftype_v8hi_v8hi;
7634	      break;
7635	    default:
7636	      abort ();
7637	    }
7638	}
7639
7640      else
7641	abort ();
7642
7643      def_builtin (d->mask, d->name, type, d->code);
7644    }
7645
7646  /* Add the simple unary operators.  */
7647  d = (struct builtin_description *) bdesc_1arg;
7648  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7649    {
7650      enum machine_mode mode0, mode1;
7651      tree type;
7652
7653      if (d->name == 0 || d->icode == CODE_FOR_nothing)
7654	continue;
7655
7656      mode0 = insn_data[d->icode].operand[0].mode;
7657      mode1 = insn_data[d->icode].operand[1].mode;
7658
7659      if (mode0 == V4SImode && mode1 == QImode)
7660        type = v4si_ftype_int;
7661      else if (mode0 == V8HImode && mode1 == QImode)
7662        type = v8hi_ftype_int;
7663      else if (mode0 == V16QImode && mode1 == QImode)
7664        type = v16qi_ftype_int;
7665      else if (mode0 == V4SFmode && mode1 == V4SFmode)
7666	type = v4sf_ftype_v4sf;
7667      else if (mode0 == V8HImode && mode1 == V16QImode)
7668	type = v8hi_ftype_v16qi;
7669      else if (mode0 == V4SImode && mode1 == V8HImode)
7670	type = v4si_ftype_v8hi;
7671      else if (mode0 == V2SImode && mode1 == V2SImode)
7672	type = v2si_ftype_v2si;
7673      else if (mode0 == V2SFmode && mode1 == V2SFmode)
7674	type = v2sf_ftype_v2sf;
7675      else if (mode0 == V2SFmode && mode1 == V2SImode)
7676	type = v2sf_ftype_v2si;
7677      else if (mode0 == V2SImode && mode1 == V2SFmode)
7678	type = v2si_ftype_v2sf;
7679      else if (mode0 == V2SImode && mode1 == QImode)
7680	type = v2si_ftype_char;
7681      else
7682	abort ();
7683
7684      def_builtin (d->mask, d->name, type, d->code);
7685    }
7686}
7687
7688static void
7689rs6000_init_libfuncs (void)
7690{
7691  if (!TARGET_HARD_FLOAT)
7692    return;
7693
7694  if (DEFAULT_ABI != ABI_V4)
7695    {
7696      if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7697	{
7698	  /* AIX library routines for float->int conversion.  */
7699	  set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7700	  set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7701	  set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7702	  set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7703	}
7704
7705      /* AIX/Darwin/64-bit Linux quad floating point routines.  */
7706      if (!TARGET_XL_COMPAT)
7707	{
7708	  set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
7709	  set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
7710	  set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
7711	  set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
7712	}
7713      else
7714	{
7715	  set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7716	  set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7717	  set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7718	  set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7719	}
7720    }
7721  else
7722    {
7723      /* 32-bit SVR4 quad floating point routines.  */
7724
7725      set_optab_libfunc (add_optab, TFmode, "_q_add");
7726      set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7727      set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7728      set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7729      set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7730      if (TARGET_PPC_GPOPT || TARGET_POWER2)
7731	set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7732
7733      set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7734      set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7735      set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7736      set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7737      set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7738      set_optab_libfunc (le_optab, TFmode, "_q_fle");
7739
7740      set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7741      set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7742      set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7743      set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7744      set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7745      set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7746      set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7747    }
7748}
7749
7750/* Expand a block move operation, and return 1 if successful.  Return 0
7751   if we should let the compiler generate normal code.
7752
7753   operands[0] is the destination
7754   operands[1] is the source
7755   operands[2] is the length
7756   operands[3] is the alignment */
7757
7758#define MAX_MOVE_REG 4
7759
7760int
7761expand_block_move (rtx operands[])
7762{
7763  rtx orig_dest = operands[0];
7764  rtx orig_src	= operands[1];
7765  rtx bytes_rtx	= operands[2];
7766  rtx align_rtx = operands[3];
7767  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
7768  int align;
7769  int bytes;
7770  int offset;
7771  int move_bytes;
7772  rtx stores[MAX_MOVE_REG];
7773  int num_reg = 0;
7774
7775  /* If this is not a fixed size move, just call memcpy */
7776  if (! constp)
7777    return 0;
7778
7779  /* If this is not a fixed size alignment, abort */
7780  if (GET_CODE (align_rtx) != CONST_INT)
7781    abort ();
7782  align = INTVAL (align_rtx);
7783
7784  /* Anything to move? */
7785  bytes = INTVAL (bytes_rtx);
7786  if (bytes <= 0)
7787    return 1;
7788
7789  /* store_one_arg depends on expand_block_move to handle at least the size of
7790     reg_parm_stack_space.  */
7791  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7792    return 0;
7793
7794  for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7795    {
7796      union {
7797	rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7798	rtx (*mov) (rtx, rtx);
7799      } gen_func;
7800      enum machine_mode mode = BLKmode;
7801      rtx src, dest;
7802
7803      if (TARGET_STRING
7804	  && bytes > 24		/* move up to 32 bytes at a time */
7805	  && ! fixed_regs[5]
7806	  && ! fixed_regs[6]
7807	  && ! fixed_regs[7]
7808	  && ! fixed_regs[8]
7809	  && ! fixed_regs[9]
7810	  && ! fixed_regs[10]
7811	  && ! fixed_regs[11]
7812	  && ! fixed_regs[12])
7813	{
7814	  move_bytes = (bytes > 32) ? 32 : bytes;
7815	  gen_func.movstrsi = gen_movstrsi_8reg;
7816	}
7817      else if (TARGET_STRING
7818	       && bytes > 16	/* move up to 24 bytes at a time */
7819	       && ! fixed_regs[5]
7820	       && ! fixed_regs[6]
7821	       && ! fixed_regs[7]
7822	       && ! fixed_regs[8]
7823	       && ! fixed_regs[9]
7824	       && ! fixed_regs[10])
7825	{
7826	  move_bytes = (bytes > 24) ? 24 : bytes;
7827	  gen_func.movstrsi = gen_movstrsi_6reg;
7828	}
7829      else if (TARGET_STRING
7830	       && bytes > 8	/* move up to 16 bytes at a time */
7831	       && ! fixed_regs[5]
7832	       && ! fixed_regs[6]
7833	       && ! fixed_regs[7]
7834	       && ! fixed_regs[8])
7835	{
7836	  move_bytes = (bytes > 16) ? 16 : bytes;
7837	  gen_func.movstrsi = gen_movstrsi_4reg;
7838	}
7839      else if (bytes >= 8 && TARGET_POWERPC64
7840	       /* 64-bit loads and stores require word-aligned
7841		  displacements.  */
7842	       && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7843	{
7844	  move_bytes = 8;
7845	  mode = DImode;
7846	  gen_func.mov = gen_movdi;
7847	}
7848      else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7849	{			/* move up to 8 bytes at a time */
7850	  move_bytes = (bytes > 8) ? 8 : bytes;
7851	  gen_func.movstrsi = gen_movstrsi_2reg;
7852	}
7853      else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7854	{			/* move 4 bytes */
7855	  move_bytes = 4;
7856	  mode = SImode;
7857	  gen_func.mov = gen_movsi;
7858	}
7859      else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7860	{			/* move 2 bytes */
7861	  move_bytes = 2;
7862	  mode = HImode;
7863	  gen_func.mov = gen_movhi;
7864	}
7865      else if (TARGET_STRING && bytes > 1)
7866	{			/* move up to 4 bytes at a time */
7867	  move_bytes = (bytes > 4) ? 4 : bytes;
7868	  gen_func.movstrsi = gen_movstrsi_1reg;
7869	}
7870      else /* move 1 byte at a time */
7871	{
7872	  move_bytes = 1;
7873	  mode = QImode;
7874	  gen_func.mov = gen_movqi;
7875	}
7876
7877      src = adjust_address (orig_src, mode, offset);
7878      dest = adjust_address (orig_dest, mode, offset);
7879
7880      if (mode != BLKmode)
7881	{
7882	  rtx tmp_reg = gen_reg_rtx (mode);
7883
7884	  emit_insn ((*gen_func.mov) (tmp_reg, src));
7885	  stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7886	}
7887
7888      if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7889	{
7890	  int i;
7891	  for (i = 0; i < num_reg; i++)
7892	    emit_insn (stores[i]);
7893	  num_reg = 0;
7894	}
7895
7896      if (mode == BLKmode)
7897	{
7898	  /* Move the address into scratch registers.  The movstrsi
7899	     patterns require zero offset.  */
7900	  if (!REG_P (XEXP (src, 0)))
7901	    {
7902	      rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7903	      src = replace_equiv_address (src, src_reg);
7904	    }
7905	  set_mem_size (src, GEN_INT (move_bytes));
7906
7907	  if (!REG_P (XEXP (dest, 0)))
7908	    {
7909	      rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7910	      dest = replace_equiv_address (dest, dest_reg);
7911	    }
7912	  set_mem_size (dest, GEN_INT (move_bytes));
7913
7914	  emit_insn ((*gen_func.movstrsi) (dest, src,
7915					   GEN_INT (move_bytes & 31),
7916					   align_rtx));
7917	}
7918    }
7919
7920  return 1;
7921}
7922
7923
7924/* Return 1 if OP is a load multiple operation.  It is known to be a
7925   PARALLEL and the first section will be tested.  */
7926
7927int
7928load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7929{
7930  int count = XVECLEN (op, 0);
7931  unsigned int dest_regno;
7932  rtx src_addr;
7933  int i;
7934
7935  /* Perform a quick check so we don't blow up below.  */
7936  if (count <= 1
7937      || GET_CODE (XVECEXP (op, 0, 0)) != SET
7938      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7939      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7940    return 0;
7941
7942  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7943  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7944
7945  for (i = 1; i < count; i++)
7946    {
7947      rtx elt = XVECEXP (op, 0, i);
7948
7949      if (GET_CODE (elt) != SET
7950	  || GET_CODE (SET_DEST (elt)) != REG
7951	  || GET_MODE (SET_DEST (elt)) != SImode
7952	  || REGNO (SET_DEST (elt)) != dest_regno + i
7953	  || GET_CODE (SET_SRC (elt)) != MEM
7954	  || GET_MODE (SET_SRC (elt)) != SImode
7955	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7956	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7957	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7958	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7959	return 0;
7960    }
7961
7962  return 1;
7963}
7964
7965/* Similar, but tests for store multiple.  Here, the second vector element
7966   is a CLOBBER.  It will be tested later.  */
7967
7968int
7969store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7970{
7971  int count = XVECLEN (op, 0) - 1;
7972  unsigned int src_regno;
7973  rtx dest_addr;
7974  int i;
7975
7976  /* Perform a quick check so we don't blow up below.  */
7977  if (count <= 1
7978      || GET_CODE (XVECEXP (op, 0, 0)) != SET
7979      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7980      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7981    return 0;
7982
7983  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7984  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7985
7986  for (i = 1; i < count; i++)
7987    {
7988      rtx elt = XVECEXP (op, 0, i + 1);
7989
7990      if (GET_CODE (elt) != SET
7991	  || GET_CODE (SET_SRC (elt)) != REG
7992	  || GET_MODE (SET_SRC (elt)) != SImode
7993	  || REGNO (SET_SRC (elt)) != src_regno + i
7994	  || GET_CODE (SET_DEST (elt)) != MEM
7995	  || GET_MODE (SET_DEST (elt)) != SImode
7996	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7997	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7998	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7999	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
8000	return 0;
8001    }
8002
8003  return 1;
8004}
8005
8006/* Return a string to perform a load_multiple operation.
8007   operands[0] is the vector.
8008   operands[1] is the source address.
8009   operands[2] is the first destination register.  */
8010
8011const char *
8012rs6000_output_load_multiple (rtx operands[3])
8013{
8014  /* We have to handle the case where the pseudo used to contain the address
8015     is assigned to one of the output registers.  */
8016  int i, j;
8017  int words = XVECLEN (operands[0], 0);
8018  rtx xop[10];
8019
8020  if (XVECLEN (operands[0], 0) == 1)
8021    return "{l|lwz} %2,0(%1)";
8022
8023  for (i = 0; i < words; i++)
8024    if (refers_to_regno_p (REGNO (operands[2]) + i,
8025			   REGNO (operands[2]) + i + 1, operands[1], 0))
8026      {
8027	if (i == words-1)
8028	  {
8029	    xop[0] = GEN_INT (4 * (words-1));
8030	    xop[1] = operands[1];
8031	    xop[2] = operands[2];
8032	    output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8033	    return "";
8034	  }
8035	else if (i == 0)
8036	  {
8037	    xop[0] = GEN_INT (4 * (words-1));
8038	    xop[1] = operands[1];
8039	    xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8040	    output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8041	    return "";
8042	  }
8043	else
8044	  {
8045	    for (j = 0; j < words; j++)
8046	      if (j != i)
8047		{
8048		  xop[0] = GEN_INT (j * 4);
8049		  xop[1] = operands[1];
8050		  xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8051		  output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8052		}
8053	    xop[0] = GEN_INT (i * 4);
8054	    xop[1] = operands[1];
8055	    output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8056	    return "";
8057	  }
8058      }
8059
8060  return "{lsi|lswi} %2,%1,%N0";
8061}
8062
8063/* Return 1 for a parallel vrsave operation.  */
8064
8065int
8066vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8067{
8068  int count = XVECLEN (op, 0);
8069  unsigned int dest_regno, src_regno;
8070  int i;
8071
8072  if (count <= 1
8073      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8074      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8075      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8076    return 0;
8077
8078  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8079  src_regno  = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8080
8081  if (dest_regno != VRSAVE_REGNO
8082      && src_regno != VRSAVE_REGNO)
8083    return 0;
8084
8085  for (i = 1; i < count; i++)
8086    {
8087      rtx elt = XVECEXP (op, 0, i);
8088
8089      if (GET_CODE (elt) != CLOBBER
8090	  && GET_CODE (elt) != SET)
8091	return 0;
8092    }
8093
8094  return 1;
8095}
8096
8097/* Return 1 for an PARALLEL suitable for mfcr.  */
8098
8099int
8100mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8101{
8102  int count = XVECLEN (op, 0);
8103  int i;
8104
8105  /* Perform a quick check so we don't blow up below.  */
8106  if (count < 1
8107      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8108      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8109      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8110    return 0;
8111
8112  for (i = 0; i < count; i++)
8113    {
8114      rtx exp = XVECEXP (op, 0, i);
8115      rtx unspec;
8116      int maskval;
8117      rtx src_reg;
8118
8119      src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8120
8121      if (GET_CODE (src_reg) != REG
8122	  || GET_MODE (src_reg) != CCmode
8123	  || ! CR_REGNO_P (REGNO (src_reg)))
8124	return 0;
8125
8126      if (GET_CODE (exp) != SET
8127	  || GET_CODE (SET_DEST (exp)) != REG
8128	  || GET_MODE (SET_DEST (exp)) != SImode
8129	  || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8130	return 0;
8131      unspec = SET_SRC (exp);
8132      maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8133
8134      if (GET_CODE (unspec) != UNSPEC
8135	  || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8136	  || XVECLEN (unspec, 0) != 2
8137	  || XVECEXP (unspec, 0, 0) != src_reg
8138	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8139	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8140	return 0;
8141    }
8142  return 1;
8143}
8144
8145/* Return 1 for an PARALLEL suitable for mtcrf.  */
8146
8147int
8148mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8149{
8150  int count = XVECLEN (op, 0);
8151  int i;
8152  rtx src_reg;
8153
8154  /* Perform a quick check so we don't blow up below.  */
8155  if (count < 1
8156      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8157      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8158      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8159    return 0;
8160  src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8161
8162  if (GET_CODE (src_reg) != REG
8163      || GET_MODE (src_reg) != SImode
8164      || ! INT_REGNO_P (REGNO (src_reg)))
8165    return 0;
8166
8167  for (i = 0; i < count; i++)
8168    {
8169      rtx exp = XVECEXP (op, 0, i);
8170      rtx unspec;
8171      int maskval;
8172
8173      if (GET_CODE (exp) != SET
8174	  || GET_CODE (SET_DEST (exp)) != REG
8175	  || GET_MODE (SET_DEST (exp)) != CCmode
8176	  || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8177	return 0;
8178      unspec = SET_SRC (exp);
8179      maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8180
8181      if (GET_CODE (unspec) != UNSPEC
8182	  || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8183	  || XVECLEN (unspec, 0) != 2
8184	  || XVECEXP (unspec, 0, 0) != src_reg
8185	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8186	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8187	return 0;
8188    }
8189  return 1;
8190}
8191
8192/* Return 1 for an PARALLEL suitable for lmw.  */
8193
8194int
8195lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8196{
8197  int count = XVECLEN (op, 0);
8198  unsigned int dest_regno;
8199  rtx src_addr;
8200  unsigned int base_regno;
8201  HOST_WIDE_INT offset;
8202  int i;
8203
8204  /* Perform a quick check so we don't blow up below.  */
8205  if (count <= 1
8206      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8207      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8208      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8209    return 0;
8210
8211  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8212  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8213
8214  if (dest_regno > 31
8215      || count != 32 - (int) dest_regno)
8216    return 0;
8217
8218  if (legitimate_indirect_address_p (src_addr, 0))
8219    {
8220      offset = 0;
8221      base_regno = REGNO (src_addr);
8222      if (base_regno == 0)
8223	return 0;
8224    }
8225  else if (legitimate_offset_address_p (SImode, src_addr, 0))
8226    {
8227      offset = INTVAL (XEXP (src_addr, 1));
8228      base_regno = REGNO (XEXP (src_addr, 0));
8229    }
8230  else
8231    return 0;
8232
8233  for (i = 0; i < count; i++)
8234    {
8235      rtx elt = XVECEXP (op, 0, i);
8236      rtx newaddr;
8237      rtx addr_reg;
8238      HOST_WIDE_INT newoffset;
8239
8240      if (GET_CODE (elt) != SET
8241	  || GET_CODE (SET_DEST (elt)) != REG
8242	  || GET_MODE (SET_DEST (elt)) != SImode
8243	  || REGNO (SET_DEST (elt)) != dest_regno + i
8244	  || GET_CODE (SET_SRC (elt)) != MEM
8245	  || GET_MODE (SET_SRC (elt)) != SImode)
8246	return 0;
8247      newaddr = XEXP (SET_SRC (elt), 0);
8248      if (legitimate_indirect_address_p (newaddr, 0))
8249	{
8250	  newoffset = 0;
8251	  addr_reg = newaddr;
8252	}
8253      else if (legitimate_offset_address_p (SImode, newaddr, 0))
8254	{
8255	  addr_reg = XEXP (newaddr, 0);
8256	  newoffset = INTVAL (XEXP (newaddr, 1));
8257	}
8258      else
8259	return 0;
8260      if (REGNO (addr_reg) != base_regno
8261	  || newoffset != offset + 4 * i)
8262	return 0;
8263    }
8264
8265  return 1;
8266}
8267
8268/* Return 1 for an PARALLEL suitable for stmw.  */
8269
8270int
8271stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8272{
8273  int count = XVECLEN (op, 0);
8274  unsigned int src_regno;
8275  rtx dest_addr;
8276  unsigned int base_regno;
8277  HOST_WIDE_INT offset;
8278  int i;
8279
8280  /* Perform a quick check so we don't blow up below.  */
8281  if (count <= 1
8282      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8283      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8284      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8285    return 0;
8286
8287  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8288  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8289
8290  if (src_regno > 31
8291      || count != 32 - (int) src_regno)
8292    return 0;
8293
8294  if (legitimate_indirect_address_p (dest_addr, 0))
8295    {
8296      offset = 0;
8297      base_regno = REGNO (dest_addr);
8298      if (base_regno == 0)
8299	return 0;
8300    }
8301  else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8302    {
8303      offset = INTVAL (XEXP (dest_addr, 1));
8304      base_regno = REGNO (XEXP (dest_addr, 0));
8305    }
8306  else
8307    return 0;
8308
8309  for (i = 0; i < count; i++)
8310    {
8311      rtx elt = XVECEXP (op, 0, i);
8312      rtx newaddr;
8313      rtx addr_reg;
8314      HOST_WIDE_INT newoffset;
8315
8316      if (GET_CODE (elt) != SET
8317	  || GET_CODE (SET_SRC (elt)) != REG
8318	  || GET_MODE (SET_SRC (elt)) != SImode
8319	  || REGNO (SET_SRC (elt)) != src_regno + i
8320	  || GET_CODE (SET_DEST (elt)) != MEM
8321	  || GET_MODE (SET_DEST (elt)) != SImode)
8322	return 0;
8323      newaddr = XEXP (SET_DEST (elt), 0);
8324      if (legitimate_indirect_address_p (newaddr, 0))
8325	{
8326	  newoffset = 0;
8327	  addr_reg = newaddr;
8328	}
8329      else if (legitimate_offset_address_p (SImode, newaddr, 0))
8330	{
8331	  addr_reg = XEXP (newaddr, 0);
8332	  newoffset = INTVAL (XEXP (newaddr, 1));
8333	}
8334      else
8335	return 0;
8336      if (REGNO (addr_reg) != base_regno
8337	  || newoffset != offset + 4 * i)
8338	return 0;
8339    }
8340
8341  return 1;
8342}
8343
8344/* A validation routine: say whether CODE, a condition code, and MODE
8345   match.  The other alternatives either don't make sense or should
8346   never be generated.  */
8347
8348static void
8349validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8350{
8351  if (GET_RTX_CLASS (code) != '<'
8352      || GET_MODE_CLASS (mode) != MODE_CC)
8353    abort ();
8354
8355  /* These don't make sense.  */
8356  if ((code == GT || code == LT || code == GE || code == LE)
8357      && mode == CCUNSmode)
8358    abort ();
8359
8360  if ((code == GTU || code == LTU || code == GEU || code == LEU)
8361      && mode != CCUNSmode)
8362    abort ();
8363
8364  if (mode != CCFPmode
8365      && (code == ORDERED || code == UNORDERED
8366	  || code == UNEQ || code == LTGT
8367	  || code == UNGT || code == UNLT
8368	  || code == UNGE || code == UNLE))
8369    abort ();
8370
8371  /* These should never be generated except for
8372     flag_finite_math_only.  */
8373  if (mode == CCFPmode
8374      && ! flag_finite_math_only
8375      && (code == LE || code == GE
8376	  || code == UNEQ || code == LTGT
8377	  || code == UNGT || code == UNLT))
8378    abort ();
8379
8380  /* These are invalid; the information is not there.  */
8381  if (mode == CCEQmode
8382      && code != EQ && code != NE)
8383    abort ();
8384}
8385
8386/* Return 1 if OP is a comparison operation that is valid for a branch insn.
8387   We only check the opcode against the mode of the CC value here.  */
8388
8389int
8390branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8391{
8392  enum rtx_code code = GET_CODE (op);
8393  enum machine_mode cc_mode;
8394
8395  if (GET_RTX_CLASS (code) != '<')
8396    return 0;
8397
8398  cc_mode = GET_MODE (XEXP (op, 0));
8399  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8400    return 0;
8401
8402  validate_condition_mode (code, cc_mode);
8403
8404  return 1;
8405}
8406
8407/* Return 1 if OP is a comparison operation that is valid for a branch
8408   insn and which is true if the corresponding bit in the CC register
8409   is set.  */
8410
8411int
8412branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8413{
8414  enum rtx_code code;
8415
8416  if (! branch_comparison_operator (op, mode))
8417    return 0;
8418
8419  code = GET_CODE (op);
8420  return (code == EQ || code == LT || code == GT
8421	  || code == LTU || code == GTU
8422	  || code == UNORDERED);
8423}
8424
8425/* Return 1 if OP is a comparison operation that is valid for an scc
8426   insn: it must be a positive comparison.  */
8427
8428int
8429scc_comparison_operator (rtx op, enum machine_mode mode)
8430{
8431  return branch_positive_comparison_operator (op, mode);
8432}
8433
8434int
8435trap_comparison_operator (rtx op, enum machine_mode mode)
8436{
8437  if (mode != VOIDmode && mode != GET_MODE (op))
8438    return 0;
8439  return GET_RTX_CLASS (GET_CODE (op)) == '<';
8440}
8441
8442int
8443boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8444{
8445  enum rtx_code code = GET_CODE (op);
8446  return (code == AND || code == IOR || code == XOR);
8447}
8448
8449int
8450boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8451{
8452  enum rtx_code code = GET_CODE (op);
8453  return (code == IOR || code == XOR);
8454}
8455
8456int
8457min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8458{
8459  enum rtx_code code = GET_CODE (op);
8460  return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8461}
8462
8463/* Return 1 if ANDOP is a mask that has no bits on that are not in the
8464   mask required to convert the result of a rotate insn into a shift
8465   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
8466
8467int
8468includes_lshift_p (rtx shiftop, rtx andop)
8469{
8470  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8471
8472  shift_mask <<= INTVAL (shiftop);
8473
8474  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8475}
8476
8477/* Similar, but for right shift.  */
8478
8479int
8480includes_rshift_p (rtx shiftop, rtx andop)
8481{
8482  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8483
8484  shift_mask >>= INTVAL (shiftop);
8485
8486  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8487}
8488
8489/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8490   to perform a left shift.  It must have exactly SHIFTOP least
8491   significant 0's, then one or more 1's, then zero or more 0's.  */
8492
8493int
8494includes_rldic_lshift_p (rtx shiftop, rtx andop)
8495{
8496  if (GET_CODE (andop) == CONST_INT)
8497    {
8498      HOST_WIDE_INT c, lsb, shift_mask;
8499
8500      c = INTVAL (andop);
8501      if (c == 0 || c == ~0)
8502	return 0;
8503
8504      shift_mask = ~0;
8505      shift_mask <<= INTVAL (shiftop);
8506
8507      /* Find the least significant one bit.  */
8508      lsb = c & -c;
8509
8510      /* It must coincide with the LSB of the shift mask.  */
8511      if (-lsb != shift_mask)
8512	return 0;
8513
8514      /* Invert to look for the next transition (if any).  */
8515      c = ~c;
8516
8517      /* Remove the low group of ones (originally low group of zeros).  */
8518      c &= -lsb;
8519
8520      /* Again find the lsb, and check we have all 1's above.  */
8521      lsb = c & -c;
8522      return c == -lsb;
8523    }
8524  else if (GET_CODE (andop) == CONST_DOUBLE
8525	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8526    {
8527      HOST_WIDE_INT low, high, lsb;
8528      HOST_WIDE_INT shift_mask_low, shift_mask_high;
8529
8530      low = CONST_DOUBLE_LOW (andop);
8531      if (HOST_BITS_PER_WIDE_INT < 64)
8532	high = CONST_DOUBLE_HIGH (andop);
8533
8534      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8535	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8536	return 0;
8537
8538      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8539	{
8540	  shift_mask_high = ~0;
8541	  if (INTVAL (shiftop) > 32)
8542	    shift_mask_high <<= INTVAL (shiftop) - 32;
8543
8544	  lsb = high & -high;
8545
8546	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8547	    return 0;
8548
8549	  high = ~high;
8550	  high &= -lsb;
8551
8552	  lsb = high & -high;
8553	  return high == -lsb;
8554	}
8555
8556      shift_mask_low = ~0;
8557      shift_mask_low <<= INTVAL (shiftop);
8558
8559      lsb = low & -low;
8560
8561      if (-lsb != shift_mask_low)
8562	return 0;
8563
8564      if (HOST_BITS_PER_WIDE_INT < 64)
8565	high = ~high;
8566      low = ~low;
8567      low &= -lsb;
8568
8569      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8570	{
8571	  lsb = high & -high;
8572	  return high == -lsb;
8573	}
8574
8575      lsb = low & -low;
8576      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8577    }
8578  else
8579    return 0;
8580}
8581
8582/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8583   to perform a left shift.  It must have SHIFTOP or more least
8584   significant 0's, with the remainder of the word 1's.  */
8585
8586int
8587includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8588{
8589  if (GET_CODE (andop) == CONST_INT)
8590    {
8591      HOST_WIDE_INT c, lsb, shift_mask;
8592
8593      shift_mask = ~0;
8594      shift_mask <<= INTVAL (shiftop);
8595      c = INTVAL (andop);
8596
8597      /* Find the least significant one bit.  */
8598      lsb = c & -c;
8599
8600      /* It must be covered by the shift mask.
8601	 This test also rejects c == 0.  */
8602      if ((lsb & shift_mask) == 0)
8603	return 0;
8604
8605      /* Check we have all 1's above the transition, and reject all 1's.  */
8606      return c == -lsb && lsb != 1;
8607    }
8608  else if (GET_CODE (andop) == CONST_DOUBLE
8609	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8610    {
8611      HOST_WIDE_INT low, lsb, shift_mask_low;
8612
8613      low = CONST_DOUBLE_LOW (andop);
8614
8615      if (HOST_BITS_PER_WIDE_INT < 64)
8616	{
8617	  HOST_WIDE_INT high, shift_mask_high;
8618
8619	  high = CONST_DOUBLE_HIGH (andop);
8620
8621	  if (low == 0)
8622	    {
8623	      shift_mask_high = ~0;
8624	      if (INTVAL (shiftop) > 32)
8625		shift_mask_high <<= INTVAL (shiftop) - 32;
8626
8627	      lsb = high & -high;
8628
8629	      if ((lsb & shift_mask_high) == 0)
8630		return 0;
8631
8632	      return high == -lsb;
8633	    }
8634	  if (high != ~0)
8635	    return 0;
8636	}
8637
8638      shift_mask_low = ~0;
8639      shift_mask_low <<= INTVAL (shiftop);
8640
8641      lsb = low & -low;
8642
8643      if ((lsb & shift_mask_low) == 0)
8644	return 0;
8645
8646      return low == -lsb && lsb != 1;
8647    }
8648  else
8649    return 0;
8650}
8651
8652/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8653   for lfq and stfq insns.
8654
8655   Note reg1 and reg2 *must* be hard registers.  To be sure we will
8656   abort if we are passed pseudo registers.  */
8657
8658int
8659registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8660{
8661  /* We might have been passed a SUBREG.  */
8662  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8663    return 0;
8664
8665  return (REGNO (reg1) == REGNO (reg2) - 1);
8666}
8667
8668/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8669   addr1 and addr2 must be in consecutive memory locations
8670   (addr2 == addr1 + 8).  */
8671
8672int
8673addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8674{
8675  unsigned int reg1;
8676  int offset1;
8677
8678  /* Extract an offset (if used) from the first addr.  */
8679  if (GET_CODE (addr1) == PLUS)
8680    {
8681      /* If not a REG, return zero.  */
8682      if (GET_CODE (XEXP (addr1, 0)) != REG)
8683	return 0;
8684      else
8685	{
8686          reg1 = REGNO (XEXP (addr1, 0));
8687	  /* The offset must be constant!  */
8688	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8689            return 0;
8690          offset1 = INTVAL (XEXP (addr1, 1));
8691	}
8692    }
8693  else if (GET_CODE (addr1) != REG)
8694    return 0;
8695  else
8696    {
8697      reg1 = REGNO (addr1);
8698      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
8699      offset1 = 0;
8700    }
8701
8702  /* Make sure the second address is a (mem (plus (reg) (const_int)))
8703      or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8704      register as addr1.  */
8705  if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8706   return 1;
8707  if (GET_CODE (addr2) != PLUS)
8708    return 0;
8709
8710  if (GET_CODE (XEXP (addr2, 0)) != REG
8711      || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8712    return 0;
8713
8714  if (reg1 != REGNO (XEXP (addr2, 0)))
8715    return 0;
8716
8717  /* The offset for the second addr must be 8 more than the first addr.  */
8718  if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8719    return 0;
8720
8721  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
8722     instructions.  */
8723  return 1;
8724}
8725
8726/* Return the register class of a scratch register needed to copy IN into
8727   or out of a register in CLASS in MODE.  If it can be done directly,
8728   NO_REGS is returned.  INP is nonzero if we are loading the reg, zero
8729   for storing.  */
8730
8731enum reg_class
8732secondary_reload_class (enum reg_class class,
8733			enum machine_mode mode,
8734			rtx in,
8735			int inp)
8736{
8737  int regno;
8738
8739  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8740#if TARGET_MACHO
8741                    && MACHOPIC_INDIRECT
8742#endif
8743                    ))
8744    {
8745      /* We cannot copy a symbolic operand directly into anything
8746         other than BASE_REGS for TARGET_ELF.  So indicate that a
8747         register from BASE_REGS is needed as an intermediate
8748         register.
8749
8750	 On Darwin, pic addresses require a load from memory, which
8751	 needs a base register.  */
8752      if (class != BASE_REGS
8753          && (GET_CODE (in) == SYMBOL_REF
8754              || GET_CODE (in) == HIGH
8755              || GET_CODE (in) == LABEL_REF
8756              || GET_CODE (in) == CONST))
8757        return BASE_REGS;
8758    }
8759
8760  /* A 64-bit gpr load or store using an offset that isn't a multiple of
8761     four needs a secondary reload.  */
8762  if (TARGET_POWERPC64
8763      && GET_MODE_UNIT_SIZE (mode) >= 8
8764      && (!inp || class != BASE_REGS)
8765      && invalid_gpr_mem (in, mode))
8766    return BASE_REGS;
8767
8768  if (GET_CODE (in) == REG)
8769    {
8770      regno = REGNO (in);
8771      if (regno >= FIRST_PSEUDO_REGISTER)
8772	{
8773	  regno = true_regnum (in);
8774	  if (regno >= FIRST_PSEUDO_REGISTER)
8775	    regno = -1;
8776	}
8777    }
8778  else if (GET_CODE (in) == SUBREG)
8779    {
8780      regno = true_regnum (in);
8781      if (regno >= FIRST_PSEUDO_REGISTER)
8782	regno = -1;
8783    }
8784  else
8785    regno = -1;
8786
8787  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8788     into anything.  */
8789  if (class == GENERAL_REGS || class == BASE_REGS
8790      || (regno >= 0 && INT_REGNO_P (regno)))
8791    return NO_REGS;
8792
8793  /* Constants, memory, and FP registers can go into FP registers.  */
8794  if ((regno == -1 || FP_REGNO_P (regno))
8795      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8796    return NO_REGS;
8797
8798  /* Memory, and AltiVec registers can go into AltiVec registers.  */
8799  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8800      && class == ALTIVEC_REGS)
8801    return NO_REGS;
8802
8803  /* We can copy among the CR registers.  */
8804  if ((class == CR_REGS || class == CR0_REGS)
8805      && regno >= 0 && CR_REGNO_P (regno))
8806    return NO_REGS;
8807
8808  /* Otherwise, we need GENERAL_REGS.  */
8809  return GENERAL_REGS;
8810}
8811
8812/* Given a comparison operation, return the bit number in CCR to test.  We
8813   know this is a valid comparison.
8814
8815   SCC_P is 1 if this is for an scc.  That means that %D will have been
8816   used instead of %C, so the bits will be in different places.
8817
8818   Return -1 if OP isn't a valid comparison for some reason.  */
8819
8820int
8821ccr_bit (rtx op, int scc_p)
8822{
8823  enum rtx_code code = GET_CODE (op);
8824  enum machine_mode cc_mode;
8825  int cc_regnum;
8826  int base_bit;
8827  rtx reg;
8828
8829  if (GET_RTX_CLASS (code) != '<')
8830    return -1;
8831
8832  reg = XEXP (op, 0);
8833
8834  if (GET_CODE (reg) != REG
8835      || ! CR_REGNO_P (REGNO (reg)))
8836    abort ();
8837
8838  cc_mode = GET_MODE (reg);
8839  cc_regnum = REGNO (reg);
8840  base_bit = 4 * (cc_regnum - CR0_REGNO);
8841
8842  validate_condition_mode (code, cc_mode);
8843
8844  /* When generating a sCOND operation, only positive conditions are
8845     allowed.  */
8846  if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8847      && code != GTU && code != LTU)
8848    abort ();
8849
8850  switch (code)
8851    {
8852    case NE:
8853      return scc_p ? base_bit + 3 : base_bit + 2;
8854    case EQ:
8855      return base_bit + 2;
8856    case GT:  case GTU:  case UNLE:
8857      return base_bit + 1;
8858    case LT:  case LTU:  case UNGE:
8859      return base_bit;
8860    case ORDERED:  case UNORDERED:
8861      return base_bit + 3;
8862
8863    case GE:  case GEU:
8864      /* If scc, we will have done a cror to put the bit in the
8865	 unordered position.  So test that bit.  For integer, this is ! LT
8866	 unless this is an scc insn.  */
8867      return scc_p ? base_bit + 3 : base_bit;
8868
8869    case LE:  case LEU:
8870      return scc_p ? base_bit + 3 : base_bit + 1;
8871
8872    default:
8873      abort ();
8874    }
8875}
8876
8877/* Return the GOT register.  */
8878
8879struct rtx_def *
8880rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8881{
8882  /* The second flow pass currently (June 1999) can't update
8883     regs_ever_live without disturbing other parts of the compiler, so
8884     update it here to make the prolog/epilogue code happy.  */
8885  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8886    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8887
8888  current_function_uses_pic_offset_table = 1;
8889
8890  return pic_offset_table_rtx;
8891}
8892
8893/* Function to init struct machine_function.
8894   This will be called, via a pointer variable,
8895   from push_function_context.  */
8896
8897static struct machine_function *
8898rs6000_init_machine_status (void)
8899{
8900  return ggc_alloc_cleared (sizeof (machine_function));
8901}
8902
8903/* These macros test for integers and extract the low-order bits.  */
8904#define INT_P(X)  \
8905((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
8906 && GET_MODE (X) == VOIDmode)
8907
8908#define INT_LOWPART(X) \
8909  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8910
8911int
8912extract_MB (rtx op)
8913{
8914  int i;
8915  unsigned long val = INT_LOWPART (op);
8916
8917  /* If the high bit is zero, the value is the first 1 bit we find
8918     from the left.  */
8919  if ((val & 0x80000000) == 0)
8920    {
8921      if ((val & 0xffffffff) == 0)
8922	abort ();
8923
8924      i = 1;
8925      while (((val <<= 1) & 0x80000000) == 0)
8926	++i;
8927      return i;
8928    }
8929
8930  /* If the high bit is set and the low bit is not, or the mask is all
8931     1's, the value is zero.  */
8932  if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8933    return 0;
8934
8935  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
8936     from the right.  */
8937  i = 31;
8938  while (((val >>= 1) & 1) != 0)
8939    --i;
8940
8941  return i;
8942}
8943
8944int
8945extract_ME (rtx op)
8946{
8947  int i;
8948  unsigned long val = INT_LOWPART (op);
8949
8950  /* If the low bit is zero, the value is the first 1 bit we find from
8951     the right.  */
8952  if ((val & 1) == 0)
8953    {
8954      if ((val & 0xffffffff) == 0)
8955	abort ();
8956
8957      i = 30;
8958      while (((val >>= 1) & 1) == 0)
8959	--i;
8960
8961      return i;
8962    }
8963
8964  /* If the low bit is set and the high bit is not, or the mask is all
8965     1's, the value is 31.  */
8966  if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8967    return 31;
8968
8969  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
8970     from the left.  */
8971  i = 0;
8972  while (((val <<= 1) & 0x80000000) != 0)
8973    ++i;
8974
8975  return i;
8976}
8977
8978/* Locate some local-dynamic symbol still in use by this function
8979   so that we can print its name in some tls_ld pattern.  */
8980
8981static const char *
8982rs6000_get_some_local_dynamic_name (void)
8983{
8984  rtx insn;
8985
8986  if (cfun->machine->some_ld_name)
8987    return cfun->machine->some_ld_name;
8988
8989  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8990    if (INSN_P (insn)
8991	&& for_each_rtx (&PATTERN (insn),
8992			 rs6000_get_some_local_dynamic_name_1, 0))
8993      return cfun->machine->some_ld_name;
8994
8995  abort ();
8996}
8997
8998/* Helper function for rs6000_get_some_local_dynamic_name.  */
8999
9000static int
9001rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9002{
9003  rtx x = *px;
9004
9005  if (GET_CODE (x) == SYMBOL_REF)
9006    {
9007      const char *str = XSTR (x, 0);
9008      if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9009	{
9010	  cfun->machine->some_ld_name = str;
9011	  return 1;
9012	}
9013    }
9014
9015  return 0;
9016}
9017
9018/* Print an operand.  Recognize special options, documented below.  */
9019
9020#if TARGET_ELF
9021#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9022#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9023#else
9024#define SMALL_DATA_RELOC "sda21"
9025#define SMALL_DATA_REG 0
9026#endif
9027
9028void
9029print_operand (FILE *file, rtx x, int code)
9030{
9031  int i;
9032  HOST_WIDE_INT val;
9033  unsigned HOST_WIDE_INT uval;
9034
9035  switch (code)
9036    {
9037    case '.':
9038      /* Write out an instruction after the call which may be replaced
9039	 with glue code by the loader.  This depends on the AIX version.  */
9040      asm_fprintf (file, RS6000_CALL_GLUE);
9041      return;
9042
9043      /* %a is output_address.  */
9044
9045    case 'A':
9046      /* If X is a constant integer whose low-order 5 bits are zero,
9047	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
9048	 in the AIX assembler where "sri" with a zero shift count
9049	 writes a trash instruction.  */
9050      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9051	putc ('l', file);
9052      else
9053	putc ('r', file);
9054      return;
9055
9056    case 'b':
9057      /* If constant, low-order 16 bits of constant, unsigned.
9058	 Otherwise, write normally.  */
9059      if (INT_P (x))
9060	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9061      else
9062	print_operand (file, x, 0);
9063      return;
9064
9065    case 'B':
9066      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9067	 for 64-bit mask direction.  */
9068      putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9069      return;
9070
9071      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9072	 output_operand.  */
9073
9074    case 'c':
9075      /* X is a CR register.  Print the number of the GT bit of the CR.  */
9076      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9077       output_operand_lossage ("invalid %%E value");
9078      else
9079       fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9080      return;
9081
9082    case 'D':
9083      /* Like 'J' but get to the EQ bit.  */
9084      if (GET_CODE (x) != REG)
9085       abort ();
9086
9087      /* Bit 1 is EQ bit.  */
9088      i = 4 * (REGNO (x) - CR0_REGNO) + 2;
9089
9090      /* If we want bit 31, write a shift count of zero, not 32.  */
9091      fprintf (file, "%d", i == 31 ? 0 : i + 1);
9092      return;
9093
9094    case 'E':
9095      /* X is a CR register.  Print the number of the EQ bit of the CR */
9096      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9097	output_operand_lossage ("invalid %%E value");
9098      else
9099	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9100      return;
9101
9102    case 'f':
9103      /* X is a CR register.  Print the shift count needed to move it
9104	 to the high-order four bits.  */
9105      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9106	output_operand_lossage ("invalid %%f value");
9107      else
9108	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9109      return;
9110
9111    case 'F':
9112      /* Similar, but print the count for the rotate in the opposite
9113	 direction.  */
9114      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9115	output_operand_lossage ("invalid %%F value");
9116      else
9117	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9118      return;
9119
9120    case 'G':
9121      /* X is a constant integer.  If it is negative, print "m",
9122	 otherwise print "z".  This is to make an aze or ame insn.  */
9123      if (GET_CODE (x) != CONST_INT)
9124	output_operand_lossage ("invalid %%G value");
9125      else if (INTVAL (x) >= 0)
9126	putc ('z', file);
9127      else
9128	putc ('m', file);
9129      return;
9130
9131    case 'h':
9132      /* If constant, output low-order five bits.  Otherwise, write
9133	 normally.  */
9134      if (INT_P (x))
9135	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9136      else
9137	print_operand (file, x, 0);
9138      return;
9139
9140    case 'H':
9141      /* If constant, output low-order six bits.  Otherwise, write
9142	 normally.  */
9143      if (INT_P (x))
9144	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9145      else
9146	print_operand (file, x, 0);
9147      return;
9148
9149    case 'I':
9150      /* Print `i' if this is a constant, else nothing.  */
9151      if (INT_P (x))
9152	putc ('i', file);
9153      return;
9154
9155    case 'j':
9156      /* Write the bit number in CCR for jump.  */
9157      i = ccr_bit (x, 0);
9158      if (i == -1)
9159	output_operand_lossage ("invalid %%j code");
9160      else
9161	fprintf (file, "%d", i);
9162      return;
9163
9164    case 'J':
9165      /* Similar, but add one for shift count in rlinm for scc and pass
9166	 scc flag to `ccr_bit'.  */
9167      i = ccr_bit (x, 1);
9168      if (i == -1)
9169	output_operand_lossage ("invalid %%J code");
9170      else
9171	/* If we want bit 31, write a shift count of zero, not 32.  */
9172	fprintf (file, "%d", i == 31 ? 0 : i + 1);
9173      return;
9174
9175    case 'k':
9176      /* X must be a constant.  Write the 1's complement of the
9177	 constant.  */
9178      if (! INT_P (x))
9179	output_operand_lossage ("invalid %%k value");
9180      else
9181	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9182      return;
9183
9184    case 'K':
9185      /* X must be a symbolic constant on ELF.  Write an
9186	 expression suitable for an 'addi' that adds in the low 16
9187	 bits of the MEM.  */
9188      if (GET_CODE (x) != CONST)
9189	{
9190	  print_operand_address (file, x);
9191	  fputs ("@l", file);
9192	}
9193      else
9194	{
9195	  if (GET_CODE (XEXP (x, 0)) != PLUS
9196	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9197		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9198	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9199	    output_operand_lossage ("invalid %%K value");
9200	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
9201	  fputs ("@l", file);
9202	  /* For GNU as, there must be a non-alphanumeric character
9203	     between 'l' and the number.  The '-' is added by
9204	     print_operand() already.  */
9205	  if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9206	    fputs ("+", file);
9207	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9208	}
9209      return;
9210
9211      /* %l is output_asm_label.  */
9212
9213    case 'L':
9214      /* Write second word of DImode or DFmode reference.  Works on register
9215	 or non-indexed memory only.  */
9216      if (GET_CODE (x) == REG)
9217	fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9218      else if (GET_CODE (x) == MEM)
9219	{
9220	  /* Handle possible auto-increment.  Since it is pre-increment and
9221	     we have already done it, we can just use an offset of word.  */
9222	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
9223	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9224	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9225					   UNITS_PER_WORD));
9226	  else
9227	    output_address (XEXP (adjust_address_nv (x, SImode,
9228						     UNITS_PER_WORD),
9229				  0));
9230
9231	  if (small_data_operand (x, GET_MODE (x)))
9232	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9233		     reg_names[SMALL_DATA_REG]);
9234	}
9235      return;
9236
9237    case 'm':
9238      /* MB value for a mask operand.  */
9239      if (! mask_operand (x, SImode))
9240	output_operand_lossage ("invalid %%m value");
9241
9242      fprintf (file, "%d", extract_MB (x));
9243      return;
9244
9245    case 'M':
9246      /* ME value for a mask operand.  */
9247      if (! mask_operand (x, SImode))
9248	output_operand_lossage ("invalid %%M value");
9249
9250      fprintf (file, "%d", extract_ME (x));
9251      return;
9252
9253      /* %n outputs the negative of its operand.  */
9254
9255    case 'N':
9256      /* Write the number of elements in the vector times 4.  */
9257      if (GET_CODE (x) != PARALLEL)
9258	output_operand_lossage ("invalid %%N value");
9259      else
9260	fprintf (file, "%d", XVECLEN (x, 0) * 4);
9261      return;
9262
9263    case 'O':
9264      /* Similar, but subtract 1 first.  */
9265      if (GET_CODE (x) != PARALLEL)
9266	output_operand_lossage ("invalid %%O value");
9267      else
9268	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9269      return;
9270
9271    case 'p':
9272      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
9273      if (! INT_P (x)
9274	  || INT_LOWPART (x) < 0
9275	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
9276	output_operand_lossage ("invalid %%p value");
9277      else
9278	fprintf (file, "%d", i);
9279      return;
9280
9281    case 'P':
9282      /* The operand must be an indirect memory reference.  The result
9283	 is the register name.  */
9284      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9285	  || REGNO (XEXP (x, 0)) >= 32)
9286	output_operand_lossage ("invalid %%P value");
9287      else
9288	fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9289      return;
9290
9291    case 'q':
9292      /* This outputs the logical code corresponding to a boolean
9293	 expression.  The expression may have one or both operands
9294	 negated (if one, only the first one).  For condition register
9295         logical operations, it will also treat the negated
9296         CR codes as NOTs, but not handle NOTs of them.  */
9297      {
9298	const char *const *t = 0;
9299	const char *s;
9300	enum rtx_code code = GET_CODE (x);
9301	static const char * const tbl[3][3] = {
9302	  { "and", "andc", "nor" },
9303	  { "or", "orc", "nand" },
9304	  { "xor", "eqv", "xor" } };
9305
9306	if (code == AND)
9307	  t = tbl[0];
9308	else if (code == IOR)
9309	  t = tbl[1];
9310	else if (code == XOR)
9311	  t = tbl[2];
9312	else
9313	  output_operand_lossage ("invalid %%q value");
9314
9315	if (GET_CODE (XEXP (x, 0)) != NOT)
9316	  s = t[0];
9317	else
9318	  {
9319	    if (GET_CODE (XEXP (x, 1)) == NOT)
9320	      s = t[2];
9321	    else
9322	      s = t[1];
9323	  }
9324
9325	fputs (s, file);
9326      }
9327      return;
9328
9329    case 'Q':
9330      if (TARGET_MFCRF)
9331	fputc (',',file);
9332        /* FALLTHRU */
9333      else
9334	return;
9335
9336    case 'R':
9337      /* X is a CR register.  Print the mask for `mtcrf'.  */
9338      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9339	output_operand_lossage ("invalid %%R value");
9340      else
9341	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9342      return;
9343
9344    case 's':
9345      /* Low 5 bits of 32 - value */
9346      if (! INT_P (x))
9347	output_operand_lossage ("invalid %%s value");
9348      else
9349	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9350      return;
9351
9352    case 'S':
9353      /* PowerPC64 mask position.  All 0's is excluded.
9354	 CONST_INT 32-bit mask is considered sign-extended so any
9355	 transition must occur within the CONST_INT, not on the boundary.  */
9356      if (! mask64_operand (x, DImode))
9357	output_operand_lossage ("invalid %%S value");
9358
9359      uval = INT_LOWPART (x);
9360
9361      if (uval & 1)	/* Clear Left */
9362	{
9363#if HOST_BITS_PER_WIDE_INT > 64
9364	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9365#endif
9366	  i = 64;
9367	}
9368      else		/* Clear Right */
9369	{
9370	  uval = ~uval;
9371#if HOST_BITS_PER_WIDE_INT > 64
9372	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9373#endif
9374	  i = 63;
9375	}
9376      while (uval != 0)
9377	--i, uval >>= 1;
9378      if (i < 0)
9379	abort ();
9380      fprintf (file, "%d", i);
9381      return;
9382
9383    case 't':
9384      /* Like 'J' but get to the OVERFLOW/UNORDERED bit.  */
9385      if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9386	abort ();
9387
9388      /* Bit 3 is OV bit.  */
9389      i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9390
9391      /* If we want bit 31, write a shift count of zero, not 32.  */
9392      fprintf (file, "%d", i == 31 ? 0 : i + 1);
9393      return;
9394
9395    case 'T':
9396      /* Print the symbolic name of a branch target register.  */
9397      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9398				  && REGNO (x) != COUNT_REGISTER_REGNUM))
9399	output_operand_lossage ("invalid %%T value");
9400      else if (REGNO (x) == LINK_REGISTER_REGNUM)
9401	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9402      else
9403	fputs ("ctr", file);
9404      return;
9405
9406    case 'u':
9407      /* High-order 16 bits of constant for use in unsigned operand.  */
9408      if (! INT_P (x))
9409	output_operand_lossage ("invalid %%u value");
9410      else
9411	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9412		 (INT_LOWPART (x) >> 16) & 0xffff);
9413      return;
9414
9415    case 'v':
9416      /* High-order 16 bits of constant for use in signed operand.  */
9417      if (! INT_P (x))
9418	output_operand_lossage ("invalid %%v value");
9419      else
9420	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9421		 (INT_LOWPART (x) >> 16) & 0xffff);
9422      return;
9423
9424    case 'U':
9425      /* Print `u' if this has an auto-increment or auto-decrement.  */
9426      if (GET_CODE (x) == MEM
9427	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
9428	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9429	putc ('u', file);
9430      return;
9431
9432    case 'V':
9433      /* Print the trap code for this operand.  */
9434      switch (GET_CODE (x))
9435	{
9436	case EQ:
9437	  fputs ("eq", file);   /* 4 */
9438	  break;
9439	case NE:
9440	  fputs ("ne", file);   /* 24 */
9441	  break;
9442	case LT:
9443	  fputs ("lt", file);   /* 16 */
9444	  break;
9445	case LE:
9446	  fputs ("le", file);   /* 20 */
9447	  break;
9448	case GT:
9449	  fputs ("gt", file);   /* 8 */
9450	  break;
9451	case GE:
9452	  fputs ("ge", file);   /* 12 */
9453	  break;
9454	case LTU:
9455	  fputs ("llt", file);  /* 2 */
9456	  break;
9457	case LEU:
9458	  fputs ("lle", file);  /* 6 */
9459	  break;
9460	case GTU:
9461	  fputs ("lgt", file);  /* 1 */
9462	  break;
9463	case GEU:
9464	  fputs ("lge", file);  /* 5 */
9465	  break;
9466	default:
9467	  abort ();
9468	}
9469      break;
9470
9471    case 'w':
9472      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
9473	 normally.  */
9474      if (INT_P (x))
9475	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9476		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9477      else
9478	print_operand (file, x, 0);
9479      return;
9480
9481    case 'W':
9482      /* MB value for a PowerPC64 rldic operand.  */
9483      val = (GET_CODE (x) == CONST_INT
9484	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9485
9486      if (val < 0)
9487	i = -1;
9488      else
9489	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9490	  if ((val <<= 1) < 0)
9491	    break;
9492
9493#if HOST_BITS_PER_WIDE_INT == 32
9494      if (GET_CODE (x) == CONST_INT && i >= 0)
9495	i += 32;  /* zero-extend high-part was all 0's */
9496      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9497	{
9498	  val = CONST_DOUBLE_LOW (x);
9499
9500	  if (val == 0)
9501	    abort ();
9502	  else if (val < 0)
9503	    --i;
9504	  else
9505	    for ( ; i < 64; i++)
9506	      if ((val <<= 1) < 0)
9507		break;
9508	}
9509#endif
9510
9511      fprintf (file, "%d", i + 1);
9512      return;
9513
9514    case 'X':
9515      if (GET_CODE (x) == MEM
9516	  && legitimate_indexed_address_p (XEXP (x, 0), 0))
9517	putc ('x', file);
9518      return;
9519
9520    case 'Y':
9521      /* Like 'L', for third word of TImode  */
9522      if (GET_CODE (x) == REG)
9523	fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9524      else if (GET_CODE (x) == MEM)
9525	{
9526	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
9527	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9528	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9529	  else
9530	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9531	  if (small_data_operand (x, GET_MODE (x)))
9532	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9533		     reg_names[SMALL_DATA_REG]);
9534	}
9535      return;
9536
9537    case 'z':
9538      /* X is a SYMBOL_REF.  Write out the name preceded by a
9539	 period and without any trailing data in brackets.  Used for function
9540	 names.  If we are configured for System V (or the embedded ABI) on
9541	 the PowerPC, do not emit the period, since those systems do not use
9542	 TOCs and the like.  */
9543      if (GET_CODE (x) != SYMBOL_REF)
9544	abort ();
9545
9546      if (XSTR (x, 0)[0] != '.')
9547	{
9548	  switch (DEFAULT_ABI)
9549	    {
9550	    default:
9551	      abort ();
9552
9553	    case ABI_AIX:
9554	      putc ('.', file);
9555	      break;
9556
9557	    case ABI_V4:
9558	    case ABI_DARWIN:
9559	      break;
9560	    }
9561	}
9562      if (TARGET_AIX)
9563	RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9564      else
9565	assemble_name (file, XSTR (x, 0));
9566      return;
9567
9568    case 'Z':
9569      /* Like 'L', for last word of TImode.  */
9570      if (GET_CODE (x) == REG)
9571	fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9572      else if (GET_CODE (x) == MEM)
9573	{
9574	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
9575	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9576	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9577	  else
9578	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9579	  if (small_data_operand (x, GET_MODE (x)))
9580	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9581		     reg_names[SMALL_DATA_REG]);
9582	}
9583      return;
9584
9585      /* Print AltiVec or SPE memory operand.  */
9586    case 'y':
9587      {
9588	rtx tmp;
9589
9590	if (GET_CODE (x) != MEM)
9591	  abort ();
9592
9593	tmp = XEXP (x, 0);
9594
9595	if (TARGET_E500)
9596	  {
9597	    /* Handle [reg].  */
9598	    if (GET_CODE (tmp) == REG)
9599	      {
9600		fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9601		break;
9602	      }
9603	    /* Handle [reg+UIMM].  */
9604	    else if (GET_CODE (tmp) == PLUS &&
9605		     GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9606	      {
9607		int x;
9608
9609		if (GET_CODE (XEXP (tmp, 0)) != REG)
9610		  abort ();
9611
9612		x = INTVAL (XEXP (tmp, 1));
9613		fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9614		break;
9615	      }
9616
9617	    /* Fall through.  Must be [reg+reg].  */
9618	  }
9619	if (GET_CODE (tmp) == REG)
9620	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9621	else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9622	  {
9623	    if (REGNO (XEXP (tmp, 0)) == 0)
9624	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9625		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
9626	    else
9627	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9628		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
9629	  }
9630	else
9631	  abort ();
9632	break;
9633      }
9634
9635    case 0:
9636      if (GET_CODE (x) == REG)
9637	fprintf (file, "%s", reg_names[REGNO (x)]);
9638      else if (GET_CODE (x) == MEM)
9639	{
9640	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
9641	     know the width from the mode.  */
9642	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9643	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9644		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9645	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9646	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9647		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9648	  else
9649	    output_address (XEXP (x, 0));
9650	}
9651      else
9652	output_addr_const (file, x);
9653      return;
9654
9655    case '&':
9656      assemble_name (file, rs6000_get_some_local_dynamic_name ());
9657      return;
9658
9659    default:
9660      output_operand_lossage ("invalid %%xn code");
9661    }
9662}
9663
9664/* Print the address of an operand.  */
9665
9666void
9667print_operand_address (FILE *file, rtx x)
9668{
9669  if (GET_CODE (x) == REG)
9670    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9671  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9672	   || GET_CODE (x) == LABEL_REF)
9673    {
9674      output_addr_const (file, x);
9675      if (small_data_operand (x, GET_MODE (x)))
9676	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9677		 reg_names[SMALL_DATA_REG]);
9678      else if (TARGET_TOC)
9679	abort ();
9680    }
9681  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9682    {
9683      if (REGNO (XEXP (x, 0)) == 0)
9684	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9685		 reg_names[ REGNO (XEXP (x, 0)) ]);
9686      else
9687	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9688		 reg_names[ REGNO (XEXP (x, 1)) ]);
9689    }
9690  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9691    fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9692	     INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9693#if TARGET_ELF
9694  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9695           && CONSTANT_P (XEXP (x, 1)))
9696    {
9697      output_addr_const (file, XEXP (x, 1));
9698      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9699    }
9700#endif
9701#if TARGET_MACHO
9702  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9703           && CONSTANT_P (XEXP (x, 1)))
9704    {
9705      fprintf (file, "lo16(");
9706      output_addr_const (file, XEXP (x, 1));
9707      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9708    }
9709#endif
9710  else if (legitimate_constant_pool_address_p (x))
9711    {
9712      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9713	{
9714	  rtx contains_minus = XEXP (x, 1);
9715	  rtx minus, symref;
9716	  const char *name;
9717
9718	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
9719	     turn it into (sym) for output_addr_const.  */
9720	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9721	    contains_minus = XEXP (contains_minus, 0);
9722
9723	  minus = XEXP (contains_minus, 0);
9724	  symref = XEXP (minus, 0);
9725	  XEXP (contains_minus, 0) = symref;
9726	  if (TARGET_ELF)
9727	    {
9728	      char *newname;
9729
9730	      name = XSTR (symref, 0);
9731	      newname = alloca (strlen (name) + sizeof ("@toc"));
9732	      strcpy (newname, name);
9733	      strcat (newname, "@toc");
9734	      XSTR (symref, 0) = newname;
9735	    }
9736	  output_addr_const (file, XEXP (x, 1));
9737	  if (TARGET_ELF)
9738	    XSTR (symref, 0) = name;
9739	  XEXP (contains_minus, 0) = minus;
9740	}
9741      else
9742	output_addr_const (file, XEXP (x, 1));
9743
9744      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9745    }
9746  else
9747    abort ();
9748}
9749
9750/* Target hook for assembling integer objects.  The PowerPC version has
9751   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9752   is defined.  It also needs to handle DI-mode objects on 64-bit
9753   targets.  */
9754
9755static bool
9756rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9757{
9758#ifdef RELOCATABLE_NEEDS_FIXUP
9759  /* Special handling for SI values.  */
9760  if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
9761    {
9762      extern int in_toc_section (void);
9763      static int recurse = 0;
9764
9765      /* For -mrelocatable, we mark all addresses that need to be fixed up
9766	 in the .fixup section.  */
9767      if (TARGET_RELOCATABLE
9768	  && !in_toc_section ()
9769	  && !in_text_section ()
9770	  && !recurse
9771	  && GET_CODE (x) != CONST_INT
9772	  && GET_CODE (x) != CONST_DOUBLE
9773	  && CONSTANT_P (x))
9774	{
9775	  char buf[256];
9776
9777	  recurse = 1;
9778	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9779	  fixuplabelno++;
9780	  ASM_OUTPUT_LABEL (asm_out_file, buf);
9781	  fprintf (asm_out_file, "\t.long\t(");
9782	  output_addr_const (asm_out_file, x);
9783	  fprintf (asm_out_file, ")@fixup\n");
9784	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9785	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
9786	  fprintf (asm_out_file, "\t.long\t");
9787	  assemble_name (asm_out_file, buf);
9788	  fprintf (asm_out_file, "\n\t.previous\n");
9789	  recurse = 0;
9790	  return true;
9791	}
9792      /* Remove initial .'s to turn a -mcall-aixdesc function
9793	 address into the address of the descriptor, not the function
9794	 itself.  */
9795      else if (GET_CODE (x) == SYMBOL_REF
9796	       && XSTR (x, 0)[0] == '.'
9797	       && DEFAULT_ABI == ABI_AIX)
9798	{
9799	  const char *name = XSTR (x, 0);
9800	  while (*name == '.')
9801	    name++;
9802
9803	  fprintf (asm_out_file, "\t.long\t%s\n", name);
9804	  return true;
9805	}
9806    }
9807#endif /* RELOCATABLE_NEEDS_FIXUP */
9808  return default_assemble_integer (x, size, aligned_p);
9809}
9810
9811#ifdef HAVE_GAS_HIDDEN
9812/* Emit an assembler directive to set symbol visibility for DECL to
9813   VISIBILITY_TYPE.  */
9814
9815static void
9816rs6000_assemble_visibility (tree decl, int vis)
9817{
9818  /* Functions need to have their entry point symbol visibility set as
9819     well as their descriptor symbol visibility.  */
9820  if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9821    {
9822      static const char * const visibility_types[] = {
9823        NULL, "internal", "hidden", "protected"
9824      };
9825
9826      const char *name, *type;
9827
9828      name = ((* targetm.strip_name_encoding)
9829	      (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9830      type = visibility_types[vis];
9831
9832      fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9833      fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9834    }
9835  else
9836    default_assemble_visibility (decl, vis);
9837}
9838#endif
9839
9840enum rtx_code
9841rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9842{
9843  /* Reversal of FP compares takes care -- an ordered compare
9844     becomes an unordered compare and vice versa.  */
9845  if (mode == CCFPmode
9846      && (!flag_finite_math_only
9847	  || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9848	  || code == UNEQ || code == LTGT))
9849    return reverse_condition_maybe_unordered (code);
9850  else
9851    return reverse_condition (code);
9852}
9853
9854/* Generate a compare for CODE.  Return a brand-new rtx that
9855   represents the result of the compare.  */
9856
9857static rtx
9858rs6000_generate_compare (enum rtx_code code)
9859{
9860  enum machine_mode comp_mode;
9861  rtx compare_result;
9862
9863  if (rs6000_compare_fp_p)
9864    comp_mode = CCFPmode;
9865  else if (code == GTU || code == LTU
9866	  || code == GEU || code == LEU)
9867    comp_mode = CCUNSmode;
9868  else
9869    comp_mode = CCmode;
9870
9871  /* First, the compare.  */
9872  compare_result = gen_reg_rtx (comp_mode);
9873
9874  /* SPE FP compare instructions on the GPRs.  Yuck!  */
9875  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9876      && rs6000_compare_fp_p)
9877    {
9878      rtx cmp, or1, or2, or_result, compare_result2;
9879
9880      /* Note: The E500 comparison instructions set the GT bit (x +
9881        1), on success.  This explains the mess.  */
9882
9883      switch (code)
9884	{
9885       case EQ: case UNEQ: case NE: case LTGT:
9886	  cmp = flag_finite_math_only
9887	    ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9888			       rs6000_compare_op1)
9889	    : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9890			       rs6000_compare_op1);
9891	  break;
9892       case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
9893	  cmp = flag_finite_math_only
9894	    ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9895			       rs6000_compare_op1)
9896	    : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9897			       rs6000_compare_op1);
9898	  break;
9899       case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
9900	  cmp = flag_finite_math_only
9901	    ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9902			       rs6000_compare_op1)
9903	    : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9904			       rs6000_compare_op1);
9905	  break;
9906	default:
9907	  abort ();
9908	}
9909
9910      /* Synthesize LE and GE from LT/GT || EQ.  */
9911      if (code == LE || code == GE || code == LEU || code == GEU)
9912	{
9913	  emit_insn (cmp);
9914
9915	  switch (code)
9916	    {
9917	    case LE: code = LT; break;
9918	    case GE: code = GT; break;
9919	    case LEU: code = LT; break;
9920	    case GEU: code = GT; break;
9921	    default: abort ();
9922	    }
9923
9924	  or1 = gen_reg_rtx (SImode);
9925	  or2 = gen_reg_rtx (SImode);
9926	  or_result = gen_reg_rtx (CCEQmode);
9927	  compare_result2 = gen_reg_rtx (CCFPmode);
9928
9929	  /* Do the EQ.  */
9930	  cmp = flag_finite_math_only
9931	    ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9932			       rs6000_compare_op1)
9933	    : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9934			       rs6000_compare_op1);
9935	  emit_insn (cmp);
9936
9937	  or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
9938	  or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
9939
9940	  /* OR them together.  */
9941	  cmp = gen_rtx_SET (VOIDmode, or_result,
9942			     gen_rtx_COMPARE (CCEQmode,
9943					      gen_rtx_IOR (SImode, or1, or2),
9944					      const_true_rtx));
9945	  compare_result = or_result;
9946	  code = EQ;
9947	}
9948      else
9949	{
9950	  if (code == NE || code == LTGT)
9951	    code = NE;
9952         else
9953           code = EQ;
9954	}
9955
9956      emit_insn (cmp);
9957    }
9958  else
9959    {
9960      /* Generate XLC-compatible TFmode compare as PARALLEL with extra
9961         CLOBBERs to match cmptf_internal2 pattern.  */
9962      if (comp_mode == CCFPmode && TARGET_XL_COMPAT
9963          && GET_MODE (rs6000_compare_op0) == TFmode
9964          && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
9965          && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
9966        emit_insn (gen_rtx_PARALLEL (VOIDmode,
9967          gen_rtvec (9,
9968		     gen_rtx_SET (VOIDmode,
9969				  compare_result,
9970				  gen_rtx_COMPARE (comp_mode,
9971						   rs6000_compare_op0,
9972						   rs6000_compare_op1)),
9973		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9974		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9975		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9976		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9977		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9978		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9979		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9980		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
9981      else
9982	emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9983				gen_rtx_COMPARE (comp_mode,
9984						 rs6000_compare_op0,
9985						 rs6000_compare_op1)));
9986    }
9987
9988  /* Some kinds of FP comparisons need an OR operation;
9989     under flag_finite_math_only we don't bother.  */
9990  if (rs6000_compare_fp_p
9991      && ! flag_finite_math_only
9992      && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9993      && (code == LE || code == GE
9994	  || code == UNEQ || code == LTGT
9995	  || code == UNGT || code == UNLT))
9996    {
9997      enum rtx_code or1, or2;
9998      rtx or1_rtx, or2_rtx, compare2_rtx;
9999      rtx or_result = gen_reg_rtx (CCEQmode);
10000
10001      switch (code)
10002	{
10003	case LE: or1 = LT;  or2 = EQ;  break;
10004	case GE: or1 = GT;  or2 = EQ;  break;
10005	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
10006	case LTGT: or1 = LT;  or2 = GT;  break;
10007	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
10008	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
10009	default:  abort ();
10010	}
10011      validate_condition_mode (or1, comp_mode);
10012      validate_condition_mode (or2, comp_mode);
10013      or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
10014      or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
10015      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
10016				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
10017				      const_true_rtx);
10018      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
10019
10020      compare_result = or_result;
10021      code = EQ;
10022    }
10023
10024  validate_condition_mode (code, GET_MODE (compare_result));
10025
10026  return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
10027}
10028
10029
10030/* Emit the RTL for an sCOND pattern.  */
10031
10032void
10033rs6000_emit_sCOND (enum rtx_code code, rtx result)
10034{
10035  rtx condition_rtx;
10036  enum machine_mode op_mode;
10037  enum rtx_code cond_code;
10038
10039  condition_rtx = rs6000_generate_compare (code);
10040  cond_code = GET_CODE (condition_rtx);
10041
10042  if (TARGET_E500 && rs6000_compare_fp_p
10043      && !TARGET_FPRS && TARGET_HARD_FLOAT)
10044    {
10045      rtx t;
10046
10047      PUT_MODE (condition_rtx, SImode);
10048      t = XEXP (condition_rtx, 0);
10049
10050      if (cond_code != NE && cond_code != EQ)
10051       abort ();
10052
10053      if (cond_code == NE)
10054       emit_insn (gen_e500_flip_eq_bit (t, t));
10055
10056      emit_insn (gen_move_from_CR_eq_bit (result, t));
10057      return;
10058    }
10059
10060  if (cond_code == NE
10061      || cond_code == GE || cond_code == LE
10062      || cond_code == GEU || cond_code == LEU
10063      || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10064    {
10065      rtx not_result = gen_reg_rtx (CCEQmode);
10066      rtx not_op, rev_cond_rtx;
10067      enum machine_mode cc_mode;
10068
10069      cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10070
10071      rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
10072			      SImode, XEXP (condition_rtx, 0), const0_rtx);
10073      not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10074      emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10075      condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10076    }
10077
10078  op_mode = GET_MODE (rs6000_compare_op0);
10079  if (op_mode == VOIDmode)
10080    op_mode = GET_MODE (rs6000_compare_op1);
10081
10082  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10083    {
10084      PUT_MODE (condition_rtx, DImode);
10085      convert_move (result, condition_rtx, 0);
10086    }
10087  else
10088    {
10089      PUT_MODE (condition_rtx, SImode);
10090      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10091    }
10092}
10093
10094/* Emit a branch of kind CODE to location LOC.  */
10095
10096void
10097rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10098{
10099  rtx condition_rtx, loc_ref;
10100
10101  condition_rtx = rs6000_generate_compare (code);
10102  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10103  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10104			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10105						     loc_ref, pc_rtx)));
10106}
10107
10108/* Return the string to output a conditional branch to LABEL, which is
10109   the operand number of the label, or -1 if the branch is really a
10110   conditional return.
10111
10112   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
10113   condition code register and its mode specifies what kind of
10114   comparison we made.
10115
10116   REVERSED is nonzero if we should reverse the sense of the comparison.
10117
10118   INSN is the insn.  */
10119
10120char *
10121output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10122{
10123  static char string[64];
10124  enum rtx_code code = GET_CODE (op);
10125  rtx cc_reg = XEXP (op, 0);
10126  enum machine_mode mode = GET_MODE (cc_reg);
10127  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10128  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10129  int really_reversed = reversed ^ need_longbranch;
10130  char *s = string;
10131  const char *ccode;
10132  const char *pred;
10133  rtx note;
10134
10135  validate_condition_mode (code, mode);
10136
10137  /* Work out which way this really branches.  We could use
10138     reverse_condition_maybe_unordered here always but this
10139     makes the resulting assembler clearer.  */
10140  if (really_reversed)
10141    {
10142      /* Reversal of FP compares takes care -- an ordered compare
10143	 becomes an unordered compare and vice versa.  */
10144      if (mode == CCFPmode)
10145	code = reverse_condition_maybe_unordered (code);
10146      else
10147	code = reverse_condition (code);
10148    }
10149
10150  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10151    {
10152      /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10153	 to the GT bit.  */
10154      if (code == EQ)
10155	/* Opposite of GT.  */
10156	code = GT;
10157      else if (code == NE)
10158       code = UNLE;
10159      else
10160	abort ();
10161    }
10162
10163  switch (code)
10164    {
10165      /* Not all of these are actually distinct opcodes, but
10166	 we distinguish them for clarity of the resulting assembler.  */
10167    case NE: case LTGT:
10168      ccode = "ne"; break;
10169    case EQ: case UNEQ:
10170      ccode = "eq"; break;
10171    case GE: case GEU:
10172      ccode = "ge"; break;
10173    case GT: case GTU: case UNGT:
10174      ccode = "gt"; break;
10175    case LE: case LEU:
10176      ccode = "le"; break;
10177    case LT: case LTU: case UNLT:
10178      ccode = "lt"; break;
10179    case UNORDERED: ccode = "un"; break;
10180    case ORDERED: ccode = "nu"; break;
10181    case UNGE: ccode = "nl"; break;
10182    case UNLE: ccode = "ng"; break;
10183    default:
10184      abort ();
10185    }
10186
10187  /* Maybe we have a guess as to how likely the branch is.
10188     The old mnemonics don't have a way to specify this information.  */
10189  pred = "";
10190  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10191  if (note != NULL_RTX)
10192    {
10193      /* PROB is the difference from 50%.  */
10194      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10195
10196      /* Only hint for highly probable/improbable branches on newer
10197	 cpus as static prediction overrides processor dynamic
10198	 prediction.  For older cpus we may as well always hint, but
10199	 assume not taken for branches that are very close to 50% as a
10200	 mispredicted taken branch is more expensive than a
10201	 mispredicted not-taken branch.  */
10202      if (rs6000_always_hint
10203	  || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10204	{
10205	  if (abs (prob) > REG_BR_PROB_BASE / 20
10206	      && ((prob > 0) ^ need_longbranch))
10207              pred = "+";
10208	  else
10209	    pred = "-";
10210	}
10211    }
10212
10213  if (label == NULL)
10214    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10215  else
10216    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10217
10218  /* We need to escape any '%' characters in the reg_names string.
10219     Assume they'd only be the first character....  */
10220  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10221    *s++ = '%';
10222  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10223
10224  if (label != NULL)
10225    {
10226      /* If the branch distance was too far, we may have to use an
10227	 unconditional branch to go the distance.  */
10228      if (need_longbranch)
10229	s += sprintf (s, ",$+8\n\tb %s", label);
10230      else
10231	s += sprintf (s, ",%s", label);
10232    }
10233
10234  return string;
10235}
10236
10237/* Return the string to flip the EQ bit on a CR.  */
10238char *
10239output_e500_flip_eq_bit (rtx dst, rtx src)
10240{
10241  static char string[64];
10242  int a, b;
10243
10244  if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
10245      || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
10246    abort ();
10247
10248  /* EQ bit.  */
10249  a = 4 * (REGNO (dst) - CR0_REGNO) + 2;
10250  b = 4 * (REGNO (src) - CR0_REGNO) + 2;
10251
10252  sprintf (string, "crnot %d,%d", a, b);
10253  return string;
10254}
10255
10256/* Emit a conditional move: move TRUE_COND to DEST if OP of the
10257   operands of the last comparison is nonzero/true, FALSE_COND if it
10258   is zero/false.  Return 0 if the hardware has no such operation.  */
10259
10260int
10261rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10262{
10263  enum rtx_code code = GET_CODE (op);
10264  rtx op0 = rs6000_compare_op0;
10265  rtx op1 = rs6000_compare_op1;
10266  REAL_VALUE_TYPE c1;
10267  enum machine_mode compare_mode = GET_MODE (op0);
10268  enum machine_mode result_mode = GET_MODE (dest);
10269  rtx temp;
10270
10271  /* These modes should always match.  */
10272  if (GET_MODE (op1) != compare_mode
10273      /* In the isel case however, we can use a compare immediate, so
10274	 op1 may be a small constant.  */
10275      && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10276    return 0;
10277  if (GET_MODE (true_cond) != result_mode)
10278    return 0;
10279  if (GET_MODE (false_cond) != result_mode)
10280    return 0;
10281
10282  /* First, work out if the hardware can do this at all, or
10283     if it's too slow....  */
10284  if (! rs6000_compare_fp_p)
10285    {
10286      if (TARGET_ISEL)
10287	return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10288      return 0;
10289    }
10290  else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
10291	   && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
10292    return 0;
10293
10294  /* Eliminate half of the comparisons by switching operands, this
10295     makes the remaining code simpler.  */
10296  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10297      || code == LTGT || code == LT || code == UNLE)
10298    {
10299      code = reverse_condition_maybe_unordered (code);
10300      temp = true_cond;
10301      true_cond = false_cond;
10302      false_cond = temp;
10303    }
10304
10305  /* UNEQ and LTGT take four instructions for a comparison with zero,
10306     it'll probably be faster to use a branch here too.  */
10307  if (code == UNEQ && HONOR_NANS (compare_mode))
10308    return 0;
10309
10310  if (GET_CODE (op1) == CONST_DOUBLE)
10311    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10312
10313  /* We're going to try to implement comparisons by performing
10314     a subtract, then comparing against zero.  Unfortunately,
10315     Inf - Inf is NaN which is not zero, and so if we don't
10316     know that the operand is finite and the comparison
10317     would treat EQ different to UNORDERED, we can't do it.  */
10318  if (HONOR_INFINITIES (compare_mode)
10319      && code != GT && code != UNGE
10320      && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10321      /* Constructs of the form (a OP b ? a : b) are safe.  */
10322      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10323	  || (! rtx_equal_p (op0, true_cond)
10324	      && ! rtx_equal_p (op1, true_cond))))
10325    return 0;
10326  /* At this point we know we can use fsel.  */
10327
10328  /* Reduce the comparison to a comparison against zero.  */
10329  temp = gen_reg_rtx (compare_mode);
10330  emit_insn (gen_rtx_SET (VOIDmode, temp,
10331			  gen_rtx_MINUS (compare_mode, op0, op1)));
10332  op0 = temp;
10333  op1 = CONST0_RTX (compare_mode);
10334
10335  /* If we don't care about NaNs we can reduce some of the comparisons
10336     down to faster ones.  */
10337  if (! HONOR_NANS (compare_mode))
10338    switch (code)
10339      {
10340      case GT:
10341	code = LE;
10342	temp = true_cond;
10343	true_cond = false_cond;
10344	false_cond = temp;
10345	break;
10346      case UNGE:
10347	code = GE;
10348	break;
10349      case UNEQ:
10350	code = EQ;
10351	break;
10352      default:
10353	break;
10354      }
10355
10356  /* Now, reduce everything down to a GE.  */
10357  switch (code)
10358    {
10359    case GE:
10360      break;
10361
10362    case LE:
10363      temp = gen_reg_rtx (compare_mode);
10364      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10365      op0 = temp;
10366      break;
10367
10368    case ORDERED:
10369      temp = gen_reg_rtx (compare_mode);
10370      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10371      op0 = temp;
10372      break;
10373
10374    case EQ:
10375      temp = gen_reg_rtx (compare_mode);
10376      emit_insn (gen_rtx_SET (VOIDmode, temp,
10377			      gen_rtx_NEG (compare_mode,
10378					   gen_rtx_ABS (compare_mode, op0))));
10379      op0 = temp;
10380      break;
10381
10382    case UNGE:
10383      /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10384      temp = gen_reg_rtx (result_mode);
10385      emit_insn (gen_rtx_SET (VOIDmode, temp,
10386			      gen_rtx_IF_THEN_ELSE (result_mode,
10387						    gen_rtx_GE (VOIDmode,
10388								op0, op1),
10389						    true_cond, false_cond)));
10390      false_cond = true_cond;
10391      true_cond = temp;
10392
10393      temp = gen_reg_rtx (compare_mode);
10394      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10395      op0 = temp;
10396      break;
10397
10398    case GT:
10399      /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10400      temp = gen_reg_rtx (result_mode);
10401      emit_insn (gen_rtx_SET (VOIDmode, temp,
10402			      gen_rtx_IF_THEN_ELSE (result_mode,
10403						    gen_rtx_GE (VOIDmode,
10404								op0, op1),
10405						    true_cond, false_cond)));
10406      true_cond = false_cond;
10407      false_cond = temp;
10408
10409      temp = gen_reg_rtx (compare_mode);
10410      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10411      op0 = temp;
10412      break;
10413
10414    default:
10415      abort ();
10416    }
10417
10418  emit_insn (gen_rtx_SET (VOIDmode, dest,
10419			  gen_rtx_IF_THEN_ELSE (result_mode,
10420						gen_rtx_GE (VOIDmode,
10421							    op0, op1),
10422						true_cond, false_cond)));
10423  return 1;
10424}
10425
10426/* Same as above, but for ints (isel).  */
10427
10428static int
10429rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10430{
10431  rtx condition_rtx, cr;
10432
10433  /* All isel implementations thus far are 32-bits.  */
10434  if (GET_MODE (rs6000_compare_op0) != SImode)
10435    return 0;
10436
10437  /* We still have to do the compare, because isel doesn't do a
10438     compare, it just looks at the CRx bits set by a previous compare
10439     instruction.  */
10440  condition_rtx = rs6000_generate_compare (GET_CODE (op));
10441  cr = XEXP (condition_rtx, 0);
10442
10443  if (GET_MODE (cr) == CCmode)
10444    emit_insn (gen_isel_signed (dest, condition_rtx,
10445				true_cond, false_cond, cr));
10446  else
10447    emit_insn (gen_isel_unsigned (dest, condition_rtx,
10448				  true_cond, false_cond, cr));
10449
10450  return 1;
10451}
10452
10453const char *
10454output_isel (rtx *operands)
10455{
10456  enum rtx_code code;
10457
10458  code = GET_CODE (operands[1]);
10459  if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10460    {
10461      PUT_CODE (operands[1], reverse_condition (code));
10462      return "isel %0,%3,%2,%j1";
10463    }
10464  else
10465    return "isel %0,%2,%3,%j1";
10466}
10467
10468void
10469rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10470{
10471  enum machine_mode mode = GET_MODE (op0);
10472  enum rtx_code c;
10473  rtx target;
10474
10475  if (code == SMAX || code == SMIN)
10476    c = GE;
10477  else
10478    c = GEU;
10479
10480  if (code == SMAX || code == UMAX)
10481    target = emit_conditional_move (dest, c, op0, op1, mode,
10482				    op0, op1, mode, 0);
10483  else
10484    target = emit_conditional_move (dest, c, op0, op1, mode,
10485				    op1, op0, mode, 0);
10486  if (target == NULL_RTX)
10487    abort ();
10488  if (target != dest)
10489    emit_move_insn (dest, target);
10490}
10491
10492/* Emit instructions to move SRC to DST.  Called by splitters for
10493   multi-register moves.  It will emit at most one instruction for
10494   each register that is accessed; that is, it won't emit li/lis pairs
10495   (or equivalent for 64-bit code).  One of SRC or DST must be a hard
10496   register.  */
10497
10498void
10499rs6000_split_multireg_move (rtx dst, rtx src)
10500{
10501  /* The register number of the first register being moved.  */
10502  int reg;
10503  /* The mode that is to be moved.  */
10504  enum machine_mode mode;
10505  /* The mode that the move is being done in, and its size.  */
10506  enum machine_mode reg_mode;
10507  int reg_mode_size;
10508  /* The number of registers that will be moved.  */
10509  int nregs;
10510
10511  reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10512  mode = GET_MODE (dst);
10513  nregs = HARD_REGNO_NREGS (reg, mode);
10514  if (FP_REGNO_P (reg))
10515    reg_mode = DFmode;
10516  else if (ALTIVEC_REGNO_P (reg))
10517    reg_mode = V16QImode;
10518  else
10519    reg_mode = word_mode;
10520  reg_mode_size = GET_MODE_SIZE (reg_mode);
10521
10522  if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10523    abort ();
10524
10525  if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10526    {
10527      /* Move register range backwards, if we might have destructive
10528	 overlap.  */
10529      int i;
10530      for (i = nregs - 1; i >= 0; i--)
10531	emit_insn (gen_rtx_SET (VOIDmode,
10532				simplify_gen_subreg (reg_mode, dst, mode,
10533						     i * reg_mode_size),
10534				simplify_gen_subreg (reg_mode, src, mode,
10535						     i * reg_mode_size)));
10536    }
10537  else
10538    {
10539      int i;
10540      int j = -1;
10541      bool used_update = false;
10542
10543      if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10544        {
10545          rtx breg;
10546
10547	  if (GET_CODE (XEXP (src, 0)) == PRE_INC
10548	      || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10549	    {
10550	      rtx delta_rtx;
10551	      breg = XEXP (XEXP (src, 0), 0);
10552	      delta_rtx =  GET_CODE (XEXP (src, 0)) == PRE_INC
10553		  ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10554		  : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10555	      emit_insn (TARGET_32BIT
10556			 ? gen_addsi3 (breg, breg, delta_rtx)
10557			 : gen_adddi3 (breg, breg, delta_rtx));
10558	      src = gen_rtx_MEM (mode, breg);
10559	    }
10560	  else if (! offsettable_memref_p (src))
10561	    {
10562	      rtx newsrc, basereg;
10563	      basereg = gen_rtx_REG (Pmode, reg);
10564	      emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
10565	      newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
10566	      MEM_COPY_ATTRIBUTES (newsrc, src);
10567	      src = newsrc;
10568	    }
10569
10570	  breg = XEXP (src, 0);
10571	  if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
10572	    breg = XEXP (breg, 0);
10573
10574	  /* If the base register we are using to address memory is
10575	     also a destination reg, then change that register last.  */
10576	  if (REG_P (breg)
10577	      && REGNO (breg) >= REGNO (dst)
10578	      && REGNO (breg) < REGNO (dst) + nregs)
10579	    j = REGNO (breg) - REGNO (dst);
10580	}
10581
10582      if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10583	{
10584	  rtx breg;
10585
10586	  if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10587	      || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10588	    {
10589	      rtx delta_rtx;
10590	      breg = XEXP (XEXP (dst, 0), 0);
10591	      delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10592		? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10593		: GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10594
10595	      /* We have to update the breg before doing the store.
10596		 Use store with update, if available.  */
10597
10598	      if (TARGET_UPDATE)
10599		{
10600		  rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10601		  emit_insn (TARGET_32BIT
10602			     ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10603			     : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10604		  used_update = true;
10605		}
10606	      else
10607		emit_insn (TARGET_32BIT
10608			   ? gen_addsi3 (breg, breg, delta_rtx)
10609			   : gen_adddi3 (breg, breg, delta_rtx));
10610	      dst = gen_rtx_MEM (mode, breg);
10611	    }
10612	  else if (! offsettable_memref_p (dst))
10613	    abort ();
10614	}
10615
10616      for (i = 0; i < nregs; i++)
10617	{
10618	  /* Calculate index to next subword.  */
10619	  ++j;
10620	  if (j == nregs)
10621	    j = 0;
10622
10623	  /* If compiler already emitted move of first word by
10624	     store with update, no need to do anything.  */
10625	  if (j == 0 && used_update)
10626	    continue;
10627
10628	  emit_insn (gen_rtx_SET (VOIDmode,
10629				  simplify_gen_subreg (reg_mode, dst, mode,
10630						       j * reg_mode_size),
10631				  simplify_gen_subreg (reg_mode, src, mode,
10632						       j * reg_mode_size)));
10633	}
10634    }
10635}
10636
10637
10638/* This page contains routines that are used to determine what the
10639   function prologue and epilogue code will do and write them out.  */
10640
10641/* Return the first fixed-point register that is required to be
10642   saved. 32 if none.  */
10643
10644int
10645first_reg_to_save (void)
10646{
10647  int first_reg;
10648
10649  /* Find lowest numbered live register.  */
10650  for (first_reg = 13; first_reg <= 31; first_reg++)
10651    if (regs_ever_live[first_reg]
10652	&& (! call_used_regs[first_reg]
10653	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10654		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10655		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
10656		    || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
10657      break;
10658
10659#if TARGET_MACHO
10660  if (flag_pic
10661      && current_function_uses_pic_offset_table
10662      && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10663    return RS6000_PIC_OFFSET_TABLE_REGNUM;
10664#endif
10665
10666  return first_reg;
10667}
10668
10669/* Similar, for FP regs.  */
10670
10671int
10672first_fp_reg_to_save (void)
10673{
10674  int first_reg;
10675
10676  /* Find lowest numbered live register.  */
10677  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10678    if (regs_ever_live[first_reg])
10679      break;
10680
10681  return first_reg;
10682}
10683
10684/* Similar, for AltiVec regs.  */
10685
10686static int
10687first_altivec_reg_to_save (void)
10688{
10689  int i;
10690
10691  /* Stack frame remains as is unless we are in AltiVec ABI.  */
10692  if (! TARGET_ALTIVEC_ABI)
10693    return LAST_ALTIVEC_REGNO + 1;
10694
10695  /* Find lowest numbered live register.  */
10696  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10697    if (regs_ever_live[i])
10698      break;
10699
10700  return i;
10701}
10702
10703/* Return a 32-bit mask of the AltiVec registers we need to set in
10704   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
10705   the 32-bit word is 0.  */
10706
10707static unsigned int
10708compute_vrsave_mask (void)
10709{
10710  unsigned int i, mask = 0;
10711
10712  /* First, find out if we use _any_ altivec registers.  */
10713  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10714    if (regs_ever_live[i])
10715      mask |= ALTIVEC_REG_BIT (i);
10716
10717  if (mask == 0)
10718    return mask;
10719
10720  /* Next, remove the argument registers from the set.  These must
10721     be in the VRSAVE mask set by the caller, so we don't need to add
10722     them in again.  More importantly, the mask we compute here is
10723     used to generate CLOBBERs in the set_vrsave insn, and we do not
10724     wish the argument registers to die.  */
10725  for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10726    mask &= ~ALTIVEC_REG_BIT (i);
10727
10728  /* Similarly, remove the return value from the set.  */
10729  {
10730    bool yes = false;
10731    diddle_return_value (is_altivec_return_reg, &yes);
10732    if (yes)
10733      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10734  }
10735
10736  return mask;
10737}
10738
10739static void
10740is_altivec_return_reg (rtx reg, void *xyes)
10741{
10742  bool *yes = (bool *) xyes;
10743  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10744    *yes = true;
10745}
10746
10747
10748/* Calculate the stack information for the current function.  This is
10749   complicated by having two separate calling sequences, the AIX calling
10750   sequence and the V.4 calling sequence.
10751
10752   AIX (and Darwin/Mac OS X) stack frames look like:
10753							  32-bit  64-bit
10754	SP---->	+---------------------------------------+
10755		| back chain to caller			| 0	  0
10756		+---------------------------------------+
10757		| saved CR				| 4       8 (8-11)
10758		+---------------------------------------+
10759		| saved LR				| 8       16
10760		+---------------------------------------+
10761		| reserved for compilers		| 12      24
10762		+---------------------------------------+
10763		| reserved for binders			| 16      32
10764		+---------------------------------------+
10765		| saved TOC pointer			| 20      40
10766		+---------------------------------------+
10767		| Parameter save area (P)		| 24      48
10768		+---------------------------------------+
10769		| Alloca space (A)			| 24+P    etc.
10770		+---------------------------------------+
10771		| Local variable space (L)		| 24+P+A
10772		+---------------------------------------+
10773		| Float/int conversion temporary (X)	| 24+P+A+L
10774		+---------------------------------------+
10775		| Save area for AltiVec registers (W)	| 24+P+A+L+X
10776		+---------------------------------------+
10777		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
10778		+---------------------------------------+
10779		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
10780		+---------------------------------------+
10781		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
10782		+---------------------------------------+
10783		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
10784		+---------------------------------------+
10785	old SP->| back chain to caller's caller		|
10786		+---------------------------------------+
10787
10788   The required alignment for AIX configurations is two words (i.e., 8
10789   or 16 bytes).
10790
10791
10792   V.4 stack frames look like:
10793
10794	SP---->	+---------------------------------------+
10795		| back chain to caller			| 0
10796		+---------------------------------------+
10797		| caller's saved LR			| 4
10798		+---------------------------------------+
10799		| Parameter save area (P)		| 8
10800		+---------------------------------------+
10801		| Alloca space (A)			| 8+P
10802		+---------------------------------------+
10803		| Varargs save area (V)			| 8+P+A
10804		+---------------------------------------+
10805		| Local variable space (L)		| 8+P+A+V
10806		+---------------------------------------+
10807		| Float/int conversion temporary (X)	| 8+P+A+V+L
10808		+---------------------------------------+
10809		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
10810		+---------------------------------------+
10811		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
10812		+---------------------------------------+
10813		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
10814		+---------------------------------------+
10815                | SPE: area for 64-bit GP registers     |
10816                +---------------------------------------+
10817                | SPE alignment padding                 |
10818                +---------------------------------------+
10819		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
10820		+---------------------------------------+
10821		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
10822		+---------------------------------------+
10823		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
10824		+---------------------------------------+
10825	old SP->| back chain to caller's caller		|
10826		+---------------------------------------+
10827
10828   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10829   given.  (But note below and in sysv4.h that we require only 8 and
10830   may round up the size of our stack frame anyways.  The historical
10831   reason is early versions of powerpc-linux which didn't properly
10832   align the stack at program startup.  A happy side-effect is that
10833   -mno-eabi libraries can be used with -meabi programs.)
10834
10835   The EABI configuration defaults to the V.4 layout.  However,
10836   the stack alignment requirements may differ.  If -mno-eabi is not
10837   given, the required stack alignment is 8 bytes; if -mno-eabi is
10838   given, the required alignment is 16 bytes.  (But see V.4 comment
10839   above.)  */
10840
10841#ifndef ABI_STACK_BOUNDARY
10842#define ABI_STACK_BOUNDARY STACK_BOUNDARY
10843#endif
10844
10845static rs6000_stack_t *
10846rs6000_stack_info (void)
10847{
10848  static rs6000_stack_t info, zero_info;
10849  rs6000_stack_t *info_ptr = &info;
10850  int reg_size = TARGET_32BIT ? 4 : 8;
10851  int ehrd_size;
10852  int save_align;
10853  HOST_WIDE_INT non_fixed_size;
10854
10855  /* Zero all fields portably.  */
10856  info = zero_info;
10857
10858  if (TARGET_SPE)
10859    {
10860      /* Cache value so we don't rescan instruction chain over and over.  */
10861      if (cfun->machine->insn_chain_scanned_p == 0)
10862	{
10863	  cfun->machine->insn_chain_scanned_p = 1;
10864	  info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10865	}
10866    }
10867
10868  /* Select which calling sequence.  */
10869  info_ptr->abi = DEFAULT_ABI;
10870
10871  /* Calculate which registers need to be saved & save area size.  */
10872  info_ptr->first_gp_reg_save = first_reg_to_save ();
10873  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10874     even if it currently looks like we won't.  */
10875  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10876       || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10877       || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10878      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10879    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10880  else
10881    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10882
10883  /* For the SPE, we have an additional upper 32-bits on each GPR.
10884     Ideally we should save the entire 64-bits only when the upper
10885     half is used in SIMD instructions.  Since we only record
10886     registers live (not the size they are used in), this proves
10887     difficult because we'd have to traverse the instruction chain at
10888     the right time, taking reload into account.  This is a real pain,
10889     so we opt to save the GPRs in 64-bits always if but one register
10890     gets used in 64-bits.  Otherwise, all the registers in the frame
10891     get saved in 32-bits.
10892
10893     So... since when we save all GPRs (except the SP) in 64-bits, the
10894     traditional GP save area will be empty.  */
10895  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10896    info_ptr->gp_size = 0;
10897
10898  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10899  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10900
10901  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10902  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10903				 - info_ptr->first_altivec_reg_save);
10904
10905  /* Does this function call anything?  */
10906  info_ptr->calls_p = (! current_function_is_leaf
10907		       || cfun->machine->ra_needs_full_frame);
10908
10909  /* Determine if we need to save the link register.  */
10910  if (rs6000_ra_ever_killed ()
10911      || (DEFAULT_ABI == ABI_AIX
10912	  && current_function_profile
10913	  && !TARGET_PROFILE_KERNEL)
10914#ifdef TARGET_RELOCATABLE
10915      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10916#endif
10917      || (info_ptr->first_fp_reg_save != 64
10918	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10919      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10920      || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10921      || (DEFAULT_ABI == ABI_DARWIN
10922	  && flag_pic
10923	  && current_function_uses_pic_offset_table)
10924      || info_ptr->calls_p)
10925    {
10926      info_ptr->lr_save_p = 1;
10927      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10928    }
10929
10930  /* Determine if we need to save the condition code registers.  */
10931  if (regs_ever_live[CR2_REGNO]
10932      || regs_ever_live[CR3_REGNO]
10933      || regs_ever_live[CR4_REGNO])
10934    {
10935      info_ptr->cr_save_p = 1;
10936      if (DEFAULT_ABI == ABI_V4)
10937	info_ptr->cr_size = reg_size;
10938    }
10939
10940  /* If the current function calls __builtin_eh_return, then we need
10941     to allocate stack space for registers that will hold data for
10942     the exception handler.  */
10943  if (current_function_calls_eh_return)
10944    {
10945      unsigned int i;
10946      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10947	continue;
10948
10949      /* SPE saves EH registers in 64-bits.  */
10950      ehrd_size = i * (TARGET_SPE_ABI
10951		       && info_ptr->spe_64bit_regs_used != 0
10952		       ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10953    }
10954  else
10955    ehrd_size = 0;
10956
10957  /* Determine various sizes.  */
10958  info_ptr->reg_size     = reg_size;
10959  info_ptr->fixed_size   = RS6000_SAVE_AREA;
10960  info_ptr->varargs_size = RS6000_VARARGS_AREA;
10961  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
10962  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
10963					 TARGET_ALTIVEC ? 16 : 8);
10964
10965  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10966    info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10967  else
10968    info_ptr->spe_gp_size = 0;
10969
10970  if (TARGET_ALTIVEC_ABI)
10971    info_ptr->vrsave_mask = compute_vrsave_mask ();
10972  else
10973    info_ptr->vrsave_mask = 0;
10974
10975  if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10976    info_ptr->vrsave_size  = 4;
10977  else
10978    info_ptr->vrsave_size  = 0;
10979
10980  /* Calculate the offsets.  */
10981  switch (DEFAULT_ABI)
10982    {
10983    case ABI_NONE:
10984    default:
10985      abort ();
10986
10987    case ABI_AIX:
10988    case ABI_DARWIN:
10989      info_ptr->fp_save_offset   = - info_ptr->fp_size;
10990      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
10991
10992      if (TARGET_ALTIVEC_ABI)
10993	{
10994	  info_ptr->vrsave_save_offset
10995	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10996
10997	  /* Align stack so vector save area is on a quadword boundary.  */
10998	  if (info_ptr->altivec_size != 0)
10999	    info_ptr->altivec_padding_size
11000	      = 16 - (-info_ptr->vrsave_save_offset % 16);
11001	  else
11002	    info_ptr->altivec_padding_size = 0;
11003
11004	  info_ptr->altivec_save_offset
11005	    = info_ptr->vrsave_save_offset
11006	    - info_ptr->altivec_padding_size
11007	    - info_ptr->altivec_size;
11008
11009	  /* Adjust for AltiVec case.  */
11010	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
11011	}
11012      else
11013	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
11014      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
11015      info_ptr->lr_save_offset   = 2*reg_size;
11016      break;
11017
11018    case ABI_V4:
11019      info_ptr->fp_save_offset   = - info_ptr->fp_size;
11020      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
11021      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
11022
11023      if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11024      {
11025        /* Align stack so SPE GPR save area is aligned on a
11026           double-word boundary.  */
11027        if (info_ptr->spe_gp_size != 0)
11028          info_ptr->spe_padding_size
11029            = 8 - (-info_ptr->cr_save_offset % 8);
11030        else
11031          info_ptr->spe_padding_size = 0;
11032
11033        info_ptr->spe_gp_save_offset
11034          = info_ptr->cr_save_offset
11035          - info_ptr->spe_padding_size
11036          - info_ptr->spe_gp_size;
11037
11038        /* Adjust for SPE case.  */
11039        info_ptr->toc_save_offset
11040          = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11041      }
11042      else if (TARGET_ALTIVEC_ABI)
11043	{
11044	  info_ptr->vrsave_save_offset
11045	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11046
11047	  /* Align stack so vector save area is on a quadword boundary.  */
11048	  if (info_ptr->altivec_size != 0)
11049	    info_ptr->altivec_padding_size
11050	      = 16 - (-info_ptr->vrsave_save_offset % 16);
11051	  else
11052	    info_ptr->altivec_padding_size = 0;
11053
11054	  info_ptr->altivec_save_offset
11055	    = info_ptr->vrsave_save_offset
11056	    - info_ptr->altivec_padding_size
11057	    - info_ptr->altivec_size;
11058
11059	  /* Adjust for AltiVec case.  */
11060	  info_ptr->toc_save_offset
11061	    = info_ptr->altivec_save_offset - info_ptr->toc_size;
11062	}
11063      else
11064	info_ptr->toc_save_offset  = info_ptr->cr_save_offset - info_ptr->toc_size;
11065      info_ptr->ehrd_offset      = info_ptr->toc_save_offset - ehrd_size;
11066      info_ptr->lr_save_offset   = reg_size;
11067      break;
11068    }
11069
11070  save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
11071  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
11072					 + info_ptr->gp_size
11073					 + info_ptr->altivec_size
11074					 + info_ptr->altivec_padding_size
11075					 + info_ptr->spe_gp_size
11076					 + info_ptr->spe_padding_size
11077					 + ehrd_size
11078					 + info_ptr->cr_size
11079					 + info_ptr->lr_size
11080					 + info_ptr->vrsave_size
11081					 + info_ptr->toc_size,
11082					 save_align);
11083
11084  non_fixed_size	 = (info_ptr->vars_size
11085			    + info_ptr->parm_size
11086			    + info_ptr->save_size
11087			    + info_ptr->varargs_size);
11088
11089  info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11090				       ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11091
11092  /* Determine if we need to allocate any stack frame:
11093
11094     For AIX we need to push the stack if a frame pointer is needed
11095     (because the stack might be dynamically adjusted), if we are
11096     debugging, if we make calls, or if the sum of fp_save, gp_save,
11097     and local variables are more than the space needed to save all
11098     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11099     + 18*8 = 288 (GPR13 reserved).
11100
11101     For V.4 we don't have the stack cushion that AIX uses, but assume
11102     that the debugger can handle stackless frames.  */
11103
11104  if (info_ptr->calls_p)
11105    info_ptr->push_p = 1;
11106
11107  else if (DEFAULT_ABI == ABI_V4)
11108    info_ptr->push_p = non_fixed_size != 0;
11109
11110  else if (frame_pointer_needed)
11111    info_ptr->push_p = 1;
11112
11113  else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11114    info_ptr->push_p = 1;
11115
11116  else
11117    info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11118
11119  /* Zero offsets if we're not saving those registers.  */
11120  if (info_ptr->fp_size == 0)
11121    info_ptr->fp_save_offset = 0;
11122
11123  if (info_ptr->gp_size == 0)
11124    info_ptr->gp_save_offset = 0;
11125
11126  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11127    info_ptr->altivec_save_offset = 0;
11128
11129  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11130    info_ptr->vrsave_save_offset = 0;
11131
11132  if (! TARGET_SPE_ABI
11133      || info_ptr->spe_64bit_regs_used == 0
11134      || info_ptr->spe_gp_size == 0)
11135    info_ptr->spe_gp_save_offset = 0;
11136
11137  if (! info_ptr->lr_save_p)
11138    info_ptr->lr_save_offset = 0;
11139
11140  if (! info_ptr->cr_save_p)
11141    info_ptr->cr_save_offset = 0;
11142
11143  if (! info_ptr->toc_save_p)
11144    info_ptr->toc_save_offset = 0;
11145
11146  return info_ptr;
11147}
11148
11149/* Return true if the current function uses any GPRs in 64-bit SIMD
11150   mode.  */
11151
11152static bool
11153spe_func_has_64bit_regs_p (void)
11154{
11155  rtx insns, insn;
11156
11157  /* Functions that save and restore all the call-saved registers will
11158     need to save/restore the registers in 64-bits.  */
11159  if (current_function_calls_eh_return
11160      || current_function_calls_setjmp
11161      || current_function_has_nonlocal_goto)
11162    return true;
11163
11164  insns = get_insns ();
11165
11166  for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11167    {
11168      if (INSN_P (insn))
11169	{
11170	  rtx i;
11171
11172	  i = PATTERN (insn);
11173	  if (GET_CODE (i) == SET
11174	      && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11175	    return true;
11176	}
11177    }
11178
11179  return false;
11180}
11181
11182static void
11183debug_stack_info (rs6000_stack_t *info)
11184{
11185  const char *abi_string;
11186
11187  if (! info)
11188    info = rs6000_stack_info ();
11189
11190  fprintf (stderr, "\nStack information for function %s:\n",
11191	   ((current_function_decl && DECL_NAME (current_function_decl))
11192	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11193	    : "<unknown>"));
11194
11195  switch (info->abi)
11196    {
11197    default:		 abi_string = "Unknown";	break;
11198    case ABI_NONE:	 abi_string = "NONE";		break;
11199    case ABI_AIX:	 abi_string = "AIX";		break;
11200    case ABI_DARWIN:	 abi_string = "Darwin";		break;
11201    case ABI_V4:	 abi_string = "V.4";		break;
11202    }
11203
11204  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
11205
11206  if (TARGET_ALTIVEC_ABI)
11207    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11208
11209  if (TARGET_SPE_ABI)
11210    fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11211
11212  if (info->first_gp_reg_save != 32)
11213    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
11214
11215  if (info->first_fp_reg_save != 64)
11216    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
11217
11218  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11219    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11220	     info->first_altivec_reg_save);
11221
11222  if (info->lr_save_p)
11223    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
11224
11225  if (info->cr_save_p)
11226    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
11227
11228  if (info->toc_save_p)
11229    fprintf (stderr, "\ttoc_save_p          = %5d\n", info->toc_save_p);
11230
11231  if (info->vrsave_mask)
11232    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
11233
11234  if (info->push_p)
11235    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
11236
11237  if (info->calls_p)
11238    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
11239
11240  if (info->gp_save_offset)
11241    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
11242
11243  if (info->fp_save_offset)
11244    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
11245
11246  if (info->altivec_save_offset)
11247    fprintf (stderr, "\taltivec_save_offset = %5d\n",
11248	     info->altivec_save_offset);
11249
11250  if (info->spe_gp_save_offset)
11251    fprintf (stderr, "\tspe_gp_save_offset  = %5d\n",
11252	     info->spe_gp_save_offset);
11253
11254  if (info->vrsave_save_offset)
11255    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
11256	     info->vrsave_save_offset);
11257
11258  if (info->lr_save_offset)
11259    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
11260
11261  if (info->cr_save_offset)
11262    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
11263
11264  if (info->toc_save_offset)
11265    fprintf (stderr, "\ttoc_save_offset     = %5d\n", info->toc_save_offset);
11266
11267  if (info->varargs_save_offset)
11268    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11269
11270  if (info->total_size)
11271    fprintf (stderr, "\ttotal_size          = "HOST_WIDE_INT_PRINT_DEC"\n",
11272	     info->total_size);
11273
11274  if (info->varargs_size)
11275    fprintf (stderr, "\tvarargs_size        = %5d\n", info->varargs_size);
11276
11277  if (info->vars_size)
11278    fprintf (stderr, "\tvars_size           = "HOST_WIDE_INT_PRINT_DEC"\n",
11279	     info->vars_size);
11280
11281  if (info->parm_size)
11282    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
11283
11284  if (info->fixed_size)
11285    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
11286
11287  if (info->gp_size)
11288    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
11289
11290  if (info->spe_gp_size)
11291    fprintf (stderr, "\tspe_gp_size         = %5d\n", info->spe_gp_size);
11292
11293  if (info->fp_size)
11294    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
11295
11296  if (info->altivec_size)
11297    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
11298
11299  if (info->vrsave_size)
11300    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
11301
11302  if (info->altivec_padding_size)
11303    fprintf (stderr, "\taltivec_padding_size= %5d\n",
11304	     info->altivec_padding_size);
11305
11306  if (info->spe_padding_size)
11307    fprintf (stderr, "\tspe_padding_size    = %5d\n",
11308	     info->spe_padding_size);
11309
11310  if (info->lr_size)
11311    fprintf (stderr, "\tlr_size             = %5d\n", info->lr_size);
11312
11313  if (info->cr_size)
11314    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
11315
11316  if (info->toc_size)
11317    fprintf (stderr, "\ttoc_size            = %5d\n", info->toc_size);
11318
11319  if (info->save_size)
11320    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
11321
11322  if (info->reg_size != 4)
11323    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
11324
11325  fprintf (stderr, "\n");
11326}
11327
11328rtx
11329rs6000_return_addr (int count, rtx frame)
11330{
11331  /* Currently we don't optimize very well between prolog and body
11332     code and for PIC code the code can be actually quite bad, so
11333     don't try to be too clever here.  */
11334  if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11335    {
11336      cfun->machine->ra_needs_full_frame = 1;
11337
11338      return
11339	gen_rtx_MEM
11340	  (Pmode,
11341	   memory_address
11342	   (Pmode,
11343	    plus_constant (copy_to_reg
11344			   (gen_rtx_MEM (Pmode,
11345					 memory_address (Pmode, frame))),
11346			   RETURN_ADDRESS_OFFSET)));
11347    }
11348
11349  cfun->machine->ra_need_lr = 1;
11350  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11351}
11352
11353/* Say whether a function is a candidate for sibcall handling or not.
11354   We do not allow indirect calls to be optimized into sibling calls.
11355   Also, we can't do it if there are any vector parameters; there's
11356   nowhere to put the VRsave code so it works; note that functions with
11357   vector parameters are required to have a prototype, so the argument
11358   type info must be available here.  (The tail recursion case can work
11359   with vector parameters, but there's no way to distinguish here.) */
11360static bool
11361rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11362{
11363  tree type;
11364  if (decl)
11365    {
11366      if (TARGET_ALTIVEC_VRSAVE)
11367        {
11368	  for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11369	       type; type = TREE_CHAIN (type))
11370	    {
11371	      if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11372		return false;
11373	    }
11374        }
11375      if (DEFAULT_ABI == ABI_DARWIN
11376	  || (*targetm.binds_local_p) (decl))
11377	{
11378	  tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11379
11380	  if (!lookup_attribute ("longcall", attr_list)
11381	      || lookup_attribute ("shortcall", attr_list))
11382	    return true;
11383	}
11384    }
11385  return false;
11386}
11387
11388static int
11389rs6000_ra_ever_killed (void)
11390{
11391  rtx top;
11392  rtx reg;
11393  rtx insn;
11394
11395  if (current_function_is_thunk)
11396    return 0;
11397
11398  /* regs_ever_live has LR marked as used if any sibcalls are present,
11399     but this should not force saving and restoring in the
11400     pro/epilogue.  Likewise, reg_set_between_p thinks a sibcall
11401     clobbers LR, so that is inappropriate.  */
11402
11403  /* Also, the prologue can generate a store into LR that
11404     doesn't really count, like this:
11405
11406        move LR->R0
11407        bcl to set PIC register
11408        move LR->R31
11409        move R0->LR
11410
11411     When we're called from the epilogue, we need to avoid counting
11412     this as a store.  */
11413
11414  push_topmost_sequence ();
11415  top = get_insns ();
11416  pop_topmost_sequence ();
11417  reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11418
11419  for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11420    {
11421      if (INSN_P (insn))
11422	{
11423	  if (FIND_REG_INC_NOTE (insn, reg))
11424	    return 1;
11425	  else if (GET_CODE (insn) == CALL_INSN
11426		   && !SIBLING_CALL_P (insn))
11427	    return 1;
11428	  else if (set_of (reg, insn) != NULL_RTX
11429		   && !prologue_epilogue_contains (insn))
11430	    return 1;
11431    	}
11432    }
11433  return 0;
11434}
11435
11436/* Add a REG_MAYBE_DEAD note to the insn.  */
11437static void
11438rs6000_maybe_dead (rtx insn)
11439{
11440  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11441					const0_rtx,
11442					REG_NOTES (insn));
11443}
11444
11445/* Emit instructions needed to load the TOC register.
11446   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11447   a constant pool; or for SVR4 -fpic.  */
11448
11449void
11450rs6000_emit_load_toc_table (int fromprolog)
11451{
11452  rtx dest, insn;
11453  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11454
11455  if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11456    {
11457      rtx temp = (fromprolog
11458		  ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11459		  : gen_reg_rtx (Pmode));
11460      insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11461      if (fromprolog)
11462	rs6000_maybe_dead (insn);
11463      insn = emit_move_insn (dest, temp);
11464      if (fromprolog)
11465	rs6000_maybe_dead (insn);
11466    }
11467  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11468    {
11469      char buf[30];
11470      rtx tempLR = (fromprolog
11471		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11472		    : gen_reg_rtx (Pmode));
11473      rtx temp0 = (fromprolog
11474		   ? gen_rtx_REG (Pmode, 0)
11475		   : gen_reg_rtx (Pmode));
11476
11477      /* possibly create the toc section */
11478      if (! toc_initialized)
11479	{
11480	  toc_section ();
11481	  function_section (current_function_decl);
11482	}
11483
11484      if (fromprolog)
11485	{
11486	  rtx symF, symL;
11487
11488	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11489	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11490
11491	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11492	  symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11493
11494	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11495							       symF)));
11496	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11497	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11498							       symL,
11499							       symF)));
11500	}
11501      else
11502	{
11503	  rtx tocsym;
11504
11505	  tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11506	  emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
11507	  emit_move_insn (dest, tempLR);
11508	  emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11509	}
11510      insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11511      if (fromprolog)
11512	rs6000_maybe_dead (insn);
11513    }
11514  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11515    {
11516      /* This is for AIX code running in non-PIC ELF32.  */
11517      char buf[30];
11518      rtx realsym;
11519      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11520      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11521
11522      insn = emit_insn (gen_elf_high (dest, realsym));
11523      if (fromprolog)
11524	rs6000_maybe_dead (insn);
11525      insn = emit_insn (gen_elf_low (dest, dest, realsym));
11526      if (fromprolog)
11527	rs6000_maybe_dead (insn);
11528    }
11529  else if (DEFAULT_ABI == ABI_AIX)
11530    {
11531      if (TARGET_32BIT)
11532	insn = emit_insn (gen_load_toc_aix_si (dest));
11533      else
11534	insn = emit_insn (gen_load_toc_aix_di (dest));
11535      if (fromprolog)
11536	rs6000_maybe_dead (insn);
11537    }
11538  else
11539    abort ();
11540}
11541
11542/* Emit instructions to restore the link register after determining where
11543   its value has been stored.  */
11544
11545void
11546rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11547{
11548  rs6000_stack_t *info = rs6000_stack_info ();
11549  rtx operands[2];
11550
11551  operands[0] = source;
11552  operands[1] = scratch;
11553
11554  if (info->lr_save_p)
11555    {
11556      rtx frame_rtx = stack_pointer_rtx;
11557      HOST_WIDE_INT sp_offset = 0;
11558      rtx tmp;
11559
11560      if (frame_pointer_needed
11561	  || current_function_calls_alloca
11562	  || info->total_size > 32767)
11563	{
11564	  emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11565	  frame_rtx = operands[1];
11566	}
11567      else if (info->push_p)
11568	sp_offset = info->total_size;
11569
11570      tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11571      tmp = gen_rtx_MEM (Pmode, tmp);
11572      emit_move_insn (tmp, operands[0]);
11573    }
11574  else
11575    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11576}
11577
11578static GTY(()) int set = -1;
11579
11580int
11581get_TOC_alias_set (void)
11582{
11583  if (set == -1)
11584    set = new_alias_set ();
11585  return set;
11586}
11587
11588/* This returns nonzero if the current function uses the TOC.  This is
11589   determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11590   is generated by the ABI_V4 load_toc_* patterns.  */
11591#if TARGET_ELF
11592static int
11593uses_TOC (void)
11594{
11595  rtx insn;
11596
11597  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11598    if (INSN_P (insn))
11599      {
11600	rtx pat = PATTERN (insn);
11601	int i;
11602
11603	if (GET_CODE (pat) == PARALLEL)
11604	  for (i = 0; i < XVECLEN (pat, 0); i++)
11605	    {
11606	      rtx sub = XVECEXP (pat, 0, i);
11607	      if (GET_CODE (sub) == USE)
11608		{
11609		  sub = XEXP (sub, 0);
11610		  if (GET_CODE (sub) == UNSPEC
11611		      && XINT (sub, 1) == UNSPEC_TOC)
11612		    return 1;
11613		}
11614	    }
11615      }
11616  return 0;
11617}
11618#endif
11619
11620rtx
11621create_TOC_reference (rtx symbol)
11622{
11623  return gen_rtx_PLUS (Pmode,
11624	   gen_rtx_REG (Pmode, TOC_REGISTER),
11625	     gen_rtx_CONST (Pmode,
11626	       gen_rtx_MINUS (Pmode, symbol,
11627		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11628}
11629
11630/* If _Unwind_* has been called from within the same module,
11631   toc register is not guaranteed to be saved to 40(1) on function
11632   entry.  Save it there in that case.  */
11633
11634void
11635rs6000_aix_emit_builtin_unwind_init (void)
11636{
11637  rtx mem;
11638  rtx stack_top = gen_reg_rtx (Pmode);
11639  rtx opcode_addr = gen_reg_rtx (Pmode);
11640  rtx opcode = gen_reg_rtx (SImode);
11641  rtx tocompare = gen_reg_rtx (SImode);
11642  rtx no_toc_save_needed = gen_label_rtx ();
11643
11644  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11645  emit_move_insn (stack_top, mem);
11646
11647  mem = gen_rtx_MEM (Pmode,
11648		     gen_rtx_PLUS (Pmode, stack_top,
11649				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11650  emit_move_insn (opcode_addr, mem);
11651  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11652  emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11653					   : 0xE8410028, SImode));
11654
11655  do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11656			   SImode, NULL_RTX, NULL_RTX,
11657			   no_toc_save_needed);
11658
11659  mem = gen_rtx_MEM (Pmode,
11660		     gen_rtx_PLUS (Pmode, stack_top,
11661				   GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11662  emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11663  emit_label (no_toc_save_needed);
11664}
11665
11666/* This ties together stack memory (MEM with an alias set of
11667   rs6000_sr_alias_set) and the change to the stack pointer.  */
11668
11669static void
11670rs6000_emit_stack_tie (void)
11671{
11672  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11673
11674  set_mem_alias_set (mem, rs6000_sr_alias_set);
11675  emit_insn (gen_stack_tie (mem));
11676}
11677
11678/* Emit the correct code for allocating stack space, as insns.
11679   If COPY_R12, make sure a copy of the old frame is left in r12.
11680   The generated code may use hard register 0 as a temporary.  */
11681
11682static void
11683rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11684{
11685  rtx insn;
11686  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11687  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11688  rtx todec = GEN_INT (-size);
11689
11690  if (current_function_limit_stack)
11691    {
11692      if (REG_P (stack_limit_rtx)
11693	  && REGNO (stack_limit_rtx) > 1
11694	  && REGNO (stack_limit_rtx) <= 31)
11695	{
11696	  emit_insn (TARGET_32BIT
11697		     ? gen_addsi3 (tmp_reg,
11698				   stack_limit_rtx,
11699				   GEN_INT (size))
11700		     : gen_adddi3 (tmp_reg,
11701				   stack_limit_rtx,
11702				   GEN_INT (size)));
11703
11704	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11705				    const0_rtx));
11706	}
11707      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11708	       && TARGET_32BIT
11709	       && DEFAULT_ABI == ABI_V4)
11710	{
11711	  rtx toload = gen_rtx_CONST (VOIDmode,
11712				      gen_rtx_PLUS (Pmode,
11713						    stack_limit_rtx,
11714						    GEN_INT (size)));
11715
11716	  emit_insn (gen_elf_high (tmp_reg, toload));
11717	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11718	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11719				    const0_rtx));
11720	}
11721      else
11722	warning ("stack limit expression is not supported");
11723    }
11724
11725  if (copy_r12 || ! TARGET_UPDATE)
11726    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11727
11728  if (TARGET_UPDATE)
11729    {
11730      if (size > 32767)
11731	{
11732	  /* Need a note here so that try_split doesn't get confused.  */
11733	  if (get_last_insn() == NULL_RTX)
11734	    emit_note (NOTE_INSN_DELETED);
11735	  insn = emit_move_insn (tmp_reg, todec);
11736	  try_split (PATTERN (insn), insn, 0);
11737	  todec = tmp_reg;
11738	}
11739
11740      insn = emit_insn (TARGET_32BIT
11741			? gen_movsi_update (stack_reg, stack_reg,
11742					    todec, stack_reg)
11743			: gen_movdi_update (stack_reg, stack_reg,
11744					    todec, stack_reg));
11745    }
11746  else
11747    {
11748      insn = emit_insn (TARGET_32BIT
11749			? gen_addsi3 (stack_reg, stack_reg, todec)
11750			: gen_adddi3 (stack_reg, stack_reg, todec));
11751      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11752		      gen_rtx_REG (Pmode, 12));
11753    }
11754
11755  RTX_FRAME_RELATED_P (insn) = 1;
11756  REG_NOTES (insn) =
11757    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11758		       gen_rtx_SET (VOIDmode, stack_reg,
11759				    gen_rtx_PLUS (Pmode, stack_reg,
11760						  GEN_INT (-size))),
11761		       REG_NOTES (insn));
11762}
11763
11764/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11765   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11766   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
11767   deduce these equivalences by itself so it wasn't necessary to hold
11768   its hand so much.  */
11769
11770static void
11771rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11772		      rtx reg2, rtx rreg)
11773{
11774  rtx real, temp;
11775
11776  /* copy_rtx will not make unique copies of registers, so we need to
11777     ensure we don't have unwanted sharing here.  */
11778  if (reg == reg2)
11779    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11780
11781  if (reg == rreg)
11782    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11783
11784  real = copy_rtx (PATTERN (insn));
11785
11786  if (reg2 != NULL_RTX)
11787    real = replace_rtx (real, reg2, rreg);
11788
11789  real = replace_rtx (real, reg,
11790		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11791							STACK_POINTER_REGNUM),
11792				    GEN_INT (val)));
11793
11794  /* We expect that 'real' is either a SET or a PARALLEL containing
11795     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
11796     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
11797
11798  if (GET_CODE (real) == SET)
11799    {
11800      rtx set = real;
11801
11802      temp = simplify_rtx (SET_SRC (set));
11803      if (temp)
11804	SET_SRC (set) = temp;
11805      temp = simplify_rtx (SET_DEST (set));
11806      if (temp)
11807	SET_DEST (set) = temp;
11808      if (GET_CODE (SET_DEST (set)) == MEM)
11809	{
11810	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11811	  if (temp)
11812	    XEXP (SET_DEST (set), 0) = temp;
11813	}
11814    }
11815  else if (GET_CODE (real) == PARALLEL)
11816    {
11817      int i;
11818      for (i = 0; i < XVECLEN (real, 0); i++)
11819	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11820	  {
11821	    rtx set = XVECEXP (real, 0, i);
11822
11823	    temp = simplify_rtx (SET_SRC (set));
11824	    if (temp)
11825	      SET_SRC (set) = temp;
11826	    temp = simplify_rtx (SET_DEST (set));
11827	    if (temp)
11828	      SET_DEST (set) = temp;
11829	    if (GET_CODE (SET_DEST (set)) == MEM)
11830	      {
11831		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11832		if (temp)
11833		  XEXP (SET_DEST (set), 0) = temp;
11834	      }
11835	    RTX_FRAME_RELATED_P (set) = 1;
11836	  }
11837    }
11838  else
11839    abort ();
11840
11841  if (TARGET_SPE)
11842    real = spe_synthesize_frame_save (real);
11843
11844  RTX_FRAME_RELATED_P (insn) = 1;
11845  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11846					real,
11847					REG_NOTES (insn));
11848}
11849
11850/* Given an SPE frame note, return a PARALLEL of SETs with the
11851   original note, plus a synthetic register save.  */
11852
11853static rtx
11854spe_synthesize_frame_save (rtx real)
11855{
11856  rtx synth, offset, reg, real2;
11857
11858  if (GET_CODE (real) != SET
11859      || GET_MODE (SET_SRC (real)) != V2SImode)
11860    return real;
11861
11862  /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11863     frame related note.  The parallel contains a set of the register
11864     being saved, and another set to a synthetic register (n+1200).
11865     This is so we can differentiate between 64-bit and 32-bit saves.
11866     Words cannot describe this nastiness.  */
11867
11868  if (GET_CODE (SET_DEST (real)) != MEM
11869      || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11870      || GET_CODE (SET_SRC (real)) != REG)
11871    abort ();
11872
11873  /* Transform:
11874       (set (mem (plus (reg x) (const y)))
11875            (reg z))
11876     into:
11877       (set (mem (plus (reg x) (const y+4)))
11878            (reg z+1200))
11879  */
11880
11881  real2 = copy_rtx (real);
11882  PUT_MODE (SET_DEST (real2), SImode);
11883  reg = SET_SRC (real2);
11884  real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11885  synth = copy_rtx (real2);
11886
11887  if (BYTES_BIG_ENDIAN)
11888    {
11889      offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11890      real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11891    }
11892
11893  reg = SET_SRC (synth);
11894
11895  synth = replace_rtx (synth, reg,
11896		       gen_rtx_REG (SImode, REGNO (reg) + 1200));
11897
11898  offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11899  synth = replace_rtx (synth, offset,
11900		       GEN_INT (INTVAL (offset)
11901				+ (BYTES_BIG_ENDIAN ? 0 : 4)));
11902
11903  RTX_FRAME_RELATED_P (synth) = 1;
11904  RTX_FRAME_RELATED_P (real2) = 1;
11905  if (BYTES_BIG_ENDIAN)
11906    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11907  else
11908    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11909
11910  return real;
11911}
11912
11913/* Returns an insn that has a vrsave set operation with the
11914   appropriate CLOBBERs.  */
11915
11916static rtx
11917generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11918{
11919  int nclobs, i;
11920  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11921  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11922
11923  clobs[0]
11924    = gen_rtx_SET (VOIDmode,
11925		   vrsave,
11926		   gen_rtx_UNSPEC_VOLATILE (SImode,
11927					    gen_rtvec (2, reg, vrsave),
11928					    30));
11929
11930  nclobs = 1;
11931
11932  /* We need to clobber the registers in the mask so the scheduler
11933     does not move sets to VRSAVE before sets of AltiVec registers.
11934
11935     However, if the function receives nonlocal gotos, reload will set
11936     all call saved registers live.  We will end up with:
11937
11938     	(set (reg 999) (mem))
11939	(parallel [ (set (reg vrsave) (unspec blah))
11940		    (clobber (reg 999))])
11941
11942     The clobber will cause the store into reg 999 to be dead, and
11943     flow will attempt to delete an epilogue insn.  In this case, we
11944     need an unspec use/set of the register.  */
11945
11946  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11947    if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11948      {
11949	if (!epiloguep || call_used_regs [i])
11950	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11951					     gen_rtx_REG (V4SImode, i));
11952	else
11953	  {
11954	    rtx reg = gen_rtx_REG (V4SImode, i);
11955
11956	    clobs[nclobs++]
11957	      = gen_rtx_SET (VOIDmode,
11958			     reg,
11959			     gen_rtx_UNSPEC (V4SImode,
11960					     gen_rtvec (1, reg), 27));
11961	  }
11962      }
11963
11964  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11965
11966  for (i = 0; i < nclobs; ++i)
11967    XVECEXP (insn, 0, i) = clobs[i];
11968
11969  return insn;
11970}
11971
11972/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11973   Save REGNO into [FRAME_REG + OFFSET] in mode MODE.  */
11974
11975static void
11976emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11977		 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11978{
11979  rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11980  rtx replacea, replaceb;
11981
11982  int_rtx = GEN_INT (offset);
11983
11984  /* Some cases that need register indexed addressing.  */
11985  if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11986      || (TARGET_SPE_ABI
11987	  && SPE_VECTOR_MODE (mode)
11988	  && !SPE_CONST_OFFSET_OK (offset)))
11989    {
11990      /* Whomever calls us must make sure r11 is available in the
11991         flow path of instructions in the prologue.  */
11992      offset_rtx = gen_rtx_REG (Pmode, 11);
11993      emit_move_insn (offset_rtx, int_rtx);
11994
11995      replacea = offset_rtx;
11996      replaceb = int_rtx;
11997    }
11998  else
11999    {
12000      offset_rtx = int_rtx;
12001      replacea = NULL_RTX;
12002      replaceb = NULL_RTX;
12003    }
12004
12005  reg = gen_rtx_REG (mode, regno);
12006  addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
12007  mem = gen_rtx_MEM (mode, addr);
12008  set_mem_alias_set (mem, rs6000_sr_alias_set);
12009
12010  insn = emit_move_insn (mem, reg);
12011
12012  rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
12013}
12014
12015/* Emit an offset memory reference suitable for a frame store, while
12016   converting to a valid addressing mode.  */
12017
12018static rtx
12019gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
12020{
12021  rtx int_rtx, offset_rtx;
12022
12023  int_rtx = GEN_INT (offset);
12024
12025  if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
12026    {
12027      offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12028      emit_move_insn (offset_rtx, int_rtx);
12029    }
12030  else
12031    offset_rtx = int_rtx;
12032
12033  return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12034}
12035
12036/* Emit function prologue as insns.  */
12037
12038void
12039rs6000_emit_prologue (void)
12040{
12041  rs6000_stack_t *info = rs6000_stack_info ();
12042  enum machine_mode reg_mode = Pmode;
12043  int reg_size = TARGET_32BIT ? 4 : 8;
12044  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12045  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12046  rtx frame_reg_rtx = sp_reg_rtx;
12047  rtx cr_save_rtx = NULL_RTX;
12048  rtx insn;
12049  int saving_FPRs_inline;
12050  int using_store_multiple;
12051  HOST_WIDE_INT sp_offset = 0;
12052
12053   if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12054     {
12055       reg_mode = V2SImode;
12056       reg_size = 8;
12057     }
12058
12059  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12060			  && (!TARGET_SPE_ABI
12061			      || info->spe_64bit_regs_used == 0)
12062			  && info->first_gp_reg_save < 31);
12063  saving_FPRs_inline = (info->first_fp_reg_save == 64
12064			|| FP_SAVE_INLINE (info->first_fp_reg_save)
12065			|| current_function_calls_eh_return
12066			|| cfun->machine->ra_need_lr);
12067
12068  /* For V.4, update stack before we do any saving and set back pointer.  */
12069  if (info->push_p
12070      && (DEFAULT_ABI == ABI_V4
12071	  || current_function_calls_eh_return))
12072    {
12073      if (info->total_size < 32767)
12074	sp_offset = info->total_size;
12075      else
12076	frame_reg_rtx = frame_ptr_rtx;
12077      rs6000_emit_allocate_stack (info->total_size,
12078				  (frame_reg_rtx != sp_reg_rtx
12079				   && (info->cr_save_p
12080				       || info->lr_save_p
12081				       || info->first_fp_reg_save < 64
12082				       || info->first_gp_reg_save < 32
12083				       )));
12084      if (frame_reg_rtx != sp_reg_rtx)
12085	rs6000_emit_stack_tie ();
12086    }
12087
12088  /* Save AltiVec registers if needed.  */
12089  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12090    {
12091      int i;
12092
12093      /* There should be a non inline version of this, for when we
12094	 are saving lots of vector registers.  */
12095      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12096	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12097	  {
12098	    rtx areg, savereg, mem;
12099	    int offset;
12100
12101	    offset = info->altivec_save_offset + sp_offset
12102	      + 16 * (i - info->first_altivec_reg_save);
12103
12104	    savereg = gen_rtx_REG (V4SImode, i);
12105
12106	    areg = gen_rtx_REG (Pmode, 0);
12107	    emit_move_insn (areg, GEN_INT (offset));
12108
12109	    /* AltiVec addressing mode is [reg+reg].  */
12110	    mem = gen_rtx_MEM (V4SImode,
12111			       gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
12112
12113	    set_mem_alias_set (mem, rs6000_sr_alias_set);
12114
12115	    insn = emit_move_insn (mem, savereg);
12116
12117	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12118				  areg, GEN_INT (offset));
12119	  }
12120    }
12121
12122  /* VRSAVE is a bit vector representing which AltiVec registers
12123     are used.  The OS uses this to determine which vector
12124     registers to save on a context switch.  We need to save
12125     VRSAVE on the stack frame, add whatever AltiVec registers we
12126     used in this function, and do the corresponding magic in the
12127     epilogue.  */
12128
12129  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12130      && info->vrsave_mask != 0)
12131    {
12132      rtx reg, mem, vrsave;
12133      int offset;
12134
12135      /* Get VRSAVE onto a GPR.  Note that ABI_V4 might be using r12
12136	 as frame_reg_rtx and r11 as the static chain pointer for
12137	 nested functions.  */
12138      reg = gen_rtx_REG (SImode, 0);
12139      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12140      if (TARGET_MACHO)
12141	emit_insn (gen_get_vrsave_internal (reg));
12142      else
12143	emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
12144
12145      /* Save VRSAVE.  */
12146      offset = info->vrsave_save_offset + sp_offset;
12147      mem
12148	= gen_rtx_MEM (SImode,
12149		       gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12150      set_mem_alias_set (mem, rs6000_sr_alias_set);
12151      insn = emit_move_insn (mem, reg);
12152
12153      /* Include the registers in the mask.  */
12154      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12155
12156      insn = emit_insn (generate_set_vrsave (reg, info, 0));
12157    }
12158
12159  /* If we use the link register, get it into r0.  */
12160  if (info->lr_save_p)
12161    emit_move_insn (gen_rtx_REG (Pmode, 0),
12162		    gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12163
12164  /* If we need to save CR, put it into r12.  */
12165  if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12166    {
12167      cr_save_rtx = gen_rtx_REG (SImode, 12);
12168      emit_insn (gen_movesi_from_cr (cr_save_rtx));
12169    }
12170
12171  /* Do any required saving of fpr's.  If only one or two to save, do
12172     it ourselves.  Otherwise, call function.  */
12173  if (saving_FPRs_inline)
12174    {
12175      int i;
12176      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12177	if ((regs_ever_live[info->first_fp_reg_save+i]
12178	     && ! call_used_regs[info->first_fp_reg_save+i]))
12179	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12180			   info->first_fp_reg_save + i,
12181			   info->fp_save_offset + sp_offset + 8 * i,
12182			   info->total_size);
12183    }
12184  else if (info->first_fp_reg_save != 64)
12185    {
12186      int i;
12187      char rname[30];
12188      const char *alloc_rname;
12189      rtvec p;
12190      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12191
12192      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12193					  gen_rtx_REG (Pmode,
12194						       LINK_REGISTER_REGNUM));
12195      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12196	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12197      alloc_rname = ggc_strdup (rname);
12198      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12199				      gen_rtx_SYMBOL_REF (Pmode,
12200							  alloc_rname));
12201      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12202	{
12203	  rtx addr, reg, mem;
12204	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12205	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12206			       GEN_INT (info->fp_save_offset
12207					+ sp_offset + 8*i));
12208	  mem = gen_rtx_MEM (DFmode, addr);
12209	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12210
12211	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12212	}
12213      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12214      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12215			    NULL_RTX, NULL_RTX);
12216    }
12217
12218  /* Save GPRs.  This is done as a PARALLEL if we are using
12219     the store-multiple instructions.  */
12220  if (using_store_multiple)
12221    {
12222      rtvec p;
12223      int i;
12224      p = rtvec_alloc (32 - info->first_gp_reg_save);
12225      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12226	{
12227	  rtx addr, reg, mem;
12228	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12229	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12230			       GEN_INT (info->gp_save_offset
12231					+ sp_offset
12232					+ reg_size * i));
12233	  mem = gen_rtx_MEM (reg_mode, addr);
12234	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12235
12236	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12237	}
12238      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12239      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12240			    NULL_RTX, NULL_RTX);
12241    }
12242  else
12243    {
12244      int i;
12245      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12246	if ((regs_ever_live[info->first_gp_reg_save+i]
12247	     && (! call_used_regs[info->first_gp_reg_save+i]
12248		 || (i+info->first_gp_reg_save
12249		     == RS6000_PIC_OFFSET_TABLE_REGNUM
12250		     && TARGET_TOC && TARGET_MINIMAL_TOC)))
12251	    || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12252		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12253		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12254	  {
12255	    rtx addr, reg, mem;
12256	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12257
12258	    if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12259	      {
12260		int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12261		rtx b;
12262
12263		if (!SPE_CONST_OFFSET_OK (offset))
12264		  {
12265		    b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12266		    emit_move_insn (b, GEN_INT (offset));
12267		  }
12268		else
12269		  b = GEN_INT (offset);
12270
12271		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12272		mem = gen_rtx_MEM (V2SImode, addr);
12273		set_mem_alias_set (mem, rs6000_sr_alias_set);
12274		insn = emit_move_insn (mem, reg);
12275
12276		if (GET_CODE (b) == CONST_INT)
12277		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12278					NULL_RTX, NULL_RTX);
12279		else
12280		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12281					b, GEN_INT (offset));
12282	      }
12283	    else
12284	      {
12285		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12286				     GEN_INT (info->gp_save_offset
12287					      + sp_offset
12288					      + reg_size * i));
12289		mem = gen_rtx_MEM (reg_mode, addr);
12290		set_mem_alias_set (mem, rs6000_sr_alias_set);
12291
12292		insn = emit_move_insn (mem, reg);
12293		rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12294				      NULL_RTX, NULL_RTX);
12295	      }
12296	  }
12297    }
12298
12299  /* ??? There's no need to emit actual instructions here, but it's the
12300     easiest way to get the frame unwind information emitted.  */
12301  if (current_function_calls_eh_return)
12302    {
12303      unsigned int i, regno;
12304
12305      /* In AIX ABI we need to pretend we save r2 here.  */
12306      if (TARGET_AIX)
12307	{
12308	  rtx addr, reg, mem;
12309
12310	  reg = gen_rtx_REG (reg_mode, 2);
12311	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12312			       GEN_INT (sp_offset + 5 * reg_size));
12313	  mem = gen_rtx_MEM (reg_mode, addr);
12314	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12315
12316	  insn = emit_move_insn (mem, reg);
12317	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12318				NULL_RTX, NULL_RTX);
12319	  PATTERN (insn) = gen_blockage ();
12320	}
12321
12322      for (i = 0; ; ++i)
12323	{
12324	  regno = EH_RETURN_DATA_REGNO (i);
12325	  if (regno == INVALID_REGNUM)
12326	    break;
12327
12328	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12329			   info->ehrd_offset + sp_offset
12330			   + reg_size * (int) i,
12331			   info->total_size);
12332	}
12333    }
12334
12335  /* Save lr if we used it.  */
12336  if (info->lr_save_p)
12337    {
12338      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12339			       GEN_INT (info->lr_save_offset + sp_offset));
12340      rtx reg = gen_rtx_REG (Pmode, 0);
12341      rtx mem = gen_rtx_MEM (Pmode, addr);
12342      /* This should not be of rs6000_sr_alias_set, because of
12343	 __builtin_return_address.  */
12344
12345      insn = emit_move_insn (mem, reg);
12346      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12347			    reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12348    }
12349
12350  /* Save CR if we use any that must be preserved.  */
12351  if (info->cr_save_p)
12352    {
12353      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12354			       GEN_INT (info->cr_save_offset + sp_offset));
12355      rtx mem = gen_rtx_MEM (SImode, addr);
12356
12357      set_mem_alias_set (mem, rs6000_sr_alias_set);
12358
12359      /* If r12 was used to hold the original sp, copy cr into r0 now
12360	 that it's free.  */
12361      if (REGNO (frame_reg_rtx) == 12)
12362	{
12363	  cr_save_rtx = gen_rtx_REG (SImode, 0);
12364	  emit_insn (gen_movesi_from_cr (cr_save_rtx));
12365	}
12366      insn = emit_move_insn (mem, cr_save_rtx);
12367
12368      /* Now, there's no way that dwarf2out_frame_debug_expr is going
12369	 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12370	 But that's OK.  All we have to do is specify that _one_ condition
12371	 code register is saved in this stack slot.  The thrower's epilogue
12372	 will then restore all the call-saved registers.
12373	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
12374      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12375			    cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12376    }
12377
12378  /* Update stack and set back pointer unless this is V.4,
12379     for which it was done previously.  */
12380  if (info->push_p
12381      && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12382    rs6000_emit_allocate_stack (info->total_size, FALSE);
12383
12384  /* Set frame pointer, if needed.  */
12385  if (frame_pointer_needed)
12386    {
12387      insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12388			     sp_reg_rtx);
12389      RTX_FRAME_RELATED_P (insn) = 1;
12390    }
12391
12392  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
12393  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12394      || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12395	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12396  {
12397    /* If emit_load_toc_table will use the link register, we need to save
12398       it.  We use R12 for this purpose because emit_load_toc_table
12399       can use register 0.  This allows us to use a plain 'blr' to return
12400       from the procedure more often.  */
12401    int save_LR_around_toc_setup = (TARGET_ELF
12402				    && DEFAULT_ABI != ABI_AIX
12403				    && flag_pic
12404				    && ! info->lr_save_p
12405				    && EXIT_BLOCK_PTR->pred != NULL);
12406    if (save_LR_around_toc_setup)
12407      {
12408	rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12409	rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12410	rs6000_emit_load_toc_table (TRUE);
12411	rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12412      }
12413    else
12414      rs6000_emit_load_toc_table (TRUE);
12415  }
12416
12417#if TARGET_MACHO
12418  if (DEFAULT_ABI == ABI_DARWIN
12419      && flag_pic && current_function_uses_pic_offset_table)
12420    {
12421      rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12422      const char *picbase = machopic_function_base_name ();
12423      rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12424
12425      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12426
12427      rs6000_maybe_dead (
12428	emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12429			gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12430    }
12431#endif
12432}
12433
12434/* Write function prologue.  */
12435
12436static void
12437rs6000_output_function_prologue (FILE *file,
12438				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12439{
12440  rs6000_stack_t *info = rs6000_stack_info ();
12441
12442  if (TARGET_DEBUG_STACK)
12443    debug_stack_info (info);
12444
12445  /* Write .extern for any function we will call to save and restore
12446     fp values.  */
12447  if (info->first_fp_reg_save < 64
12448      && !FP_SAVE_INLINE (info->first_fp_reg_save))
12449    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12450	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12451	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12452	     RESTORE_FP_SUFFIX);
12453
12454  /* Write .extern for AIX common mode routines, if needed.  */
12455  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12456    {
12457      fputs ("\t.extern __mulh\n", file);
12458      fputs ("\t.extern __mull\n", file);
12459      fputs ("\t.extern __divss\n", file);
12460      fputs ("\t.extern __divus\n", file);
12461      fputs ("\t.extern __quoss\n", file);
12462      fputs ("\t.extern __quous\n", file);
12463      common_mode_defined = 1;
12464    }
12465
12466  if (! HAVE_prologue)
12467    {
12468      start_sequence ();
12469
12470      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12471	 the "toplevel" insn chain.  */
12472      emit_note (NOTE_INSN_DELETED);
12473      rs6000_emit_prologue ();
12474      emit_note (NOTE_INSN_DELETED);
12475
12476      /* Expand INSN_ADDRESSES so final() doesn't crash.  */
12477      {
12478	rtx insn;
12479	unsigned addr = 0;
12480	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12481	  {
12482	    INSN_ADDRESSES_NEW (insn, addr);
12483	    addr += 4;
12484	  }
12485      }
12486
12487      if (TARGET_DEBUG_STACK)
12488	debug_rtx_list (get_insns (), 100);
12489      final (get_insns (), file, FALSE, FALSE);
12490      end_sequence ();
12491    }
12492
12493  rs6000_pic_labelno++;
12494}
12495
12496/* Emit function epilogue as insns.
12497
12498   At present, dwarf2out_frame_debug_expr doesn't understand
12499   register restores, so we don't bother setting RTX_FRAME_RELATED_P
12500   anywhere in the epilogue.  Most of the insns below would in any case
12501   need special notes to explain where r11 is in relation to the stack.  */
12502
12503void
12504rs6000_emit_epilogue (int sibcall)
12505{
12506  rs6000_stack_t *info;
12507  int restoring_FPRs_inline;
12508  int using_load_multiple;
12509  int using_mfcr_multiple;
12510  int use_backchain_to_restore_sp;
12511  int sp_offset = 0;
12512  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12513  rtx frame_reg_rtx = sp_reg_rtx;
12514  enum machine_mode reg_mode = Pmode;
12515  int reg_size = TARGET_32BIT ? 4 : 8;
12516  int i;
12517
12518  info = rs6000_stack_info ();
12519
12520  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12521    {
12522      reg_mode = V2SImode;
12523      reg_size = 8;
12524    }
12525
12526  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12527			 && (!TARGET_SPE_ABI
12528			     || info->spe_64bit_regs_used == 0)
12529			 && info->first_gp_reg_save < 31);
12530  restoring_FPRs_inline = (sibcall
12531			   || current_function_calls_eh_return
12532			   || info->first_fp_reg_save == 64
12533			   || FP_SAVE_INLINE (info->first_fp_reg_save));
12534  use_backchain_to_restore_sp = (frame_pointer_needed
12535				 || current_function_calls_alloca
12536				 || info->total_size > 32767);
12537  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12538			 || rs6000_cpu == PROCESSOR_PPC603
12539			 || rs6000_cpu == PROCESSOR_PPC750
12540			 || optimize_size);
12541
12542  /* If we have a frame pointer, a call to alloca,  or a large stack
12543     frame, restore the old stack pointer using the backchain.  Otherwise,
12544     we know what size to update it with.  */
12545  if (use_backchain_to_restore_sp)
12546    {
12547      /* Under V.4, don't reset the stack pointer until after we're done
12548	 loading the saved registers.  */
12549      if (DEFAULT_ABI == ABI_V4)
12550	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12551
12552      emit_move_insn (frame_reg_rtx,
12553		      gen_rtx_MEM (Pmode, sp_reg_rtx));
12554
12555    }
12556  else if (info->push_p)
12557    {
12558      if (DEFAULT_ABI == ABI_V4
12559	  || current_function_calls_eh_return)
12560	sp_offset = info->total_size;
12561      else
12562	{
12563	  emit_insn (TARGET_32BIT
12564		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12565				   GEN_INT (info->total_size))
12566		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12567				   GEN_INT (info->total_size)));
12568	}
12569    }
12570
12571  /* Restore AltiVec registers if needed.  */
12572  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12573    {
12574      int i;
12575
12576      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12577	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12578	  {
12579	    rtx addr, areg, mem;
12580
12581	    areg = gen_rtx_REG (Pmode, 0);
12582	    emit_move_insn
12583	      (areg, GEN_INT (info->altivec_save_offset
12584			      + sp_offset
12585			      + 16 * (i - info->first_altivec_reg_save)));
12586
12587	    /* AltiVec addressing mode is [reg+reg].  */
12588	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12589	    mem = gen_rtx_MEM (V4SImode, addr);
12590	    set_mem_alias_set (mem, rs6000_sr_alias_set);
12591
12592	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12593	  }
12594    }
12595
12596  /* Restore VRSAVE if needed.  */
12597  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12598      && info->vrsave_mask != 0)
12599    {
12600      rtx addr, mem, reg;
12601
12602      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12603			   GEN_INT (info->vrsave_save_offset + sp_offset));
12604      mem = gen_rtx_MEM (SImode, addr);
12605      set_mem_alias_set (mem, rs6000_sr_alias_set);
12606      reg = gen_rtx_REG (SImode, 12);
12607      emit_move_insn (reg, mem);
12608
12609      emit_insn (generate_set_vrsave (reg, info, 1));
12610    }
12611
12612  /* Get the old lr if we saved it.  */
12613  if (info->lr_save_p)
12614    {
12615      rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12616				      info->lr_save_offset + sp_offset);
12617
12618      set_mem_alias_set (mem, rs6000_sr_alias_set);
12619
12620      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12621    }
12622
12623  /* Get the old cr if we saved it.  */
12624  if (info->cr_save_p)
12625    {
12626      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12627			       GEN_INT (info->cr_save_offset + sp_offset));
12628      rtx mem = gen_rtx_MEM (SImode, addr);
12629
12630      set_mem_alias_set (mem, rs6000_sr_alias_set);
12631
12632      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12633    }
12634
12635  /* Set LR here to try to overlap restores below.  */
12636  if (info->lr_save_p)
12637    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12638		    gen_rtx_REG (Pmode, 0));
12639
12640  /* Load exception handler data registers, if needed.  */
12641  if (current_function_calls_eh_return)
12642    {
12643      unsigned int i, regno;
12644
12645      if (TARGET_AIX)
12646	{
12647	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12648				   GEN_INT (sp_offset + 5 * reg_size));
12649	  rtx mem = gen_rtx_MEM (reg_mode, addr);
12650
12651	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12652
12653	  emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12654	}
12655
12656      for (i = 0; ; ++i)
12657	{
12658	  rtx mem;
12659
12660	  regno = EH_RETURN_DATA_REGNO (i);
12661	  if (regno == INVALID_REGNUM)
12662	    break;
12663
12664	  mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12665				      info->ehrd_offset + sp_offset
12666				      + reg_size * (int) i);
12667	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12668
12669	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12670	}
12671    }
12672
12673  /* Restore GPRs.  This is done as a PARALLEL if we are using
12674     the load-multiple instructions.  */
12675  if (using_load_multiple)
12676    {
12677      rtvec p;
12678      p = rtvec_alloc (32 - info->first_gp_reg_save);
12679      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12680	{
12681	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12682				   GEN_INT (info->gp_save_offset
12683					    + sp_offset
12684					    + reg_size * i));
12685	  rtx mem = gen_rtx_MEM (reg_mode, addr);
12686
12687	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12688
12689	  RTVEC_ELT (p, i) =
12690	    gen_rtx_SET (VOIDmode,
12691			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12692			 mem);
12693	}
12694      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12695    }
12696  else
12697    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12698      if ((regs_ever_live[info->first_gp_reg_save+i]
12699	   && (! call_used_regs[info->first_gp_reg_save+i]
12700	       || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12701		   && TARGET_TOC && TARGET_MINIMAL_TOC)))
12702	  || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12703	      && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12704		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12705	{
12706	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12707				   GEN_INT (info->gp_save_offset
12708					    + sp_offset
12709					    + reg_size * i));
12710	  rtx mem = gen_rtx_MEM (reg_mode, addr);
12711
12712	  /* Restore 64-bit quantities for SPE.  */
12713	  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12714	    {
12715	      int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12716	      rtx b;
12717
12718	      if (!SPE_CONST_OFFSET_OK (offset))
12719		{
12720		  b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12721		  emit_move_insn (b, GEN_INT (offset));
12722		}
12723	      else
12724		b = GEN_INT (offset);
12725
12726	      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12727	      mem = gen_rtx_MEM (V2SImode, addr);
12728	    }
12729
12730	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12731
12732	  emit_move_insn (gen_rtx_REG (reg_mode,
12733				       info->first_gp_reg_save + i), mem);
12734	}
12735
12736  /* Restore fpr's if we need to do it without calling a function.  */
12737  if (restoring_FPRs_inline)
12738    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12739      if ((regs_ever_live[info->first_fp_reg_save+i]
12740	   && ! call_used_regs[info->first_fp_reg_save+i]))
12741	{
12742	  rtx addr, mem;
12743	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12744			       GEN_INT (info->fp_save_offset
12745					+ sp_offset
12746					+ 8 * i));
12747	  mem = gen_rtx_MEM (DFmode, addr);
12748	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12749
12750	  emit_move_insn (gen_rtx_REG (DFmode,
12751				       info->first_fp_reg_save + i),
12752			  mem);
12753	}
12754
12755  /* If we saved cr, restore it here.  Just those that were used.  */
12756  if (info->cr_save_p)
12757    {
12758      rtx r12_rtx = gen_rtx_REG (SImode, 12);
12759      int count = 0;
12760
12761      if (using_mfcr_multiple)
12762	{
12763	  for (i = 0; i < 8; i++)
12764	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12765	      count++;
12766	  if (count == 0)
12767	    abort ();
12768	}
12769
12770      if (using_mfcr_multiple && count > 1)
12771	{
12772	  rtvec p;
12773	  int ndx;
12774
12775	  p = rtvec_alloc (count);
12776
12777	  ndx = 0;
12778	  for (i = 0; i < 8; i++)
12779	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12780	      {
12781		rtvec r = rtvec_alloc (2);
12782		RTVEC_ELT (r, 0) = r12_rtx;
12783		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12784		RTVEC_ELT (p, ndx) =
12785		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12786			       gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12787		ndx++;
12788	      }
12789	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12790	  if (ndx != count)
12791	    abort ();
12792	}
12793      else
12794	for (i = 0; i < 8; i++)
12795	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12796	    {
12797	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12798							   CR0_REGNO+i),
12799					      r12_rtx));
12800	    }
12801    }
12802
12803  /* If this is V.4, unwind the stack pointer after all of the loads
12804     have been done.  We need to emit a block here so that sched
12805     doesn't decide to move the sp change before the register restores
12806     (which may not have any obvious dependency on the stack).  This
12807     doesn't hurt performance, because there is no scheduling that can
12808     be done after this point.  */
12809  if (DEFAULT_ABI == ABI_V4
12810      || current_function_calls_eh_return)
12811    {
12812      if (frame_reg_rtx != sp_reg_rtx)
12813	  rs6000_emit_stack_tie ();
12814
12815      if (use_backchain_to_restore_sp)
12816	{
12817	  emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12818	}
12819      else if (sp_offset != 0)
12820	{
12821	  emit_insn (TARGET_32BIT
12822		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12823				   GEN_INT (sp_offset))
12824		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12825				   GEN_INT (sp_offset)));
12826	}
12827    }
12828
12829  if (current_function_calls_eh_return)
12830    {
12831      rtx sa = EH_RETURN_STACKADJ_RTX;
12832      emit_insn (TARGET_32BIT
12833		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12834		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12835    }
12836
12837  if (!sibcall)
12838    {
12839      rtvec p;
12840      if (! restoring_FPRs_inline)
12841	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12842      else
12843	p = rtvec_alloc (2);
12844
12845      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12846      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12847				      gen_rtx_REG (Pmode,
12848						   LINK_REGISTER_REGNUM));
12849
12850      /* If we have to restore more than two FP registers, branch to the
12851	 restore function.  It will return to our caller.  */
12852      if (! restoring_FPRs_inline)
12853	{
12854	  int i;
12855	  char rname[30];
12856	  const char *alloc_rname;
12857
12858	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12859		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12860	  alloc_rname = ggc_strdup (rname);
12861	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12862					  gen_rtx_SYMBOL_REF (Pmode,
12863							      alloc_rname));
12864
12865	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12866	    {
12867	      rtx addr, mem;
12868	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12869				   GEN_INT (info->fp_save_offset + 8*i));
12870	      mem = gen_rtx_MEM (DFmode, addr);
12871	      set_mem_alias_set (mem, rs6000_sr_alias_set);
12872
12873	      RTVEC_ELT (p, i+3) =
12874		gen_rtx_SET (VOIDmode,
12875			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12876			     mem);
12877	    }
12878	}
12879
12880      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12881    }
12882}
12883
12884/* Write function epilogue.  */
12885
12886static void
12887rs6000_output_function_epilogue (FILE *file,
12888				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12889{
12890  rs6000_stack_t *info = rs6000_stack_info ();
12891
12892  if (! HAVE_epilogue)
12893    {
12894      rtx insn = get_last_insn ();
12895      /* If the last insn was a BARRIER, we don't have to write anything except
12896	 the trace table.  */
12897      if (GET_CODE (insn) == NOTE)
12898	insn = prev_nonnote_insn (insn);
12899      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
12900	{
12901	  /* This is slightly ugly, but at least we don't have two
12902	     copies of the epilogue-emitting code.  */
12903	  start_sequence ();
12904
12905	  /* A NOTE_INSN_DELETED is supposed to be at the start
12906	     and end of the "toplevel" insn chain.  */
12907	  emit_note (NOTE_INSN_DELETED);
12908	  rs6000_emit_epilogue (FALSE);
12909	  emit_note (NOTE_INSN_DELETED);
12910
12911	  /* Expand INSN_ADDRESSES so final() doesn't crash.  */
12912	  {
12913	    rtx insn;
12914	    unsigned addr = 0;
12915	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12916	      {
12917		INSN_ADDRESSES_NEW (insn, addr);
12918		addr += 4;
12919	      }
12920	  }
12921
12922	  if (TARGET_DEBUG_STACK)
12923	    debug_rtx_list (get_insns (), 100);
12924	  final (get_insns (), file, FALSE, FALSE);
12925	  end_sequence ();
12926	}
12927    }
12928
12929#if TARGET_MACHO
12930  macho_branch_islands ();
12931  /* Mach-O doesn't support labels at the end of objects, so if
12932     it looks like we might want one, insert a NOP.  */
12933  {
12934    rtx insn = get_last_insn ();
12935    while (insn
12936	   && NOTE_P (insn)
12937	   && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12938      insn = PREV_INSN (insn);
12939    if (insn
12940	&& (LABEL_P (insn)
12941	    || (NOTE_P (insn)
12942		&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12943      fputs ("\tnop\n", file);
12944  }
12945#endif
12946
12947  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
12948     on its format.
12949
12950     We don't output a traceback table if -finhibit-size-directive was
12951     used.  The documentation for -finhibit-size-directive reads
12952     ``don't output a @code{.size} assembler directive, or anything
12953     else that would cause trouble if the function is split in the
12954     middle, and the two halves are placed at locations far apart in
12955     memory.''  The traceback table has this property, since it
12956     includes the offset from the start of the function to the
12957     traceback table itself.
12958
12959     System V.4 Powerpc's (and the embedded ABI derived from it) use a
12960     different traceback table.  */
12961  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12962      && rs6000_traceback != traceback_none)
12963    {
12964      const char *fname = NULL;
12965      const char *language_string = lang_hooks.name;
12966      int fixed_parms = 0, float_parms = 0, parm_info = 0;
12967      int i;
12968      int optional_tbtab;
12969
12970      if (rs6000_traceback == traceback_full)
12971	optional_tbtab = 1;
12972      else if (rs6000_traceback == traceback_part)
12973	optional_tbtab = 0;
12974      else
12975	optional_tbtab = !optimize_size && !TARGET_ELF;
12976
12977      if (optional_tbtab)
12978	{
12979	  fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12980	  while (*fname == '.')	/* V.4 encodes . in the name */
12981	    fname++;
12982
12983	  /* Need label immediately before tbtab, so we can compute
12984	     its offset from the function start.  */
12985	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12986	  ASM_OUTPUT_LABEL (file, fname);
12987	}
12988
12989      /* The .tbtab pseudo-op can only be used for the first eight
12990	 expressions, since it can't handle the possibly variable
12991	 length fields that follow.  However, if you omit the optional
12992	 fields, the assembler outputs zeros for all optional fields
12993	 anyways, giving each variable length field is minimum length
12994	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
12995	 pseudo-op at all.  */
12996
12997      /* An all-zero word flags the start of the tbtab, for debuggers
12998	 that have to find it by searching forward from the entry
12999	 point or from the current pc.  */
13000      fputs ("\t.long 0\n", file);
13001
13002      /* Tbtab format type.  Use format type 0.  */
13003      fputs ("\t.byte 0,", file);
13004
13005      /* Language type.  Unfortunately, there does not seem to be any
13006	 official way to discover the language being compiled, so we
13007	 use language_string.
13008	 C is 0.  Fortran is 1.  Pascal is 2.  Ada is 3.  C++ is 9.
13009	 Java is 13.  Objective-C is 14.  */
13010      if (! strcmp (language_string, "GNU C"))
13011	i = 0;
13012      else if (! strcmp (language_string, "GNU F77"))
13013	i = 1;
13014      else if (! strcmp (language_string, "GNU Pascal"))
13015	i = 2;
13016      else if (! strcmp (language_string, "GNU Ada"))
13017	i = 3;
13018      else if (! strcmp (language_string, "GNU C++"))
13019	i = 9;
13020      else if (! strcmp (language_string, "GNU Java"))
13021	i = 13;
13022      else if (! strcmp (language_string, "GNU Objective-C"))
13023	i = 14;
13024      else
13025	abort ();
13026      fprintf (file, "%d,", i);
13027
13028      /* 8 single bit fields: global linkage (not set for C extern linkage,
13029	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
13030	 from start of procedure stored in tbtab, internal function, function
13031	 has controlled storage, function has no toc, function uses fp,
13032	 function logs/aborts fp operations.  */
13033      /* Assume that fp operations are used if any fp reg must be saved.  */
13034      fprintf (file, "%d,",
13035	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
13036
13037      /* 6 bitfields: function is interrupt handler, name present in
13038	 proc table, function calls alloca, on condition directives
13039	 (controls stack walks, 3 bits), saves condition reg, saves
13040	 link reg.  */
13041      /* The `function calls alloca' bit seems to be set whenever reg 31 is
13042	 set up as a frame pointer, even when there is no alloca call.  */
13043      fprintf (file, "%d,",
13044	       ((optional_tbtab << 6)
13045		| ((optional_tbtab & frame_pointer_needed) << 5)
13046		| (info->cr_save_p << 1)
13047		| (info->lr_save_p)));
13048
13049      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
13050	 (6 bits).  */
13051      fprintf (file, "%d,",
13052	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
13053
13054      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
13055      fprintf (file, "%d,", (32 - first_reg_to_save ()));
13056
13057      if (optional_tbtab)
13058	{
13059	  /* Compute the parameter info from the function decl argument
13060	     list.  */
13061	  tree decl;
13062	  int next_parm_info_bit = 31;
13063
13064	  for (decl = DECL_ARGUMENTS (current_function_decl);
13065	       decl; decl = TREE_CHAIN (decl))
13066	    {
13067	      rtx parameter = DECL_INCOMING_RTL (decl);
13068	      enum machine_mode mode = GET_MODE (parameter);
13069
13070	      if (GET_CODE (parameter) == REG)
13071		{
13072		  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
13073		    {
13074		      int bits;
13075
13076		      float_parms++;
13077
13078		      if (mode == SFmode)
13079			bits = 0x2;
13080		      else if (mode == DFmode || mode == TFmode)
13081			bits = 0x3;
13082		      else
13083			abort ();
13084
13085		      /* If only one bit will fit, don't or in this entry.  */
13086		      if (next_parm_info_bit > 0)
13087			parm_info |= (bits << (next_parm_info_bit - 1));
13088		      next_parm_info_bit -= 2;
13089		    }
13090		  else
13091		    {
13092		      fixed_parms += ((GET_MODE_SIZE (mode)
13093				       + (UNITS_PER_WORD - 1))
13094				      / UNITS_PER_WORD);
13095		      next_parm_info_bit -= 1;
13096		    }
13097		}
13098	    }
13099	}
13100
13101      /* Number of fixed point parameters.  */
13102      /* This is actually the number of words of fixed point parameters; thus
13103	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
13104      fprintf (file, "%d,", fixed_parms);
13105
13106      /* 2 bitfields: number of floating point parameters (7 bits), parameters
13107	 all on stack.  */
13108      /* This is actually the number of fp registers that hold parameters;
13109	 and thus the maximum value is 13.  */
13110      /* Set parameters on stack bit if parameters are not in their original
13111	 registers, regardless of whether they are on the stack?  Xlc
13112	 seems to set the bit when not optimizing.  */
13113      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
13114
13115      if (! optional_tbtab)
13116	return;
13117
13118      /* Optional fields follow.  Some are variable length.  */
13119
13120      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
13121	 11 double float.  */
13122      /* There is an entry for each parameter in a register, in the order that
13123	 they occur in the parameter list.  Any intervening arguments on the
13124	 stack are ignored.  If the list overflows a long (max possible length
13125	 34 bits) then completely leave off all elements that don't fit.  */
13126      /* Only emit this long if there was at least one parameter.  */
13127      if (fixed_parms || float_parms)
13128	fprintf (file, "\t.long %d\n", parm_info);
13129
13130      /* Offset from start of code to tb table.  */
13131      fputs ("\t.long ", file);
13132      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13133#if TARGET_AIX
13134      RS6000_OUTPUT_BASENAME (file, fname);
13135#else
13136      assemble_name (file, fname);
13137#endif
13138      fputs ("-.", file);
13139#if TARGET_AIX
13140      RS6000_OUTPUT_BASENAME (file, fname);
13141#else
13142      assemble_name (file, fname);
13143#endif
13144      putc ('\n', file);
13145
13146      /* Interrupt handler mask.  */
13147      /* Omit this long, since we never set the interrupt handler bit
13148	 above.  */
13149
13150      /* Number of CTL (controlled storage) anchors.  */
13151      /* Omit this long, since the has_ctl bit is never set above.  */
13152
13153      /* Displacement into stack of each CTL anchor.  */
13154      /* Omit this list of longs, because there are no CTL anchors.  */
13155
13156      /* Length of function name.  */
13157      if (*fname == '*')
13158	++fname;
13159      fprintf (file, "\t.short %d\n", (int) strlen (fname));
13160
13161      /* Function name.  */
13162      assemble_string (fname, strlen (fname));
13163
13164      /* Register for alloca automatic storage; this is always reg 31.
13165	 Only emit this if the alloca bit was set above.  */
13166      if (frame_pointer_needed)
13167	fputs ("\t.byte 31\n", file);
13168
13169      fputs ("\t.align 2\n", file);
13170    }
13171}
13172
13173/* A C compound statement that outputs the assembler code for a thunk
13174   function, used to implement C++ virtual function calls with
13175   multiple inheritance.  The thunk acts as a wrapper around a virtual
13176   function, adjusting the implicit object parameter before handing
13177   control off to the real function.
13178
13179   First, emit code to add the integer DELTA to the location that
13180   contains the incoming first argument.  Assume that this argument
13181   contains a pointer, and is the one used to pass the `this' pointer
13182   in C++.  This is the incoming argument *before* the function
13183   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
13184   values of all other incoming arguments.
13185
13186   After the addition, emit code to jump to FUNCTION, which is a
13187   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
13188   not touch the return address.  Hence returning from FUNCTION will
13189   return to whoever called the current `thunk'.
13190
13191   The effect must be as if FUNCTION had been called directly with the
13192   adjusted first argument.  This macro is responsible for emitting
13193   all of the code for a thunk function; output_function_prologue()
13194   and output_function_epilogue() are not invoked.
13195
13196   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
13197   been extracted from it.)  It might possibly be useful on some
13198   targets, but probably not.
13199
13200   If you do not define this macro, the target-independent code in the
13201   C++ frontend will generate a less efficient heavyweight thunk that
13202   calls FUNCTION instead of jumping to it.  The generic approach does
13203   not support varargs.  */
13204
13205static void
13206rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13207			HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13208			tree function)
13209{
13210  rtx this, insn, funexp;
13211
13212  reload_completed = 1;
13213  epilogue_completed = 1;
13214  no_new_pseudos = 1;
13215
13216  /* Mark the end of the (empty) prologue.  */
13217  emit_note (NOTE_INSN_PROLOGUE_END);
13218
13219  /* Find the "this" pointer.  If the function returns a structure,
13220     the structure return pointer is in r3.  */
13221  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13222    this = gen_rtx_REG (Pmode, 4);
13223  else
13224    this = gen_rtx_REG (Pmode, 3);
13225
13226  /* Apply the constant offset, if required.  */
13227  if (delta)
13228    {
13229      rtx delta_rtx = GEN_INT (delta);
13230      emit_insn (TARGET_32BIT
13231		 ? gen_addsi3 (this, this, delta_rtx)
13232		 : gen_adddi3 (this, this, delta_rtx));
13233    }
13234
13235  /* Apply the offset from the vtable, if required.  */
13236  if (vcall_offset)
13237    {
13238      rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13239      rtx tmp = gen_rtx_REG (Pmode, 12);
13240
13241      emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13242      if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13243	{
13244	  emit_insn (TARGET_32BIT
13245		     ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13246		     : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13247	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13248	}
13249      else
13250	{
13251	  rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13252
13253	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13254	}
13255      emit_insn (TARGET_32BIT
13256		 ? gen_addsi3 (this, this, tmp)
13257		 : gen_adddi3 (this, this, tmp));
13258    }
13259
13260  /* Generate a tail call to the target function.  */
13261  if (!TREE_USED (function))
13262    {
13263      assemble_external (function);
13264      TREE_USED (function) = 1;
13265    }
13266  funexp = XEXP (DECL_RTL (function), 0);
13267  funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13268
13269#if TARGET_MACHO
13270  if (MACHOPIC_INDIRECT)
13271    funexp = machopic_indirect_call_target (funexp);
13272#endif
13273
13274  /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13275     generate sibcall RTL explicitly to avoid constraint abort.  */
13276  insn = emit_call_insn (
13277	   gen_rtx_PARALLEL (VOIDmode,
13278	     gen_rtvec (4,
13279			gen_rtx_CALL (VOIDmode,
13280				      funexp, const0_rtx),
13281			gen_rtx_USE (VOIDmode, const0_rtx),
13282			gen_rtx_USE (VOIDmode,
13283				     gen_rtx_REG (SImode,
13284						  LINK_REGISTER_REGNUM)),
13285			gen_rtx_RETURN (VOIDmode))));
13286  SIBLING_CALL_P (insn) = 1;
13287  emit_barrier ();
13288
13289  /* Run just enough of rest_of_compilation to get the insns emitted.
13290     There's not really enough bulk here to make other passes such as
13291     instruction scheduling worth while.  Note that use_thunk calls
13292     assemble_start_function and assemble_end_function.  */
13293  insn = get_insns ();
13294  insn_locators_initialize ();
13295  shorten_branches (insn);
13296  final_start_function (insn, file, 1);
13297  final (insn, file, 1, 0);
13298  final_end_function ();
13299
13300  reload_completed = 0;
13301  epilogue_completed = 0;
13302  no_new_pseudos = 0;
13303}
13304
13305/* A quick summary of the various types of 'constant-pool tables'
13306   under PowerPC:
13307
13308   Target	Flags		Name		One table per
13309   AIX		(none)		AIX TOC		object file
13310   AIX		-mfull-toc	AIX TOC		object file
13311   AIX		-mminimal-toc	AIX minimal TOC	translation unit
13312   SVR4/EABI	(none)		SVR4 SDATA	object file
13313   SVR4/EABI	-fpic		SVR4 pic	object file
13314   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
13315   SVR4/EABI	-mrelocatable	EABI TOC	function
13316   SVR4/EABI	-maix		AIX TOC		object file
13317   SVR4/EABI	-maix -mminimal-toc
13318				AIX minimal TOC	translation unit
13319
13320   Name			Reg.	Set by	entries	      contains:
13321					made by	 addrs?	fp?	sum?
13322
13323   AIX TOC		2	crt0	as	 Y	option	option
13324   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
13325   SVR4 SDATA		13	crt0	gcc	 N	Y	N
13326   SVR4 pic		30	prolog	ld	 Y	not yet	N
13327   SVR4 PIC		30	prolog	gcc	 Y	option	option
13328   EABI TOC		30	prolog	gcc	 Y	option	option
13329
13330*/
13331
13332/* Hash functions for the hash table.  */
13333
13334static unsigned
13335rs6000_hash_constant (rtx k)
13336{
13337  enum rtx_code code = GET_CODE (k);
13338  enum machine_mode mode = GET_MODE (k);
13339  unsigned result = (code << 3) ^ mode;
13340  const char *format;
13341  int flen, fidx;
13342
13343  format = GET_RTX_FORMAT (code);
13344  flen = strlen (format);
13345  fidx = 0;
13346
13347  switch (code)
13348    {
13349    case LABEL_REF:
13350      return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13351
13352    case CONST_DOUBLE:
13353      if (mode != VOIDmode)
13354	return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13355      flen = 2;
13356      break;
13357
13358    case CODE_LABEL:
13359      fidx = 3;
13360      break;
13361
13362    default:
13363      break;
13364    }
13365
13366  for (; fidx < flen; fidx++)
13367    switch (format[fidx])
13368      {
13369      case 's':
13370	{
13371	  unsigned i, len;
13372	  const char *str = XSTR (k, fidx);
13373	  len = strlen (str);
13374	  result = result * 613 + len;
13375	  for (i = 0; i < len; i++)
13376	    result = result * 613 + (unsigned) str[i];
13377	  break;
13378	}
13379      case 'u':
13380      case 'e':
13381	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13382	break;
13383      case 'i':
13384      case 'n':
13385	result = result * 613 + (unsigned) XINT (k, fidx);
13386	break;
13387      case 'w':
13388	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13389	  result = result * 613 + (unsigned) XWINT (k, fidx);
13390	else
13391	  {
13392	    size_t i;
13393	    for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13394	      result = result * 613 + (unsigned) (XWINT (k, fidx)
13395						  >> CHAR_BIT * i);
13396	  }
13397	break;
13398      case '0':
13399	break;
13400      default:
13401	abort ();
13402      }
13403
13404  return result;
13405}
13406
13407static unsigned
13408toc_hash_function (const void *hash_entry)
13409{
13410  const struct toc_hash_struct *thc =
13411    (const struct toc_hash_struct *) hash_entry;
13412  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13413}
13414
13415/* Compare H1 and H2 for equivalence.  */
13416
13417static int
13418toc_hash_eq (const void *h1, const void *h2)
13419{
13420  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13421  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13422
13423  if (((const struct toc_hash_struct *) h1)->key_mode
13424      != ((const struct toc_hash_struct *) h2)->key_mode)
13425    return 0;
13426
13427  return rtx_equal_p (r1, r2);
13428}
13429
13430/* These are the names given by the C++ front-end to vtables, and
13431   vtable-like objects.  Ideally, this logic should not be here;
13432   instead, there should be some programmatic way of inquiring as
13433   to whether or not an object is a vtable.  */
13434
13435#define VTABLE_NAME_P(NAME)				\
13436  (strncmp ("_vt.", name, strlen("_vt.")) == 0		\
13437  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
13438  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
13439  || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0	\
13440  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13441
13442void
13443rs6000_output_symbol_ref (FILE *file, rtx x)
13444{
13445  /* Currently C++ toc references to vtables can be emitted before it
13446     is decided whether the vtable is public or private.  If this is
13447     the case, then the linker will eventually complain that there is
13448     a reference to an unknown section.  Thus, for vtables only,
13449     we emit the TOC reference to reference the symbol and not the
13450     section.  */
13451  const char *name = XSTR (x, 0);
13452
13453  if (VTABLE_NAME_P (name))
13454    {
13455      RS6000_OUTPUT_BASENAME (file, name);
13456    }
13457  else
13458    assemble_name (file, name);
13459}
13460
13461/* Output a TOC entry.  We derive the entry name from what is being
13462   written.  */
13463
13464void
13465output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13466{
13467  char buf[256];
13468  const char *name = buf;
13469  const char *real_name;
13470  rtx base = x;
13471  int offset = 0;
13472
13473  if (TARGET_NO_TOC)
13474    abort ();
13475
13476  /* When the linker won't eliminate them, don't output duplicate
13477     TOC entries (this happens on AIX if there is any kind of TOC,
13478     and on SVR4 under -fPIC or -mrelocatable).  Don't do this for
13479     CODE_LABELs.  */
13480  if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13481    {
13482      struct toc_hash_struct *h;
13483      void * * found;
13484
13485      /* Create toc_hash_table.  This can't be done at OVERRIDE_OPTIONS
13486         time because GGC is not initialized at that point.  */
13487      if (toc_hash_table == NULL)
13488	toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13489					  toc_hash_eq, NULL);
13490
13491      h = ggc_alloc (sizeof (*h));
13492      h->key = x;
13493      h->key_mode = mode;
13494      h->labelno = labelno;
13495
13496      found = htab_find_slot (toc_hash_table, h, 1);
13497      if (*found == NULL)
13498	*found = h;
13499      else  /* This is indeed a duplicate.
13500	       Set this label equal to that label.  */
13501	{
13502	  fputs ("\t.set ", file);
13503	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13504	  fprintf (file, "%d,", labelno);
13505	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13506	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13507					      found)->labelno));
13508	  return;
13509	}
13510    }
13511
13512  /* If we're going to put a double constant in the TOC, make sure it's
13513     aligned properly when strict alignment is on.  */
13514  if (GET_CODE (x) == CONST_DOUBLE
13515      && STRICT_ALIGNMENT
13516      && GET_MODE_BITSIZE (mode) >= 64
13517      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13518    ASM_OUTPUT_ALIGN (file, 3);
13519  }
13520
13521  (*targetm.asm_out.internal_label) (file, "LC", labelno);
13522
13523  /* Handle FP constants specially.  Note that if we have a minimal
13524     TOC, things we put here aren't actually in the TOC, so we can allow
13525     FP constants.  */
13526  if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13527    {
13528      REAL_VALUE_TYPE rv;
13529      long k[4];
13530
13531      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13532      REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13533
13534      if (TARGET_64BIT)
13535	{
13536	  if (TARGET_MINIMAL_TOC)
13537	    fputs (DOUBLE_INT_ASM_OP, file);
13538	  else
13539	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13540		     k[0] & 0xffffffff, k[1] & 0xffffffff,
13541		     k[2] & 0xffffffff, k[3] & 0xffffffff);
13542	  fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13543		   k[0] & 0xffffffff, k[1] & 0xffffffff,
13544		   k[2] & 0xffffffff, k[3] & 0xffffffff);
13545	  return;
13546	}
13547      else
13548	{
13549	  if (TARGET_MINIMAL_TOC)
13550	    fputs ("\t.long ", file);
13551	  else
13552	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13553		     k[0] & 0xffffffff, k[1] & 0xffffffff,
13554		     k[2] & 0xffffffff, k[3] & 0xffffffff);
13555	  fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13556		   k[0] & 0xffffffff, k[1] & 0xffffffff,
13557		   k[2] & 0xffffffff, k[3] & 0xffffffff);
13558	  return;
13559	}
13560    }
13561  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13562    {
13563      REAL_VALUE_TYPE rv;
13564      long k[2];
13565
13566      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13567      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13568
13569      if (TARGET_64BIT)
13570	{
13571	  if (TARGET_MINIMAL_TOC)
13572	    fputs (DOUBLE_INT_ASM_OP, file);
13573	  else
13574	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13575		     k[0] & 0xffffffff, k[1] & 0xffffffff);
13576	  fprintf (file, "0x%lx%08lx\n",
13577		   k[0] & 0xffffffff, k[1] & 0xffffffff);
13578	  return;
13579	}
13580      else
13581	{
13582	  if (TARGET_MINIMAL_TOC)
13583	    fputs ("\t.long ", file);
13584	  else
13585	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13586		     k[0] & 0xffffffff, k[1] & 0xffffffff);
13587	  fprintf (file, "0x%lx,0x%lx\n",
13588		   k[0] & 0xffffffff, k[1] & 0xffffffff);
13589	  return;
13590	}
13591    }
13592  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13593    {
13594      REAL_VALUE_TYPE rv;
13595      long l;
13596
13597      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13598      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13599
13600      if (TARGET_64BIT)
13601	{
13602	  if (TARGET_MINIMAL_TOC)
13603	    fputs (DOUBLE_INT_ASM_OP, file);
13604	  else
13605	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13606	  fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13607	  return;
13608	}
13609      else
13610	{
13611	  if (TARGET_MINIMAL_TOC)
13612	    fputs ("\t.long ", file);
13613	  else
13614	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13615	  fprintf (file, "0x%lx\n", l & 0xffffffff);
13616	  return;
13617	}
13618    }
13619  else if (GET_MODE (x) == VOIDmode
13620	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13621    {
13622      unsigned HOST_WIDE_INT low;
13623      HOST_WIDE_INT high;
13624
13625      if (GET_CODE (x) == CONST_DOUBLE)
13626	{
13627	  low = CONST_DOUBLE_LOW (x);
13628	  high = CONST_DOUBLE_HIGH (x);
13629	}
13630      else
13631#if HOST_BITS_PER_WIDE_INT == 32
13632	{
13633	  low = INTVAL (x);
13634	  high = (low & 0x80000000) ? ~0 : 0;
13635	}
13636#else
13637	{
13638          low = INTVAL (x) & 0xffffffff;
13639          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13640	}
13641#endif
13642
13643      /* TOC entries are always Pmode-sized, but since this
13644	 is a bigendian machine then if we're putting smaller
13645	 integer constants in the TOC we have to pad them.
13646	 (This is still a win over putting the constants in
13647	 a separate constant pool, because then we'd have
13648	 to have both a TOC entry _and_ the actual constant.)
13649
13650	 For a 32-bit target, CONST_INT values are loaded and shifted
13651	 entirely within `low' and can be stored in one TOC entry.  */
13652
13653      if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13654	abort ();/* It would be easy to make this work, but it doesn't now.  */
13655
13656      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13657	{
13658#if HOST_BITS_PER_WIDE_INT == 32
13659	  lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13660			 POINTER_SIZE, &low, &high, 0);
13661#else
13662	  low |= high << 32;
13663	  low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13664	  high = (HOST_WIDE_INT) low >> 32;
13665	  low &= 0xffffffff;
13666#endif
13667	}
13668
13669      if (TARGET_64BIT)
13670	{
13671	  if (TARGET_MINIMAL_TOC)
13672	    fputs (DOUBLE_INT_ASM_OP, file);
13673	  else
13674	    fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13675		     (long) high & 0xffffffff, (long) low & 0xffffffff);
13676	  fprintf (file, "0x%lx%08lx\n",
13677		   (long) high & 0xffffffff, (long) low & 0xffffffff);
13678	  return;
13679	}
13680      else
13681	{
13682	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13683	    {
13684	      if (TARGET_MINIMAL_TOC)
13685		fputs ("\t.long ", file);
13686	      else
13687		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13688			 (long) high & 0xffffffff, (long) low & 0xffffffff);
13689	      fprintf (file, "0x%lx,0x%lx\n",
13690		       (long) high & 0xffffffff, (long) low & 0xffffffff);
13691	    }
13692	  else
13693	    {
13694	      if (TARGET_MINIMAL_TOC)
13695		fputs ("\t.long ", file);
13696	      else
13697		fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13698	      fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13699	    }
13700	  return;
13701	}
13702    }
13703
13704  if (GET_CODE (x) == CONST)
13705    {
13706      if (GET_CODE (XEXP (x, 0)) != PLUS)
13707	abort ();
13708
13709      base = XEXP (XEXP (x, 0), 0);
13710      offset = INTVAL (XEXP (XEXP (x, 0), 1));
13711    }
13712
13713  if (GET_CODE (base) == SYMBOL_REF)
13714    name = XSTR (base, 0);
13715  else if (GET_CODE (base) == LABEL_REF)
13716    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13717  else if (GET_CODE (base) == CODE_LABEL)
13718    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13719  else
13720    abort ();
13721
13722  real_name = (*targetm.strip_name_encoding) (name);
13723  if (TARGET_MINIMAL_TOC)
13724    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13725  else
13726    {
13727      fprintf (file, "\t.tc %s", real_name);
13728
13729      if (offset < 0)
13730	fprintf (file, ".N%d", - offset);
13731      else if (offset)
13732	fprintf (file, ".P%d", offset);
13733
13734      fputs ("[TC],", file);
13735    }
13736
13737  /* Currently C++ toc references to vtables can be emitted before it
13738     is decided whether the vtable is public or private.  If this is
13739     the case, then the linker will eventually complain that there is
13740     a TOC reference to an unknown section.  Thus, for vtables only,
13741     we emit the TOC reference to reference the symbol and not the
13742     section.  */
13743  if (VTABLE_NAME_P (name))
13744    {
13745      RS6000_OUTPUT_BASENAME (file, name);
13746      if (offset < 0)
13747	fprintf (file, "%d", offset);
13748      else if (offset > 0)
13749	fprintf (file, "+%d", offset);
13750    }
13751  else
13752    output_addr_const (file, x);
13753  putc ('\n', file);
13754}
13755
13756/* Output an assembler pseudo-op to write an ASCII string of N characters
13757   starting at P to FILE.
13758
13759   On the RS/6000, we have to do this using the .byte operation and
13760   write out special characters outside the quoted string.
13761   Also, the assembler is broken; very long strings are truncated,
13762   so we must artificially break them up early.  */
13763
13764void
13765output_ascii (FILE *file, const char *p, int n)
13766{
13767  char c;
13768  int i, count_string;
13769  const char *for_string = "\t.byte \"";
13770  const char *for_decimal = "\t.byte ";
13771  const char *to_close = NULL;
13772
13773  count_string = 0;
13774  for (i = 0; i < n; i++)
13775    {
13776      c = *p++;
13777      if (c >= ' ' && c < 0177)
13778	{
13779	  if (for_string)
13780	    fputs (for_string, file);
13781	  putc (c, file);
13782
13783	  /* Write two quotes to get one.  */
13784	  if (c == '"')
13785	    {
13786	      putc (c, file);
13787	      ++count_string;
13788	    }
13789
13790	  for_string = NULL;
13791	  for_decimal = "\"\n\t.byte ";
13792	  to_close = "\"\n";
13793	  ++count_string;
13794
13795	  if (count_string >= 512)
13796	    {
13797	      fputs (to_close, file);
13798
13799	      for_string = "\t.byte \"";
13800	      for_decimal = "\t.byte ";
13801	      to_close = NULL;
13802	      count_string = 0;
13803	    }
13804	}
13805      else
13806	{
13807	  if (for_decimal)
13808	    fputs (for_decimal, file);
13809	  fprintf (file, "%d", c);
13810
13811	  for_string = "\n\t.byte \"";
13812	  for_decimal = ", ";
13813	  to_close = "\n";
13814	  count_string = 0;
13815	}
13816    }
13817
13818  /* Now close the string if we have written one.  Then end the line.  */
13819  if (to_close)
13820    fputs (to_close, file);
13821}
13822
13823/* Generate a unique section name for FILENAME for a section type
13824   represented by SECTION_DESC.  Output goes into BUF.
13825
13826   SECTION_DESC can be any string, as long as it is different for each
13827   possible section type.
13828
13829   We name the section in the same manner as xlc.  The name begins with an
13830   underscore followed by the filename (after stripping any leading directory
13831   names) with the last period replaced by the string SECTION_DESC.  If
13832   FILENAME does not contain a period, SECTION_DESC is appended to the end of
13833   the name.  */
13834
13835void
13836rs6000_gen_section_name (char **buf, const char *filename,
13837		         const char *section_desc)
13838{
13839  const char *q, *after_last_slash, *last_period = 0;
13840  char *p;
13841  int len;
13842
13843  after_last_slash = filename;
13844  for (q = filename; *q; q++)
13845    {
13846      if (*q == '/')
13847	after_last_slash = q + 1;
13848      else if (*q == '.')
13849	last_period = q;
13850    }
13851
13852  len = strlen (after_last_slash) + strlen (section_desc) + 2;
13853  *buf = (char *) xmalloc (len);
13854
13855  p = *buf;
13856  *p++ = '_';
13857
13858  for (q = after_last_slash; *q; q++)
13859    {
13860      if (q == last_period)
13861        {
13862	  strcpy (p, section_desc);
13863	  p += strlen (section_desc);
13864	  break;
13865        }
13866
13867      else if (ISALNUM (*q))
13868        *p++ = *q;
13869    }
13870
13871  if (last_period == 0)
13872    strcpy (p, section_desc);
13873  else
13874    *p = '\0';
13875}
13876
13877/* Emit profile function.  */
13878
13879void
13880output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13881{
13882  if (TARGET_PROFILE_KERNEL)
13883    return;
13884
13885  if (DEFAULT_ABI == ABI_AIX)
13886    {
13887#ifndef NO_PROFILE_COUNTERS
13888# define NO_PROFILE_COUNTERS 0
13889#endif
13890      if (NO_PROFILE_COUNTERS)
13891	emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13892      else
13893	{
13894	  char buf[30];
13895	  const char *label_name;
13896	  rtx fun;
13897
13898	  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13899	  label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13900	  fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13901
13902	  emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13903			     fun, Pmode);
13904	}
13905    }
13906  else if (DEFAULT_ABI == ABI_DARWIN)
13907    {
13908      const char *mcount_name = RS6000_MCOUNT;
13909      int caller_addr_regno = LINK_REGISTER_REGNUM;
13910
13911      /* Be conservative and always set this, at least for now.  */
13912      current_function_uses_pic_offset_table = 1;
13913
13914#if TARGET_MACHO
13915      /* For PIC code, set up a stub and collect the caller's address
13916	 from r0, which is where the prologue puts it.  */
13917      if (MACHOPIC_INDIRECT)
13918	{
13919	  mcount_name = machopic_stub_name (mcount_name);
13920	  if (current_function_uses_pic_offset_table)
13921	    caller_addr_regno = 0;
13922	}
13923#endif
13924      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13925			 0, VOIDmode, 1,
13926			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13927    }
13928}
13929
13930/* Write function profiler code.  */
13931
13932void
13933output_function_profiler (FILE *file, int labelno)
13934{
13935  char buf[100];
13936  int save_lr = 8;
13937
13938  switch (DEFAULT_ABI)
13939    {
13940    default:
13941      abort ();
13942
13943    case ABI_V4:
13944      save_lr = 4;
13945      if (!TARGET_32BIT)
13946	{
13947	  warning ("no profiling of 64-bit code for this ABI");
13948	  return;
13949	}
13950      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13951      fprintf (file, "\tmflr %s\n", reg_names[0]);
13952      if (flag_pic == 1)
13953	{
13954	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13955	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13956		       reg_names[0], save_lr, reg_names[1]);
13957	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13958	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13959	  assemble_name (file, buf);
13960	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13961	}
13962      else if (flag_pic > 1)
13963	{
13964	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13965		       reg_names[0], save_lr, reg_names[1]);
13966	  /* Now, we need to get the address of the label.  */
13967	  fputs ("\tbl 1f\n\t.long ", file);
13968	  assemble_name (file, buf);
13969	  fputs ("-.\n1:", file);
13970	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13971	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13972		       reg_names[0], reg_names[11]);
13973	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13974		       reg_names[0], reg_names[0], reg_names[11]);
13975	}
13976      else
13977	{
13978	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13979	  assemble_name (file, buf);
13980	  fputs ("@ha\n", file);
13981	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13982		       reg_names[0], save_lr, reg_names[1]);
13983	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13984	  assemble_name (file, buf);
13985	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13986	}
13987
13988      /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
13989      fprintf (file, "\tbl %s%s\n",
13990	       RS6000_MCOUNT, flag_pic ? "@plt" : "");
13991
13992      break;
13993
13994    case ABI_AIX:
13995    case ABI_DARWIN:
13996      if (!TARGET_PROFILE_KERNEL)
13997	{
13998	  /* Don't do anything, done in output_profile_hook ().  */
13999	}
14000      else
14001	{
14002	  if (TARGET_32BIT)
14003	    abort ();
14004
14005	  asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
14006	  asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
14007
14008	  if (current_function_needs_context)
14009	    {
14010	      asm_fprintf (file, "\tstd %s,24(%s)\n",
14011			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14012	      fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14013	      asm_fprintf (file, "\tld %s,24(%s)\n",
14014			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14015	    }
14016	  else
14017	    fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14018	}
14019      break;
14020    }
14021}
14022
14023
14024static int
14025rs6000_use_dfa_pipeline_interface (void)
14026{
14027  return 1;
14028}
14029
14030/* Power4 load update and store update instructions are cracked into a
14031   load or store and an integer insn which are executed in the same cycle.
14032   Branches have their own dispatch slot which does not count against the
14033   GCC issue rate, but it changes the program flow so there are no other
14034   instructions to issue in this cycle.  */
14035
14036static int
14037rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
14038		       int verbose ATTRIBUTE_UNUSED,
14039		       rtx insn, int more)
14040{
14041  if (GET_CODE (PATTERN (insn)) == USE
14042      || GET_CODE (PATTERN (insn)) == CLOBBER)
14043    return more;
14044
14045  if (rs6000_sched_groups)
14046    {
14047      if (is_microcoded_insn (insn))
14048        return 0;
14049      else if (is_cracked_insn (insn))
14050        return more > 2 ? more - 2 : 0;
14051    }
14052
14053  return more - 1;
14054}
14055
14056/* Adjust the cost of a scheduling dependency.  Return the new cost of
14057   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
14058
14059static int
14060rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
14061		    int cost)
14062{
14063  if (! recog_memoized (insn))
14064    return 0;
14065
14066  if (REG_NOTE_KIND (link) != 0)
14067    return 0;
14068
14069  if (REG_NOTE_KIND (link) == 0)
14070    {
14071      /* Data dependency; DEP_INSN writes a register that INSN reads
14072	 some cycles later.  */
14073      switch (get_attr_type (insn))
14074	{
14075	case TYPE_JMPREG:
14076	  /* Tell the first scheduling pass about the latency between
14077	     a mtctr and bctr (and mtlr and br/blr).  The first
14078	     scheduling pass will not know about this latency since
14079	     the mtctr instruction, which has the latency associated
14080	     to it, will be generated by reload.  */
14081	  return TARGET_POWER ? 5 : 4;
14082	case TYPE_BRANCH:
14083	  /* Leave some extra cycles between a compare and its
14084	     dependent branch, to inhibit expensive mispredicts.  */
14085	  if ((rs6000_cpu_attr == CPU_PPC603
14086	       || rs6000_cpu_attr == CPU_PPC604
14087	       || rs6000_cpu_attr == CPU_PPC604E
14088	       || rs6000_cpu_attr == CPU_PPC620
14089	       || rs6000_cpu_attr == CPU_PPC630
14090	       || rs6000_cpu_attr == CPU_PPC750
14091	       || rs6000_cpu_attr == CPU_PPC7400
14092	       || rs6000_cpu_attr == CPU_PPC7450
14093	       || rs6000_cpu_attr == CPU_POWER4
14094	       || rs6000_cpu_attr == CPU_POWER5)
14095	      && recog_memoized (dep_insn)
14096	      && (INSN_CODE (dep_insn) >= 0)
14097	      && (get_attr_type (dep_insn) == TYPE_CMP
14098		  || get_attr_type (dep_insn) == TYPE_COMPARE
14099		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
14100		  || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
14101		  || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
14102		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
14103		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
14104		  || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
14105	    return cost + 2;
14106	default:
14107	  break;
14108	}
14109      /* Fall out to return default cost.  */
14110    }
14111
14112  return cost;
14113}
14114
14115/* The function returns a true if INSN is microcoded.
14116   Return false otherwise.  */
14117
14118static bool
14119is_microcoded_insn (rtx insn)
14120{
14121  if (!insn || !INSN_P (insn)
14122      || GET_CODE (PATTERN (insn)) == USE
14123      || GET_CODE (PATTERN (insn)) == CLOBBER)
14124    return false;
14125
14126  if (rs6000_sched_groups)
14127    {
14128      enum attr_type type = get_attr_type (insn);
14129      if (type == TYPE_LOAD_EXT_U
14130	  || type == TYPE_LOAD_EXT_UX
14131	  || type == TYPE_LOAD_UX
14132	  || type == TYPE_STORE_UX
14133	  || type == TYPE_MFCR)
14134        return true;
14135    }
14136
14137  return false;
14138}
14139
14140/* The function returns a nonzero value if INSN can be scheduled only
14141   as the first insn in a dispatch group ("dispatch-slot restricted").
14142   In this case, the returned value indicates how many dispatch slots
14143   the insn occupies (at the beginning of the group).
14144   Return 0 otherwise.  */
14145
14146static int
14147is_dispatch_slot_restricted (rtx insn)
14148{
14149  enum attr_type type;
14150
14151  if (!rs6000_sched_groups)
14152    return 0;
14153
14154  if (!insn
14155      || insn == NULL_RTX
14156      || GET_CODE (insn) == NOTE
14157      || GET_CODE (PATTERN (insn)) == USE
14158      || GET_CODE (PATTERN (insn)) == CLOBBER)
14159    return 0;
14160
14161  type = get_attr_type (insn);
14162
14163  switch (type)
14164    {
14165    case TYPE_MFCR:
14166    case TYPE_MFCRF:
14167    case TYPE_MTCR:
14168    case TYPE_DELAYED_CR:
14169    case TYPE_CR_LOGICAL:
14170    case TYPE_MTJMPR:
14171    case TYPE_MFJMPR:
14172      return 1;
14173    case TYPE_IDIV:
14174    case TYPE_LDIV:
14175      return 2;
14176    default:
14177      if (rs6000_cpu == PROCESSOR_POWER5
14178	  && is_cracked_insn (insn))
14179	return 2;
14180      return 0;
14181    }
14182}
14183
14184/* The function returns true if INSN is cracked into 2 instructions
14185   by the processor (and therefore occupies 2 issue slots).  */
14186
14187static bool
14188is_cracked_insn (rtx insn)
14189{
14190  if (!insn || !INSN_P (insn)
14191      || GET_CODE (PATTERN (insn)) == USE
14192      || GET_CODE (PATTERN (insn)) == CLOBBER)
14193    return false;
14194
14195  if (rs6000_sched_groups)
14196    {
14197      enum attr_type type = get_attr_type (insn);
14198      if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14199	       || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14200	       || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14201	       || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14202	       || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14203	       || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14204	       || type == TYPE_IDIV || type == TYPE_LDIV
14205	       || type == TYPE_INSERT_WORD)
14206        return true;
14207    }
14208
14209  return false;
14210}
14211
14212/* The function returns true if INSN can be issued only from
14213   the branch slot.  */
14214
14215static bool
14216is_branch_slot_insn (rtx insn)
14217{
14218  if (!insn || !INSN_P (insn)
14219      || GET_CODE (PATTERN (insn)) == USE
14220      || GET_CODE (PATTERN (insn)) == CLOBBER)
14221    return false;
14222
14223  if (rs6000_sched_groups)
14224    {
14225      enum attr_type type = get_attr_type (insn);
14226      if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14227	return true;
14228      return false;
14229    }
14230
14231  return false;
14232}
14233
14234/* A C statement (sans semicolon) to update the integer scheduling
14235   priority INSN_PRIORITY (INSN). Increase the priority to execute the
14236   INSN earlier, reduce the priority to execute INSN later.  Do not
14237   define this macro if you do not need to adjust the scheduling
14238   priorities of insns.  */
14239
14240static int
14241rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14242{
14243  /* On machines (like the 750) which have asymmetric integer units,
14244     where one integer unit can do multiply and divides and the other
14245     can't, reduce the priority of multiply/divide so it is scheduled
14246     before other integer operations.  */
14247
14248#if 0
14249  if (! INSN_P (insn))
14250    return priority;
14251
14252  if (GET_CODE (PATTERN (insn)) == USE)
14253    return priority;
14254
14255  switch (rs6000_cpu_attr) {
14256  case CPU_PPC750:
14257    switch (get_attr_type (insn))
14258      {
14259      default:
14260	break;
14261
14262      case TYPE_IMUL:
14263      case TYPE_IDIV:
14264	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14265		 priority, priority);
14266	if (priority >= 0 && priority < 0x01000000)
14267	  priority >>= 3;
14268	break;
14269      }
14270  }
14271#endif
14272
14273  if (is_dispatch_slot_restricted (insn)
14274      && reload_completed
14275      && current_sched_info->sched_max_insns_priority
14276      && rs6000_sched_restricted_insns_priority)
14277    {
14278
14279      /* Prioritize insns that can be dispatched only in the first dispatch slot.  */
14280      if (rs6000_sched_restricted_insns_priority == 1)
14281	/* Attach highest priority to insn. This means that in
14282	   haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14283	   precede 'priority' (critical path) considerations.  */
14284	return current_sched_info->sched_max_insns_priority;
14285      else if (rs6000_sched_restricted_insns_priority == 2)
14286	/* Increase priority of insn by a minimal amount. This means that in
14287	   haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14288	   precede dispatch-slot restriction considerations.  */
14289	return (priority + 1);
14290    }
14291
14292  return priority;
14293}
14294
14295/* Return how many instructions the machine can issue per cycle.  */
14296
14297static int
14298rs6000_issue_rate (void)
14299{
14300  /* Use issue rate of 1 for first scheduling pass to decrease degradation.  */
14301  if (!reload_completed)
14302    return 1;
14303
14304  switch (rs6000_cpu_attr) {
14305  case CPU_RIOS1:  /* ? */
14306  case CPU_RS64A:
14307  case CPU_PPC601: /* ? */
14308  case CPU_PPC7450:
14309    return 3;
14310  case CPU_PPC440:
14311  case CPU_PPC603:
14312  case CPU_PPC750:
14313  case CPU_PPC7400:
14314  case CPU_PPC8540:
14315    return 2;
14316  case CPU_RIOS2:
14317  case CPU_PPC604:
14318  case CPU_PPC604E:
14319  case CPU_PPC620:
14320  case CPU_PPC630:
14321    return 4;
14322  case CPU_POWER4:
14323  case CPU_POWER5:
14324    return 5;
14325  default:
14326    return 1;
14327  }
14328}
14329
14330/* Return how many instructions to look ahead for better insn
14331   scheduling.  */
14332
14333static int
14334rs6000_use_sched_lookahead (void)
14335{
14336  if (rs6000_cpu_attr == CPU_PPC8540)
14337    return 4;
14338  return 0;
14339}
14340
14341/* Determine is PAT refers to memory.  */
14342
14343static bool
14344is_mem_ref (rtx pat)
14345{
14346  const char * fmt;
14347  int i, j;
14348  bool ret = false;
14349
14350  if (GET_CODE (pat) == MEM)
14351    return true;
14352
14353  /* Recursively process the pattern.  */
14354  fmt = GET_RTX_FORMAT (GET_CODE (pat));
14355
14356  for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14357    {
14358      if (fmt[i] == 'e')
14359	ret |= is_mem_ref (XEXP (pat, i));
14360      else if (fmt[i] == 'E')
14361	for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14362	  ret |= is_mem_ref (XVECEXP (pat, i, j));
14363    }
14364
14365  return ret;
14366}
14367
14368/* Determine if PAT is a PATTERN of a load insn.  */
14369
14370static bool
14371is_load_insn1 (rtx pat)
14372{
14373  if (!pat || pat == NULL_RTX)
14374    return false;
14375
14376  if (GET_CODE (pat) == SET)
14377    return is_mem_ref (SET_SRC (pat));
14378
14379  if (GET_CODE (pat) == PARALLEL)
14380    {
14381      int i;
14382
14383      for (i = 0; i < XVECLEN (pat, 0); i++)
14384	if (is_load_insn1 (XVECEXP (pat, 0, i)))
14385	  return true;
14386    }
14387
14388  return false;
14389}
14390
14391/* Determine if INSN loads from memory.  */
14392
14393static bool
14394is_load_insn (rtx insn)
14395{
14396  if (!insn || !INSN_P (insn))
14397    return false;
14398
14399  if (GET_CODE (insn) == CALL_INSN)
14400    return false;
14401
14402  return is_load_insn1 (PATTERN (insn));
14403}
14404
14405/* Determine if PAT is a PATTERN of a store insn.  */
14406
14407static bool
14408is_store_insn1 (rtx pat)
14409{
14410  if (!pat || pat == NULL_RTX)
14411    return false;
14412
14413  if (GET_CODE (pat) == SET)
14414    return is_mem_ref (SET_DEST (pat));
14415
14416  if (GET_CODE (pat) == PARALLEL)
14417    {
14418      int i;
14419
14420      for (i = 0; i < XVECLEN (pat, 0); i++)
14421	if (is_store_insn1 (XVECEXP (pat, 0, i)))
14422	  return true;
14423    }
14424
14425  return false;
14426}
14427
14428/* Determine if INSN stores to memory.  */
14429
14430static bool
14431is_store_insn (rtx insn)
14432{
14433  if (!insn || !INSN_P (insn))
14434    return false;
14435
14436  return is_store_insn1 (PATTERN (insn));
14437}
14438
14439/* Returns whether the dependence between INSN and NEXT is considered
14440   costly by the given target.  */
14441
14442static bool
14443rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14444{
14445  /* If the flag is not enbled - no dependence is considered costly;
14446     allow all dependent insns in the same group.
14447     This is the most aggressive option.  */
14448  if (rs6000_sched_costly_dep == no_dep_costly)
14449    return false;
14450
14451  /* If the flag is set to 1 - a dependence is always considered costly;
14452     do not allow dependent instructions in the same group.
14453     This is the most conservative option.  */
14454  if (rs6000_sched_costly_dep == all_deps_costly)
14455    return true;
14456
14457  if (rs6000_sched_costly_dep == store_to_load_dep_costly
14458      && is_load_insn (next)
14459      && is_store_insn (insn))
14460    /* Prevent load after store in the same group.  */
14461    return true;
14462
14463  if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14464      && is_load_insn (next)
14465      && is_store_insn (insn)
14466      && (!link || (int) REG_NOTE_KIND (link) == 0))
14467     /* Prevent load after store in the same group if it is a true dependence.  */
14468     return true;
14469
14470  /* The flag is set to X; dependences with latency >= X are considered costly,
14471     and will not be scheduled in the same group.  */
14472  if (rs6000_sched_costly_dep <= max_dep_latency
14473      && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14474    return true;
14475
14476  return false;
14477}
14478
14479/* Return the next insn after INSN that is found before TAIL is reached,
14480   skipping any "non-active" insns - insns that will not actually occupy
14481   an issue slot.  Return NULL_RTX if such an insn is not found.  */
14482
14483static rtx
14484get_next_active_insn (rtx insn, rtx tail)
14485{
14486  rtx next_insn;
14487
14488  if (!insn || insn == tail)
14489    return NULL_RTX;
14490
14491  next_insn = NEXT_INSN (insn);
14492
14493  while (next_insn
14494  	 && next_insn != tail
14495	 && (GET_CODE(next_insn) == NOTE
14496	     || GET_CODE (PATTERN (next_insn)) == USE
14497	     || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14498    {
14499      next_insn = NEXT_INSN (next_insn);
14500    }
14501
14502  if (!next_insn || next_insn == tail)
14503    return NULL_RTX;
14504
14505  return next_insn;
14506}
14507
14508/* Return whether the presence of INSN causes a dispatch group termination
14509   of group WHICH_GROUP.
14510
14511   If WHICH_GROUP == current_group, this function will return true if INSN
14512   causes the termination of the current group (i.e, the dispatch group to
14513   which INSN belongs). This means that INSN will be the last insn in the
14514   group it belongs to.
14515
14516   If WHICH_GROUP == previous_group, this function will return true if INSN
14517   causes the termination of the previous group (i.e, the dispatch group that
14518   precedes the group to which INSN belongs).  This means that INSN will be
14519   the first insn in the group it belongs to).  */
14520
14521static bool
14522insn_terminates_group_p (rtx insn, enum group_termination which_group)
14523{
14524  enum attr_type type;
14525
14526  if (! insn)
14527    return false;
14528
14529  type = get_attr_type (insn);
14530
14531  if (is_microcoded_insn (insn))
14532    return true;
14533
14534  if (which_group == current_group)
14535    {
14536      if (is_branch_slot_insn (insn))
14537        return true;
14538      return false;
14539    }
14540  else if (which_group == previous_group)
14541    {
14542      if (is_dispatch_slot_restricted (insn))
14543        return true;
14544      return false;
14545    }
14546
14547  return false;
14548}
14549
14550/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14551   dispatch group) from the insns in GROUP_INSNS.  Return false otherwise.  */
14552
14553static bool
14554is_costly_group (rtx *group_insns, rtx next_insn)
14555{
14556  int i;
14557  rtx link;
14558  int cost;
14559  int issue_rate = rs6000_issue_rate ();
14560
14561  for (i = 0; i < issue_rate; i++)
14562    {
14563      rtx insn = group_insns[i];
14564      if (!insn)
14565        continue;
14566      for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14567        {
14568          rtx next = XEXP (link, 0);
14569          if (next == next_insn)
14570            {
14571              cost = insn_cost (insn, link, next_insn);
14572              if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14573                return true;
14574            }
14575        }
14576    }
14577
14578  return false;
14579}
14580
14581/* Utility of the function redefine_groups.
14582   Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14583   in the same dispatch group.  If so, insert nops before NEXT_INSN, in order
14584   to keep it "far" (in a separate group) from GROUP_INSNS, following
14585   one of the following schemes, depending on the value of the flag
14586   -minsert_sched_nops = X:
14587   (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14588       in order to force NEXT_INSN into a separate group.
14589   (2) X < sched_finish_regroup_exact: insert exactly X nops.
14590   GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14591   insertion (has a group just ended, how many vacant issue slots remain in the
14592   last group, and how many dispatch groups were encountered so far).  */
14593
14594static int
14595force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14596		 bool *group_end, int can_issue_more, int *group_count)
14597{
14598  rtx nop;
14599  bool force;
14600  int issue_rate = rs6000_issue_rate ();
14601  bool end = *group_end;
14602  int i;
14603
14604  if (next_insn == NULL_RTX)
14605    return can_issue_more;
14606
14607  if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14608    return can_issue_more;
14609
14610  force = is_costly_group (group_insns, next_insn);
14611  if (!force)
14612    return can_issue_more;
14613
14614  if (sched_verbose > 6)
14615    fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14616			*group_count ,can_issue_more);
14617
14618  if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14619    {
14620      if (*group_end)
14621        can_issue_more = 0;
14622
14623      /* Since only a branch can be issued in the last issue_slot, it is
14624	 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14625	 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14626	 in this case the last nop will start a new group and the branch will be
14627	 forced to the new group.  */
14628      if (can_issue_more && !is_branch_slot_insn (next_insn))
14629        can_issue_more--;
14630
14631      while (can_issue_more > 0)
14632        {
14633          nop = gen_nop();
14634          emit_insn_before (nop, next_insn);
14635          can_issue_more--;
14636        }
14637
14638      *group_end = true;
14639      return 0;
14640    }
14641
14642  if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14643    {
14644      int n_nops = rs6000_sched_insert_nops;
14645
14646      /* Nops can't be issued from the branch slot, so the effective
14647         issue_rate for nops is 'issue_rate - 1'.  */
14648      if (can_issue_more == 0)
14649        can_issue_more = issue_rate;
14650      can_issue_more--;
14651      if (can_issue_more == 0)
14652        {
14653          can_issue_more = issue_rate - 1;
14654          (*group_count)++;
14655          end = true;
14656          for (i = 0; i < issue_rate; i++)
14657            {
14658              group_insns[i] = 0;
14659            }
14660        }
14661
14662      while (n_nops > 0)
14663        {
14664          nop = gen_nop ();
14665          emit_insn_before (nop, next_insn);
14666          if (can_issue_more == issue_rate - 1) /* new group begins */
14667            end = false;
14668          can_issue_more--;
14669          if (can_issue_more == 0)
14670            {
14671              can_issue_more = issue_rate - 1;
14672              (*group_count)++;
14673              end = true;
14674              for (i = 0; i < issue_rate; i++)
14675                {
14676                  group_insns[i] = 0;
14677                }
14678            }
14679          n_nops--;
14680        }
14681
14682      /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1').  */
14683      can_issue_more++;
14684
14685      *group_end = /* Is next_insn going to start a new group?  */
14686	  (end
14687	   || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14688	   || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14689	   || (can_issue_more < issue_rate &&
14690	      insn_terminates_group_p (next_insn, previous_group)));
14691      if (*group_end && end)
14692        (*group_count)--;
14693
14694      if (sched_verbose > 6)
14695        fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14696			*group_count, can_issue_more);
14697      return can_issue_more;
14698    }
14699
14700  return can_issue_more;
14701}
14702
14703/* This function tries to synch the dispatch groups that the compiler "sees"
14704   with the dispatch groups that the processor dispatcher is expected to
14705   form in practice.  It tries to achieve this synchronization by forcing the
14706   estimated processor grouping on the compiler (as opposed to the function
14707   'pad_goups' which tries to force the scheduler's grouping on the processor).
14708
14709   The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14710   examines the (estimated) dispatch groups that will be formed by the processor
14711   dispatcher.  It marks these group boundaries to reflect the estimated
14712   processor grouping, overriding the grouping that the scheduler had marked.
14713   Depending on the value of the flag '-minsert-sched-nops' this function can
14714   force certain insns into separate groups or force a certain distance between
14715   them by inserting nops, for example, if there exists a "costly dependence"
14716   between the insns.
14717
14718   The function estimates the group boundaries that the processor will form as
14719   folllows:  It keeps track of how many vacant issue slots are available after
14720   each insn.  A subsequent insn will start a new group if one of the following
14721   4 cases applies:
14722   - no more vacant issue slots remain in the current dispatch group.
14723   - only the last issue slot, which is the branch slot, is vacant, but the next
14724     insn is not a branch.
14725   - only the last 2 or less issue slots, including the branch slot, are vacant,
14726     which means that a cracked insn (which occupies two issue slots) can't be
14727     issued in this group.
14728   - less than 'issue_rate' slots are vacant, and the next insn always needs to
14729     start a new group.  */
14730
14731static int
14732redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14733{
14734  rtx insn, next_insn;
14735  int issue_rate;
14736  int can_issue_more;
14737  int slot, i;
14738  bool group_end;
14739  int group_count = 0;
14740  rtx *group_insns;
14741
14742  /* Initialize.  */
14743  issue_rate = rs6000_issue_rate ();
14744  group_insns = alloca (issue_rate * sizeof (rtx));
14745  for (i = 0; i < issue_rate; i++)
14746    {
14747      group_insns[i] = 0;
14748    }
14749  can_issue_more = issue_rate;
14750  slot = 0;
14751  insn = get_next_active_insn (prev_head_insn, tail);
14752  group_end = false;
14753
14754  while (insn != NULL_RTX)
14755    {
14756      slot = (issue_rate - can_issue_more);
14757      group_insns[slot] = insn;
14758      can_issue_more =
14759        rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14760      if (insn_terminates_group_p (insn, current_group))
14761        can_issue_more = 0;
14762
14763      next_insn = get_next_active_insn (insn, tail);
14764      if (next_insn == NULL_RTX)
14765        return group_count + 1;
14766
14767      group_end = /* Is next_insn going to start a new group?  */
14768        (can_issue_more == 0
14769         || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14770         || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14771         || (can_issue_more < issue_rate &&
14772             insn_terminates_group_p (next_insn, previous_group)));
14773
14774      can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14775			next_insn, &group_end, can_issue_more, &group_count);
14776
14777      if (group_end)
14778        {
14779          group_count++;
14780          can_issue_more = 0;
14781          for (i = 0; i < issue_rate; i++)
14782            {
14783              group_insns[i] = 0;
14784            }
14785        }
14786
14787      if (GET_MODE (next_insn) == TImode && can_issue_more)
14788        PUT_MODE(next_insn, VOIDmode);
14789      else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14790        PUT_MODE (next_insn, TImode);
14791
14792      insn = next_insn;
14793      if (can_issue_more == 0)
14794        can_issue_more = issue_rate;
14795   } /* while */
14796
14797  return group_count;
14798}
14799
14800/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14801   dispatch group boundaries that the scheduler had marked.  Pad with nops
14802   any dispatch groups which have vacant issue slots, in order to force the
14803   scheduler's grouping on the processor dispatcher.  The function
14804   returns the number of dispatch groups found.  */
14805
14806static int
14807pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14808{
14809  rtx insn, next_insn;
14810  rtx nop;
14811  int issue_rate;
14812  int can_issue_more;
14813  int group_end;
14814  int group_count = 0;
14815
14816  /* Initialize issue_rate.  */
14817  issue_rate = rs6000_issue_rate ();
14818  can_issue_more = issue_rate;
14819
14820  insn = get_next_active_insn (prev_head_insn, tail);
14821  next_insn = get_next_active_insn (insn, tail);
14822
14823  while (insn != NULL_RTX)
14824    {
14825      can_issue_more =
14826      	rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14827
14828      group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14829
14830      if (next_insn == NULL_RTX)
14831        break;
14832
14833      if (group_end)
14834        {
14835          /* If the scheduler had marked group termination at this location
14836             (between insn and next_indn), and neither insn nor next_insn will
14837             force group termination, pad the group with nops to force group
14838             termination.  */
14839          if (can_issue_more
14840              && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14841              && !insn_terminates_group_p (insn, current_group)
14842              && !insn_terminates_group_p (next_insn, previous_group))
14843            {
14844              if (!is_branch_slot_insn(next_insn))
14845                can_issue_more--;
14846
14847              while (can_issue_more)
14848                {
14849                  nop = gen_nop ();
14850                  emit_insn_before (nop, next_insn);
14851                  can_issue_more--;
14852                }
14853            }
14854
14855          can_issue_more = issue_rate;
14856          group_count++;
14857        }
14858
14859      insn = next_insn;
14860      next_insn = get_next_active_insn (insn, tail);
14861    }
14862
14863  return group_count;
14864}
14865
14866/* The following function is called at the end of scheduling BB.
14867   After reload, it inserts nops at insn group bundling.  */
14868
14869static void
14870rs6000_sched_finish (FILE *dump, int sched_verbose)
14871{
14872  int n_groups;
14873
14874  if (sched_verbose)
14875    fprintf (dump, "=== Finishing schedule.\n");
14876
14877  if (reload_completed && rs6000_sched_groups)
14878    {
14879      if (rs6000_sched_insert_nops == sched_finish_none)
14880        return;
14881
14882      if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14883        n_groups = pad_groups (dump, sched_verbose,
14884				current_sched_info->prev_head,
14885  			   	current_sched_info->next_tail);
14886      else
14887        n_groups = redefine_groups (dump, sched_verbose,
14888				current_sched_info->prev_head,
14889  				current_sched_info->next_tail);
14890
14891      if (sched_verbose >= 6)
14892	{
14893    	  fprintf (dump, "ngroups = %d\n", n_groups);
14894	  print_rtl (dump, current_sched_info->prev_head);
14895	  fprintf (dump, "Done finish_sched\n");
14896	}
14897    }
14898}
14899
14900/* Length in units of the trampoline for entering a nested function.  */
14901
14902int
14903rs6000_trampoline_size (void)
14904{
14905  int ret = 0;
14906
14907  switch (DEFAULT_ABI)
14908    {
14909    default:
14910      abort ();
14911
14912    case ABI_AIX:
14913      ret = (TARGET_32BIT) ? 12 : 24;
14914      break;
14915
14916    case ABI_DARWIN:
14917    case ABI_V4:
14918      ret = (TARGET_32BIT) ? 40 : 48;
14919      break;
14920    }
14921
14922  return ret;
14923}
14924
14925/* Emit RTL insns to initialize the variable parts of a trampoline.
14926   FNADDR is an RTX for the address of the function's pure code.
14927   CXT is an RTX for the static chain value for the function.  */
14928
14929void
14930rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14931{
14932  enum machine_mode pmode = Pmode;
14933  int regsize = (TARGET_32BIT) ? 4 : 8;
14934  rtx ctx_reg = force_reg (pmode, cxt);
14935
14936  switch (DEFAULT_ABI)
14937    {
14938    default:
14939      abort ();
14940
14941/* Macros to shorten the code expansions below.  */
14942#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14943#define MEM_PLUS(addr,offset) \
14944  gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14945
14946    /* Under AIX, just build the 3 word function descriptor */
14947    case ABI_AIX:
14948      {
14949	rtx fn_reg = gen_reg_rtx (pmode);
14950	rtx toc_reg = gen_reg_rtx (pmode);
14951	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14952	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14953	emit_move_insn (MEM_DEREF (addr), fn_reg);
14954	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14955	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14956      }
14957      break;
14958
14959    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
14960    case ABI_DARWIN:
14961    case ABI_V4:
14962      emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14963			 FALSE, VOIDmode, 4,
14964			 addr, pmode,
14965			 GEN_INT (rs6000_trampoline_size ()), SImode,
14966			 fnaddr, pmode,
14967			 ctx_reg, pmode);
14968      break;
14969    }
14970
14971  return;
14972}
14973
14974
14975/* Table of valid machine attributes.  */
14976
14977const struct attribute_spec rs6000_attribute_table[] =
14978{
14979  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14980  { "altivec",   1, 1, false, true,  false, rs6000_handle_altivec_attribute },
14981  { "longcall",  0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
14982  { "shortcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
14983  { NULL,        0, 0, false, false, false, NULL }
14984};
14985
14986/* Handle the "altivec" attribute.  The attribute may have
14987   arguments as follows:
14988
14989       __attribute__((altivec(vector__)))
14990       __attribute__((altivec(pixel__)))       (always followed by 'unsigned short')
14991       __attribute__((altivec(bool__)))        (always followed by 'unsigned')
14992
14993  and may appear more than once (e.g., 'vector bool char') in a
14994  given declaration.  */
14995
14996static tree
14997rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
14998				 int flags ATTRIBUTE_UNUSED,
14999				 bool *no_add_attrs)
15000{
15001  tree type = *node, result = NULL_TREE;
15002  enum machine_mode mode;
15003  int unsigned_p;
15004  char altivec_type
15005    = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
15006       && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
15007       ? *IDENTIFIER_POINTER (TREE_VALUE (args))
15008       : '?');
15009
15010  while (POINTER_TYPE_P (type)
15011	 || TREE_CODE (type) == FUNCTION_TYPE
15012	 || TREE_CODE (type) == METHOD_TYPE
15013	 || TREE_CODE (type) == ARRAY_TYPE)
15014    type = TREE_TYPE (type);
15015
15016  mode = TYPE_MODE (type);
15017
15018  if (rs6000_warn_altivec_long
15019      && (type == long_unsigned_type_node || type == long_integer_type_node))
15020    warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
15021
15022  switch (altivec_type)
15023    {
15024    case 'v':
15025      unsigned_p = TREE_UNSIGNED (type);
15026      switch (mode)
15027	{
15028	  case SImode:
15029	    result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
15030	    break;
15031	  case HImode:
15032	    result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
15033	    break;
15034	  case QImode:
15035	    result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
15036	    break;
15037	  case SFmode: result = V4SF_type_node; break;
15038	    /* If the user says 'vector int bool', we may be handed the 'bool'
15039	       attribute _before_ the 'vector' attribute, and so select the proper
15040	       type in the 'b' case below.  */
15041	  case V4SImode: case V8HImode: case V16QImode: result = type;
15042	  default: break;
15043	}
15044      break;
15045    case 'b':
15046      switch (mode)
15047	{
15048	  case SImode: case V4SImode: result = bool_V4SI_type_node; break;
15049	  case HImode: case V8HImode: result = bool_V8HI_type_node; break;
15050	  case QImode: case V16QImode: result = bool_V16QI_type_node;
15051	  default: break;
15052	}
15053      break;
15054    case 'p':
15055      switch (mode)
15056	{
15057	  case V8HImode: result = pixel_V8HI_type_node;
15058	  default: break;
15059	}
15060    default: break;
15061    }
15062
15063  if (result && result != type && TYPE_READONLY (type))
15064    result = build_qualified_type (result, TYPE_QUAL_CONST);
15065
15066  *no_add_attrs = true;  /* No need to hang on to the attribute.  */
15067
15068  if (!result)
15069    warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
15070  else
15071    *node = reconstruct_complex_type (*node, result);
15072
15073  return NULL_TREE;
15074}
15075
15076/* AltiVec defines four built-in scalar types that serve as vector
15077   elements; we must teach the compiler how to mangle them.  */
15078
15079static const char *
15080rs6000_mangle_fundamental_type (tree type)
15081{
15082  if (type == bool_char_type_node) return "U6__boolc";
15083  if (type == bool_short_type_node) return "U6__bools";
15084  if (type == pixel_type_node) return "u7__pixel";
15085  if (type == bool_int_type_node) return "U6__booli";
15086
15087  /* For all other types, use normal C++ mangling.  */
15088  return NULL;
15089}
15090
15091/* Handle a "longcall" or "shortcall" attribute; arguments as in
15092   struct attribute_spec.handler.  */
15093
15094static tree
15095rs6000_handle_longcall_attribute (tree *node, tree name,
15096				  tree args ATTRIBUTE_UNUSED,
15097				  int flags ATTRIBUTE_UNUSED,
15098				  bool *no_add_attrs)
15099{
15100  if (TREE_CODE (*node) != FUNCTION_TYPE
15101      && TREE_CODE (*node) != FIELD_DECL
15102      && TREE_CODE (*node) != TYPE_DECL)
15103    {
15104      warning ("`%s' attribute only applies to functions",
15105	       IDENTIFIER_POINTER (name));
15106      *no_add_attrs = true;
15107    }
15108
15109  return NULL_TREE;
15110}
15111
15112/* Set longcall attributes on all functions declared when
15113   rs6000_default_long_calls is true.  */
15114static void
15115rs6000_set_default_type_attributes (tree type)
15116{
15117  if (rs6000_default_long_calls
15118      && (TREE_CODE (type) == FUNCTION_TYPE
15119	  || TREE_CODE (type) == METHOD_TYPE))
15120    TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
15121					NULL_TREE,
15122					TYPE_ATTRIBUTES (type));
15123}
15124
15125/* Return a reference suitable for calling a function with the
15126   longcall attribute.  */
15127
15128struct rtx_def *
15129rs6000_longcall_ref (rtx call_ref)
15130{
15131  const char *call_name;
15132  tree node;
15133
15134  if (GET_CODE (call_ref) != SYMBOL_REF)
15135    return call_ref;
15136
15137  /* System V adds '.' to the internal name, so skip them.  */
15138  call_name = XSTR (call_ref, 0);
15139  if (*call_name == '.')
15140    {
15141      while (*call_name == '.')
15142	call_name++;
15143
15144      node = get_identifier (call_name);
15145      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
15146    }
15147
15148  return force_reg (Pmode, call_ref);
15149}
15150
15151#ifdef USING_ELFOS_H
15152
15153/* A C statement or statements to switch to the appropriate section
15154   for output of RTX in mode MODE.  You can assume that RTX is some
15155   kind of constant in RTL.  The argument MODE is redundant except in
15156   the case of a `const_int' rtx.  Select the section by calling
15157   `text_section' or one of the alternatives for other sections.
15158
15159   Do not define this macro if you put all constants in the read-only
15160   data section.  */
15161
15162static void
15163rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
15164			       unsigned HOST_WIDE_INT align)
15165{
15166  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15167    toc_section ();
15168  else
15169    default_elf_select_rtx_section (mode, x, align);
15170}
15171
15172/* A C statement or statements to switch to the appropriate
15173   section for output of DECL.  DECL is either a `VAR_DECL' node
15174   or a constant of some sort.  RELOC indicates whether forming
15175   the initial value of DECL requires link-time relocations.  */
15176
15177static void
15178rs6000_elf_select_section (tree decl, int reloc,
15179			   unsigned HOST_WIDE_INT align)
15180{
15181  /* Pretend that we're always building for a shared library when
15182     ABI_AIX, because otherwise we end up with dynamic relocations
15183     in read-only sections.  This happens for function pointers,
15184     references to vtables in typeinfo, and probably other cases.  */
15185  default_elf_select_section_1 (decl, reloc, align,
15186				flag_pic || DEFAULT_ABI == ABI_AIX);
15187}
15188
15189/* A C statement to build up a unique section name, expressed as a
15190   STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15191   RELOC indicates whether the initial value of EXP requires
15192   link-time relocations.  If you do not define this macro, GCC will use
15193   the symbol name prefixed by `.' as the section name.  Note - this
15194   macro can now be called for uninitialized data items as well as
15195   initialized data and functions.  */
15196
15197static void
15198rs6000_elf_unique_section (tree decl, int reloc)
15199{
15200  /* As above, pretend that we're always building for a shared library
15201     when ABI_AIX, to avoid dynamic relocations in read-only sections.  */
15202  default_unique_section_1 (decl, reloc,
15203			    flag_pic || DEFAULT_ABI == ABI_AIX);
15204}
15205
15206/* For a SYMBOL_REF, set generic flags and then perform some
15207   target-specific processing.
15208
15209   When the AIX ABI is requested on a non-AIX system, replace the
15210   function name with the real name (with a leading .) rather than the
15211   function descriptor name.  This saves a lot of overriding code to
15212   read the prefixes.  */
15213
15214static void
15215rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15216{
15217  default_encode_section_info (decl, rtl, first);
15218
15219  if (first
15220      && TREE_CODE (decl) == FUNCTION_DECL
15221      && !TARGET_AIX
15222      && DEFAULT_ABI == ABI_AIX)
15223    {
15224      rtx sym_ref = XEXP (rtl, 0);
15225      size_t len = strlen (XSTR (sym_ref, 0));
15226      char *str = alloca (len + 2);
15227      str[0] = '.';
15228      memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15229      XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15230    }
15231}
15232
15233static bool
15234rs6000_elf_in_small_data_p (tree decl)
15235{
15236  if (rs6000_sdata == SDATA_NONE)
15237    return false;
15238
15239  /* We want to merge strings, so we never consider them small data.  */
15240  if (TREE_CODE (decl) == STRING_CST)
15241    return false;
15242
15243  /* Functions are never in the small data area.  */
15244  if (TREE_CODE (decl) == FUNCTION_DECL)
15245    return false;
15246
15247  /* Thread-local vars can't go in the small data area.  */
15248  if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL (decl))
15249    return false;
15250
15251  if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15252    {
15253      const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15254      if (strcmp (section, ".sdata") == 0
15255	  || strcmp (section, ".sdata2") == 0
15256	  || strcmp (section, ".sbss") == 0
15257	  || strcmp (section, ".sbss2") == 0
15258	  || strcmp (section, ".PPC.EMB.sdata0") == 0
15259	  || strcmp (section, ".PPC.EMB.sbss0") == 0)
15260	return true;
15261    }
15262  else
15263    {
15264      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15265
15266      if (size > 0
15267	  && (unsigned HOST_WIDE_INT) size <= g_switch_value
15268	  /* If it's not public, and we're not going to reference it there,
15269	     there's no need to put it in the small data section.  */
15270	  && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15271	return true;
15272    }
15273
15274  return false;
15275}
15276
15277#endif /* USING_ELFOS_H */
15278
15279
15280/* Return a REG that occurs in ADDR with coefficient 1.
15281   ADDR can be effectively incremented by incrementing REG.
15282
15283   r0 is special and we must not select it as an address
15284   register by this routine since our caller will try to
15285   increment the returned register via an "la" instruction.  */
15286
15287struct rtx_def *
15288find_addr_reg (rtx addr)
15289{
15290  while (GET_CODE (addr) == PLUS)
15291    {
15292      if (GET_CODE (XEXP (addr, 0)) == REG
15293	  && REGNO (XEXP (addr, 0)) != 0)
15294	addr = XEXP (addr, 0);
15295      else if (GET_CODE (XEXP (addr, 1)) == REG
15296	       && REGNO (XEXP (addr, 1)) != 0)
15297	addr = XEXP (addr, 1);
15298      else if (CONSTANT_P (XEXP (addr, 0)))
15299	addr = XEXP (addr, 1);
15300      else if (CONSTANT_P (XEXP (addr, 1)))
15301	addr = XEXP (addr, 0);
15302      else
15303	abort ();
15304    }
15305  if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15306    return addr;
15307  abort ();
15308}
15309
15310void
15311rs6000_fatal_bad_address (rtx op)
15312{
15313  fatal_insn ("bad address", op);
15314}
15315
15316#if TARGET_MACHO
15317
15318#if 0
15319/* Returns 1 if OP is either a symbol reference or a sum of a symbol
15320   reference and a constant.  */
15321
15322int
15323symbolic_operand (rtx op)
15324{
15325  switch (GET_CODE (op))
15326    {
15327    case SYMBOL_REF:
15328    case LABEL_REF:
15329      return 1;
15330    case CONST:
15331      op = XEXP (op, 0);
15332      return (GET_CODE (op) == SYMBOL_REF ||
15333	      (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15334	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15335	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
15336    default:
15337      return 0;
15338    }
15339}
15340#endif
15341
15342#if TARGET_MACHO
15343
15344static tree branch_island_list = 0;
15345
15346/* Remember to generate a branch island for far calls to the given
15347   function.  */
15348
15349static void
15350add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15351{
15352  tree branch_island = build_tree_list (function_name, label_name);
15353  TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15354  TREE_CHAIN (branch_island) = branch_island_list;
15355  branch_island_list = branch_island;
15356}
15357
15358#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND)     TREE_VALUE (BRANCH_ISLAND)
15359#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND)  TREE_PURPOSE (BRANCH_ISLAND)
15360#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND)    \
15361		TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15362
15363/* Generate far-jump branch islands for everything on the
15364   branch_island_list.  Invoked immediately after the last instruction
15365   of the epilogue has been emitted; the branch-islands must be
15366   appended to, and contiguous with, the function body.  Mach-O stubs
15367   are generated in machopic_output_stub().  */
15368
15369static void
15370macho_branch_islands (void)
15371{
15372  char tmp_buf[512];
15373  tree branch_island;
15374
15375  for (branch_island = branch_island_list;
15376       branch_island;
15377       branch_island = TREE_CHAIN (branch_island))
15378    {
15379      const char *label =
15380	IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15381      const char *name  =
15382	darwin_strip_name_encoding (
15383	  IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15384      char name_buf[512];
15385      /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF().  */
15386      if (name[0] == '*' || name[0] == '&')
15387	strcpy (name_buf, name+1);
15388      else
15389	{
15390	  name_buf[0] = '_';
15391	  strcpy (name_buf+1, name);
15392	}
15393      strcpy (tmp_buf, "\n");
15394      strcat (tmp_buf, label);
15395#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15396      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15397	fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15398		 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15399#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15400      if (flag_pic)
15401	{
15402	  strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15403	  strcat (tmp_buf, label);
15404	  strcat (tmp_buf, "_pic\n");
15405	  strcat (tmp_buf, label);
15406	  strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15407
15408	  strcat (tmp_buf, "\taddis r11,r11,ha16(");
15409	  strcat (tmp_buf, name_buf);
15410	  strcat (tmp_buf, " - ");
15411	  strcat (tmp_buf, label);
15412	  strcat (tmp_buf, "_pic)\n");
15413
15414	  strcat (tmp_buf, "\tmtlr r0\n");
15415
15416	  strcat (tmp_buf, "\taddi r12,r11,lo16(");
15417	  strcat (tmp_buf, name_buf);
15418	  strcat (tmp_buf, " - ");
15419	  strcat (tmp_buf, label);
15420	  strcat (tmp_buf, "_pic)\n");
15421
15422	  strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15423	}
15424      else
15425	{
15426	  strcat (tmp_buf, ":\nlis r12,hi16(");
15427	  strcat (tmp_buf, name_buf);
15428	  strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15429	  strcat (tmp_buf, name_buf);
15430	  strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15431	}
15432      output_asm_insn (tmp_buf, 0);
15433#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15434      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15435	fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15436		BRANCH_ISLAND_LINE_NUMBER (branch_island));
15437#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15438    }
15439
15440  branch_island_list = 0;
15441}
15442
15443/* NO_PREVIOUS_DEF checks in the link list whether the function name is
15444   already there or not.  */
15445
15446static int
15447no_previous_def (tree function_name)
15448{
15449  tree branch_island;
15450  for (branch_island = branch_island_list;
15451       branch_island;
15452       branch_island = TREE_CHAIN (branch_island))
15453    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15454      return 0;
15455  return 1;
15456}
15457
15458/* GET_PREV_LABEL gets the label name from the previous definition of
15459   the function.  */
15460
15461static tree
15462get_prev_label (tree function_name)
15463{
15464  tree branch_island;
15465  for (branch_island = branch_island_list;
15466       branch_island;
15467       branch_island = TREE_CHAIN (branch_island))
15468    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15469      return BRANCH_ISLAND_LABEL_NAME (branch_island);
15470  return 0;
15471}
15472
15473/* INSN is either a function call or a millicode call.  It may have an
15474   unconditional jump in its delay slot.
15475
15476   CALL_DEST is the routine we are calling.  */
15477
15478char *
15479output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15480{
15481  static char buf[256];
15482  if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15483      && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15484    {
15485      tree labelname;
15486      tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15487
15488      if (no_previous_def (funname))
15489	{
15490	  int line_number = 0;
15491	  rtx label_rtx = gen_label_rtx ();
15492	  char *label_buf, temp_buf[256];
15493	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15494				       CODE_LABEL_NUMBER (label_rtx));
15495	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15496	  labelname = get_identifier (label_buf);
15497	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15498	  if (insn)
15499	    line_number = NOTE_LINE_NUMBER (insn);
15500	  add_compiler_branch_island (labelname, funname, line_number);
15501	}
15502      else
15503	labelname = get_prev_label (funname);
15504
15505      /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15506	 instruction will reach 'foo', otherwise link as 'bl L42'".
15507	 "L42" should be a 'branch island', that will do a far jump to
15508	 'foo'.  Branch islands are generated in
15509	 macho_branch_islands().  */
15510      sprintf (buf, "jbsr %%z%d,%.246s",
15511	       dest_operand_number, IDENTIFIER_POINTER (labelname));
15512    }
15513  else
15514    sprintf (buf, "bl %%z%d", dest_operand_number);
15515  return buf;
15516}
15517
15518#endif /* TARGET_MACHO */
15519
15520/* Generate PIC and indirect symbol stubs.  */
15521
15522void
15523machopic_output_stub (FILE *file, const char *symb, const char *stub)
15524{
15525  unsigned int length;
15526  char *symbol_name, *lazy_ptr_name;
15527  char *local_label_0;
15528  static int label = 0;
15529
15530  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
15531  symb = (*targetm.strip_name_encoding) (symb);
15532
15533
15534  length = strlen (symb);
15535  symbol_name = alloca (length + 32);
15536  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15537
15538  lazy_ptr_name = alloca (length + 32);
15539  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15540
15541  if (flag_pic == 2)
15542    machopic_picsymbol_stub1_section ();
15543  else
15544    machopic_symbol_stub1_section ();
15545  fprintf (file, "\t.align 2\n");
15546
15547  fprintf (file, "%s:\n", stub);
15548  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15549
15550  if (flag_pic == 2)
15551    {
15552      label++;
15553      local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15554      sprintf (local_label_0, "\"L%011d$spb\"", label);
15555
15556      fprintf (file, "\tmflr r0\n");
15557      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15558      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15559      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15560	       lazy_ptr_name, local_label_0);
15561      fprintf (file, "\tmtlr r0\n");
15562      fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15563	       lazy_ptr_name, local_label_0);
15564      fprintf (file, "\tmtctr r12\n");
15565      fprintf (file, "\tbctr\n");
15566    }
15567  else
15568   {
15569     fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15570     fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15571     fprintf (file, "\tmtctr r12\n");
15572     fprintf (file, "\tbctr\n");
15573   }
15574
15575  machopic_lazy_symbol_ptr_section ();
15576  fprintf (file, "%s:\n", lazy_ptr_name);
15577  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15578  fprintf (file, "\t.long dyld_stub_binding_helper\n");
15579}
15580
15581/* Legitimize PIC addresses.  If the address is already
15582   position-independent, we return ORIG.  Newly generated
15583   position-independent addresses go into a reg.  This is REG if non
15584   zero, otherwise we allocate register(s) as necessary.  */
15585
15586#define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15587
15588rtx
15589rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15590					rtx reg)
15591{
15592  rtx base, offset;
15593
15594  if (reg == NULL && ! reload_in_progress && ! reload_completed)
15595    reg = gen_reg_rtx (Pmode);
15596
15597  if (GET_CODE (orig) == CONST)
15598    {
15599      if (GET_CODE (XEXP (orig, 0)) == PLUS
15600	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15601	return orig;
15602
15603      if (GET_CODE (XEXP (orig, 0)) == PLUS)
15604	{
15605	  /* Use a different reg for the intermediate value, as
15606	     it will be marked UNCHANGING.  */
15607	  rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15608
15609	  base =
15610	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15611						    Pmode, reg_temp);
15612	  offset =
15613	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15614						    Pmode, reg);
15615	}
15616      else
15617	abort ();
15618
15619      if (GET_CODE (offset) == CONST_INT)
15620	{
15621	  if (SMALL_INT (offset))
15622	    return plus_constant (base, INTVAL (offset));
15623	  else if (! reload_in_progress && ! reload_completed)
15624	    offset = force_reg (Pmode, offset);
15625	  else
15626	    {
15627 	      rtx mem = force_const_mem (Pmode, orig);
15628	      return machopic_legitimize_pic_address (mem, Pmode, reg);
15629	    }
15630	}
15631      return gen_rtx (PLUS, Pmode, base, offset);
15632    }
15633
15634  /* Fall back on generic machopic code.  */
15635  return machopic_legitimize_pic_address (orig, mode, reg);
15636}
15637
15638/* This is just a placeholder to make linking work without having to
15639   add this to the generic Darwin EXTRA_SECTIONS.  If -mcall-aix is
15640   ever needed for Darwin (not too likely!) this would have to get a
15641   real definition.  */
15642
15643void
15644toc_section (void)
15645{
15646}
15647
15648#endif /* TARGET_MACHO */
15649
15650#if TARGET_ELF
15651static unsigned int
15652rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15653{
15654  return default_section_type_flags_1 (decl, name, reloc,
15655				       flag_pic || DEFAULT_ABI == ABI_AIX);
15656}
15657
15658/* Record an element in the table of global constructors.  SYMBOL is
15659   a SYMBOL_REF of the function to be called; PRIORITY is a number
15660   between 0 and MAX_INIT_PRIORITY.
15661
15662   This differs from default_named_section_asm_out_constructor in
15663   that we have special handling for -mrelocatable.  */
15664
15665static void
15666rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15667{
15668  const char *section = ".ctors";
15669  char buf[16];
15670
15671  if (priority != DEFAULT_INIT_PRIORITY)
15672    {
15673      sprintf (buf, ".ctors.%.5u",
15674               /* Invert the numbering so the linker puts us in the proper
15675                  order; constructors are run from right to left, and the
15676                  linker sorts in increasing order.  */
15677               MAX_INIT_PRIORITY - priority);
15678      section = buf;
15679    }
15680
15681  named_section_flags (section, SECTION_WRITE);
15682  assemble_align (POINTER_SIZE);
15683
15684  if (TARGET_RELOCATABLE)
15685    {
15686      fputs ("\t.long (", asm_out_file);
15687      output_addr_const (asm_out_file, symbol);
15688      fputs (")@fixup\n", asm_out_file);
15689    }
15690  else
15691    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15692}
15693
15694static void
15695rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15696{
15697  const char *section = ".dtors";
15698  char buf[16];
15699
15700  if (priority != DEFAULT_INIT_PRIORITY)
15701    {
15702      sprintf (buf, ".dtors.%.5u",
15703               /* Invert the numbering so the linker puts us in the proper
15704                  order; constructors are run from right to left, and the
15705                  linker sorts in increasing order.  */
15706               MAX_INIT_PRIORITY - priority);
15707      section = buf;
15708    }
15709
15710  named_section_flags (section, SECTION_WRITE);
15711  assemble_align (POINTER_SIZE);
15712
15713  if (TARGET_RELOCATABLE)
15714    {
15715      fputs ("\t.long (", asm_out_file);
15716      output_addr_const (asm_out_file, symbol);
15717      fputs (")@fixup\n", asm_out_file);
15718    }
15719  else
15720    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15721}
15722
15723void
15724rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15725{
15726  if (TARGET_64BIT)
15727    {
15728      fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15729      ASM_OUTPUT_LABEL (file, name);
15730      fputs (DOUBLE_INT_ASM_OP, file);
15731      putc ('.', file);
15732      assemble_name (file, name);
15733      fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15734      assemble_name (file, name);
15735      fputs (",24\n\t.type\t.", file);
15736      assemble_name (file, name);
15737      fputs (",@function\n", file);
15738      if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15739	{
15740	  fputs ("\t.globl\t.", file);
15741	  assemble_name (file, name);
15742	  putc ('\n', file);
15743	}
15744      ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15745      putc ('.', file);
15746      ASM_OUTPUT_LABEL (file, name);
15747      return;
15748    }
15749
15750  if (TARGET_RELOCATABLE
15751      && (get_pool_size () != 0 || current_function_profile)
15752      && uses_TOC ())
15753    {
15754      char buf[256];
15755
15756      (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15757
15758      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15759      fprintf (file, "\t.long ");
15760      assemble_name (file, buf);
15761      putc ('-', file);
15762      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15763      assemble_name (file, buf);
15764      putc ('\n', file);
15765    }
15766
15767  ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15768  ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15769
15770  if (DEFAULT_ABI == ABI_AIX)
15771    {
15772      const char *desc_name, *orig_name;
15773
15774      orig_name = (*targetm.strip_name_encoding) (name);
15775      desc_name = orig_name;
15776      while (*desc_name == '.')
15777	desc_name++;
15778
15779      if (TREE_PUBLIC (decl))
15780	fprintf (file, "\t.globl %s\n", desc_name);
15781
15782      fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15783      fprintf (file, "%s:\n", desc_name);
15784      fprintf (file, "\t.long %s\n", orig_name);
15785      fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15786      if (DEFAULT_ABI == ABI_AIX)
15787	fputs ("\t.long 0\n", file);
15788      fprintf (file, "\t.previous\n");
15789    }
15790  ASM_OUTPUT_LABEL (file, name);
15791}
15792
15793static void
15794rs6000_elf_end_indicate_exec_stack (void)
15795{
15796  if (TARGET_32BIT)
15797    file_end_indicate_exec_stack ();
15798}
15799#endif
15800
15801#if TARGET_XCOFF
15802static void
15803rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15804{
15805  fputs (GLOBAL_ASM_OP, stream);
15806  RS6000_OUTPUT_BASENAME (stream, name);
15807  putc ('\n', stream);
15808}
15809
15810static void
15811rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15812{
15813  int smclass;
15814  static const char * const suffix[3] = { "PR", "RO", "RW" };
15815
15816  if (flags & SECTION_CODE)
15817    smclass = 0;
15818  else if (flags & SECTION_WRITE)
15819    smclass = 2;
15820  else
15821    smclass = 1;
15822
15823  fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15824	   (flags & SECTION_CODE) ? "." : "",
15825	   name, suffix[smclass], flags & SECTION_ENTSIZE);
15826}
15827
15828static void
15829rs6000_xcoff_select_section (tree decl, int reloc,
15830			    unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15831{
15832  if (decl_readonly_section_1 (decl, reloc, 1))
15833    {
15834      if (TREE_PUBLIC (decl))
15835        read_only_data_section ();
15836      else
15837        read_only_private_data_section ();
15838    }
15839  else
15840    {
15841      if (TREE_PUBLIC (decl))
15842        data_section ();
15843      else
15844        private_data_section ();
15845    }
15846}
15847
15848static void
15849rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15850{
15851  const char *name;
15852
15853  /* Use select_section for private and uninitialized data.  */
15854  if (!TREE_PUBLIC (decl)
15855      || DECL_COMMON (decl)
15856      || DECL_INITIAL (decl) == NULL_TREE
15857      || DECL_INITIAL (decl) == error_mark_node
15858      || (flag_zero_initialized_in_bss
15859	  && initializer_zerop (DECL_INITIAL (decl))))
15860    return;
15861
15862  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15863  name = (*targetm.strip_name_encoding) (name);
15864  DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15865}
15866
15867/* Select section for constant in constant pool.
15868
15869   On RS/6000, all constants are in the private read-only data area.
15870   However, if this is being placed in the TOC it must be output as a
15871   toc entry.  */
15872
15873static void
15874rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15875				unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15876{
15877  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15878    toc_section ();
15879  else
15880    read_only_private_data_section ();
15881}
15882
15883/* Remove any trailing [DS] or the like from the symbol name.  */
15884
15885static const char *
15886rs6000_xcoff_strip_name_encoding (const char *name)
15887{
15888  size_t len;
15889  if (*name == '*')
15890    name++;
15891  len = strlen (name);
15892  if (name[len - 1] == ']')
15893    return ggc_alloc_string (name, len - 4);
15894  else
15895    return name;
15896}
15897
15898/* Section attributes.  AIX is always PIC.  */
15899
15900static unsigned int
15901rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15902{
15903  unsigned int align;
15904  unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15905
15906  /* Align to at least UNIT size.  */
15907  if (flags & SECTION_CODE)
15908    align = MIN_UNITS_PER_WORD;
15909  else
15910    /* Increase alignment of large objects if not already stricter.  */
15911    align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15912		 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15913		 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15914
15915  return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15916}
15917
15918/* Output at beginning of assembler file.
15919
15920   Initialize the section names for the RS/6000 at this point.
15921
15922   Specify filename, including full path, to assembler.
15923
15924   We want to go into the TOC section so at least one .toc will be emitted.
15925   Also, in order to output proper .bs/.es pairs, we need at least one static
15926   [RW] section emitted.
15927
15928   Finally, declare mcount when profiling to make the assembler happy.  */
15929
15930static void
15931rs6000_xcoff_file_start (void)
15932{
15933  rs6000_gen_section_name (&xcoff_bss_section_name,
15934			   main_input_filename, ".bss_");
15935  rs6000_gen_section_name (&xcoff_private_data_section_name,
15936			   main_input_filename, ".rw_");
15937  rs6000_gen_section_name (&xcoff_read_only_section_name,
15938			   main_input_filename, ".ro_");
15939
15940  fputs ("\t.file\t", asm_out_file);
15941  output_quoted_string (asm_out_file, main_input_filename);
15942  fputc ('\n', asm_out_file);
15943  toc_section ();
15944  if (write_symbols != NO_DEBUG)
15945    private_data_section ();
15946  text_section ();
15947  if (profile_flag)
15948    fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15949  rs6000_file_start ();
15950}
15951
15952/* Output at end of assembler file.
15953   On the RS/6000, referencing data should automatically pull in text.  */
15954
15955static void
15956rs6000_xcoff_file_end (void)
15957{
15958  text_section ();
15959  fputs ("_section_.text:\n", asm_out_file);
15960  data_section ();
15961  fputs (TARGET_32BIT
15962	 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15963	 asm_out_file);
15964}
15965#endif /* TARGET_XCOFF */
15966
15967#if TARGET_MACHO
15968/* Cross-module name binding.  Darwin does not support overriding
15969   functions at dynamic-link time.  */
15970
15971static bool
15972rs6000_binds_local_p (tree decl)
15973{
15974  return default_binds_local_p_1 (decl, 0);
15975}
15976#endif
15977
15978/* Compute a (partial) cost for rtx X.  Return true if the complete
15979   cost has been computed, and false if subexpressions should be
15980   scanned.  In either case, *TOTAL contains the cost result.  */
15981
15982static bool
15983rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15984		  int *total)
15985{
15986  switch (code)
15987    {
15988      /* On the RS/6000, if it is valid in the insn, it is free.
15989	 So this always returns 0.  */
15990    case CONST_INT:
15991    case CONST:
15992    case LABEL_REF:
15993    case SYMBOL_REF:
15994    case CONST_DOUBLE:
15995    case HIGH:
15996      *total = 0;
15997      return true;
15998
15999    case PLUS:
16000      *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
16001		 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
16002					       + 0x8000) >= 0x10000)
16003		 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
16004		? COSTS_N_INSNS (2)
16005		: COSTS_N_INSNS (1));
16006      return true;
16007
16008    case AND:
16009    case IOR:
16010    case XOR:
16011      *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
16012		 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
16013		 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
16014		? COSTS_N_INSNS (2)
16015		: COSTS_N_INSNS (1));
16016      return true;
16017
16018    case MULT:
16019      if (optimize_size)
16020	{
16021	  *total = COSTS_N_INSNS (2);
16022	  return true;
16023	}
16024      switch (rs6000_cpu)
16025	{
16026	case PROCESSOR_RIOS1:
16027	case PROCESSOR_PPC405:
16028	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16029		    ? COSTS_N_INSNS (5)
16030		    : (INTVAL (XEXP (x, 1)) >= -256
16031		       && INTVAL (XEXP (x, 1)) <= 255)
16032		    ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16033	  return true;
16034
16035	case PROCESSOR_PPC440:
16036	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16037		    ? COSTS_N_INSNS (3)
16038		    : COSTS_N_INSNS (2));
16039	  return true;
16040
16041	case PROCESSOR_RS64A:
16042	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16043		    ? GET_MODE (XEXP (x, 1)) != DImode
16044		    ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
16045		    : (INTVAL (XEXP (x, 1)) >= -256
16046		       && INTVAL (XEXP (x, 1)) <= 255)
16047		    ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
16048	  return true;
16049
16050	case PROCESSOR_RIOS2:
16051	case PROCESSOR_MPCCORE:
16052	case PROCESSOR_PPC604e:
16053	  *total = COSTS_N_INSNS (2);
16054	  return true;
16055
16056	case PROCESSOR_PPC601:
16057	  *total = COSTS_N_INSNS (5);
16058	  return true;
16059
16060	case PROCESSOR_PPC603:
16061	case PROCESSOR_PPC7400:
16062	case PROCESSOR_PPC750:
16063	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16064		    ? COSTS_N_INSNS (5)
16065		    : (INTVAL (XEXP (x, 1)) >= -256
16066		       && INTVAL (XEXP (x, 1)) <= 255)
16067		    ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
16068	  return true;
16069
16070	case PROCESSOR_PPC7450:
16071	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16072		    ? COSTS_N_INSNS (4)
16073		    : COSTS_N_INSNS (3));
16074	  return true;
16075
16076	case PROCESSOR_PPC403:
16077	case PROCESSOR_PPC604:
16078	case PROCESSOR_PPC8540:
16079	  *total = COSTS_N_INSNS (4);
16080	  return true;
16081
16082	case PROCESSOR_PPC620:
16083	case PROCESSOR_PPC630:
16084	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16085		    ? GET_MODE (XEXP (x, 1)) != DImode
16086		    ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
16087		    : (INTVAL (XEXP (x, 1)) >= -256
16088		       && INTVAL (XEXP (x, 1)) <= 255)
16089		    ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16090	  return true;
16091
16092	case PROCESSOR_POWER4:
16093	case PROCESSOR_POWER5:
16094	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16095		    ? GET_MODE (XEXP (x, 1)) != DImode
16096		    ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
16097		    : COSTS_N_INSNS (2));
16098	  return true;
16099
16100	default:
16101	  abort ();
16102	}
16103
16104    case DIV:
16105    case MOD:
16106      if (GET_CODE (XEXP (x, 1)) == CONST_INT
16107	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
16108	{
16109	  *total = COSTS_N_INSNS (2);
16110	  return true;
16111	}
16112      /* FALLTHRU */
16113
16114    case UDIV:
16115    case UMOD:
16116      switch (rs6000_cpu)
16117	{
16118	case PROCESSOR_RIOS1:
16119	  *total = COSTS_N_INSNS (19);
16120	  return true;
16121
16122	case PROCESSOR_RIOS2:
16123	  *total = COSTS_N_INSNS (13);
16124	  return true;
16125
16126	case PROCESSOR_RS64A:
16127	  *total = (GET_MODE (XEXP (x, 1)) != DImode
16128		    ? COSTS_N_INSNS (65)
16129		    : COSTS_N_INSNS (67));
16130	  return true;
16131
16132	case PROCESSOR_MPCCORE:
16133	  *total = COSTS_N_INSNS (6);
16134	  return true;
16135
16136	case PROCESSOR_PPC403:
16137	  *total = COSTS_N_INSNS (33);
16138	  return true;
16139
16140	case PROCESSOR_PPC405:
16141	  *total = COSTS_N_INSNS (35);
16142	  return true;
16143
16144	case PROCESSOR_PPC440:
16145	  *total = COSTS_N_INSNS (34);
16146	  return true;
16147
16148	case PROCESSOR_PPC601:
16149	  *total = COSTS_N_INSNS (36);
16150	  return true;
16151
16152	case PROCESSOR_PPC603:
16153	  *total = COSTS_N_INSNS (37);
16154	  return true;
16155
16156	case PROCESSOR_PPC604:
16157	case PROCESSOR_PPC604e:
16158	  *total = COSTS_N_INSNS (20);
16159	  return true;
16160
16161	case PROCESSOR_PPC620:
16162	case PROCESSOR_PPC630:
16163	  *total = (GET_MODE (XEXP (x, 1)) != DImode
16164		    ? COSTS_N_INSNS (21)
16165		    : COSTS_N_INSNS (37));
16166	  return true;
16167
16168	case PROCESSOR_PPC750:
16169	case PROCESSOR_PPC8540:
16170	case PROCESSOR_PPC7400:
16171	  *total = COSTS_N_INSNS (19);
16172	  return true;
16173
16174	case PROCESSOR_PPC7450:
16175	  *total = COSTS_N_INSNS (23);
16176	  return true;
16177
16178	case PROCESSOR_POWER4:
16179	case PROCESSOR_POWER5:
16180	  *total = (GET_MODE (XEXP (x, 1)) != DImode
16181		    ? COSTS_N_INSNS (18)
16182		    : COSTS_N_INSNS (34));
16183	  return true;
16184
16185	default:
16186	  abort ();
16187	}
16188
16189    case FFS:
16190      *total = COSTS_N_INSNS (4);
16191      return true;
16192
16193    case MEM:
16194      /* MEM should be slightly more expensive than (plus (reg) (const)).  */
16195      *total = 5;
16196      return true;
16197
16198    default:
16199      return false;
16200    }
16201}
16202
16203/* A C expression returning the cost of moving data from a register of class
16204   CLASS1 to one of CLASS2.  */
16205
16206int
16207rs6000_register_move_cost (enum machine_mode mode,
16208			   enum reg_class from, enum reg_class to)
16209{
16210  /*  Moves from/to GENERAL_REGS.  */
16211  if (reg_classes_intersect_p (to, GENERAL_REGS)
16212      || reg_classes_intersect_p (from, GENERAL_REGS))
16213    {
16214      if (! reg_classes_intersect_p (to, GENERAL_REGS))
16215	from = to;
16216
16217      if (from == FLOAT_REGS || from == ALTIVEC_REGS)
16218	return (rs6000_memory_move_cost (mode, from, 0)
16219		+ rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
16220
16221/* It's more expensive to move CR_REGS than CR0_REGS because of the shift....  */
16222      else if (from == CR_REGS)
16223	return 4;
16224
16225      else
16226/* A move will cost one instruction per GPR moved.  */
16227	return 2 * HARD_REGNO_NREGS (0, mode);
16228    }
16229
16230/* Moving between two similar registers is just one instruction.  */
16231  else if (reg_classes_intersect_p (to, from))
16232    return mode == TFmode ? 4 : 2;
16233
16234/* Everything else has to go through GENERAL_REGS.  */
16235  else
16236    return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16237	    + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16238}
16239
16240/* A C expressions returning the cost of moving data of MODE from a register to
16241   or from memory.  */
16242
16243int
16244rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16245			 int in ATTRIBUTE_UNUSED)
16246{
16247  if (reg_classes_intersect_p (class, GENERAL_REGS))
16248    return 4 * HARD_REGNO_NREGS (0, mode);
16249  else if (reg_classes_intersect_p (class, FLOAT_REGS))
16250    return 4 * HARD_REGNO_NREGS (32, mode);
16251  else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16252    return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16253  else
16254    return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16255}
16256
16257/* Return an RTX representing where to find the function value of a
16258   function returning MODE.  */
16259static rtx
16260rs6000_complex_function_value (enum machine_mode mode)
16261{
16262  unsigned int regno;
16263  rtx r1, r2;
16264  enum machine_mode inner = GET_MODE_INNER (mode);
16265  unsigned int inner_bytes = GET_MODE_SIZE (inner);
16266
16267  if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
16268    regno = FP_ARG_RETURN;
16269  else
16270    {
16271      regno = GP_ARG_RETURN;
16272
16273      /* 32-bit is OK since it'll go in r3/r4.  */
16274      if (TARGET_32BIT && inner_bytes >= 4)
16275	return gen_rtx_REG (mode, regno);
16276    }
16277
16278  if (inner_bytes >= 8)
16279    return gen_rtx_REG (mode, regno);
16280
16281  r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16282			  const0_rtx);
16283  r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16284			  GEN_INT (inner_bytes));
16285  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16286}
16287
16288/* Define how to find the value returned by a function.
16289   VALTYPE is the data type of the value (as a tree).
16290   If the precise function being called is known, FUNC is its FUNCTION_DECL;
16291   otherwise, FUNC is 0.
16292
16293   On the SPE, both FPs and vectors are returned in r3.
16294
16295   On RS/6000 an integer value is in r3 and a floating-point value is in
16296   fp1, unless -msoft-float.  */
16297
16298rtx
16299rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16300{
16301  enum machine_mode mode;
16302  unsigned int regno;
16303
16304  if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16305    {
16306      /* Long long return value need be split in -mpowerpc64, 32bit ABI.  */
16307      return gen_rtx_PARALLEL (DImode,
16308	gen_rtvec (2,
16309		   gen_rtx_EXPR_LIST (VOIDmode,
16310				      gen_rtx_REG (SImode, GP_ARG_RETURN),
16311				      const0_rtx),
16312		   gen_rtx_EXPR_LIST (VOIDmode,
16313				      gen_rtx_REG (SImode,
16314						   GP_ARG_RETURN + 1),
16315				      GEN_INT (4))));
16316    }
16317
16318  if ((INTEGRAL_TYPE_P (valtype)
16319       && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16320      || POINTER_TYPE_P (valtype))
16321    mode = TARGET_32BIT ? SImode : DImode;
16322  else
16323    mode = TYPE_MODE (valtype);
16324
16325  if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
16326    regno = FP_ARG_RETURN;
16327  else if (TREE_CODE (valtype) == COMPLEX_TYPE
16328	   && targetm.calls.split_complex_arg)
16329    return rs6000_complex_function_value (mode);
16330  else if (TREE_CODE (valtype) == VECTOR_TYPE
16331	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16332    regno = ALTIVEC_ARG_RETURN;
16333  else
16334    regno = GP_ARG_RETURN;
16335
16336  return gen_rtx_REG (mode, regno);
16337}
16338
16339/* Define how to find the value returned by a library function
16340   assuming the value has mode MODE.  */
16341rtx
16342rs6000_libcall_value (enum machine_mode mode)
16343{
16344  unsigned int regno;
16345
16346  if (GET_MODE_CLASS (mode) == MODE_FLOAT
16347	   && TARGET_HARD_FLOAT && TARGET_FPRS)
16348    regno = FP_ARG_RETURN;
16349  else if (ALTIVEC_VECTOR_MODE (mode)
16350	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16351    regno = ALTIVEC_ARG_RETURN;
16352  else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16353    return rs6000_complex_function_value (mode);
16354  else
16355    regno = GP_ARG_RETURN;
16356
16357  return gen_rtx_REG (mode, regno);
16358}
16359
16360/* Define the offset between two registers, FROM to be eliminated and its
16361   replacement TO, at the start of a routine.  */
16362HOST_WIDE_INT
16363rs6000_initial_elimination_offset (int from, int to)
16364{
16365  rs6000_stack_t *info = rs6000_stack_info ();
16366  HOST_WIDE_INT offset;
16367
16368  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16369    offset = info->push_p ? 0 : -info->total_size;
16370  else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16371    offset = info->total_size;
16372  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16373    offset = info->push_p ? info->total_size : 0;
16374  else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16375    offset = 0;
16376  else
16377    abort ();
16378
16379  return offset;
16380}
16381
16382/* Return true if TYPE is of type __ev64_opaque__.  */
16383
16384static bool
16385is_ev64_opaque_type (tree type)
16386{
16387  return (TARGET_SPE
16388	  && (type == opaque_V2SI_type_node
16389	      || type == opaque_V2SF_type_node
16390	      || type == opaque_p_V2SI_type_node));
16391}
16392
16393static rtx
16394rs6000_dwarf_register_span (rtx reg)
16395{
16396  unsigned regno;
16397
16398  if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16399    return NULL_RTX;
16400
16401  regno = REGNO (reg);
16402
16403  /* The duality of the SPE register size wreaks all kinds of havoc.
16404     This is a way of distinguishing r0 in 32-bits from r0 in
16405     64-bits.  */
16406  return
16407    gen_rtx_PARALLEL (VOIDmode,
16408		      BYTES_BIG_ENDIAN
16409		      ? gen_rtvec (2,
16410				   gen_rtx_REG (SImode, regno + 1200),
16411				   gen_rtx_REG (SImode, regno))
16412		      : gen_rtvec (2,
16413				   gen_rtx_REG (SImode, regno),
16414				   gen_rtx_REG (SImode, regno + 1200)));
16415}
16416
16417/* Map internal gcc register numbers to DWARF2 register numbers.  */
16418
16419unsigned int
16420rs6000_dbx_register_number (unsigned int regno)
16421{
16422  if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16423    return regno;
16424  if (regno == MQ_REGNO)
16425    return 100;
16426  if (regno == LINK_REGISTER_REGNUM)
16427    return 108;
16428  if (regno == COUNT_REGISTER_REGNUM)
16429    return 109;
16430  if (CR_REGNO_P (regno))
16431    return regno - CR0_REGNO + 86;
16432  if (regno == XER_REGNO)
16433    return 101;
16434  if (ALTIVEC_REGNO_P (regno))
16435    return regno - FIRST_ALTIVEC_REGNO + 1124;
16436  if (regno == VRSAVE_REGNO)
16437    return 356;
16438  if (regno == VSCR_REGNO)
16439    return 67;
16440  if (regno == SPE_ACC_REGNO)
16441    return 99;
16442  if (regno == SPEFSCR_REGNO)
16443    return 612;
16444  /* SPE high reg number.  We get these values of regno from
16445     rs6000_dwarf_register_span.  */
16446  if (regno >= 1200 && regno < 1232)
16447    return regno;
16448
16449  abort ();
16450}
16451
16452#include "gt-rs6000.h"
16453