rs6000.c revision 161651
1/* Subroutines used for code generation on IBM RS/6000.
2   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6   This file is part of GCC.
7
8   GCC is free software; you can redistribute it and/or modify it
9   under the terms of the GNU General Public License as published
10   by the Free Software Foundation; either version 2, or (at your
11   option) any later version.
12
13   GCC is distributed in the hope that it will be useful, but WITHOUT
14   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16   License for more details.
17
18   You should have received a copy of the GNU General Public License
19   along with GCC; see the file COPYING.  If not, write to the
20   Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21   MA 02111-1307, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
36#include "obstack.h"
37#include "tree.h"
38#include "expr.h"
39#include "optabs.h"
40#include "except.h"
41#include "function.h"
42#include "output.h"
43#include "basic-block.h"
44#include "integrate.h"
45#include "toplev.h"
46#include "ggc.h"
47#include "hashtab.h"
48#include "tm_p.h"
49#include "target.h"
50#include "target-def.h"
51#include "langhooks.h"
52#include "reload.h"
53#include "cfglayout.h"
54#include "sched-int.h"
55#if TARGET_XCOFF
56#include "xcoffout.h"  /* get declarations of xcoff_*_section_name */
57#endif
58
59#ifndef TARGET_NO_PROTOTYPE
60#define TARGET_NO_PROTOTYPE 0
61#endif
62
63#define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
64#define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
65                                          && !((n) & 1))
66
67#define min(A,B)	((A) < (B) ? (A) : (B))
68#define max(A,B)	((A) > (B) ? (A) : (B))
69
70/* Structure used to define the rs6000 stack */
71typedef struct rs6000_stack {
72  int first_gp_reg_save;	/* first callee saved GP register used */
73  int first_fp_reg_save;	/* first callee saved FP register used */
74  int first_altivec_reg_save;	/* first callee saved AltiVec register used */
75  int lr_save_p;		/* true if the link reg needs to be saved */
76  int cr_save_p;		/* true if the CR reg needs to be saved */
77  unsigned int vrsave_mask;	/* mask of vec registers to save */
78  int toc_save_p;		/* true if the TOC needs to be saved */
79  int push_p;			/* true if we need to allocate stack space */
80  int calls_p;			/* true if the function makes any calls */
81  enum rs6000_abi abi;		/* which ABI to use */
82  int gp_save_offset;		/* offset to save GP regs from initial SP */
83  int fp_save_offset;		/* offset to save FP regs from initial SP */
84  int altivec_save_offset;	/* offset to save AltiVec regs from initial SP */
85  int lr_save_offset;		/* offset to save LR from initial SP */
86  int cr_save_offset;		/* offset to save CR from initial SP */
87  int vrsave_save_offset;	/* offset to save VRSAVE from initial SP */
88  int spe_gp_save_offset;	/* offset to save spe 64-bit gprs  */
89  int toc_save_offset;		/* offset to save the TOC pointer */
90  int varargs_save_offset;	/* offset to save the varargs registers */
91  int ehrd_offset;		/* offset to EH return data */
92  int reg_size;			/* register size (4 or 8) */
93  int varargs_size;		/* size to hold V.4 args passed in regs */
94  HOST_WIDE_INT vars_size;	/* variable save area size */
95  int parm_size;		/* outgoing parameter size */
96  int save_size;		/* save area size */
97  int fixed_size;		/* fixed size of stack frame */
98  int gp_size;			/* size of saved GP registers */
99  int fp_size;			/* size of saved FP registers */
100  int altivec_size;		/* size of saved AltiVec registers */
101  int cr_size;			/* size to hold CR if not in save_size */
102  int lr_size;			/* size to hold LR if not in save_size */
103  int vrsave_size;		/* size to hold VRSAVE if not in save_size */
104  int altivec_padding_size;	/* size of altivec alignment padding if
105				   not in save_size */
106  int spe_gp_size;		/* size of 64-bit GPR save size for SPE */
107  int spe_padding_size;
108  int toc_size;			/* size to hold TOC if not in save_size */
109  HOST_WIDE_INT total_size;	/* total bytes allocated for stack */
110  int spe_64bit_regs_used;
111} rs6000_stack_t;
112
113/* Target cpu type */
114
115enum processor_type rs6000_cpu;
116struct rs6000_cpu_select rs6000_select[3] =
117{
118  /* switch		name,			tune	arch */
119  { (const char *)0,	"--with-cpu=",		1,	1 },
120  { (const char *)0,	"-mcpu=",		1,	1 },
121  { (const char *)0,	"-mtune=",		1,	0 },
122};
123
124/* Always emit branch hint bits.  */
125static GTY(()) bool rs6000_always_hint;
126
127/* Schedule instructions for group formation.  */
128static GTY(()) bool rs6000_sched_groups;
129
130/* Support adjust_priority scheduler hook
131   and -mprioritize-restricted-insns= option.  */
132const char *rs6000_sched_restricted_insns_priority_str;
133int rs6000_sched_restricted_insns_priority;
134
135/* Support for -msched-costly-dep option.  */
136const char *rs6000_sched_costly_dep_str;
137enum rs6000_dependence_cost rs6000_sched_costly_dep;
138
139/* Support for -minsert-sched-nops option.  */
140const char *rs6000_sched_insert_nops_str;
141enum rs6000_nop_insertion rs6000_sched_insert_nops;
142
143/* Size of long double */
144const char *rs6000_long_double_size_string;
145int rs6000_long_double_type_size;
146
147/* Whether -mabi=altivec has appeared */
148int rs6000_altivec_abi;
149
150/* Whether VRSAVE instructions should be generated.  */
151int rs6000_altivec_vrsave;
152
153/* String from -mvrsave= option.  */
154const char *rs6000_altivec_vrsave_string;
155
156/* Nonzero if we want SPE ABI extensions.  */
157int rs6000_spe_abi;
158
159/* Whether isel instructions should be generated.  */
160int rs6000_isel;
161
162/* Whether SPE simd instructions should be generated.  */
163int rs6000_spe;
164
165/* Nonzero if floating point operations are done in the GPRs.  */
166int rs6000_float_gprs = 0;
167
168/* String from -mfloat-gprs=.  */
169const char *rs6000_float_gprs_string;
170
171/* String from -misel=.  */
172const char *rs6000_isel_string;
173
174/* String from -mspe=.  */
175const char *rs6000_spe_string;
176
177/* Set to nonzero once AIX common-mode calls have been defined.  */
178static GTY(()) int common_mode_defined;
179
180/* Save information from a "cmpxx" operation until the branch or scc is
181   emitted.  */
182rtx rs6000_compare_op0, rs6000_compare_op1;
183int rs6000_compare_fp_p;
184
185/* Label number of label created for -mrelocatable, to call to so we can
186   get the address of the GOT section */
187int rs6000_pic_labelno;
188
189#ifdef USING_ELFOS_H
190/* Which abi to adhere to */
191const char *rs6000_abi_name;
192
193/* Semantics of the small data area */
194enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
195
196/* Which small data model to use */
197const char *rs6000_sdata_name = (char *)0;
198
199/* Counter for labels which are to be placed in .fixup.  */
200int fixuplabelno = 0;
201#endif
202
203/* Bit size of immediate TLS offsets and string from which it is decoded.  */
204int rs6000_tls_size = 32;
205const char *rs6000_tls_size_string;
206
207/* ABI enumeration available for subtarget to use.  */
208enum rs6000_abi rs6000_current_abi;
209
210/* ABI string from -mabi= option.  */
211const char *rs6000_abi_string;
212
213/* Debug flags */
214const char *rs6000_debug_name;
215int rs6000_debug_stack;		/* debug stack applications */
216int rs6000_debug_arg;		/* debug argument handling */
217
218/* Opaque types.  */
219static GTY(()) tree opaque_V2SI_type_node;
220static GTY(()) tree opaque_V2SF_type_node;
221static GTY(()) tree opaque_p_V2SI_type_node;
222
223/* AltiVec requires a few more basic types in addition to the vector
224   types already defined in tree.c.  */
225static GTY(()) tree bool_char_type_node;	/* __bool char */
226static GTY(()) tree bool_short_type_node;	/* __bool short */
227static GTY(()) tree bool_int_type_node;		/* __bool int */
228static GTY(()) tree pixel_type_node;		/* __pixel */
229static GTY(()) tree bool_V16QI_type_node;	/* __vector __bool char */
230static GTY(()) tree bool_V8HI_type_node;	/* __vector __bool short */
231static GTY(()) tree bool_V4SI_type_node;	/* __vector __bool int */
232static GTY(()) tree pixel_V8HI_type_node;	/* __vector __pixel */
233
234int rs6000_warn_altivec_long = 1;		/* On by default. */
235const char *rs6000_warn_altivec_long_switch;
236
237const char *rs6000_traceback_name;
238static enum {
239  traceback_default = 0,
240  traceback_none,
241  traceback_part,
242  traceback_full
243} rs6000_traceback;
244
245/* Flag to say the TOC is initialized */
246int toc_initialized;
247char toc_label_name[10];
248
249/* Alias set for saves and restores from the rs6000 stack.  */
250static GTY(()) int rs6000_sr_alias_set;
251
252/* Call distance, overridden by -mlongcall and #pragma longcall(1).
253   The only place that looks at this is rs6000_set_default_type_attributes;
254   everywhere else should rely on the presence or absence of a longcall
255   attribute on the function declaration.  Exception: init_cumulative_args
256   looks at it too, for libcalls.  */
257int rs6000_default_long_calls;
258const char *rs6000_longcall_switch;
259
260/* Control alignment for fields within structures.  */
261/* String from -malign-XXXXX.  */
262const char *rs6000_alignment_string;
263int rs6000_alignment_flags;
264
265struct builtin_description
266{
267  /* mask is not const because we're going to alter it below.  This
268     nonsense will go away when we rewrite the -march infrastructure
269     to give us more target flag bits.  */
270  unsigned int mask;
271  const enum insn_code icode;
272  const char *const name;
273  const enum rs6000_builtins code;
274};
275
276static bool rs6000_function_ok_for_sibcall (tree, tree);
277static int num_insns_constant_wide (HOST_WIDE_INT);
278static void validate_condition_mode (enum rtx_code, enum machine_mode);
279static rtx rs6000_generate_compare (enum rtx_code);
280static void rs6000_maybe_dead (rtx);
281static void rs6000_emit_stack_tie (void);
282static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
283static rtx spe_synthesize_frame_save (rtx);
284static bool spe_func_has_64bit_regs_p (void);
285static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
286			     int, HOST_WIDE_INT);
287static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
288static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
289static unsigned rs6000_hash_constant (rtx);
290static unsigned toc_hash_function (const void *);
291static int toc_hash_eq (const void *, const void *);
292static int constant_pool_expr_1 (rtx, int *, int *);
293static bool constant_pool_expr_p (rtx);
294static bool toc_relative_expr_p (rtx);
295static bool legitimate_small_data_p (enum machine_mode, rtx);
296static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
297static bool legitimate_indexed_address_p (rtx, int);
298static bool legitimate_indirect_address_p (rtx, int);
299static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
300static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
301static struct machine_function * rs6000_init_machine_status (void);
302static bool rs6000_assemble_integer (rtx, unsigned int, int);
303#ifdef HAVE_GAS_HIDDEN
304static void rs6000_assemble_visibility (tree, int);
305#endif
306static int rs6000_ra_ever_killed (void);
307static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
308static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
309static const char *rs6000_mangle_fundamental_type (tree);
310extern const struct attribute_spec rs6000_attribute_table[];
311static void rs6000_set_default_type_attributes (tree);
312static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
313static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
314static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
315				    tree);
316static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
317static bool rs6000_return_in_memory (tree, tree);
318static void rs6000_file_start (void);
319#if TARGET_ELF
320static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
321static void rs6000_elf_asm_out_constructor (rtx, int);
322static void rs6000_elf_asm_out_destructor (rtx, int);
323static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
324static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
325static void rs6000_elf_unique_section (tree, int);
326static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
327					   unsigned HOST_WIDE_INT);
328static void rs6000_elf_encode_section_info (tree, rtx, int)
329     ATTRIBUTE_UNUSED;
330static bool rs6000_elf_in_small_data_p (tree);
331#endif
332#if TARGET_XCOFF
333static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
334static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
335static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
336static void rs6000_xcoff_unique_section (tree, int);
337static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
338					     unsigned HOST_WIDE_INT);
339static const char * rs6000_xcoff_strip_name_encoding (const char *);
340static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
341static void rs6000_xcoff_file_start (void);
342static void rs6000_xcoff_file_end (void);
343#endif
344#if TARGET_MACHO
345static bool rs6000_binds_local_p (tree);
346#endif
347static int rs6000_use_dfa_pipeline_interface (void);
348static int rs6000_variable_issue (FILE *, int, rtx, int);
349static bool rs6000_rtx_costs (rtx, int, int, int *);
350static int rs6000_adjust_cost (rtx, rtx, rtx, int);
351static bool is_microcoded_insn (rtx);
352static int is_dispatch_slot_restricted (rtx);
353static bool is_cracked_insn (rtx);
354static bool is_branch_slot_insn (rtx);
355static int rs6000_adjust_priority (rtx, int);
356static int rs6000_issue_rate (void);
357static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
358static rtx get_next_active_insn (rtx, rtx);
359static bool insn_terminates_group_p (rtx , enum group_termination);
360static bool is_costly_group (rtx *, rtx);
361static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
362static int redefine_groups (FILE *, int, rtx, rtx);
363static int pad_groups (FILE *, int, rtx, rtx);
364static void rs6000_sched_finish (FILE *, int);
365static int rs6000_use_sched_lookahead (void);
366
367static void rs6000_init_builtins (void);
368static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
369static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
370static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
371static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
372static void altivec_init_builtins (void);
373static void rs6000_common_init_builtins (void);
374static void rs6000_init_libfuncs (void);
375
376static void enable_mask_for_builtins (struct builtin_description *, int,
377				      enum rs6000_builtins,
378				      enum rs6000_builtins);
379static void spe_init_builtins (void);
380static rtx spe_expand_builtin (tree, rtx, bool *);
381static rtx spe_expand_stv_builtin (enum insn_code, tree);
382static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
383static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
384static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
385static rs6000_stack_t *rs6000_stack_info (void);
386static void debug_stack_info (rs6000_stack_t *);
387
388static rtx altivec_expand_builtin (tree, rtx, bool *);
389static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
390static rtx altivec_expand_st_builtin (tree, rtx, bool *);
391static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
392static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
393static rtx altivec_expand_predicate_builtin (enum insn_code,
394					    const char *, tree, rtx);
395static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
396static rtx altivec_expand_stv_builtin (enum insn_code, tree);
397static void rs6000_parse_abi_options (void);
398static void rs6000_parse_alignment_option (void);
399static void rs6000_parse_tls_size_option (void);
400static void rs6000_parse_yes_no_option (const char *, const char *, int *);
401static int first_altivec_reg_to_save (void);
402static unsigned int compute_vrsave_mask (void);
403static void is_altivec_return_reg (rtx, void *);
404static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
405int easy_vector_constant (rtx, enum machine_mode);
406static int easy_vector_same (rtx, enum machine_mode);
407static int easy_vector_splat_const (int, enum machine_mode);
408static bool is_ev64_opaque_type (tree);
409static rtx rs6000_dwarf_register_span (rtx);
410static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
411static rtx rs6000_tls_get_addr (void);
412static rtx rs6000_got_sym (void);
413static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
414static const char *rs6000_get_some_local_dynamic_name (void);
415static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
416static rtx rs6000_complex_function_value (enum machine_mode);
417static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
418				    enum machine_mode, tree);
419static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
420static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
421static void setup_incoming_varargs (CUMULATIVE_ARGS *,
422				    enum machine_mode, tree,
423				    int *, int);
424#if TARGET_MACHO
425static void macho_branch_islands (void);
426static void add_compiler_branch_island (tree, tree, int);
427static int no_previous_def (tree function_name);
428static tree get_prev_label (tree function_name);
429#endif
430
431static tree rs6000_build_builtin_va_list (void);
432
433/* Hash table stuff for keeping track of TOC entries.  */
434
435struct toc_hash_struct GTY(())
436{
437  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
438     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
439  rtx key;
440  enum machine_mode key_mode;
441  int labelno;
442};
443
444static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
445
446/* Default register names.  */
447char rs6000_reg_names[][8] =
448{
449      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
450      "8",  "9", "10", "11", "12", "13", "14", "15",
451     "16", "17", "18", "19", "20", "21", "22", "23",
452     "24", "25", "26", "27", "28", "29", "30", "31",
453      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
454      "8",  "9", "10", "11", "12", "13", "14", "15",
455     "16", "17", "18", "19", "20", "21", "22", "23",
456     "24", "25", "26", "27", "28", "29", "30", "31",
457     "mq", "lr", "ctr","ap",
458      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
459      "xer",
460      /* AltiVec registers.  */
461      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
462      "8",  "9",  "10", "11", "12", "13", "14", "15",
463      "16", "17", "18", "19", "20", "21", "22", "23",
464      "24", "25", "26", "27", "28", "29", "30", "31",
465      "vrsave", "vscr",
466      /* SPE registers.  */
467      "spe_acc", "spefscr"
468};
469
470#ifdef TARGET_REGNAMES
471static const char alt_reg_names[][8] =
472{
473   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
474   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
475  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
476  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
477   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
478   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
479  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
480  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
481    "mq",    "lr",  "ctr",   "ap",
482  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
483   "xer",
484  /* AltiVec registers.  */
485   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
486   "%v8",  "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
487  "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
488  "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
489  "vrsave", "vscr",
490  /* SPE registers.  */
491  "spe_acc", "spefscr"
492};
493#endif
494
495#ifndef MASK_STRICT_ALIGN
496#define MASK_STRICT_ALIGN 0
497#endif
498#ifndef TARGET_PROFILE_KERNEL
499#define TARGET_PROFILE_KERNEL 0
500#endif
501
502/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
503#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
504
505/* Return 1 for a symbol ref for a thread-local storage symbol.  */
506#define RS6000_SYMBOL_REF_TLS_P(RTX) \
507  (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
508
509/* Initialize the GCC target structure.  */
510#undef TARGET_ATTRIBUTE_TABLE
511#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
512#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
513#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
514
515#undef TARGET_ASM_ALIGNED_DI_OP
516#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
517
518/* Default unaligned ops are only provided for ELF.  Find the ops needed
519   for non-ELF systems.  */
520#ifndef OBJECT_FORMAT_ELF
521#if TARGET_XCOFF
522/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
523   64-bit targets.  */
524#undef TARGET_ASM_UNALIGNED_HI_OP
525#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
526#undef TARGET_ASM_UNALIGNED_SI_OP
527#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
528#undef TARGET_ASM_UNALIGNED_DI_OP
529#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
530#else
531/* For Darwin.  */
532#undef TARGET_ASM_UNALIGNED_HI_OP
533#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
534#undef TARGET_ASM_UNALIGNED_SI_OP
535#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
536#endif
537#endif
538
539/* This hook deals with fixups for relocatable code and DI-mode objects
540   in 64-bit code.  */
541#undef TARGET_ASM_INTEGER
542#define TARGET_ASM_INTEGER rs6000_assemble_integer
543
544#ifdef HAVE_GAS_HIDDEN
545#undef TARGET_ASM_ASSEMBLE_VISIBILITY
546#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
547#endif
548
549#undef TARGET_HAVE_TLS
550#define TARGET_HAVE_TLS HAVE_AS_TLS
551
552#undef TARGET_CANNOT_FORCE_CONST_MEM
553#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
554
555#undef TARGET_ASM_FUNCTION_PROLOGUE
556#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
557#undef TARGET_ASM_FUNCTION_EPILOGUE
558#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
559
560#undef  TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
561#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
562#undef  TARGET_SCHED_VARIABLE_ISSUE
563#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
564
565#undef TARGET_SCHED_ISSUE_RATE
566#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
567#undef TARGET_SCHED_ADJUST_COST
568#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
569#undef TARGET_SCHED_ADJUST_PRIORITY
570#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
571#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
572#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
573#undef TARGET_SCHED_FINISH
574#define TARGET_SCHED_FINISH rs6000_sched_finish
575
576#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
577#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
578
579#undef TARGET_INIT_BUILTINS
580#define TARGET_INIT_BUILTINS rs6000_init_builtins
581
582#undef TARGET_EXPAND_BUILTIN
583#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
584
585#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
586#define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
587
588#undef TARGET_INIT_LIBFUNCS
589#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
590
591#if TARGET_MACHO
592#undef TARGET_BINDS_LOCAL_P
593#define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
594#endif
595
596#undef TARGET_ASM_OUTPUT_MI_THUNK
597#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
598
599#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
600#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
601
602#undef TARGET_FUNCTION_OK_FOR_SIBCALL
603#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
604
605#undef TARGET_RTX_COSTS
606#define TARGET_RTX_COSTS rs6000_rtx_costs
607#undef TARGET_ADDRESS_COST
608#define TARGET_ADDRESS_COST hook_int_rtx_0
609
610#undef TARGET_VECTOR_OPAQUE_P
611#define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
612
613#undef TARGET_DWARF_REGISTER_SPAN
614#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
615
616/* On rs6000, function arguments are promoted, as are function return
617   values.  */
618#undef TARGET_PROMOTE_FUNCTION_ARGS
619#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
620#undef TARGET_PROMOTE_FUNCTION_RETURN
621#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
622
623/* Structure return values are passed as an extra parameter.  */
624#undef TARGET_STRUCT_VALUE_RTX
625#define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
626
627#undef TARGET_RETURN_IN_MEMORY
628#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
629
630#undef TARGET_SETUP_INCOMING_VARARGS
631#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
632
633/* Always strict argument naming on rs6000.  */
634#undef TARGET_STRICT_ARGUMENT_NAMING
635#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
636#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
637#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
638#undef TARGET_SPLIT_COMPLEX_ARG
639#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
640
641#undef TARGET_BUILD_BUILTIN_VA_LIST
642#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
643
644struct gcc_target targetm = TARGET_INITIALIZER;
645
646/* Override command line options.  Mostly we process the processor
647   type and sometimes adjust other TARGET_ options.  */
648
649void
650rs6000_override_options (const char *default_cpu)
651{
652  size_t i, j;
653  struct rs6000_cpu_select *ptr;
654  int set_masks;
655
656  /* Simplifications for entries below.  */
657
658  enum {
659    POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
660    POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
661  };
662
663  /* This table occasionally claims that a processor does not support
664     a particular feature even though it does, but the feature is slower
665     than the alternative.  Thus, it shouldn't be relied on as a
666     complete description of the processor's support.
667
668     Please keep this list in order, and don't forget to update the
669     documentation in invoke.texi when adding a new processor or
670     flag.  */
671  static struct ptt
672    {
673      const char *const name;		/* Canonical processor name.  */
674      const enum processor_type processor; /* Processor type enum value.  */
675      const int target_enable;	/* Target flags to enable.  */
676    } const processor_target_table[]
677      = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
678	 {"403", PROCESSOR_PPC403,
679	  POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
680	 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
681	 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
682	 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
683	 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
684	 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
685	 {"601", PROCESSOR_PPC601,
686	  MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
687	 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
688	 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
689	 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
690	 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
691	 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
692	 {"620", PROCESSOR_PPC620,
693	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
694	 {"630", PROCESSOR_PPC630,
695	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
696	 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
697	 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
698	 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
699	 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
700	 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
701	 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
702	 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
703	 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
704	 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
705	 {"970", PROCESSOR_POWER4,
706	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
707	 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
708	 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
709	 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
710	 {"G4",  PROCESSOR_PPC7450, POWERPC_7400_MASK},
711	 {"G5", PROCESSOR_POWER4,
712	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
713	 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
714	 {"power2", PROCESSOR_POWER,
715	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
716	 {"power3", PROCESSOR_PPC630,
717	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
718	 {"power4", PROCESSOR_POWER4,
719	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
720	 {"power5", PROCESSOR_POWER5,
721	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
722	 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
723	 {"powerpc64", PROCESSOR_POWERPC64,
724	  POWERPC_BASE_MASK | MASK_POWERPC64},
725	 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
726	 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
727	 {"rios2", PROCESSOR_RIOS2,
728	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
729	 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
730	 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
731	 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
732      };
733
734  const size_t ptt_size = ARRAY_SIZE (processor_target_table);
735
736  /* Some OSs don't support saving the high part of 64-bit registers on
737     context switch.  Other OSs don't support saving Altivec registers.
738     On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
739     settings; if the user wants either, the user must explicitly specify
740     them and we won't interfere with the user's specification.  */
741
742  enum {
743    POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
744    POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
745		     | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
746		     | MASK_MFCRF)
747  };
748 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
749#ifdef OS_MISSING_POWERPC64
750  if (OS_MISSING_POWERPC64)
751    set_masks &= ~MASK_POWERPC64;
752#endif
753#ifdef OS_MISSING_ALTIVEC
754  if (OS_MISSING_ALTIVEC)
755    set_masks &= ~MASK_ALTIVEC;
756#endif
757
758  /* Don't override by the processor default if given explicitly.  */
759  set_masks &= ~target_flags_explicit;
760
761  /* Identify the processor type.  */
762  rs6000_select[0].string = default_cpu;
763  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
764
765  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
766    {
767      ptr = &rs6000_select[i];
768      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
769	{
770	  for (j = 0; j < ptt_size; j++)
771	    if (! strcmp (ptr->string, processor_target_table[j].name))
772	      {
773		if (ptr->set_tune_p)
774		  rs6000_cpu = processor_target_table[j].processor;
775
776		if (ptr->set_arch_p)
777		  {
778		    target_flags &= ~set_masks;
779		    target_flags |= (processor_target_table[j].target_enable
780				     & set_masks);
781		  }
782		break;
783	      }
784
785	  if (j == ptt_size)
786	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
787	}
788    }
789
790  if (TARGET_E500)
791    rs6000_isel = 1;
792
793  /* If we are optimizing big endian systems for space, use the load/store
794     multiple and string instructions.  */
795  if (BYTES_BIG_ENDIAN && optimize_size)
796    target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
797
798  /* Don't allow -mmultiple or -mstring on little endian systems
799     unless the cpu is a 750, because the hardware doesn't support the
800     instructions used in little endian mode, and causes an alignment
801     trap.  The 750 does not cause an alignment trap (except when the
802     target is unaligned).  */
803
804  if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
805    {
806      if (TARGET_MULTIPLE)
807	{
808	  target_flags &= ~MASK_MULTIPLE;
809	  if ((target_flags_explicit & MASK_MULTIPLE) != 0)
810	    warning ("-mmultiple is not supported on little endian systems");
811	}
812
813      if (TARGET_STRING)
814	{
815	  target_flags &= ~MASK_STRING;
816	  if ((target_flags_explicit & MASK_STRING) != 0)
817	    warning ("-mstring is not supported on little endian systems");
818	}
819    }
820
821  /* Set debug flags */
822  if (rs6000_debug_name)
823    {
824      if (! strcmp (rs6000_debug_name, "all"))
825	rs6000_debug_stack = rs6000_debug_arg = 1;
826      else if (! strcmp (rs6000_debug_name, "stack"))
827	rs6000_debug_stack = 1;
828      else if (! strcmp (rs6000_debug_name, "arg"))
829	rs6000_debug_arg = 1;
830      else
831	error ("unknown -mdebug-%s switch", rs6000_debug_name);
832    }
833
834  if (rs6000_traceback_name)
835    {
836      if (! strncmp (rs6000_traceback_name, "full", 4))
837	rs6000_traceback = traceback_full;
838      else if (! strncmp (rs6000_traceback_name, "part", 4))
839	rs6000_traceback = traceback_part;
840      else if (! strncmp (rs6000_traceback_name, "no", 2))
841	rs6000_traceback = traceback_none;
842      else
843	error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
844	       rs6000_traceback_name);
845    }
846
847  /* Set size of long double */
848  rs6000_long_double_type_size = 64;
849  if (rs6000_long_double_size_string)
850    {
851      char *tail;
852      int size = strtol (rs6000_long_double_size_string, &tail, 10);
853      if (*tail != '\0' || (size != 64 && size != 128))
854	error ("Unknown switch -mlong-double-%s",
855	       rs6000_long_double_size_string);
856      else
857	rs6000_long_double_type_size = size;
858    }
859
860  /* Set Altivec ABI as default for powerpc64 linux.  */
861  if (TARGET_ELF && TARGET_64BIT)
862    {
863      rs6000_altivec_abi = 1;
864      rs6000_altivec_vrsave = 1;
865    }
866
867  /* Handle -mabi= options.  */
868  rs6000_parse_abi_options ();
869
870  /* Handle -malign-XXXXX option.  */
871  rs6000_parse_alignment_option ();
872
873  /* Handle generic -mFOO=YES/NO options.  */
874  rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
875			      &rs6000_altivec_vrsave);
876  rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
877			      &rs6000_isel);
878  rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
879  rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
880			      &rs6000_float_gprs);
881
882  /* Handle -mtls-size option.  */
883  rs6000_parse_tls_size_option ();
884
885#ifdef SUBTARGET_OVERRIDE_OPTIONS
886  SUBTARGET_OVERRIDE_OPTIONS;
887#endif
888#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
889  SUBSUBTARGET_OVERRIDE_OPTIONS;
890#endif
891
892  if (TARGET_E500)
893    {
894      if (TARGET_ALTIVEC)
895      error ("AltiVec and E500 instructions cannot coexist");
896
897      /* The e500 does not have string instructions, and we set
898	 MASK_STRING above when optimizing for size.  */
899      if ((target_flags & MASK_STRING) != 0)
900	target_flags = target_flags & ~MASK_STRING;
901
902      /* No SPE means 64-bit long doubles, even if an E500.  */
903      if (rs6000_spe_string != 0
904          && !strcmp (rs6000_spe_string, "no"))
905	rs6000_long_double_type_size = 64;
906    }
907  else if (rs6000_select[1].string != NULL)
908    {
909      /* For the powerpc-eabispe configuration, we set all these by
910	 default, so let's unset them if we manually set another
911	 CPU that is not the E500.  */
912      if (rs6000_abi_string == 0)
913	rs6000_spe_abi = 0;
914      if (rs6000_spe_string == 0)
915	rs6000_spe = 0;
916      if (rs6000_float_gprs_string == 0)
917	rs6000_float_gprs = 0;
918      if (rs6000_isel_string == 0)
919	rs6000_isel = 0;
920      if (rs6000_long_double_size_string == 0)
921	rs6000_long_double_type_size = 64;
922    }
923
924  rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
925			&& rs6000_cpu != PROCESSOR_POWER5);
926  rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
927			 || rs6000_cpu == PROCESSOR_POWER5);
928
929  /* Handle -m(no-)longcall option.  This is a bit of a cheap hack,
930     using TARGET_OPTIONS to handle a toggle switch, but we're out of
931     bits in target_flags so TARGET_SWITCHES cannot be used.
932     Assumption here is that rs6000_longcall_switch points into the
933     text of the complete option, rather than being a copy, so we can
934     scan back for the presence or absence of the no- modifier.  */
935  if (rs6000_longcall_switch)
936    {
937      const char *base = rs6000_longcall_switch;
938      while (base[-1] != 'm') base--;
939
940      if (*rs6000_longcall_switch != '\0')
941	error ("invalid option `%s'", base);
942      rs6000_default_long_calls = (base[0] != 'n');
943    }
944
945  /* Handle -m(no-)warn-altivec-long similarly.  */
946  if (rs6000_warn_altivec_long_switch)
947    {
948      const char *base = rs6000_warn_altivec_long_switch;
949      while (base[-1] != 'm') base--;
950
951      if (*rs6000_warn_altivec_long_switch != '\0')
952       error ("invalid option `%s'", base);
953      rs6000_warn_altivec_long = (base[0] != 'n');
954    }
955
956  /* Handle -mprioritize-restricted-insns option.  */
957  rs6000_sched_restricted_insns_priority
958    = (rs6000_sched_groups ? 1 : 0);
959  if (rs6000_sched_restricted_insns_priority_str)
960    rs6000_sched_restricted_insns_priority =
961      atoi (rs6000_sched_restricted_insns_priority_str);
962
963  /* Handle -msched-costly-dep option.  */
964  rs6000_sched_costly_dep
965    = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
966  if (rs6000_sched_costly_dep_str)
967    {
968      if (! strcmp (rs6000_sched_costly_dep_str, "no"))
969        rs6000_sched_costly_dep = no_dep_costly;
970      else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
971        rs6000_sched_costly_dep = all_deps_costly;
972      else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
973        rs6000_sched_costly_dep = true_store_to_load_dep_costly;
974      else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
975        rs6000_sched_costly_dep = store_to_load_dep_costly;
976      else
977        rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
978    }
979
980  /* Handle -minsert-sched-nops option.  */
981  rs6000_sched_insert_nops
982    = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
983  if (rs6000_sched_insert_nops_str)
984    {
985      if (! strcmp (rs6000_sched_insert_nops_str, "no"))
986        rs6000_sched_insert_nops = sched_finish_none;
987      else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
988        rs6000_sched_insert_nops = sched_finish_pad_groups;
989      else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
990        rs6000_sched_insert_nops = sched_finish_regroup_exact;
991      else
992        rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
993    }
994
995#ifdef TARGET_REGNAMES
996  /* If the user desires alternate register names, copy in the
997     alternate names now.  */
998  if (TARGET_REGNAMES)
999    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1000#endif
1001
1002  /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1003     If -maix-struct-return or -msvr4-struct-return was explicitly
1004     used, don't override with the ABI default.  */
1005  if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1006    {
1007      if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1008	target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1009      else
1010	target_flags |= MASK_AIX_STRUCT_RET;
1011    }
1012
1013  if (TARGET_LONG_DOUBLE_128
1014      && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1015    REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1016
1017  /* Allocate an alias set for register saves & restores from stack.  */
1018  rs6000_sr_alias_set = new_alias_set ();
1019
1020  if (TARGET_TOC)
1021    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1022
1023  /* We can only guarantee the availability of DI pseudo-ops when
1024     assembling for 64-bit targets.  */
1025  if (!TARGET_64BIT)
1026    {
1027      targetm.asm_out.aligned_op.di = NULL;
1028      targetm.asm_out.unaligned_op.di = NULL;
1029    }
1030
1031  /* Set maximum branch target alignment at two instructions, eight bytes.  */
1032  align_jumps_max_skip = 8;
1033  align_loops_max_skip = 8;
1034
1035  /* Arrange to save and restore machine status around nested functions.  */
1036  init_machine_status = rs6000_init_machine_status;
1037
1038  /* We should always be splitting complex arguments, but we can't break
1039     Linux and Darwin ABIs at the moment.  For now, only AIX is fixed.  */
1040  if (DEFAULT_ABI != ABI_AIX)
1041    targetm.calls.split_complex_arg = NULL;
1042}
1043
1044/* Handle generic options of the form -mfoo=yes/no.
1045   NAME is the option name.
1046   VALUE is the option value.
1047   FLAG is the pointer to the flag where to store a 1 or 0, depending on
1048   whether the option value is 'yes' or 'no' respectively.  */
1049static void
1050rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1051{
1052  if (value == 0)
1053    return;
1054  else if (!strcmp (value, "yes"))
1055    *flag = 1;
1056  else if (!strcmp (value, "no"))
1057    *flag = 0;
1058  else
1059    error ("unknown -m%s= option specified: '%s'", name, value);
1060}
1061
1062/* Handle -mabi= options.  */
1063static void
1064rs6000_parse_abi_options (void)
1065{
1066  if (rs6000_abi_string == 0)
1067    return;
1068  else if (! strcmp (rs6000_abi_string, "altivec"))
1069    {
1070      rs6000_altivec_abi = 1;
1071      rs6000_spe_abi = 0;
1072    }
1073  else if (! strcmp (rs6000_abi_string, "no-altivec"))
1074    rs6000_altivec_abi = 0;
1075  else if (! strcmp (rs6000_abi_string, "spe"))
1076    {
1077      rs6000_spe_abi = 1;
1078      rs6000_altivec_abi = 0;
1079      if (!TARGET_SPE_ABI)
1080	error ("not configured for ABI: '%s'", rs6000_abi_string);
1081    }
1082
1083  else if (! strcmp (rs6000_abi_string, "no-spe"))
1084    rs6000_spe_abi = 0;
1085  else
1086    error ("unknown ABI specified: '%s'", rs6000_abi_string);
1087}
1088
1089/* Handle -malign-XXXXXX options.  */
1090static void
1091rs6000_parse_alignment_option (void)
1092{
1093  if (rs6000_alignment_string == 0)
1094    return;
1095  else if (! strcmp (rs6000_alignment_string, "power"))
1096    rs6000_alignment_flags = MASK_ALIGN_POWER;
1097  else if (! strcmp (rs6000_alignment_string, "natural"))
1098    rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1099  else
1100    error ("unknown -malign-XXXXX option specified: '%s'",
1101	   rs6000_alignment_string);
1102}
1103
1104/* Validate and record the size specified with the -mtls-size option.  */
1105
1106static void
1107rs6000_parse_tls_size_option (void)
1108{
1109  if (rs6000_tls_size_string == 0)
1110    return;
1111  else if (strcmp (rs6000_tls_size_string, "16") == 0)
1112    rs6000_tls_size = 16;
1113  else if (strcmp (rs6000_tls_size_string, "32") == 0)
1114    rs6000_tls_size = 32;
1115  else if (strcmp (rs6000_tls_size_string, "64") == 0)
1116    rs6000_tls_size = 64;
1117  else
1118    error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1119}
1120
1121void
1122optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1123{
1124}
1125
1126/* Do anything needed at the start of the asm file.  */
1127
1128static void
1129rs6000_file_start (void)
1130{
1131  size_t i;
1132  char buffer[80];
1133  const char *start = buffer;
1134  struct rs6000_cpu_select *ptr;
1135  const char *default_cpu = TARGET_CPU_DEFAULT;
1136  FILE *file = asm_out_file;
1137
1138  default_file_start ();
1139
1140#ifdef TARGET_BI_ARCH
1141  if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1142    default_cpu = 0;
1143#endif
1144
1145  if (flag_verbose_asm)
1146    {
1147      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1148      rs6000_select[0].string = default_cpu;
1149
1150      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1151	{
1152	  ptr = &rs6000_select[i];
1153	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1154	    {
1155	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1156	      start = "";
1157	    }
1158	}
1159
1160#ifdef USING_ELFOS_H
1161      switch (rs6000_sdata)
1162	{
1163	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1164	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1165	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1166	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1167	}
1168
1169      if (rs6000_sdata && g_switch_value)
1170	{
1171	  fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1172		   g_switch_value);
1173	  start = "";
1174	}
1175#endif
1176
1177      if (*start == '\0')
1178	putc ('\n', file);
1179    }
1180}
1181
1182/* Return nonzero if this function is known to have a null epilogue.  */
1183
1184int
1185direct_return (void)
1186{
1187  if (reload_completed)
1188    {
1189      rs6000_stack_t *info = rs6000_stack_info ();
1190
1191      if (info->first_gp_reg_save == 32
1192	  && info->first_fp_reg_save == 64
1193	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1194	  && ! info->lr_save_p
1195	  && ! info->cr_save_p
1196	  && info->vrsave_mask == 0
1197	  && ! info->push_p)
1198	return 1;
1199    }
1200
1201  return 0;
1202}
1203
1204/* Returns 1 always.  */
1205
1206int
1207any_operand (rtx op ATTRIBUTE_UNUSED,
1208	     enum machine_mode mode ATTRIBUTE_UNUSED)
1209{
1210  return 1;
1211}
1212
1213/* Returns 1 if op is the count register.  */
1214int
1215count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1216{
1217  if (GET_CODE (op) != REG)
1218    return 0;
1219
1220  if (REGNO (op) == COUNT_REGISTER_REGNUM)
1221    return 1;
1222
1223  if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1224    return 1;
1225
1226  return 0;
1227}
1228
1229/* Returns 1 if op is an altivec register.  */
1230int
1231altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1232{
1233
1234  return (register_operand (op, mode)
1235	  && (GET_CODE (op) != REG
1236	      || REGNO (op) > FIRST_PSEUDO_REGISTER
1237	      || ALTIVEC_REGNO_P (REGNO (op))));
1238}
1239
1240int
1241xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1242{
1243  if (GET_CODE (op) != REG)
1244    return 0;
1245
1246  if (XER_REGNO_P (REGNO (op)))
1247    return 1;
1248
1249  return 0;
1250}
1251
1252/* Return 1 if OP is a signed 8-bit constant.  Int multiplication
1253   by such constants completes more quickly.  */
1254
1255int
1256s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1257{
1258  return ( GET_CODE (op) == CONST_INT
1259	  && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1260}
1261
1262/* Return 1 if OP is a constant that can fit in a D field.  */
1263
1264int
1265short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1266{
1267  return (GET_CODE (op) == CONST_INT
1268	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1269}
1270
1271/* Similar for an unsigned D field.  */
1272
1273int
1274u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1275{
1276  return (GET_CODE (op) == CONST_INT
1277	  && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1278}
1279
1280/* Return 1 if OP is a CONST_INT that cannot fit in a signed D field.  */
1281
1282int
1283non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1284{
1285  return (GET_CODE (op) == CONST_INT
1286	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1287}
1288
1289/* Returns 1 if OP is a CONST_INT that is a positive value
1290   and an exact power of 2.  */
1291
1292int
1293exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1294{
1295  return (GET_CODE (op) == CONST_INT
1296	  && INTVAL (op) > 0
1297	  && exact_log2 (INTVAL (op)) >= 0);
1298}
1299
1300/* Returns 1 if OP is a register that is not special (i.e., not MQ,
1301   ctr, or lr).  */
1302
1303int
1304gpc_reg_operand (rtx op, enum machine_mode mode)
1305{
1306  return (register_operand (op, mode)
1307	  && (GET_CODE (op) != REG
1308	      || (REGNO (op) >= ARG_POINTER_REGNUM
1309		  && !XER_REGNO_P (REGNO (op)))
1310	      || REGNO (op) < MQ_REGNO));
1311}
1312
1313/* Returns 1 if OP is either a pseudo-register or a register denoting a
1314   CR field.  */
1315
1316int
1317cc_reg_operand (rtx op, enum machine_mode mode)
1318{
1319  return (register_operand (op, mode)
1320	  && (GET_CODE (op) != REG
1321	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
1322	      || CR_REGNO_P (REGNO (op))));
1323}
1324
1325/* Returns 1 if OP is either a pseudo-register or a register denoting a
1326   CR field that isn't CR0.  */
1327
1328int
1329cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1330{
1331  return (register_operand (op, mode)
1332	  && (GET_CODE (op) != REG
1333	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
1334	      || CR_REGNO_NOT_CR0_P (REGNO (op))));
1335}
1336
1337/* Returns 1 if OP is either a constant integer valid for a D-field or
1338   a non-special register.  If a register, it must be in the proper
1339   mode unless MODE is VOIDmode.  */
1340
1341int
1342reg_or_short_operand (rtx op, enum machine_mode mode)
1343{
1344  return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1345}
1346
1347/* Similar, except check if the negation of the constant would be
1348   valid for a D-field.  Don't allow a constant zero, since all the
1349   patterns that call this predicate use "addic r1,r2,-constant" on
1350   a constant value to set a carry when r2 is greater or equal to
1351   "constant".  That doesn't work for zero.  */
1352
1353int
1354reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1355{
1356  if (GET_CODE (op) == CONST_INT)
1357    return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1358
1359  return gpc_reg_operand (op, mode);
1360}
1361
1362/* Returns 1 if OP is either a constant integer valid for a DS-field or
1363   a non-special register.  If a register, it must be in the proper
1364   mode unless MODE is VOIDmode.  */
1365
1366int
1367reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1368{
1369  if (gpc_reg_operand (op, mode))
1370    return 1;
1371  else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1372    return 1;
1373
1374  return 0;
1375}
1376
1377
1378/* Return 1 if the operand is either a register or an integer whose
1379   high-order 16 bits are zero.  */
1380
1381int
1382reg_or_u_short_operand (rtx op, enum machine_mode mode)
1383{
1384  return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1385}
1386
1387/* Return 1 is the operand is either a non-special register or ANY
1388   constant integer.  */
1389
1390int
1391reg_or_cint_operand (rtx op, enum machine_mode mode)
1392{
1393  return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1394}
1395
1396/* Return 1 is the operand is either a non-special register or ANY
1397   32-bit signed constant integer.  */
1398
1399int
1400reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1401{
1402  return (gpc_reg_operand (op, mode)
1403	  || (GET_CODE (op) == CONST_INT
1404#if HOST_BITS_PER_WIDE_INT != 32
1405	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1406		  < (unsigned HOST_WIDE_INT) 0x100000000ll)
1407#endif
1408	      ));
1409}
1410
1411/* Return 1 is the operand is either a non-special register or a 32-bit
1412   signed constant integer valid for 64-bit addition.  */
1413
1414int
1415reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1416{
1417  return (gpc_reg_operand (op, mode)
1418	  || (GET_CODE (op) == CONST_INT
1419#if HOST_BITS_PER_WIDE_INT == 32
1420	      && INTVAL (op) < 0x7fff8000
1421#else
1422	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1423		  < 0x100000000ll)
1424#endif
1425	      ));
1426}
1427
1428/* Return 1 is the operand is either a non-special register or a 32-bit
1429   signed constant integer valid for 64-bit subtraction.  */
1430
1431int
1432reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1433{
1434  return (gpc_reg_operand (op, mode)
1435	  || (GET_CODE (op) == CONST_INT
1436#if HOST_BITS_PER_WIDE_INT == 32
1437	      && (- INTVAL (op)) < 0x7fff8000
1438#else
1439	      && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1440		  < 0x100000000ll)
1441#endif
1442	      ));
1443}
1444
1445/* Return 1 is the operand is either a non-special register or ANY
1446   32-bit unsigned constant integer.  */
1447
1448int
1449reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1450{
1451  if (GET_CODE (op) == CONST_INT)
1452    {
1453      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1454	{
1455	  if (GET_MODE_BITSIZE (mode) <= 32)
1456	    abort ();
1457
1458	  if (INTVAL (op) < 0)
1459	    return 0;
1460	}
1461
1462      return ((INTVAL (op) & GET_MODE_MASK (mode)
1463	       & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1464    }
1465  else if (GET_CODE (op) == CONST_DOUBLE)
1466    {
1467      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1468	  || mode != DImode)
1469	abort ();
1470
1471      return CONST_DOUBLE_HIGH (op) == 0;
1472    }
1473  else
1474    return gpc_reg_operand (op, mode);
1475}
1476
1477/* Return 1 if the operand is an operand that can be loaded via the GOT.  */
1478
1479int
1480got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1481{
1482  return (GET_CODE (op) == SYMBOL_REF
1483	  || GET_CODE (op) == CONST
1484	  || GET_CODE (op) == LABEL_REF);
1485}
1486
1487/* Return 1 if the operand is a simple references that can be loaded via
1488   the GOT (labels involving addition aren't allowed).  */
1489
1490int
1491got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1492{
1493  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1494}
1495
1496/* Return the number of instructions it takes to form a constant in an
1497   integer register.  */
1498
1499static int
1500num_insns_constant_wide (HOST_WIDE_INT value)
1501{
1502  /* signed constant loadable with {cal|addi} */
1503  if (CONST_OK_FOR_LETTER_P (value, 'I'))
1504    return 1;
1505
1506  /* constant loadable with {cau|addis} */
1507  else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1508    return 1;
1509
1510#if HOST_BITS_PER_WIDE_INT == 64
1511  else if (TARGET_POWERPC64)
1512    {
1513      HOST_WIDE_INT low  = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1514      HOST_WIDE_INT high = value >> 31;
1515
1516      if (high == 0 || high == -1)
1517	return 2;
1518
1519      high >>= 1;
1520
1521      if (low == 0)
1522	return num_insns_constant_wide (high) + 1;
1523      else
1524	return (num_insns_constant_wide (high)
1525		+ num_insns_constant_wide (low) + 1);
1526    }
1527#endif
1528
1529  else
1530    return 2;
1531}
1532
1533int
1534num_insns_constant (rtx op, enum machine_mode mode)
1535{
1536  if (GET_CODE (op) == CONST_INT)
1537    {
1538#if HOST_BITS_PER_WIDE_INT == 64
1539      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1540	  && mask64_operand (op, mode))
1541	    return 2;
1542      else
1543#endif
1544	return num_insns_constant_wide (INTVAL (op));
1545    }
1546
1547  else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1548    {
1549      long l;
1550      REAL_VALUE_TYPE rv;
1551
1552      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1553      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1554      return num_insns_constant_wide ((HOST_WIDE_INT) l);
1555    }
1556
1557  else if (GET_CODE (op) == CONST_DOUBLE)
1558    {
1559      HOST_WIDE_INT low;
1560      HOST_WIDE_INT high;
1561      long l[2];
1562      REAL_VALUE_TYPE rv;
1563      int endian = (WORDS_BIG_ENDIAN == 0);
1564
1565      if (mode == VOIDmode || mode == DImode)
1566	{
1567	  high = CONST_DOUBLE_HIGH (op);
1568	  low  = CONST_DOUBLE_LOW (op);
1569	}
1570      else
1571	{
1572	  REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1573	  REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1574	  high = l[endian];
1575	  low  = l[1 - endian];
1576	}
1577
1578      if (TARGET_32BIT)
1579	return (num_insns_constant_wide (low)
1580		+ num_insns_constant_wide (high));
1581
1582      else
1583	{
1584	  if (high == 0 && low >= 0)
1585	    return num_insns_constant_wide (low);
1586
1587	  else if (high == -1 && low < 0)
1588	    return num_insns_constant_wide (low);
1589
1590	  else if (mask64_operand (op, mode))
1591	    return 2;
1592
1593	  else if (low == 0)
1594	    return num_insns_constant_wide (high) + 1;
1595
1596	  else
1597	    return (num_insns_constant_wide (high)
1598		    + num_insns_constant_wide (low) + 1);
1599	}
1600    }
1601
1602  else
1603    abort ();
1604}
1605
1606/* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1607   register with one instruction per word.  We only do this if we can
1608   safely read CONST_DOUBLE_{LOW,HIGH}.  */
1609
1610int
1611easy_fp_constant (rtx op, enum machine_mode mode)
1612{
1613  if (GET_CODE (op) != CONST_DOUBLE
1614      || GET_MODE (op) != mode
1615      || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1616    return 0;
1617
1618  /* Consider all constants with -msoft-float to be easy.  */
1619  if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1620      && mode != DImode)
1621    return 1;
1622
1623  /* If we are using V.4 style PIC, consider all constants to be hard.  */
1624  if (flag_pic && DEFAULT_ABI == ABI_V4)
1625    return 0;
1626
1627#ifdef TARGET_RELOCATABLE
1628  /* Similarly if we are using -mrelocatable, consider all constants
1629     to be hard.  */
1630  if (TARGET_RELOCATABLE)
1631    return 0;
1632#endif
1633
1634  if (mode == TFmode)
1635    {
1636      long k[4];
1637      REAL_VALUE_TYPE rv;
1638
1639      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1640      REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1641
1642      return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1643	      && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1644	      && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1645	      && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1646    }
1647
1648  else if (mode == DFmode)
1649    {
1650      long k[2];
1651      REAL_VALUE_TYPE rv;
1652
1653      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1654      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1655
1656      return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1657	      && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1658    }
1659
1660  else if (mode == SFmode)
1661    {
1662      long l;
1663      REAL_VALUE_TYPE rv;
1664
1665      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1666      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1667
1668      return num_insns_constant_wide (l) == 1;
1669    }
1670
1671  else if (mode == DImode)
1672    return ((TARGET_POWERPC64
1673	     && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1674	    || (num_insns_constant (op, DImode) <= 2));
1675
1676  else if (mode == SImode)
1677    return 1;
1678  else
1679    abort ();
1680}
1681
1682/* Returns the constant for the splat instrunction, if exists.  */
1683
1684static int
1685easy_vector_splat_const (int cst, enum machine_mode mode)
1686{
1687  switch (mode)
1688    {
1689    case V4SImode:
1690      if (EASY_VECTOR_15 (cst)
1691	  || EASY_VECTOR_15_ADD_SELF (cst))
1692	return cst;
1693      if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1694	break;
1695      cst = cst >> 16;
1696    case V8HImode:
1697      if (EASY_VECTOR_15 (cst)
1698	  || EASY_VECTOR_15_ADD_SELF (cst))
1699	return cst;
1700      if ((cst & 0xff) != ((cst >> 8) & 0xff))
1701	break;
1702      cst = cst >> 8;
1703    case V16QImode:
1704	  if (EASY_VECTOR_15 (cst)
1705	      || EASY_VECTOR_15_ADD_SELF (cst))
1706	    return cst;
1707    default:
1708      break;
1709    }
1710  return 0;
1711}
1712
1713
1714/* Return nonzero if all elements of a vector have the same value.  */
1715
1716static int
1717easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1718{
1719  int units, i, cst;
1720
1721  units = CONST_VECTOR_NUNITS (op);
1722
1723  cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1724  for (i = 1; i < units; ++i)
1725    if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1726      break;
1727  if (i == units && easy_vector_splat_const (cst, mode))
1728    return 1;
1729  return 0;
1730}
1731
1732/* Return 1 if the operand is a CONST_INT and can be put into a
1733   register without using memory.  */
1734
1735int
1736easy_vector_constant (rtx op, enum machine_mode mode)
1737{
1738  int cst, cst2;
1739
1740  if (GET_CODE (op) != CONST_VECTOR
1741      || (!TARGET_ALTIVEC
1742	  && !TARGET_SPE))
1743    return 0;
1744
1745  if (zero_constant (op, mode)
1746      && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1747	  || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1748    return 1;
1749
1750  if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1751    return 0;
1752
1753  if (TARGET_SPE && mode == V1DImode)
1754    return 0;
1755
1756  cst  = INTVAL (CONST_VECTOR_ELT (op, 0));
1757  cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1758
1759  /* Limit SPE vectors to 15 bits signed.  These we can generate with:
1760       li r0, CONSTANT1
1761       evmergelo r0, r0, r0
1762       li r0, CONSTANT2
1763
1764     I don't know how efficient it would be to allow bigger constants,
1765     considering we'll have an extra 'ori' for every 'li'.  I doubt 5
1766     instructions is better than a 64-bit memory load, but I don't
1767     have the e500 timing specs.  */
1768  if (TARGET_SPE && mode == V2SImode
1769      && cst  >= -0x7fff && cst <= 0x7fff
1770      && cst2 >= -0x7fff && cst2 <= 0x7fff)
1771    return 1;
1772
1773  if (TARGET_ALTIVEC
1774      && easy_vector_same (op, mode))
1775    {
1776      cst = easy_vector_splat_const (cst, mode);
1777      if (EASY_VECTOR_15_ADD_SELF (cst)
1778	  || EASY_VECTOR_15 (cst))
1779	return 1;
1780    }
1781  return 0;
1782}
1783
1784/* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF.  */
1785
1786int
1787easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1788{
1789  int cst;
1790  if (TARGET_ALTIVEC
1791      && GET_CODE (op) == CONST_VECTOR
1792      && easy_vector_same (op, mode))
1793    {
1794      cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
1795      if (EASY_VECTOR_15_ADD_SELF (cst))
1796	return 1;
1797    }
1798  return 0;
1799}
1800
1801/* Generate easy_vector_constant out of a easy_vector_constant_add_self.  */
1802
1803rtx
1804gen_easy_vector_constant_add_self (rtx op)
1805{
1806  int i, units;
1807  rtvec v;
1808  units = GET_MODE_NUNITS (GET_MODE (op));
1809  v = rtvec_alloc (units);
1810
1811  for (i = 0; i < units; i++)
1812    RTVEC_ELT (v, i) =
1813      GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
1814  return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
1815}
1816
1817const char *
1818output_vec_const_move (rtx *operands)
1819{
1820  int cst, cst2;
1821  enum machine_mode mode;
1822  rtx dest, vec;
1823
1824  dest = operands[0];
1825  vec = operands[1];
1826
1827  cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1828  cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1829  mode = GET_MODE (dest);
1830
1831  if (TARGET_ALTIVEC)
1832    {
1833      if (zero_constant (vec, mode))
1834	return "vxor %0,%0,%0";
1835      else if (easy_vector_constant (vec, mode))
1836	{
1837	  operands[1] = GEN_INT (cst);
1838	  switch (mode)
1839	    {
1840	    case V4SImode:
1841	      if (EASY_VECTOR_15 (cst))
1842		{
1843		  operands[1] = GEN_INT (cst);
1844		  return "vspltisw %0,%1";
1845		}
1846	      else if (EASY_VECTOR_15_ADD_SELF (cst))
1847		return "#";
1848	      cst = cst >> 16;
1849	    case V8HImode:
1850	      if (EASY_VECTOR_15 (cst))
1851		{
1852		  operands[1] = GEN_INT (cst);
1853		  return "vspltish %0,%1";
1854		}
1855	      else if (EASY_VECTOR_15_ADD_SELF (cst))
1856		return "#";
1857	      cst = cst >> 8;
1858	    case V16QImode:
1859	      if (EASY_VECTOR_15 (cst))
1860		{
1861		  operands[1] = GEN_INT (cst);
1862		  return "vspltisb %0,%1";
1863		}
1864	      else if (EASY_VECTOR_15_ADD_SELF (cst))
1865		return "#";
1866	    default:
1867	      abort ();
1868	    }
1869	}
1870      else
1871	abort ();
1872    }
1873
1874  if (TARGET_SPE)
1875    {
1876      /* Vector constant 0 is handled as a splitter of V2SI, and in the
1877	 pattern of V1DI, V4HI, and V2SF.
1878
1879	 FIXME: We should probably return # and add post reload
1880	 splitters for these, but this way is so easy ;-).
1881      */
1882      operands[1] = GEN_INT (cst);
1883      operands[2] = GEN_INT (cst2);
1884      if (cst == cst2)
1885	return "li %0,%1\n\tevmergelo %0,%0,%0";
1886      else
1887	return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1888    }
1889
1890  abort ();
1891}
1892
1893/* Return 1 if the operand is the constant 0.  This works for scalars
1894   as well as vectors.  */
1895int
1896zero_constant (rtx op, enum machine_mode mode)
1897{
1898  return op == CONST0_RTX (mode);
1899}
1900
1901/* Return 1 if the operand is 0.0.  */
1902int
1903zero_fp_constant (rtx op, enum machine_mode mode)
1904{
1905  return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1906}
1907
1908/* Return 1 if the operand is in volatile memory.  Note that during
1909   the RTL generation phase, memory_operand does not return TRUE for
1910   volatile memory references.  So this function allows us to
1911   recognize volatile references where its safe.  */
1912
1913int
1914volatile_mem_operand (rtx op, enum machine_mode mode)
1915{
1916  if (GET_CODE (op) != MEM)
1917    return 0;
1918
1919  if (!MEM_VOLATILE_P (op))
1920    return 0;
1921
1922  if (mode != GET_MODE (op))
1923    return 0;
1924
1925  if (reload_completed)
1926    return memory_operand (op, mode);
1927
1928  if (reload_in_progress)
1929    return strict_memory_address_p (mode, XEXP (op, 0));
1930
1931  return memory_address_p (mode, XEXP (op, 0));
1932}
1933
1934/* Return 1 if the operand is an offsettable memory operand.  */
1935
1936int
1937offsettable_mem_operand (rtx op, enum machine_mode mode)
1938{
1939  return ((GET_CODE (op) == MEM)
1940	  && offsettable_address_p (reload_completed || reload_in_progress,
1941				    mode, XEXP (op, 0)));
1942}
1943
1944/* Return 1 if the operand is either an easy FP constant (see above) or
1945   memory.  */
1946
1947int
1948mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1949{
1950  return memory_operand (op, mode) || easy_fp_constant (op, mode);
1951}
1952
1953/* Return 1 if the operand is either a non-special register or an item
1954   that can be used as the operand of a `mode' add insn.  */
1955
1956int
1957add_operand (rtx op, enum machine_mode mode)
1958{
1959  if (GET_CODE (op) == CONST_INT)
1960    return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1961	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1962
1963  return gpc_reg_operand (op, mode);
1964}
1965
1966/* Return 1 if OP is a constant but not a valid add_operand.  */
1967
1968int
1969non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1970{
1971  return (GET_CODE (op) == CONST_INT
1972	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1973	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1974}
1975
1976/* Return 1 if the operand is a non-special register or a constant that
1977   can be used as the operand of an OR or XOR insn on the RS/6000.  */
1978
1979int
1980logical_operand (rtx op, enum machine_mode mode)
1981{
1982  HOST_WIDE_INT opl, oph;
1983
1984  if (gpc_reg_operand (op, mode))
1985    return 1;
1986
1987  if (GET_CODE (op) == CONST_INT)
1988    {
1989      opl = INTVAL (op) & GET_MODE_MASK (mode);
1990
1991#if HOST_BITS_PER_WIDE_INT <= 32
1992      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1993	return 0;
1994#endif
1995    }
1996  else if (GET_CODE (op) == CONST_DOUBLE)
1997    {
1998      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1999	abort ();
2000
2001      opl = CONST_DOUBLE_LOW (op);
2002      oph = CONST_DOUBLE_HIGH (op);
2003      if (oph != 0)
2004	return 0;
2005    }
2006  else
2007    return 0;
2008
2009  return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2010	  || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2011}
2012
2013/* Return 1 if C is a constant that is not a logical operand (as
2014   above), but could be split into one.  */
2015
2016int
2017non_logical_cint_operand (rtx op, enum machine_mode mode)
2018{
2019  return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2020	  && ! logical_operand (op, mode)
2021	  && reg_or_logical_cint_operand (op, mode));
2022}
2023
2024/* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2025   RS/6000.  It is if there are no more than two 1->0 or 0->1 transitions.
2026   Reject all ones and all zeros, since these should have been optimized
2027   away and confuse the making of MB and ME.  */
2028
2029int
2030mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2031{
2032  HOST_WIDE_INT c, lsb;
2033
2034  if (GET_CODE (op) != CONST_INT)
2035    return 0;
2036
2037  c = INTVAL (op);
2038
2039  /* Fail in 64-bit mode if the mask wraps around because the upper
2040     32-bits of the mask will all be 1s, contrary to GCC's internal view.  */
2041  if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2042    return 0;
2043
2044  /* We don't change the number of transitions by inverting,
2045     so make sure we start with the LS bit zero.  */
2046  if (c & 1)
2047    c = ~c;
2048
2049  /* Reject all zeros or all ones.  */
2050  if (c == 0)
2051    return 0;
2052
2053  /* Find the first transition.  */
2054  lsb = c & -c;
2055
2056  /* Invert to look for a second transition.  */
2057  c = ~c;
2058
2059  /* Erase first transition.  */
2060  c &= -lsb;
2061
2062  /* Find the second transition (if any).  */
2063  lsb = c & -c;
2064
2065  /* Match if all the bits above are 1's (or c is zero).  */
2066  return c == -lsb;
2067}
2068
2069/* Return 1 for the PowerPC64 rlwinm corner case.  */
2070
2071int
2072mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2073{
2074  HOST_WIDE_INT c, lsb;
2075
2076  if (GET_CODE (op) != CONST_INT)
2077    return 0;
2078
2079  c = INTVAL (op);
2080
2081  if ((c & 0x80000001) != 0x80000001)
2082    return 0;
2083
2084  c = ~c;
2085  if (c == 0)
2086    return 0;
2087
2088  lsb = c & -c;
2089  c = ~c;
2090  c &= -lsb;
2091  lsb = c & -c;
2092  return c == -lsb;
2093}
2094
2095/* Return 1 if the operand is a constant that is a PowerPC64 mask.
2096   It is if there are no more than one 1->0 or 0->1 transitions.
2097   Reject all zeros, since zero should have been optimized away and
2098   confuses the making of MB and ME.  */
2099
2100int
2101mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2102{
2103  if (GET_CODE (op) == CONST_INT)
2104    {
2105      HOST_WIDE_INT c, lsb;
2106
2107      c = INTVAL (op);
2108
2109      /* Reject all zeros.  */
2110      if (c == 0)
2111	return 0;
2112
2113      /* We don't change the number of transitions by inverting,
2114	 so make sure we start with the LS bit zero.  */
2115      if (c & 1)
2116	c = ~c;
2117
2118      /* Find the transition, and check that all bits above are 1's.  */
2119      lsb = c & -c;
2120
2121      /* Match if all the bits above are 1's (or c is zero).  */
2122      return c == -lsb;
2123    }
2124  return 0;
2125}
2126
2127/* Like mask64_operand, but allow up to three transitions.  This
2128   predicate is used by insn patterns that generate two rldicl or
2129   rldicr machine insns.  */
2130
2131int
2132mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2133{
2134  if (GET_CODE (op) == CONST_INT)
2135    {
2136      HOST_WIDE_INT c, lsb;
2137
2138      c = INTVAL (op);
2139
2140      /* Disallow all zeros.  */
2141      if (c == 0)
2142	return 0;
2143
2144      /* We don't change the number of transitions by inverting,
2145	 so make sure we start with the LS bit zero.  */
2146      if (c & 1)
2147	c = ~c;
2148
2149      /* Find the first transition.  */
2150      lsb = c & -c;
2151
2152      /* Invert to look for a second transition.  */
2153      c = ~c;
2154
2155      /* Erase first transition.  */
2156      c &= -lsb;
2157
2158      /* Find the second transition.  */
2159      lsb = c & -c;
2160
2161      /* Invert to look for a third transition.  */
2162      c = ~c;
2163
2164      /* Erase second transition.  */
2165      c &= -lsb;
2166
2167      /* Find the third transition (if any).  */
2168      lsb = c & -c;
2169
2170      /* Match if all the bits above are 1's (or c is zero).  */
2171      return c == -lsb;
2172    }
2173  return 0;
2174}
2175
2176/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2177   implement ANDing by the mask IN.  */
2178void
2179build_mask64_2_operands (rtx in, rtx *out)
2180{
2181#if HOST_BITS_PER_WIDE_INT >= 64
2182  unsigned HOST_WIDE_INT c, lsb, m1, m2;
2183  int shift;
2184
2185  if (GET_CODE (in) != CONST_INT)
2186    abort ();
2187
2188  c = INTVAL (in);
2189  if (c & 1)
2190    {
2191      /* Assume c initially something like 0x00fff000000fffff.  The idea
2192	 is to rotate the word so that the middle ^^^^^^ group of zeros
2193	 is at the MS end and can be cleared with an rldicl mask.  We then
2194	 rotate back and clear off the MS    ^^ group of zeros with a
2195	 second rldicl.  */
2196      c = ~c;			/*   c == 0xff000ffffff00000 */
2197      lsb = c & -c;		/* lsb == 0x0000000000100000 */
2198      m1 = -lsb;		/*  m1 == 0xfffffffffff00000 */
2199      c = ~c;			/*   c == 0x00fff000000fffff */
2200      c &= -lsb;		/*   c == 0x00fff00000000000 */
2201      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2202      c = ~c;			/*   c == 0xff000fffffffffff */
2203      c &= -lsb;		/*   c == 0xff00000000000000 */
2204      shift = 0;
2205      while ((lsb >>= 1) != 0)
2206	shift++;		/* shift == 44 on exit from loop */
2207      m1 <<= 64 - shift;	/*  m1 == 0xffffff0000000000 */
2208      m1 = ~m1;			/*  m1 == 0x000000ffffffffff */
2209      m2 = ~c;			/*  m2 == 0x00ffffffffffffff */
2210    }
2211  else
2212    {
2213      /* Assume c initially something like 0xff000f0000000000.  The idea
2214	 is to rotate the word so that the     ^^^  middle group of zeros
2215	 is at the LS end and can be cleared with an rldicr mask.  We then
2216	 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2217	 a second rldicr.  */
2218      lsb = c & -c;		/* lsb == 0x0000010000000000 */
2219      m2 = -lsb;		/*  m2 == 0xffffff0000000000 */
2220      c = ~c;			/*   c == 0x00fff0ffffffffff */
2221      c &= -lsb;		/*   c == 0x00fff00000000000 */
2222      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2223      c = ~c;			/*   c == 0xff000fffffffffff */
2224      c &= -lsb;		/*   c == 0xff00000000000000 */
2225      shift = 0;
2226      while ((lsb >>= 1) != 0)
2227	shift++;		/* shift == 44 on exit from loop */
2228      m1 = ~c;			/*  m1 == 0x00ffffffffffffff */
2229      m1 >>= shift;		/*  m1 == 0x0000000000000fff */
2230      m1 = ~m1;			/*  m1 == 0xfffffffffffff000 */
2231    }
2232
2233  /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2234     masks will be all 1's.  We are guaranteed more than one transition.  */
2235  out[0] = GEN_INT (64 - shift);
2236  out[1] = GEN_INT (m1);
2237  out[2] = GEN_INT (shift);
2238  out[3] = GEN_INT (m2);
2239#else
2240  (void)in;
2241  (void)out;
2242  abort ();
2243#endif
2244}
2245
2246/* Return 1 if the operand is either a non-special register or a constant
2247   that can be used as the operand of a PowerPC64 logical AND insn.  */
2248
2249int
2250and64_operand (rtx op, enum machine_mode mode)
2251{
2252  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
2253    return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2254
2255  return (logical_operand (op, mode) || mask64_operand (op, mode));
2256}
2257
2258/* Like the above, but also match constants that can be implemented
2259   with two rldicl or rldicr insns.  */
2260
2261int
2262and64_2_operand (rtx op, enum machine_mode mode)
2263{
2264  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
2265    return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2266
2267  return logical_operand (op, mode) || mask64_2_operand (op, mode);
2268}
2269
2270/* Return 1 if the operand is either a non-special register or a
2271   constant that can be used as the operand of an RS/6000 logical AND insn.  */
2272
2273int
2274and_operand (rtx op, enum machine_mode mode)
2275{
2276  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
2277    return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2278
2279  return (logical_operand (op, mode) || mask_operand (op, mode));
2280}
2281
2282/* Return 1 if the operand is a general register or memory operand.  */
2283
2284int
2285reg_or_mem_operand (rtx op, enum machine_mode mode)
2286{
2287  return (gpc_reg_operand (op, mode)
2288	  || memory_operand (op, mode)
2289	  || macho_lo_sum_memory_operand (op, mode)
2290	  || volatile_mem_operand (op, mode));
2291}
2292
2293/* Return 1 if the operand is a general register or memory operand without
2294   pre_inc or pre_dec which produces invalid form of PowerPC lwa
2295   instruction.  */
2296
2297int
2298lwa_operand (rtx op, enum machine_mode mode)
2299{
2300  rtx inner = op;
2301
2302  if (reload_completed && GET_CODE (inner) == SUBREG)
2303    inner = SUBREG_REG (inner);
2304
2305  return gpc_reg_operand (inner, mode)
2306    || (memory_operand (inner, mode)
2307	&& GET_CODE (XEXP (inner, 0)) != PRE_INC
2308	&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
2309	&& (GET_CODE (XEXP (inner, 0)) != PLUS
2310	    || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2311	    || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2312}
2313
2314/* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF.  */
2315
2316int
2317symbol_ref_operand (rtx op, enum machine_mode mode)
2318{
2319  if (mode != VOIDmode && GET_MODE (op) != mode)
2320    return 0;
2321
2322  return (GET_CODE (op) == SYMBOL_REF
2323	  && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2324}
2325
2326/* Return 1 if the operand, used inside a MEM, is a valid first argument
2327   to CALL.  This is a SYMBOL_REF, a pseudo-register, LR or CTR.  */
2328
2329int
2330call_operand (rtx op, enum machine_mode mode)
2331{
2332  if (mode != VOIDmode && GET_MODE (op) != mode)
2333    return 0;
2334
2335  return (GET_CODE (op) == SYMBOL_REF
2336	  || (GET_CODE (op) == REG
2337	      && (REGNO (op) == LINK_REGISTER_REGNUM
2338		  || REGNO (op) == COUNT_REGISTER_REGNUM
2339		  || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2340}
2341
2342/* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2343   this file.  */
2344
2345int
2346current_file_function_operand (rtx op,
2347                              enum machine_mode mode ATTRIBUTE_UNUSED)
2348{
2349  return (GET_CODE (op) == SYMBOL_REF
2350	  && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2351	  && (SYMBOL_REF_LOCAL_P (op)
2352	      || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2353}
2354
2355/* Return 1 if this operand is a valid input for a move insn.  */
2356
2357int
2358input_operand (rtx op, enum machine_mode mode)
2359{
2360  /* Memory is always valid.  */
2361  if (memory_operand (op, mode))
2362    return 1;
2363
2364  /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary.  */
2365  if (GET_CODE (op) == CONSTANT_P_RTX)
2366    return 1;
2367
2368  /* For floating-point, easy constants are valid.  */
2369  if (GET_MODE_CLASS (mode) == MODE_FLOAT
2370      && CONSTANT_P (op)
2371      && easy_fp_constant (op, mode))
2372    return 1;
2373
2374  /* Allow any integer constant.  */
2375  if (GET_MODE_CLASS (mode) == MODE_INT
2376      && (GET_CODE (op) == CONST_INT
2377	  || GET_CODE (op) == CONST_DOUBLE))
2378    return 1;
2379
2380  /* Allow easy vector constants.  */
2381  if (GET_CODE (op) == CONST_VECTOR
2382      && easy_vector_constant (op, mode))
2383    return 1;
2384
2385  /* For floating-point or multi-word mode, the only remaining valid type
2386     is a register.  */
2387  if (GET_MODE_CLASS (mode) == MODE_FLOAT
2388      || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2389    return register_operand (op, mode);
2390
2391  /* The only cases left are integral modes one word or smaller (we
2392     do not get called for MODE_CC values).  These can be in any
2393     register.  */
2394  if (register_operand (op, mode))
2395    return 1;
2396
2397  /* A SYMBOL_REF referring to the TOC is valid.  */
2398  if (legitimate_constant_pool_address_p (op))
2399    return 1;
2400
2401  /* A constant pool expression (relative to the TOC) is valid */
2402  if (toc_relative_expr_p (op))
2403    return 1;
2404
2405  /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2406     to be valid.  */
2407  if (DEFAULT_ABI == ABI_V4
2408      && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2409      && small_data_operand (op, Pmode))
2410    return 1;
2411
2412  return 0;
2413}
2414
2415
2416/* Darwin, AIX increases natural record alignment to doubleword if the first
2417   field is an FP double while the FP fields remain word aligned.  */
2418
2419unsigned int
2420rs6000_special_round_type_align (tree type, int computed, int specified)
2421{
2422  tree field = TYPE_FIELDS (type);
2423
2424  /* Skip all the static variables only if ABI is greater than
2425     1 or equal to 0.   */
2426  while (field != NULL && TREE_CODE (field) == VAR_DECL)
2427    field = TREE_CHAIN (field);
2428
2429  if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2430    return MAX (computed, specified);
2431
2432  return MAX (MAX (computed, specified), 64);
2433}
2434
2435/* Return 1 for an operand in small memory on V.4/eabi.  */
2436
2437int
2438small_data_operand (rtx op ATTRIBUTE_UNUSED,
2439		    enum machine_mode mode ATTRIBUTE_UNUSED)
2440{
2441#if TARGET_ELF
2442  rtx sym_ref;
2443
2444  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2445    return 0;
2446
2447  if (DEFAULT_ABI != ABI_V4)
2448    return 0;
2449
2450  if (GET_CODE (op) == SYMBOL_REF)
2451    sym_ref = op;
2452
2453  else if (GET_CODE (op) != CONST
2454	   || GET_CODE (XEXP (op, 0)) != PLUS
2455	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2456	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2457    return 0;
2458
2459  else
2460    {
2461      rtx sum = XEXP (op, 0);
2462      HOST_WIDE_INT summand;
2463
2464      /* We have to be careful here, because it is the referenced address
2465        that must be 32k from _SDA_BASE_, not just the symbol.  */
2466      summand = INTVAL (XEXP (sum, 1));
2467      if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2468       return 0;
2469
2470      sym_ref = XEXP (sum, 0);
2471    }
2472
2473  return SYMBOL_REF_SMALL_P (sym_ref);
2474#else
2475  return 0;
2476#endif
2477}
2478
2479/* Return true, if operand is a memory operand and has a
2480   displacement divisible by 4.  */
2481
2482int
2483word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2484{
2485  rtx addr;
2486  int off = 0;
2487
2488  if (!memory_operand (op, mode))
2489    return 0;
2490
2491  addr = XEXP (op, 0);
2492  if (GET_CODE (addr) == PLUS
2493      && GET_CODE (XEXP (addr, 0)) == REG
2494      && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2495    off = INTVAL (XEXP (addr, 1));
2496
2497  return (off % 4) == 0;
2498}
2499
2500/* Return true if either operand is a general purpose register.  */
2501
2502bool
2503gpr_or_gpr_p (rtx op0, rtx op1)
2504{
2505  return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2506	  || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2507}
2508
2509
2510/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address.  */
2511
2512static int
2513constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2514{
2515  switch (GET_CODE(op))
2516    {
2517    case SYMBOL_REF:
2518      if (RS6000_SYMBOL_REF_TLS_P (op))
2519	return 0;
2520      else if (CONSTANT_POOL_ADDRESS_P (op))
2521	{
2522	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2523	    {
2524	      *have_sym = 1;
2525	      return 1;
2526	    }
2527	  else
2528	    return 0;
2529	}
2530      else if (! strcmp (XSTR (op, 0), toc_label_name))
2531	{
2532	  *have_toc = 1;
2533	  return 1;
2534	}
2535      else
2536	return 0;
2537    case PLUS:
2538    case MINUS:
2539      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2540	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2541    case CONST:
2542      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2543    case CONST_INT:
2544      return 1;
2545    default:
2546      return 0;
2547    }
2548}
2549
2550static bool
2551constant_pool_expr_p (rtx op)
2552{
2553  int have_sym = 0;
2554  int have_toc = 0;
2555  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2556}
2557
2558static bool
2559toc_relative_expr_p (rtx op)
2560{
2561  int have_sym = 0;
2562  int have_toc = 0;
2563  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2564}
2565
2566/* SPE offset addressing is limited to 5-bits worth of double words.  */
2567#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2568
2569bool
2570legitimate_constant_pool_address_p (rtx x)
2571{
2572  return (TARGET_TOC
2573	  && GET_CODE (x) == PLUS
2574	  && GET_CODE (XEXP (x, 0)) == REG
2575	  && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2576	  && constant_pool_expr_p (XEXP (x, 1)));
2577}
2578
2579static bool
2580legitimate_small_data_p (enum machine_mode mode, rtx x)
2581{
2582  return (DEFAULT_ABI == ABI_V4
2583	  && !flag_pic && !TARGET_TOC
2584	  && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2585	  && small_data_operand (x, mode));
2586}
2587
2588static bool
2589legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2590{
2591  unsigned HOST_WIDE_INT offset, extra;
2592
2593  if (GET_CODE (x) != PLUS)
2594    return false;
2595  if (GET_CODE (XEXP (x, 0)) != REG)
2596    return false;
2597  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2598    return false;
2599  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2600    return false;
2601
2602  offset = INTVAL (XEXP (x, 1));
2603  extra = 0;
2604  switch (mode)
2605    {
2606    case V16QImode:
2607    case V8HImode:
2608    case V4SFmode:
2609    case V4SImode:
2610      /* AltiVec vector modes.  Only reg+reg addressing is valid here,
2611	 which leaves the only valid constant offset of zero, which by
2612	 canonicalization rules is also invalid.  */
2613      return false;
2614
2615    case V4HImode:
2616    case V2SImode:
2617    case V1DImode:
2618    case V2SFmode:
2619      /* SPE vector modes.  */
2620      return SPE_CONST_OFFSET_OK (offset);
2621
2622    case DFmode:
2623    case DImode:
2624      if (mode == DFmode || !TARGET_POWERPC64)
2625	extra = 4;
2626      else if (offset & 3)
2627	return false;
2628      break;
2629
2630    case TFmode:
2631    case TImode:
2632      if (mode == TFmode || !TARGET_POWERPC64)
2633	extra = 12;
2634      else if (offset & 3)
2635	return false;
2636      else
2637	extra = 8;
2638      break;
2639
2640    default:
2641      break;
2642    }
2643
2644  offset += 0x8000;
2645  return (offset < 0x10000) && (offset + extra < 0x10000);
2646}
2647
2648static bool
2649legitimate_indexed_address_p (rtx x, int strict)
2650{
2651  rtx op0, op1;
2652
2653  if (GET_CODE (x) != PLUS)
2654    return false;
2655  op0 = XEXP (x, 0);
2656  op1 = XEXP (x, 1);
2657
2658  if (!REG_P (op0) || !REG_P (op1))
2659    return false;
2660
2661  return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2662	   && INT_REG_OK_FOR_INDEX_P (op1, strict))
2663	  || (INT_REG_OK_FOR_BASE_P (op1, strict)
2664	      && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2665}
2666
2667static inline bool
2668legitimate_indirect_address_p (rtx x, int strict)
2669{
2670  return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2671}
2672
2673static bool
2674macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2675{
2676    if (!TARGET_MACHO || !flag_pic
2677        || mode != SImode || GET_CODE(x) != MEM)
2678      return false;
2679    x = XEXP (x, 0);
2680
2681  if (GET_CODE (x) != LO_SUM)
2682    return false;
2683  if (GET_CODE (XEXP (x, 0)) != REG)
2684    return false;
2685  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2686    return false;
2687  x = XEXP (x, 1);
2688
2689  return CONSTANT_P (x);
2690}
2691
2692static bool
2693legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2694{
2695  if (GET_CODE (x) != LO_SUM)
2696    return false;
2697  if (GET_CODE (XEXP (x, 0)) != REG)
2698    return false;
2699  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2700    return false;
2701  x = XEXP (x, 1);
2702
2703  if (TARGET_ELF || TARGET_MACHO)
2704    {
2705      if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2706	return false;
2707      if (TARGET_TOC)
2708	return false;
2709      if (GET_MODE_NUNITS (mode) != 1)
2710	return false;
2711      if (GET_MODE_BITSIZE (mode) > 32
2712	  && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2713	return false;
2714
2715      return CONSTANT_P (x);
2716    }
2717
2718  return false;
2719}
2720
2721
2722/* Try machine-dependent ways of modifying an illegitimate address
2723   to be legitimate.  If we find one, return the new, valid address.
2724   This is used from only one place: `memory_address' in explow.c.
2725
2726   OLDX is the address as it was before break_out_memory_refs was
2727   called.  In some cases it is useful to look at this to decide what
2728   needs to be done.
2729
2730   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2731
2732   It is always safe for this function to do nothing.  It exists to
2733   recognize opportunities to optimize the output.
2734
2735   On RS/6000, first check for the sum of a register with a constant
2736   integer that is out of range.  If so, generate code to add the
2737   constant with the low-order 16 bits masked to the register and force
2738   this result into another register (this can be done with `cau').
2739   Then generate an address of REG+(CONST&0xffff), allowing for the
2740   possibility of bit 16 being a one.
2741
2742   Then check for the sum of a register and something not constant, try to
2743   load the other things into a register and return the sum.  */
2744
2745rtx
2746rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2747			   enum machine_mode mode)
2748{
2749  if (GET_CODE (x) == SYMBOL_REF)
2750    {
2751      enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2752      if (model != 0)
2753	return rs6000_legitimize_tls_address (x, model);
2754    }
2755
2756  if (GET_CODE (x) == PLUS
2757      && GET_CODE (XEXP (x, 0)) == REG
2758      && GET_CODE (XEXP (x, 1)) == CONST_INT
2759      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2760    {
2761      HOST_WIDE_INT high_int, low_int;
2762      rtx sum;
2763      low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2764      high_int = INTVAL (XEXP (x, 1)) - low_int;
2765      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2766					 GEN_INT (high_int)), 0);
2767      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2768    }
2769  else if (GET_CODE (x) == PLUS
2770	   && GET_CODE (XEXP (x, 0)) == REG
2771	   && GET_CODE (XEXP (x, 1)) != CONST_INT
2772	   && GET_MODE_NUNITS (mode) == 1
2773	   && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2774	       || TARGET_POWERPC64
2775	       || (mode != DFmode && mode != TFmode))
2776	   && (TARGET_POWERPC64 || mode != DImode)
2777	   && mode != TImode)
2778    {
2779      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2780			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2781    }
2782  else if (ALTIVEC_VECTOR_MODE (mode))
2783    {
2784      rtx reg;
2785
2786      /* Make sure both operands are registers.  */
2787      if (GET_CODE (x) == PLUS)
2788	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2789			     force_reg (Pmode, XEXP (x, 1)));
2790
2791      reg = force_reg (Pmode, x);
2792      return reg;
2793    }
2794  else if (SPE_VECTOR_MODE (mode))
2795    {
2796      /* We accept [reg + reg] and [reg + OFFSET].  */
2797
2798      if (GET_CODE (x) == PLUS)
2799      {
2800        rtx op1 = XEXP (x, 0);
2801        rtx op2 = XEXP (x, 1);
2802
2803        op1 = force_reg (Pmode, op1);
2804
2805        if (GET_CODE (op2) != REG
2806            && (GET_CODE (op2) != CONST_INT
2807                || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2808          op2 = force_reg (Pmode, op2);
2809
2810        return gen_rtx_PLUS (Pmode, op1, op2);
2811      }
2812
2813      return force_reg (Pmode, x);
2814    }
2815  else if (TARGET_ELF
2816	   && TARGET_32BIT
2817	   && TARGET_NO_TOC
2818	   && ! flag_pic
2819	   && GET_CODE (x) != CONST_INT
2820	   && GET_CODE (x) != CONST_DOUBLE
2821	   && CONSTANT_P (x)
2822	   && GET_MODE_NUNITS (mode) == 1
2823	   && (GET_MODE_BITSIZE (mode) <= 32
2824	       || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2825    {
2826      rtx reg = gen_reg_rtx (Pmode);
2827      emit_insn (gen_elf_high (reg, x));
2828      return gen_rtx_LO_SUM (Pmode, reg, x);
2829    }
2830  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2831	   && ! flag_pic
2832#if TARGET_MACHO
2833	   && ! MACHO_DYNAMIC_NO_PIC_P
2834#endif
2835	   && GET_CODE (x) != CONST_INT
2836	   && GET_CODE (x) != CONST_DOUBLE
2837	   && CONSTANT_P (x)
2838	   && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2839	   && mode != DImode
2840	   && mode != TImode)
2841    {
2842      rtx reg = gen_reg_rtx (Pmode);
2843      emit_insn (gen_macho_high (reg, x));
2844      return gen_rtx_LO_SUM (Pmode, reg, x);
2845    }
2846  else if (TARGET_TOC
2847	   && constant_pool_expr_p (x)
2848	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2849    {
2850      return create_TOC_reference (x);
2851    }
2852  else
2853    return NULL_RTX;
2854}
2855
2856/* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2857   We need to emit DTP-relative relocations.  */
2858
2859void
2860rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2861{
2862  switch (size)
2863    {
2864    case 4:
2865      fputs ("\t.long\t", file);
2866      break;
2867    case 8:
2868      fputs (DOUBLE_INT_ASM_OP, file);
2869      break;
2870    default:
2871      abort ();
2872    }
2873  output_addr_const (file, x);
2874  fputs ("@dtprel+0x8000", file);
2875}
2876
2877/* Construct the SYMBOL_REF for the tls_get_addr function.  */
2878
2879static GTY(()) rtx rs6000_tls_symbol;
2880static rtx
2881rs6000_tls_get_addr (void)
2882{
2883  if (!rs6000_tls_symbol)
2884    rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2885
2886  return rs6000_tls_symbol;
2887}
2888
2889/* Construct the SYMBOL_REF for TLS GOT references.  */
2890
2891static GTY(()) rtx rs6000_got_symbol;
2892static rtx
2893rs6000_got_sym (void)
2894{
2895  if (!rs6000_got_symbol)
2896    {
2897      rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2898      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2899      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2900    }
2901
2902  return rs6000_got_symbol;
2903}
2904
2905/* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
2906   this (thread-local) address.  */
2907
2908static rtx
2909rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2910{
2911  rtx dest, insn;
2912
2913  dest = gen_reg_rtx (Pmode);
2914  if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2915    {
2916      rtx tlsreg;
2917
2918      if (TARGET_64BIT)
2919	{
2920	  tlsreg = gen_rtx_REG (Pmode, 13);
2921	  insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2922	}
2923      else
2924	{
2925	  tlsreg = gen_rtx_REG (Pmode, 2);
2926	  insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2927	}
2928      emit_insn (insn);
2929    }
2930  else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2931    {
2932      rtx tlsreg, tmp;
2933
2934      tmp = gen_reg_rtx (Pmode);
2935      if (TARGET_64BIT)
2936	{
2937	  tlsreg = gen_rtx_REG (Pmode, 13);
2938	  insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2939	}
2940      else
2941	{
2942	  tlsreg = gen_rtx_REG (Pmode, 2);
2943	  insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2944	}
2945      emit_insn (insn);
2946      if (TARGET_64BIT)
2947	insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2948      else
2949	insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2950      emit_insn (insn);
2951    }
2952  else
2953    {
2954      rtx r3, got, tga, tmp1, tmp2, eqv;
2955
2956      if (TARGET_64BIT)
2957	got = gen_rtx_REG (Pmode, 2);
2958      else
2959	{
2960	  if (flag_pic == 1)
2961	    got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2962	  else
2963	    {
2964	      rtx gsym = rs6000_got_sym ();
2965	      got = gen_reg_rtx (Pmode);
2966	      if (flag_pic == 0)
2967		rs6000_emit_move (got, gsym, Pmode);
2968	      else
2969		{
2970		  rtx tempLR, tmp3, mem;
2971		  rtx first, last;
2972
2973		  tempLR = gen_reg_rtx (Pmode);
2974		  tmp1 = gen_reg_rtx (Pmode);
2975		  tmp2 = gen_reg_rtx (Pmode);
2976		  tmp3 = gen_reg_rtx (Pmode);
2977		  mem = gen_rtx_MEM (Pmode, tmp1);
2978		  RTX_UNCHANGING_P (mem) = 1;
2979
2980		  first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
2981		  emit_move_insn (tmp1, tempLR);
2982		  emit_move_insn (tmp2, mem);
2983		  emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2984		  last = emit_move_insn (got, tmp3);
2985		  REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2986							REG_NOTES (last));
2987		  REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2988							 REG_NOTES (first));
2989		  REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2990							REG_NOTES (last));
2991		}
2992	    }
2993	}
2994
2995      if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2996	{
2997	  r3 = gen_rtx_REG (Pmode, 3);
2998	  if (TARGET_64BIT)
2999	    insn = gen_tls_gd_64 (r3, got, addr);
3000	  else
3001	    insn = gen_tls_gd_32 (r3, got, addr);
3002	  start_sequence ();
3003	  emit_insn (insn);
3004	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3005	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3006	  insn = emit_call_insn (insn);
3007	  CONST_OR_PURE_CALL_P (insn) = 1;
3008	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3009	  insn = get_insns ();
3010	  end_sequence ();
3011	  emit_libcall_block (insn, dest, r3, addr);
3012	}
3013      else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3014	{
3015	  r3 = gen_rtx_REG (Pmode, 3);
3016	  if (TARGET_64BIT)
3017	    insn = gen_tls_ld_64 (r3, got);
3018	  else
3019	    insn = gen_tls_ld_32 (r3, got);
3020	  start_sequence ();
3021	  emit_insn (insn);
3022	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3023	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3024	  insn = emit_call_insn (insn);
3025	  CONST_OR_PURE_CALL_P (insn) = 1;
3026	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3027	  insn = get_insns ();
3028	  end_sequence ();
3029	  tmp1 = gen_reg_rtx (Pmode);
3030	  eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3031				UNSPEC_TLSLD);
3032	  emit_libcall_block (insn, tmp1, r3, eqv);
3033	  if (rs6000_tls_size == 16)
3034	    {
3035	      if (TARGET_64BIT)
3036		insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3037	      else
3038		insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3039	    }
3040	  else if (rs6000_tls_size == 32)
3041	    {
3042	      tmp2 = gen_reg_rtx (Pmode);
3043	      if (TARGET_64BIT)
3044		insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3045	      else
3046		insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3047	      emit_insn (insn);
3048	      if (TARGET_64BIT)
3049		insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3050	      else
3051		insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3052	    }
3053	  else
3054	    {
3055	      tmp2 = gen_reg_rtx (Pmode);
3056	      if (TARGET_64BIT)
3057		insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3058	      else
3059		insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3060	      emit_insn (insn);
3061	      insn = gen_rtx_SET (Pmode, dest,
3062				  gen_rtx_PLUS (Pmode, tmp2, tmp1));
3063	    }
3064	  emit_insn (insn);
3065	}
3066      else
3067	{
3068	  /* IE, or 64 bit offset LE.  */
3069	  tmp2 = gen_reg_rtx (Pmode);
3070	  if (TARGET_64BIT)
3071	    insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3072	  else
3073	    insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3074	  emit_insn (insn);
3075	  if (TARGET_64BIT)
3076	    insn = gen_tls_tls_64 (dest, tmp2, addr);
3077	  else
3078	    insn = gen_tls_tls_32 (dest, tmp2, addr);
3079	  emit_insn (insn);
3080	}
3081    }
3082
3083  return dest;
3084}
3085
3086/* Return 1 if X is a SYMBOL_REF for a TLS symbol.  This is used in
3087   instruction definitions.  */
3088
3089int
3090rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3091{
3092  return RS6000_SYMBOL_REF_TLS_P (x);
3093}
3094
3095/* Return 1 if X contains a thread-local symbol.  */
3096
3097bool
3098rs6000_tls_referenced_p (rtx x)
3099{
3100  if (! TARGET_HAVE_TLS)
3101    return false;
3102
3103  return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3104}
3105
3106/* Return 1 if *X is a thread-local symbol.  This is the same as
3107   rs6000_tls_symbol_ref except for the type of the unused argument.  */
3108
3109static inline int
3110rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3111{
3112  return RS6000_SYMBOL_REF_TLS_P (*x);
3113}
3114
3115/* The convention appears to be to define this wherever it is used.
3116   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3117   is now used here.  */
3118#ifndef REG_MODE_OK_FOR_BASE_P
3119#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3120#endif
3121
3122/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
3123   replace the input X, or the original X if no replacement is called for.
3124   The output parameter *WIN is 1 if the calling macro should goto WIN,
3125   0 if it should not.
3126
3127   For RS/6000, we wish to handle large displacements off a base
3128   register by splitting the addend across an addiu/addis and the mem insn.
3129   This cuts number of extra insns needed from 3 to 1.
3130
3131   On Darwin, we use this to generate code for floating point constants.
3132   A movsf_low is generated so we wind up with 2 instructions rather than 3.
3133   The Darwin code is inside #if TARGET_MACHO because only then is
3134   machopic_function_base_name() defined.  */
3135rtx
3136rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3137	int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3138{
3139  /* We must recognize output that we have already generated ourselves.  */
3140  if (GET_CODE (x) == PLUS
3141      && GET_CODE (XEXP (x, 0)) == PLUS
3142      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3143      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3144      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3145    {
3146      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3147                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3148                   opnum, (enum reload_type)type);
3149      *win = 1;
3150      return x;
3151    }
3152
3153#if TARGET_MACHO
3154  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3155      && GET_CODE (x) == LO_SUM
3156      && GET_CODE (XEXP (x, 0)) == PLUS
3157      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3158      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3159      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3160      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3161      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3162      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3163      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3164    {
3165      /* Result of previous invocation of this function on Darwin
3166	 floating point constant.  */
3167      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3168		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3169		opnum, (enum reload_type)type);
3170      *win = 1;
3171      return x;
3172    }
3173#endif
3174
3175  /* Force ld/std non-word aligned offset into base register by wrapping
3176     in offset 0.  */
3177  if (GET_CODE (x) == PLUS
3178      && GET_CODE (XEXP (x, 0)) == REG
3179      && REGNO (XEXP (x, 0)) < 32
3180      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3181      && GET_CODE (XEXP (x, 1)) == CONST_INT
3182      && (INTVAL (XEXP (x, 1)) & 3) != 0
3183      && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3184      && TARGET_POWERPC64)
3185    {
3186      x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3187      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3188		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3189		   opnum, (enum reload_type) type);
3190      *win = 1;
3191      return x;
3192    }
3193
3194  if (GET_CODE (x) == PLUS
3195      && GET_CODE (XEXP (x, 0)) == REG
3196      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3197      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3198      && GET_CODE (XEXP (x, 1)) == CONST_INT
3199      && !SPE_VECTOR_MODE (mode)
3200      && !ALTIVEC_VECTOR_MODE (mode))
3201    {
3202      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3203      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3204      HOST_WIDE_INT high
3205        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3206
3207      /* Check for 32-bit overflow.  */
3208      if (high + low != val)
3209        {
3210	  *win = 0;
3211	  return x;
3212	}
3213
3214      /* Reload the high part into a base reg; leave the low part
3215         in the mem directly.  */
3216
3217      x = gen_rtx_PLUS (GET_MODE (x),
3218                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3219                                      GEN_INT (high)),
3220                        GEN_INT (low));
3221
3222      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3223                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3224                   opnum, (enum reload_type)type);
3225      *win = 1;
3226      return x;
3227    }
3228
3229#if TARGET_MACHO
3230  if (GET_CODE (x) == SYMBOL_REF
3231      && DEFAULT_ABI == ABI_DARWIN
3232      && !ALTIVEC_VECTOR_MODE (mode)
3233      && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3234      /* Don't do this for TFmode, since the result isn't offsettable.  */
3235      && mode != TFmode)
3236    {
3237      if (flag_pic)
3238	{
3239	  rtx offset = gen_rtx_CONST (Pmode,
3240			 gen_rtx_MINUS (Pmode, x,
3241			   gen_rtx_SYMBOL_REF (Pmode,
3242			     machopic_function_base_name ())));
3243	  x = gen_rtx_LO_SUM (GET_MODE (x),
3244		gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3245		  gen_rtx_HIGH (Pmode, offset)), offset);
3246	}
3247      else
3248	x = gen_rtx_LO_SUM (GET_MODE (x),
3249              gen_rtx_HIGH (Pmode, x), x);
3250
3251      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3252		   BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3253		   opnum, (enum reload_type)type);
3254      *win = 1;
3255      return x;
3256    }
3257#endif
3258
3259  if (TARGET_TOC
3260      && constant_pool_expr_p (x)
3261      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3262    {
3263      (x) = create_TOC_reference (x);
3264      *win = 1;
3265      return x;
3266    }
3267  *win = 0;
3268  return x;
3269}
3270
3271/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3272   that is a valid memory address for an instruction.
3273   The MODE argument is the machine mode for the MEM expression
3274   that wants to use this address.
3275
3276   On the RS/6000, there are four valid address: a SYMBOL_REF that
3277   refers to a constant pool entry of an address (or the sum of it
3278   plus a constant), a short (16-bit signed) constant plus a register,
3279   the sum of two registers, or a register indirect, possibly with an
3280   auto-increment.  For DFmode and DImode with a constant plus register,
3281   we must ensure that both words are addressable or PowerPC64 with offset
3282   word aligned.
3283
3284   For modes spanning multiple registers (DFmode in 32-bit GPRs,
3285   32-bit DImode, TImode), indexed addressing cannot be used because
3286   adjacent memory cells are accessed by adding word-sized offsets
3287   during assembly output.  */
3288int
3289rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3290{
3291  if (RS6000_SYMBOL_REF_TLS_P (x))
3292    return 0;
3293  if (legitimate_indirect_address_p (x, reg_ok_strict))
3294    return 1;
3295  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3296      && !ALTIVEC_VECTOR_MODE (mode)
3297      && !SPE_VECTOR_MODE (mode)
3298      && TARGET_UPDATE
3299      && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3300    return 1;
3301  if (legitimate_small_data_p (mode, x))
3302    return 1;
3303  if (legitimate_constant_pool_address_p (x))
3304    return 1;
3305  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
3306  if (! reg_ok_strict
3307      && GET_CODE (x) == PLUS
3308      && GET_CODE (XEXP (x, 0)) == REG
3309      && (XEXP (x, 0) == virtual_stack_vars_rtx
3310	  || XEXP (x, 0) == arg_pointer_rtx)
3311      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3312    return 1;
3313  if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3314    return 1;
3315  if (mode != TImode
3316      && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3317	  || TARGET_POWERPC64
3318	  || (mode != DFmode && mode != TFmode))
3319      && (TARGET_POWERPC64 || mode != DImode)
3320      && legitimate_indexed_address_p (x, reg_ok_strict))
3321    return 1;
3322  if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3323    return 1;
3324  return 0;
3325}
3326
3327/* Go to LABEL if ADDR (a legitimate address expression)
3328   has an effect that depends on the machine mode it is used for.
3329
3330   On the RS/6000 this is true of all integral offsets (since AltiVec
3331   modes don't allow them) or is a pre-increment or decrement.
3332
3333   ??? Except that due to conceptual problems in offsettable_address_p
3334   we can't really report the problems of integral offsets.  So leave
3335   this assuming that the adjustable offset must be valid for the
3336   sub-words of a TFmode operand, which is what we had before.  */
3337
3338bool
3339rs6000_mode_dependent_address (rtx addr)
3340{
3341  switch (GET_CODE (addr))
3342    {
3343    case PLUS:
3344      if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3345	{
3346	  unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3347	  return val + 12 + 0x8000 >= 0x10000;
3348	}
3349      break;
3350
3351    case LO_SUM:
3352      return true;
3353
3354    case PRE_INC:
3355    case PRE_DEC:
3356      return TARGET_UPDATE;
3357
3358    default:
3359      break;
3360    }
3361
3362  return false;
3363}
3364
3365/* Try to output insns to set TARGET equal to the constant C if it can
3366   be done in less than N insns.  Do all computations in MODE.
3367   Returns the place where the output has been placed if it can be
3368   done and the insns have been emitted.  If it would take more than N
3369   insns, zero is returned and no insns and emitted.  */
3370
3371rtx
3372rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3373		       rtx source, int n ATTRIBUTE_UNUSED)
3374{
3375  rtx result, insn, set;
3376  HOST_WIDE_INT c0, c1;
3377
3378  if (mode == QImode || mode == HImode)
3379    {
3380      if (dest == NULL)
3381        dest = gen_reg_rtx (mode);
3382      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3383      return dest;
3384    }
3385  else if (mode == SImode)
3386    {
3387      result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3388
3389      emit_insn (gen_rtx_SET (VOIDmode, result,
3390			      GEN_INT (INTVAL (source)
3391				       & (~ (HOST_WIDE_INT) 0xffff))));
3392      emit_insn (gen_rtx_SET (VOIDmode, dest,
3393			      gen_rtx_IOR (SImode, result,
3394					   GEN_INT (INTVAL (source) & 0xffff))));
3395      result = dest;
3396    }
3397  else if (mode == DImode)
3398    {
3399      if (GET_CODE (source) == CONST_INT)
3400	{
3401	  c0 = INTVAL (source);
3402	  c1 = -(c0 < 0);
3403	}
3404      else if (GET_CODE (source) == CONST_DOUBLE)
3405	{
3406#if HOST_BITS_PER_WIDE_INT >= 64
3407	  c0 = CONST_DOUBLE_LOW (source);
3408	  c1 = -(c0 < 0);
3409#else
3410	  c0 = CONST_DOUBLE_LOW (source);
3411	  c1 = CONST_DOUBLE_HIGH (source);
3412#endif
3413	}
3414      else
3415	abort ();
3416
3417      result = rs6000_emit_set_long_const (dest, c0, c1);
3418    }
3419  else
3420    abort ();
3421
3422  insn = get_last_insn ();
3423  set = single_set (insn);
3424  if (! CONSTANT_P (SET_SRC (set)))
3425    set_unique_reg_note (insn, REG_EQUAL, source);
3426
3427  return result;
3428}
3429
3430/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3431   fall back to a straight forward decomposition.  We do this to avoid
3432   exponential run times encountered when looking for longer sequences
3433   with rs6000_emit_set_const.  */
3434static rtx
3435rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3436{
3437  if (!TARGET_POWERPC64)
3438    {
3439      rtx operand1, operand2;
3440
3441      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3442					DImode);
3443      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3444					DImode);
3445      emit_move_insn (operand1, GEN_INT (c1));
3446      emit_move_insn (operand2, GEN_INT (c2));
3447    }
3448  else
3449    {
3450      HOST_WIDE_INT ud1, ud2, ud3, ud4;
3451
3452      ud1 = c1 & 0xffff;
3453      ud2 = (c1 & 0xffff0000) >> 16;
3454#if HOST_BITS_PER_WIDE_INT >= 64
3455      c2 = c1 >> 32;
3456#endif
3457      ud3 = c2 & 0xffff;
3458      ud4 = (c2 & 0xffff0000) >> 16;
3459
3460      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3461	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3462	{
3463	  if (ud1 & 0x8000)
3464	    emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) -  0x8000)));
3465	  else
3466	    emit_move_insn (dest, GEN_INT (ud1));
3467	}
3468
3469      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3470	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3471	{
3472	  if (ud2 & 0x8000)
3473	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3474					   - 0x80000000));
3475	  else
3476	    emit_move_insn (dest, GEN_INT (ud2 << 16));
3477	  if (ud1 != 0)
3478	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3479	}
3480      else if ((ud4 == 0xffff && (ud3 & 0x8000))
3481	       || (ud4 == 0 && ! (ud3 & 0x8000)))
3482	{
3483	  if (ud3 & 0x8000)
3484	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3485					   - 0x80000000));
3486	  else
3487	    emit_move_insn (dest, GEN_INT (ud3 << 16));
3488
3489	  if (ud2 != 0)
3490	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3491	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3492	  if (ud1 != 0)
3493	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3494	}
3495      else
3496	{
3497	  if (ud4 & 0x8000)
3498	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3499					   - 0x80000000));
3500	  else
3501	    emit_move_insn (dest, GEN_INT (ud4 << 16));
3502
3503	  if (ud3 != 0)
3504	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3505
3506	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3507	  if (ud2 != 0)
3508	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3509					       GEN_INT (ud2 << 16)));
3510	  if (ud1 != 0)
3511	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3512	}
3513    }
3514  return dest;
3515}
3516
3517/* Emit a move from SOURCE to DEST in mode MODE.  */
3518void
3519rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3520{
3521  rtx operands[2];
3522  operands[0] = dest;
3523  operands[1] = source;
3524
3525  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
3526  if (GET_CODE (operands[1]) == CONST_DOUBLE
3527      && ! FLOAT_MODE_P (mode)
3528      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3529    {
3530      /* FIXME.  This should never happen.  */
3531      /* Since it seems that it does, do the safe thing and convert
3532	 to a CONST_INT.  */
3533      operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3534    }
3535  if (GET_CODE (operands[1]) == CONST_DOUBLE
3536      && ! FLOAT_MODE_P (mode)
3537      && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3538	   && CONST_DOUBLE_LOW (operands[1]) >= 0)
3539	  || (CONST_DOUBLE_HIGH (operands[1]) == -1
3540	      && CONST_DOUBLE_LOW (operands[1]) < 0)))
3541    abort ();
3542
3543  /* Check if GCC is setting up a block move that will end up using FP
3544     registers as temporaries.  We must make sure this is acceptable.  */
3545  if (GET_CODE (operands[0]) == MEM
3546      && GET_CODE (operands[1]) == MEM
3547      && mode == DImode
3548      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3549	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3550      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3551					    ? 32 : MEM_ALIGN (operands[0])))
3552	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3553					       ? 32
3554					       : MEM_ALIGN (operands[1]))))
3555      && ! MEM_VOLATILE_P (operands [0])
3556      && ! MEM_VOLATILE_P (operands [1]))
3557    {
3558      emit_move_insn (adjust_address (operands[0], SImode, 0),
3559		      adjust_address (operands[1], SImode, 0));
3560      emit_move_insn (adjust_address (operands[0], SImode, 4),
3561		      adjust_address (operands[1], SImode, 4));
3562      return;
3563    }
3564
3565  if (!no_new_pseudos)
3566    {
3567      if (GET_CODE (operands[1]) == MEM && optimize > 0
3568	  && (mode == QImode || mode == HImode || mode == SImode)
3569	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3570	{
3571	  rtx reg = gen_reg_rtx (word_mode);
3572
3573	  emit_insn (gen_rtx_SET (word_mode, reg,
3574				  gen_rtx_ZERO_EXTEND (word_mode,
3575						       operands[1])));
3576	  operands[1] = gen_lowpart (mode, reg);
3577	}
3578      if (GET_CODE (operands[0]) != REG)
3579	operands[1] = force_reg (mode, operands[1]);
3580    }
3581
3582  if (mode == SFmode && ! TARGET_POWERPC
3583      && TARGET_HARD_FLOAT && TARGET_FPRS
3584      && GET_CODE (operands[0]) == MEM)
3585    {
3586      int regnum;
3587
3588      if (reload_in_progress || reload_completed)
3589	regnum = true_regnum (operands[1]);
3590      else if (GET_CODE (operands[1]) == REG)
3591	regnum = REGNO (operands[1]);
3592      else
3593	regnum = -1;
3594
3595      /* If operands[1] is a register, on POWER it may have
3596	 double-precision data in it, so truncate it to single
3597	 precision.  */
3598      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3599	{
3600	  rtx newreg;
3601	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3602	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3603	  operands[1] = newreg;
3604	}
3605    }
3606
3607  /* Recognize the case where operand[1] is a reference to thread-local
3608     data and load its address to a register.  */
3609  if (GET_CODE (operands[1]) == SYMBOL_REF)
3610    {
3611      enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3612      if (model != 0)
3613	operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3614    }
3615
3616  /* Handle the case where reload calls us with an invalid address.  */
3617  if (reload_in_progress && mode == Pmode
3618      && (! general_operand (operands[1], mode)
3619	  || ! nonimmediate_operand (operands[0], mode)))
3620    goto emit_set;
3621
3622  /* Handle the case of CONSTANT_P_RTX.  */
3623  if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3624    goto emit_set;
3625
3626  /* 128-bit constant floating-point values on Darwin should really be
3627     loaded as two parts.  */
3628  if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3629      && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3630      && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3631    {
3632      /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3633	 know how to get a DFmode SUBREG of a TFmode.  */
3634      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3635			simplify_gen_subreg (DImode, operands[1], mode, 0),
3636			DImode);
3637      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3638					     GET_MODE_SIZE (DImode)),
3639			simplify_gen_subreg (DImode, operands[1], mode,
3640					     GET_MODE_SIZE (DImode)),
3641			DImode);
3642      return;
3643    }
3644
3645  /* FIXME:  In the long term, this switch statement should go away
3646     and be replaced by a sequence of tests based on things like
3647     mode == Pmode.  */
3648  switch (mode)
3649    {
3650    case HImode:
3651    case QImode:
3652      if (CONSTANT_P (operands[1])
3653	  && GET_CODE (operands[1]) != CONST_INT)
3654	operands[1] = force_const_mem (mode, operands[1]);
3655      break;
3656
3657    case TFmode:
3658    case DFmode:
3659    case SFmode:
3660      if (CONSTANT_P (operands[1])
3661	  && ! easy_fp_constant (operands[1], mode))
3662	operands[1] = force_const_mem (mode, operands[1]);
3663      break;
3664
3665    case V16QImode:
3666    case V8HImode:
3667    case V4SFmode:
3668    case V4SImode:
3669    case V4HImode:
3670    case V2SFmode:
3671    case V2SImode:
3672    case V1DImode:
3673      if (CONSTANT_P (operands[1])
3674	  && !easy_vector_constant (operands[1], mode))
3675	operands[1] = force_const_mem (mode, operands[1]);
3676      break;
3677
3678    case SImode:
3679    case DImode:
3680      /* Use default pattern for address of ELF small data */
3681      if (TARGET_ELF
3682	  && mode == Pmode
3683	  && DEFAULT_ABI == ABI_V4
3684	  && (GET_CODE (operands[1]) == SYMBOL_REF
3685	      || GET_CODE (operands[1]) == CONST)
3686	  && small_data_operand (operands[1], mode))
3687	{
3688	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3689	  return;
3690	}
3691
3692      if (DEFAULT_ABI == ABI_V4
3693	  && mode == Pmode && mode == SImode
3694	  && flag_pic == 1 && got_operand (operands[1], mode))
3695	{
3696	  emit_insn (gen_movsi_got (operands[0], operands[1]));
3697	  return;
3698	}
3699
3700      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3701	  && TARGET_NO_TOC
3702	  && ! flag_pic
3703	  && mode == Pmode
3704	  && CONSTANT_P (operands[1])
3705	  && GET_CODE (operands[1]) != HIGH
3706	  && GET_CODE (operands[1]) != CONST_INT)
3707	{
3708	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3709
3710	  /* If this is a function address on -mcall-aixdesc,
3711	     convert it to the address of the descriptor.  */
3712	  if (DEFAULT_ABI == ABI_AIX
3713	      && GET_CODE (operands[1]) == SYMBOL_REF
3714	      && XSTR (operands[1], 0)[0] == '.')
3715	    {
3716	      const char *name = XSTR (operands[1], 0);
3717	      rtx new_ref;
3718	      while (*name == '.')
3719		name++;
3720	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3721	      CONSTANT_POOL_ADDRESS_P (new_ref)
3722		= CONSTANT_POOL_ADDRESS_P (operands[1]);
3723	      SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3724	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3725	      SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3726	      operands[1] = new_ref;
3727	    }
3728
3729	  if (DEFAULT_ABI == ABI_DARWIN)
3730	    {
3731#if TARGET_MACHO
3732	      if (MACHO_DYNAMIC_NO_PIC_P)
3733		{
3734		  /* Take care of any required data indirection.  */
3735		  operands[1] = rs6000_machopic_legitimize_pic_address (
3736				  operands[1], mode, operands[0]);
3737		  if (operands[0] != operands[1])
3738		    emit_insn (gen_rtx_SET (VOIDmode,
3739				            operands[0], operands[1]));
3740		  return;
3741		}
3742#endif
3743	      emit_insn (gen_macho_high (target, operands[1]));
3744	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
3745	      return;
3746	    }
3747
3748	  emit_insn (gen_elf_high (target, operands[1]));
3749	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
3750	  return;
3751	}
3752
3753      /* If this is a SYMBOL_REF that refers to a constant pool entry,
3754	 and we have put it in the TOC, we just need to make a TOC-relative
3755	 reference to it.  */
3756      if (TARGET_TOC
3757	  && GET_CODE (operands[1]) == SYMBOL_REF
3758	  && constant_pool_expr_p (operands[1])
3759	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3760					      get_pool_mode (operands[1])))
3761	{
3762	  operands[1] = create_TOC_reference (operands[1]);
3763	}
3764      else if (mode == Pmode
3765	       && CONSTANT_P (operands[1])
3766	       && ((GET_CODE (operands[1]) != CONST_INT
3767		    && ! easy_fp_constant (operands[1], mode))
3768		   || (GET_CODE (operands[1]) == CONST_INT
3769		       && num_insns_constant (operands[1], mode) > 2)
3770		   || (GET_CODE (operands[0]) == REG
3771		       && FP_REGNO_P (REGNO (operands[0]))))
3772	       && GET_CODE (operands[1]) != HIGH
3773	       && ! legitimate_constant_pool_address_p (operands[1])
3774	       && ! toc_relative_expr_p (operands[1]))
3775	{
3776	  /* Emit a USE operation so that the constant isn't deleted if
3777	     expensive optimizations are turned on because nobody
3778	     references it.  This should only be done for operands that
3779	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3780	     This should not be done for operands that contain LABEL_REFs.
3781	     For now, we just handle the obvious case.  */
3782	  if (GET_CODE (operands[1]) != LABEL_REF)
3783	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3784
3785#if TARGET_MACHO
3786	  /* Darwin uses a special PIC legitimizer.  */
3787	  if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3788	    {
3789	      operands[1] =
3790		rs6000_machopic_legitimize_pic_address (operands[1], mode,
3791							operands[0]);
3792	      if (operands[0] != operands[1])
3793		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3794	      return;
3795	    }
3796#endif
3797
3798	  /* If we are to limit the number of things we put in the TOC and
3799	     this is a symbol plus a constant we can add in one insn,
3800	     just put the symbol in the TOC and add the constant.  Don't do
3801	     this if reload is in progress.  */
3802	  if (GET_CODE (operands[1]) == CONST
3803	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3804	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
3805	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3806	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3807		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3808	      && ! side_effects_p (operands[0]))
3809	    {
3810	      rtx sym =
3811		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3812	      rtx other = XEXP (XEXP (operands[1], 0), 1);
3813
3814	      sym = force_reg (mode, sym);
3815	      if (mode == SImode)
3816		emit_insn (gen_addsi3 (operands[0], sym, other));
3817	      else
3818		emit_insn (gen_adddi3 (operands[0], sym, other));
3819	      return;
3820	    }
3821
3822	  operands[1] = force_const_mem (mode, operands[1]);
3823
3824	  if (TARGET_TOC
3825	      && constant_pool_expr_p (XEXP (operands[1], 0))
3826	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3827			get_pool_constant (XEXP (operands[1], 0)),
3828			get_pool_mode (XEXP (operands[1], 0))))
3829	    {
3830	      operands[1]
3831		= gen_rtx_MEM (mode,
3832			       create_TOC_reference (XEXP (operands[1], 0)));
3833	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
3834	      RTX_UNCHANGING_P (operands[1]) = 1;
3835	    }
3836	}
3837      break;
3838
3839    case TImode:
3840      if (GET_CODE (operands[0]) == MEM
3841	  && GET_CODE (XEXP (operands[0], 0)) != REG
3842	  && ! reload_in_progress)
3843	operands[0]
3844	  = replace_equiv_address (operands[0],
3845				   copy_addr_to_reg (XEXP (operands[0], 0)));
3846
3847      if (GET_CODE (operands[1]) == MEM
3848	  && GET_CODE (XEXP (operands[1], 0)) != REG
3849	  && ! reload_in_progress)
3850	operands[1]
3851	  = replace_equiv_address (operands[1],
3852				   copy_addr_to_reg (XEXP (operands[1], 0)));
3853      if (TARGET_POWER)
3854	{
3855	  emit_insn (gen_rtx_PARALLEL (VOIDmode,
3856		       gen_rtvec (2,
3857				  gen_rtx_SET (VOIDmode,
3858					       operands[0], operands[1]),
3859				  gen_rtx_CLOBBER (VOIDmode,
3860						   gen_rtx_SCRATCH (SImode)))));
3861	  return;
3862	}
3863      break;
3864
3865    default:
3866      abort ();
3867    }
3868
3869  /* Above, we may have called force_const_mem which may have returned
3870     an invalid address.  If we can, fix this up; otherwise, reload will
3871     have to deal with it.  */
3872  if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3873    operands[1] = validize_mem (operands[1]);
3874
3875 emit_set:
3876  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3877}
3878
3879/* Nonzero if we can use a floating-point register to pass this arg.  */
3880#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE)		\
3881  (GET_MODE_CLASS (MODE) == MODE_FLOAT		\
3882   && (CUM)->fregno <= FP_ARG_MAX_REG		\
3883   && TARGET_HARD_FLOAT && TARGET_FPRS)
3884
3885/* Nonzero if we can use an AltiVec register to pass this arg.  */
3886#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED)	\
3887  (ALTIVEC_VECTOR_MODE (MODE)				\
3888   && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG		\
3889   && TARGET_ALTIVEC_ABI				\
3890   && (NAMED))
3891
3892/* Return a nonzero value to say to return the function value in
3893   memory, just as large structures are always returned.  TYPE will be
3894   the data type of the value, and FNTYPE will be the type of the
3895   function doing the returning, or @code{NULL} for libcalls.
3896
3897   The AIX ABI for the RS/6000 specifies that all structures are
3898   returned in memory.  The Darwin ABI does the same.  The SVR4 ABI
3899   specifies that structures <= 8 bytes are returned in r3/r4, but a
3900   draft put them in memory, and GCC used to implement the draft
3901   instead of the final standard.  Therefore, TARGET_AIX_STRUCT_RET
3902   controls this instead of DEFAULT_ABI; V.4 targets needing backward
3903   compatibility can change DRAFT_V4_STRUCT_RET to override the
3904   default, and -m switches get the final word.  See
3905   rs6000_override_options for more details.
3906
3907   The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3908   long double support is enabled.  These values are returned in memory.
3909
3910   int_size_in_bytes returns -1 for variable size objects, which go in
3911   memory always.  The cast to unsigned makes -1 > 8.  */
3912
3913static bool
3914rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3915{
3916  if (AGGREGATE_TYPE_P (type)
3917      && (TARGET_AIX_STRUCT_RET
3918	  || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3919    return true;
3920  if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3921    return true;
3922  return false;
3923}
3924
3925/* Initialize a variable CUM of type CUMULATIVE_ARGS
3926   for a call to a function whose data type is FNTYPE.
3927   For a library call, FNTYPE is 0.
3928
3929   For incoming args we set the number of arguments in the prototype large
3930   so we never return a PARALLEL.  */
3931
3932void
3933init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3934		      rtx libname ATTRIBUTE_UNUSED, int incoming,
3935		      int libcall, int n_named_args)
3936{
3937  static CUMULATIVE_ARGS zero_cumulative;
3938
3939  *cum = zero_cumulative;
3940  cum->words = 0;
3941  cum->fregno = FP_ARG_MIN_REG;
3942  cum->vregno = ALTIVEC_ARG_MIN_REG;
3943  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3944  cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3945		      ? CALL_LIBCALL : CALL_NORMAL);
3946  cum->sysv_gregno = GP_ARG_MIN_REG;
3947  cum->stdarg = fntype
3948    && (TYPE_ARG_TYPES (fntype) != 0
3949	&& (TREE_VALUE (tree_last  (TYPE_ARG_TYPES (fntype)))
3950	    != void_type_node));
3951
3952  cum->nargs_prototype = 0;
3953  if (incoming || cum->prototype)
3954    cum->nargs_prototype = n_named_args;
3955
3956  /* Check for a longcall attribute.  */
3957  if ((!fntype && rs6000_default_long_calls)
3958      || (fntype
3959	  && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3960	  && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
3961    cum->call_cookie |= CALL_LONG;
3962
3963  if (TARGET_DEBUG_ARG)
3964    {
3965      fprintf (stderr, "\ninit_cumulative_args:");
3966      if (fntype)
3967	{
3968	  tree ret_type = TREE_TYPE (fntype);
3969	  fprintf (stderr, " ret code = %s,",
3970		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
3971	}
3972
3973      if (cum->call_cookie & CALL_LONG)
3974	fprintf (stderr, " longcall,");
3975
3976      fprintf (stderr, " proto = %d, nargs = %d\n",
3977	       cum->prototype, cum->nargs_prototype);
3978    }
3979
3980    if (fntype
3981	&& !TARGET_ALTIVEC
3982	&& TARGET_ALTIVEC_ABI
3983        && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
3984      {
3985	error ("Cannot return value in vector register because"
3986	       " altivec instructions are disabled, use -maltivec"
3987	       " to enable them.");
3988      }
3989}
3990
3991/* If defined, a C expression which determines whether, and in which
3992   direction, to pad out an argument with extra space.  The value
3993   should be of type `enum direction': either `upward' to pad above
3994   the argument, `downward' to pad below, or `none' to inhibit
3995   padding.
3996
3997   For the AIX ABI structs are always stored left shifted in their
3998   argument slot.  */
3999
4000enum direction
4001function_arg_padding (enum machine_mode mode, tree type)
4002{
4003#ifndef AGGREGATE_PADDING_FIXED
4004#define AGGREGATE_PADDING_FIXED 0
4005#endif
4006#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4007#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4008#endif
4009
4010  if (!AGGREGATE_PADDING_FIXED)
4011    {
4012      /* GCC used to pass structures of the same size as integer types as
4013	 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4014	 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4015	 passed padded downward, except that -mstrict-align further
4016	 muddied the water in that multi-component structures of 2 and 4
4017	 bytes in size were passed padded upward.
4018
4019	 The following arranges for best compatibility with previous
4020	 versions of gcc, but removes the -mstrict-align dependency.  */
4021      if (BYTES_BIG_ENDIAN)
4022	{
4023	  HOST_WIDE_INT size = 0;
4024
4025	  if (mode == BLKmode)
4026	    {
4027	      if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4028		size = int_size_in_bytes (type);
4029	    }
4030	  else
4031	    size = GET_MODE_SIZE (mode);
4032
4033	  if (size == 1 || size == 2 || size == 4)
4034	    return downward;
4035	}
4036      return upward;
4037    }
4038
4039  if (AGGREGATES_PAD_UPWARD_ALWAYS)
4040    {
4041      if (type != 0 && AGGREGATE_TYPE_P (type))
4042	return upward;
4043    }
4044
4045  /* Fall back to the default.  */
4046  return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4047}
4048
4049/* If defined, a C expression that gives the alignment boundary, in bits,
4050   of an argument with the specified mode and type.  If it is not defined,
4051   PARM_BOUNDARY is used for all arguments.
4052
4053   V.4 wants long longs to be double word aligned.  */
4054
4055int
4056function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4057{
4058  if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4059    return 64;
4060  else if (SPE_VECTOR_MODE (mode))
4061    return 64;
4062  else if (ALTIVEC_VECTOR_MODE (mode))
4063    return 128;
4064  else
4065    return PARM_BOUNDARY;
4066}
4067
4068/* Compute the size (in words) of a function argument.  */
4069
4070static unsigned long
4071rs6000_arg_size (enum machine_mode mode, tree type)
4072{
4073  unsigned long size;
4074
4075  if (mode != BLKmode)
4076    size = GET_MODE_SIZE (mode);
4077  else
4078    size = int_size_in_bytes (type);
4079
4080  if (TARGET_32BIT)
4081    return (size + 3) >> 2;
4082  else
4083    return (size + 7) >> 3;
4084}
4085
4086/* Update the data in CUM to advance over an argument
4087   of mode MODE and data type TYPE.
4088   (TYPE is null for libcalls where that information may not be available.)
4089
4090   Note that for args passed by reference, function_arg will be called
4091   with MODE and TYPE set to that of the pointer to the arg, not the arg
4092   itself.  */
4093
4094void
4095function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4096		      tree type, int named)
4097{
4098  cum->nargs_prototype--;
4099
4100  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4101    {
4102      bool stack = false;
4103
4104      if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4105        {
4106	  cum->vregno++;
4107	  if (!TARGET_ALTIVEC)
4108	    error ("Cannot pass argument in vector register because"
4109		   " altivec instructions are disabled, use -maltivec"
4110		   " to enable them.");
4111
4112	  /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4113	     even if it is going to be passed in a vector register.
4114	     Darwin does the same for variable-argument functions.  */
4115	  if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4116	      || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4117	    stack = true;
4118	}
4119      else
4120	stack = true;
4121
4122      if (stack)
4123        {
4124	  int align;
4125
4126	  /* Vector parameters must be 16-byte aligned.  This places
4127	     them at 2 mod 4 in terms of words in 32-bit mode, since
4128	     the parameter save area starts at offset 24 from the
4129	     stack.  In 64-bit mode, they just have to start on an
4130	     even word, since the parameter save area is 16-byte
4131	     aligned.  Space for GPRs is reserved even if the argument
4132	     will be passed in memory.  */
4133	  if (TARGET_32BIT)
4134	    align = (2 - cum->words) & 3;
4135	  else
4136	    align = cum->words & 1;
4137	  cum->words += align + rs6000_arg_size (mode, type);
4138
4139	  if (TARGET_DEBUG_ARG)
4140	    {
4141	      fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4142		       cum->words, align);
4143	      fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4144		       cum->nargs_prototype, cum->prototype,
4145		       GET_MODE_NAME (mode));
4146	    }
4147	}
4148    }
4149  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4150	   && !cum->stdarg
4151	   && cum->sysv_gregno <= GP_ARG_MAX_REG)
4152    cum->sysv_gregno++;
4153  else if (DEFAULT_ABI == ABI_V4)
4154    {
4155      if (TARGET_HARD_FLOAT && TARGET_FPRS
4156	  && (mode == SFmode || mode == DFmode))
4157	{
4158	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
4159	    cum->fregno++;
4160	  else
4161	    {
4162	      if (mode == DFmode)
4163	        cum->words += cum->words & 1;
4164	      cum->words += rs6000_arg_size (mode, type);
4165	    }
4166	}
4167      else
4168	{
4169	  int n_words = rs6000_arg_size (mode, type);
4170	  int gregno = cum->sysv_gregno;
4171
4172	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4173	     (r7,r8) or (r9,r10).  As does any other 2 word item such
4174	     as complex int due to a historical mistake.  */
4175	  if (n_words == 2)
4176	    gregno += (1 - gregno) & 1;
4177
4178	  /* Multi-reg args are not split between registers and stack.  */
4179	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4180	    {
4181	      /* Long long and SPE vectors are aligned on the stack.
4182		 So are other 2 word items such as complex int due to
4183		 a historical mistake.  */
4184	      if (n_words == 2)
4185		cum->words += cum->words & 1;
4186	      cum->words += n_words;
4187	    }
4188
4189	  /* Note: continuing to accumulate gregno past when we've started
4190	     spilling to the stack indicates the fact that we've started
4191	     spilling to the stack to expand_builtin_saveregs.  */
4192	  cum->sysv_gregno = gregno + n_words;
4193	}
4194
4195      if (TARGET_DEBUG_ARG)
4196	{
4197	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4198		   cum->words, cum->fregno);
4199	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4200		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4201	  fprintf (stderr, "mode = %4s, named = %d\n",
4202		   GET_MODE_NAME (mode), named);
4203	}
4204    }
4205  else
4206    {
4207      int n_words = rs6000_arg_size (mode, type);
4208      int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4209
4210      /* The simple alignment calculation here works because
4211	 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4212	 If we ever want to handle alignments larger than 8 bytes for
4213	 32-bit or 16 bytes for 64-bit, then we'll need to take into
4214	 account the offset to the start of the parm save area.  */
4215      align &= cum->words;
4216      cum->words += align + n_words;
4217
4218      if (GET_MODE_CLASS (mode) == MODE_FLOAT
4219	  && TARGET_HARD_FLOAT && TARGET_FPRS)
4220	cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4221
4222      if (TARGET_DEBUG_ARG)
4223	{
4224	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4225		   cum->words, cum->fregno);
4226	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4227		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4228	  fprintf (stderr, "named = %d, align = %d\n", named, align);
4229	}
4230    }
4231}
4232
4233/* Determine where to put a SIMD argument on the SPE.  */
4234
4235static rtx
4236rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4237			 tree type)
4238{
4239  if (cum->stdarg)
4240    {
4241      int gregno = cum->sysv_gregno;
4242      int n_words = rs6000_arg_size (mode, type);
4243
4244      /* SPE vectors are put in odd registers.  */
4245      if (n_words == 2 && (gregno & 1) == 0)
4246	gregno += 1;
4247
4248      if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4249	{
4250	  rtx r1, r2;
4251	  enum machine_mode m = SImode;
4252
4253	  r1 = gen_rtx_REG (m, gregno);
4254	  r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4255	  r2 = gen_rtx_REG (m, gregno + 1);
4256	  r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4257	  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4258	}
4259      else
4260	return NULL_RTX;
4261    }
4262  else
4263    {
4264      if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4265	return gen_rtx_REG (mode, cum->sysv_gregno);
4266      else
4267	return NULL_RTX;
4268    }
4269}
4270
4271/* Determine where to place an argument in 64-bit mode with 32-bit ABI.  */
4272
4273static rtx
4274rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4275{
4276  int n_units;
4277  int i, k;
4278  rtx rvec[GP_ARG_NUM_REG + 1];
4279
4280  if (align_words >= GP_ARG_NUM_REG)
4281    return NULL_RTX;
4282
4283  n_units = rs6000_arg_size (mode, type);
4284
4285  /* Optimize the simple case where the arg fits in one gpr, except in
4286     the case of BLKmode due to assign_parms assuming that registers are
4287     BITS_PER_WORD wide.  */
4288  if (n_units == 0
4289      || (n_units == 1 && mode != BLKmode))
4290    return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4291
4292  k = 0;
4293  if (align_words + n_units > GP_ARG_NUM_REG)
4294    /* Not all of the arg fits in gprs.  Say that it goes in memory too,
4295       using a magic NULL_RTX component.
4296       FIXME: This is not strictly correct.  Only some of the arg
4297       belongs in memory, not all of it.  However, there isn't any way
4298       to do this currently, apart from building rtx descriptions for
4299       the pieces of memory we want stored.  Due to bugs in the generic
4300       code we can't use the normal function_arg_partial_nregs scheme
4301       with the PARALLEL arg description we emit here.
4302       In any case, the code to store the whole arg to memory is often
4303       more efficient than code to store pieces, and we know that space
4304       is available in the right place for the whole arg.  */
4305    rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4306
4307  i = 0;
4308  do
4309    {
4310      rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4311      rtx off = GEN_INT (i++ * 4);
4312      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4313    }
4314  while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4315
4316  return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4317}
4318
4319/* Determine where to put an argument to a function.
4320   Value is zero to push the argument on the stack,
4321   or a hard register in which to store the argument.
4322
4323   MODE is the argument's machine mode.
4324   TYPE is the data type of the argument (as a tree).
4325    This is null for libcalls where that information may
4326    not be available.
4327   CUM is a variable of type CUMULATIVE_ARGS which gives info about
4328    the preceding args and about the function being called.
4329   NAMED is nonzero if this argument is a named parameter
4330    (otherwise it is an extra parameter matching an ellipsis).
4331
4332   On RS/6000 the first eight words of non-FP are normally in registers
4333   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
4334   Under V.4, the first 8 FP args are in registers.
4335
4336   If this is floating-point and no prototype is specified, we use
4337   both an FP and integer register (or possibly FP reg and stack).  Library
4338   functions (when CALL_LIBCALL is set) always have the proper types for args,
4339   so we can pass the FP value just in one register.  emit_library_function
4340   doesn't support PARALLEL anyway.
4341
4342   Note that for args passed by reference, function_arg will be called
4343   with MODE and TYPE set to that of the pointer to the arg, not the arg
4344   itself.  */
4345
4346struct rtx_def *
4347function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4348	      tree type, int named)
4349{
4350  enum rs6000_abi abi = DEFAULT_ABI;
4351
4352  /* Return a marker to indicate whether CR1 needs to set or clear the
4353     bit that V.4 uses to say fp args were passed in registers.
4354     Assume that we don't need the marker for software floating point,
4355     or compiler generated library calls.  */
4356  if (mode == VOIDmode)
4357    {
4358      if (abi == ABI_V4
4359	  && (cum->call_cookie & CALL_LIBCALL) == 0
4360	  && (cum->stdarg
4361	      || (cum->nargs_prototype < 0
4362		  && (cum->prototype || TARGET_NO_PROTOTYPE))))
4363	{
4364	  /* For the SPE, we need to crxor CR6 always.  */
4365	  if (TARGET_SPE_ABI)
4366	    return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4367	  else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4368	    return GEN_INT (cum->call_cookie
4369			    | ((cum->fregno == FP_ARG_MIN_REG)
4370			       ? CALL_V4_SET_FP_ARGS
4371			       : CALL_V4_CLEAR_FP_ARGS));
4372	}
4373
4374      return GEN_INT (cum->call_cookie);
4375    }
4376
4377  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4378    if (TARGET_64BIT && ! cum->prototype)
4379      {
4380       /* Vector parameters get passed in vector register
4381          and also in GPRs or memory, in absence of prototype.  */
4382       int align_words;
4383       rtx slot;
4384       align_words = (cum->words + 1) & ~1;
4385
4386       if (align_words >= GP_ARG_NUM_REG)
4387         {
4388           slot = NULL_RTX;
4389         }
4390       else
4391         {
4392           slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4393         }
4394       return gen_rtx_PARALLEL (mode,
4395                gen_rtvec (2,
4396                           gen_rtx_EXPR_LIST (VOIDmode,
4397                                              slot, const0_rtx),
4398                           gen_rtx_EXPR_LIST (VOIDmode,
4399                                              gen_rtx_REG (mode, cum->vregno),
4400                                              const0_rtx)));
4401      }
4402    else
4403      return gen_rtx_REG (mode, cum->vregno);
4404  else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4405    {
4406      if (named || abi == ABI_V4)
4407	return NULL_RTX;
4408      else
4409	{
4410	  /* Vector parameters to varargs functions under AIX or Darwin
4411	     get passed in memory and possibly also in GPRs.  */
4412	  int align, align_words, n_words;
4413	  enum machine_mode part_mode;
4414
4415	  /* Vector parameters must be 16-byte aligned.  This places them at
4416	     2 mod 4 in terms of words in 32-bit mode, since the parameter
4417	     save area starts at offset 24 from the stack.  In 64-bit mode,
4418	     they just have to start on an even word, since the parameter
4419	     save area is 16-byte aligned.  */
4420	  if (TARGET_32BIT)
4421	    align = (2 - cum->words) & 3;
4422	  else
4423	    align = cum->words & 1;
4424	  align_words = cum->words + align;
4425
4426	  /* Out of registers?  Memory, then.  */
4427	  if (align_words >= GP_ARG_NUM_REG)
4428	    return NULL_RTX;
4429
4430	  if (TARGET_32BIT && TARGET_POWERPC64)
4431	    return rs6000_mixed_function_arg (mode, type, align_words);
4432
4433	  /* The vector value goes in GPRs.  Only the part of the
4434	     value in GPRs is reported here.  */
4435	  part_mode = mode;
4436	  n_words = rs6000_arg_size (mode, type);
4437	  if (align_words + n_words > GP_ARG_NUM_REG)
4438	    /* Fortunately, there are only two possibilities, the value
4439	       is either wholly in GPRs or half in GPRs and half not.  */
4440	    part_mode = DImode;
4441
4442	  return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4443	}
4444    }
4445  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4446    return rs6000_spe_function_arg (cum, mode, type);
4447  else if (abi == ABI_V4)
4448    {
4449      if (TARGET_HARD_FLOAT && TARGET_FPRS
4450	  && (mode == SFmode || mode == DFmode))
4451	{
4452	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
4453	    return gen_rtx_REG (mode, cum->fregno);
4454	  else
4455	    return NULL_RTX;
4456	}
4457      else
4458	{
4459	  int n_words = rs6000_arg_size (mode, type);
4460	  int gregno = cum->sysv_gregno;
4461
4462	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4463	     (r7,r8) or (r9,r10).  As does any other 2 word item such
4464	     as complex int due to a historical mistake.  */
4465	  if (n_words == 2)
4466	    gregno += (1 - gregno) & 1;
4467
4468	  /* Multi-reg args are not split between registers and stack.  */
4469	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4470	    return NULL_RTX;
4471
4472	  if (TARGET_32BIT && TARGET_POWERPC64)
4473	    return rs6000_mixed_function_arg (mode, type,
4474					      gregno - GP_ARG_MIN_REG);
4475	  return gen_rtx_REG (mode, gregno);
4476	}
4477    }
4478  else
4479    {
4480      int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4481      int align_words = cum->words + (cum->words & align);
4482
4483      if (USE_FP_FOR_ARG_P (cum, mode, type))
4484	{
4485	  rtx rvec[GP_ARG_NUM_REG + 1];
4486	  rtx r;
4487	  int k;
4488	  bool needs_psave;
4489	  enum machine_mode fmode = mode;
4490	  unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4491
4492	  if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4493	    {
4494	      /* Currently, we only ever need one reg here because complex
4495		 doubles are split.  */
4496	      if (cum->fregno != FP_ARG_MAX_REG || fmode != TFmode)
4497		abort ();
4498
4499	      /* Long double split over regs and memory.  */
4500	      fmode = DFmode;
4501	    }
4502
4503	  /* Do we also need to pass this arg in the parameter save
4504	     area?  */
4505	  needs_psave = (type
4506			 && (cum->nargs_prototype <= 0
4507			     || (DEFAULT_ABI == ABI_AIX
4508				 && TARGET_XL_COMPAT
4509				 && align_words >= GP_ARG_NUM_REG)));
4510
4511	  if (!needs_psave && mode == fmode)
4512	    return gen_rtx_REG (fmode, cum->fregno);
4513
4514	  k = 0;
4515	  if (needs_psave)
4516	    {
4517	      /* Describe the part that goes in gprs or the stack.
4518		 This piece must come first, before the fprs.  */
4519	      if (align_words < GP_ARG_NUM_REG)
4520		{
4521		  unsigned long n_words = rs6000_arg_size (mode, type);
4522
4523		  if (align_words + n_words > GP_ARG_NUM_REG
4524		      || (TARGET_32BIT && TARGET_POWERPC64))
4525		    {
4526		      /* If this is partially on the stack, then we only
4527			 include the portion actually in registers here.  */
4528		      enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
4529		      rtx off;
4530		      do
4531			{
4532			  r = gen_rtx_REG (rmode,
4533					   GP_ARG_MIN_REG + align_words);
4534			  off = GEN_INT (k * GET_MODE_SIZE (rmode));
4535			  rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4536			}
4537		      while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
4538		    }
4539		  else
4540		    {
4541		      /* The whole arg fits in gprs.  */
4542		      r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4543		      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
4544		    }
4545		}
4546	      else
4547		/* It's entirely in memory.  */
4548		rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4549	    }
4550
4551	  /* Describe where this piece goes in the fprs.  */
4552	  r = gen_rtx_REG (fmode, cum->fregno);
4553	  rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
4554
4555	  return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4556	}
4557      else if (align_words < GP_ARG_NUM_REG)
4558	{
4559	  if (TARGET_32BIT && TARGET_POWERPC64)
4560	    return rs6000_mixed_function_arg (mode, type, align_words);
4561
4562	  return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4563	}
4564      else
4565	return NULL_RTX;
4566    }
4567}
4568
4569/* For an arg passed partly in registers and partly in memory, this is
4570   the number of registers used.  For args passed entirely in registers
4571   or entirely in memory, zero.  When an arg is described by a PARALLEL,
4572   perhaps using more than one register type, this function returns the
4573   number of registers used by the first element of the PARALLEL.  */
4574
4575int
4576function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4577			    tree type, int named)
4578{
4579  int ret = 0;
4580  int align;
4581  int parm_offset;
4582  int align_words;
4583
4584  if (DEFAULT_ABI == ABI_V4)
4585    return 0;
4586
4587  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4588      && cum->nargs_prototype >= 0)
4589    return 0;
4590
4591  align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4592  parm_offset = TARGET_32BIT ? 2 : 0;
4593  align_words = cum->words + ((parm_offset - cum->words) & align);
4594
4595  if (USE_FP_FOR_ARG_P (cum, mode, type)
4596      /* If we are passing this arg in gprs as well, then this function
4597	 should return the number of gprs (or memory) partially passed,
4598	 *not* the number of fprs.  */
4599      && !(type
4600	   && (cum->nargs_prototype <= 0
4601	       || (DEFAULT_ABI == ABI_AIX
4602		   && TARGET_XL_COMPAT
4603		   && align_words >= GP_ARG_NUM_REG))))
4604    {
4605      if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4606	ret = FP_ARG_MAX_REG + 1 - cum->fregno;
4607      else if (cum->nargs_prototype >= 0)
4608	return 0;
4609    }
4610
4611  if (align_words < GP_ARG_NUM_REG
4612      && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
4613    ret = GP_ARG_NUM_REG - align_words;
4614
4615  if (ret != 0 && TARGET_DEBUG_ARG)
4616    fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4617
4618  return ret;
4619}
4620
4621/* A C expression that indicates when an argument must be passed by
4622   reference.  If nonzero for an argument, a copy of that argument is
4623   made in memory and a pointer to the argument is passed instead of
4624   the argument itself.  The pointer is passed in whatever way is
4625   appropriate for passing a pointer to that type.
4626
4627   Under V.4, aggregates and long double are passed by reference.
4628
4629   As an extension to all 32-bit ABIs, AltiVec vectors are passed by
4630   reference unless the AltiVec vector extension ABI is in force.
4631
4632   As an extension to all ABIs, variable sized types are passed by
4633   reference.  */
4634
4635int
4636function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4637				enum machine_mode mode ATTRIBUTE_UNUSED,
4638				tree type, int named ATTRIBUTE_UNUSED)
4639{
4640  if ((DEFAULT_ABI == ABI_V4
4641       && ((type && AGGREGATE_TYPE_P (type))
4642	   || mode == TFmode))
4643      || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4644      || (type && int_size_in_bytes (type) < 0))
4645    {
4646      if (TARGET_DEBUG_ARG)
4647	fprintf (stderr, "function_arg_pass_by_reference\n");
4648
4649      return 1;
4650    }
4651  return 0;
4652}
4653
4654static void
4655rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4656{
4657  int i;
4658  enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4659
4660  if (nregs == 0)
4661    return;
4662
4663    for (i = 0; i < nregs; i++)
4664    {
4665      rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4666      if (reload_completed)
4667      {
4668	if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4669	  tem = NULL_RTX;
4670	else
4671	  tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4672				     i * GET_MODE_SIZE(reg_mode));
4673      }
4674      else
4675	tem = replace_equiv_address (tem, XEXP (tem, 0));
4676
4677      if (tem == NULL_RTX)
4678        abort ();
4679
4680      emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4681    }
4682}
4683
4684
4685/* Perform any needed actions needed for a function that is receiving a
4686   variable number of arguments.
4687
4688   CUM is as above.
4689
4690   MODE and TYPE are the mode and type of the current parameter.
4691
4692   PRETEND_SIZE is a variable that should be set to the amount of stack
4693   that must be pushed by the prolog to pretend that our caller pushed
4694   it.
4695
4696   Normally, this macro will push all remaining incoming registers on the
4697   stack and set PRETEND_SIZE to the length of the registers pushed.  */
4698
4699static void
4700setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4701		tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4702{
4703  CUMULATIVE_ARGS next_cum;
4704  int reg_size = TARGET_32BIT ? 4 : 8;
4705  rtx save_area = NULL_RTX, mem;
4706  int first_reg_offset, set;
4707
4708  /* Skip the last named argument.  */
4709  next_cum = *cum;
4710  function_arg_advance (&next_cum, mode, type, 1);
4711
4712  if (DEFAULT_ABI == ABI_V4)
4713    {
4714      /* Indicate to allocate space on the stack for varargs save area.  */
4715      cfun->machine->sysv_varargs_p = 1;
4716      if (! no_rtl)
4717	save_area = plus_constant (virtual_stack_vars_rtx,
4718				   - RS6000_VARARGS_SIZE);
4719
4720      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4721    }
4722  else
4723    {
4724      first_reg_offset = next_cum.words;
4725      save_area = virtual_incoming_args_rtx;
4726      cfun->machine->sysv_varargs_p = 0;
4727
4728      if (MUST_PASS_IN_STACK (mode, type))
4729	first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4730    }
4731
4732  set = get_varargs_alias_set ();
4733  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4734    {
4735      mem = gen_rtx_MEM (BLKmode,
4736		         plus_constant (save_area,
4737					first_reg_offset * reg_size)),
4738      set_mem_alias_set (mem, set);
4739      set_mem_align (mem, BITS_PER_WORD);
4740
4741      rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4742			          GP_ARG_NUM_REG - first_reg_offset);
4743    }
4744
4745  /* Save FP registers if needed.  */
4746  if (DEFAULT_ABI == ABI_V4
4747      && TARGET_HARD_FLOAT && TARGET_FPRS
4748      && ! no_rtl
4749      && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4750    {
4751      int fregno = next_cum.fregno;
4752      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4753      rtx lab = gen_label_rtx ();
4754      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4755
4756      emit_jump_insn (gen_rtx_SET (VOIDmode,
4757				   pc_rtx,
4758				   gen_rtx_IF_THEN_ELSE (VOIDmode,
4759					    gen_rtx_NE (VOIDmode, cr1,
4760						        const0_rtx),
4761					    gen_rtx_LABEL_REF (VOIDmode, lab),
4762					    pc_rtx)));
4763
4764      while (fregno <= FP_ARG_V4_MAX_REG)
4765	{
4766	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4767          set_mem_alias_set (mem, set);
4768	  set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
4769	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4770	  fregno++;
4771	  off += 8;
4772	}
4773
4774      emit_label (lab);
4775    }
4776}
4777
4778/* Create the va_list data type.  */
4779
4780static tree
4781rs6000_build_builtin_va_list (void)
4782{
4783  tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4784
4785  /* For AIX, prefer 'char *' because that's what the system
4786     header files like.  */
4787  if (DEFAULT_ABI != ABI_V4)
4788    return build_pointer_type (char_type_node);
4789
4790  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4791  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4792
4793  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4794		      unsigned_char_type_node);
4795  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4796		      unsigned_char_type_node);
4797  /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4798     every user file.  */
4799  f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4800		      short_unsigned_type_node);
4801  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4802		      ptr_type_node);
4803  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4804		      ptr_type_node);
4805
4806  DECL_FIELD_CONTEXT (f_gpr) = record;
4807  DECL_FIELD_CONTEXT (f_fpr) = record;
4808  DECL_FIELD_CONTEXT (f_res) = record;
4809  DECL_FIELD_CONTEXT (f_ovf) = record;
4810  DECL_FIELD_CONTEXT (f_sav) = record;
4811
4812  TREE_CHAIN (record) = type_decl;
4813  TYPE_NAME (record) = type_decl;
4814  TYPE_FIELDS (record) = f_gpr;
4815  TREE_CHAIN (f_gpr) = f_fpr;
4816  TREE_CHAIN (f_fpr) = f_res;
4817  TREE_CHAIN (f_res) = f_ovf;
4818  TREE_CHAIN (f_ovf) = f_sav;
4819
4820  layout_type (record);
4821
4822  /* The correct type is an array type of one element.  */
4823  return build_array_type (record, build_index_type (size_zero_node));
4824}
4825
4826/* Implement va_start.  */
4827
4828void
4829rs6000_va_start (tree valist, rtx nextarg)
4830{
4831  HOST_WIDE_INT words, n_gpr, n_fpr;
4832  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4833  tree gpr, fpr, ovf, sav, t;
4834
4835  /* Only SVR4 needs something special.  */
4836  if (DEFAULT_ABI != ABI_V4)
4837    {
4838      std_expand_builtin_va_start (valist, nextarg);
4839      return;
4840    }
4841
4842  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4843  f_fpr = TREE_CHAIN (f_gpr);
4844  f_res = TREE_CHAIN (f_fpr);
4845  f_ovf = TREE_CHAIN (f_res);
4846  f_sav = TREE_CHAIN (f_ovf);
4847
4848  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4849  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4850  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4851  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4852  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4853
4854  /* Count number of gp and fp argument registers used.  */
4855  words = current_function_args_info.words;
4856  n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4857  n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4858
4859  if (TARGET_DEBUG_ARG)
4860    fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4861	     HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4862	     words, n_gpr, n_fpr);
4863
4864  t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4865  TREE_SIDE_EFFECTS (t) = 1;
4866  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4867
4868  t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4869  TREE_SIDE_EFFECTS (t) = 1;
4870  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4871
4872  /* Find the overflow area.  */
4873  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4874  if (words != 0)
4875    t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4876	       build_int_2 (words * UNITS_PER_WORD, 0));
4877  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4878  TREE_SIDE_EFFECTS (t) = 1;
4879  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4880
4881  /* Find the register save area.  */
4882  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4883  t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4884	     build_int_2 (-RS6000_VARARGS_SIZE, -1));
4885  t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4886  TREE_SIDE_EFFECTS (t) = 1;
4887  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4888}
4889
4890/* Implement va_arg.  */
4891
4892rtx
4893rs6000_va_arg (tree valist, tree type)
4894{
4895  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4896  tree gpr, fpr, ovf, sav, reg, t, u;
4897  int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4898  rtx lab_false, lab_over, addr_rtx, r;
4899  int align;
4900
4901  if (DEFAULT_ABI != ABI_V4)
4902    {
4903      /* Variable sized types are passed by reference, as are AltiVec
4904	 vectors when 32-bit and not using the AltiVec ABI extension.  */
4905      if (int_size_in_bytes (type) < 0
4906	  || (TARGET_32BIT
4907	      && !TARGET_ALTIVEC_ABI
4908	      && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
4909	{
4910	  u = build_pointer_type (type);
4911
4912	  /* Args grow upward.  */
4913	  t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4914		     build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4915	  TREE_SIDE_EFFECTS (t) = 1;
4916
4917	  t = build1 (NOP_EXPR, build_pointer_type (u), t);
4918	  TREE_SIDE_EFFECTS (t) = 1;
4919
4920	  t = build1 (INDIRECT_REF, u, t);
4921	  TREE_SIDE_EFFECTS (t) = 1;
4922
4923	  return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4924	}
4925      if (targetm.calls.split_complex_arg
4926	  && TREE_CODE (type) == COMPLEX_TYPE)
4927	{
4928	  tree elem_type = TREE_TYPE (type);
4929	  enum machine_mode elem_mode = TYPE_MODE (elem_type);
4930	  int elem_size = GET_MODE_SIZE (elem_mode);
4931
4932	  if (elem_size < UNITS_PER_WORD)
4933	    {
4934	      rtx real_part, imag_part, dest_real, rr;
4935
4936	      real_part = rs6000_va_arg (valist, elem_type);
4937	      imag_part = rs6000_va_arg (valist, elem_type);
4938
4939	      /* We're not returning the value here, but the address.
4940		 real_part and imag_part are not contiguous, and we know
4941		 there is space available to pack real_part next to
4942		 imag_part.  float _Complex is not promoted to
4943		 double _Complex by the default promotion rules that
4944		 promote float to double.  */
4945	      if (2 * elem_size > UNITS_PER_WORD)
4946		abort ();
4947
4948	      real_part = gen_rtx_MEM (elem_mode, real_part);
4949	      imag_part = gen_rtx_MEM (elem_mode, imag_part);
4950
4951	      dest_real = adjust_address (imag_part, elem_mode, -elem_size);
4952	      rr = gen_reg_rtx (elem_mode);
4953	      emit_move_insn (rr, real_part);
4954	      emit_move_insn (dest_real, rr);
4955
4956	      return XEXP (dest_real, 0);
4957	    }
4958	}
4959
4960      return std_expand_builtin_va_arg (valist, type);
4961    }
4962
4963  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4964  f_fpr = TREE_CHAIN (f_gpr);
4965  f_res = TREE_CHAIN (f_fpr);
4966  f_ovf = TREE_CHAIN (f_res);
4967  f_sav = TREE_CHAIN (f_ovf);
4968
4969  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4970  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4971  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4972  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4973  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4974
4975  size = int_size_in_bytes (type);
4976  rsize = (size + 3) / 4;
4977  align = 1;
4978
4979  if (AGGREGATE_TYPE_P (type)
4980      || TYPE_MODE (type) == TFmode
4981      || (!TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
4982    {
4983      /* Aggregates, long doubles, and AltiVec vectors are passed by
4984	 reference.  */
4985      indirect_p = 1;
4986      reg = gpr;
4987      n_reg = 1;
4988      sav_ofs = 0;
4989      sav_scale = 4;
4990      size = 4;
4991      rsize = 1;
4992    }
4993  else if (TARGET_HARD_FLOAT && TARGET_FPRS
4994	   && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
4995    {
4996      /* FP args go in FP registers, if present.  */
4997      indirect_p = 0;
4998      reg = fpr;
4999      n_reg = 1;
5000      sav_ofs = 8*4;
5001      sav_scale = 8;
5002      if (TYPE_MODE (type) == DFmode)
5003	align = 8;
5004    }
5005  else
5006    {
5007      /* Otherwise into GP registers.  */
5008      indirect_p = 0;
5009      reg = gpr;
5010      n_reg = rsize;
5011      sav_ofs = 0;
5012      sav_scale = 4;
5013      if (n_reg == 2)
5014	align = 8;
5015    }
5016
5017  /* Pull the value out of the saved registers....  */
5018
5019  lab_over = NULL_RTX;
5020  addr_rtx = gen_reg_rtx (Pmode);
5021
5022  /*  AltiVec vectors never go in registers when -mabi=altivec.  */
5023  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5024    align = 16;
5025  else
5026    {
5027      lab_false = gen_label_rtx ();
5028      lab_over = gen_label_rtx ();
5029
5030      /* Long long and SPE vectors are aligned in the registers.
5031	 As are any other 2 gpr item such as complex int due to a
5032	 historical mistake.  */
5033      u = reg;
5034      if (n_reg == 2)
5035	{
5036	  u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5037		     build_int_2 (n_reg - 1, 0));
5038	  u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5039	  TREE_SIDE_EFFECTS (u) = 1;
5040	}
5041
5042      emit_cmp_and_jump_insns
5043	(expand_expr (u, NULL_RTX, QImode, EXPAND_NORMAL),
5044	 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
5045	 lab_false);
5046
5047      t = sav;
5048      if (sav_ofs)
5049	t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5050
5051      u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
5052		 build_int_2 (n_reg, 0));
5053      TREE_SIDE_EFFECTS (u) = 1;
5054
5055      u = build1 (CONVERT_EXPR, integer_type_node, u);
5056      TREE_SIDE_EFFECTS (u) = 1;
5057
5058      u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
5059      TREE_SIDE_EFFECTS (u) = 1;
5060
5061      t = build (PLUS_EXPR, ptr_type_node, t, u);
5062      TREE_SIDE_EFFECTS (t) = 1;
5063
5064      r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5065      if (r != addr_rtx)
5066	emit_move_insn (addr_rtx, r);
5067
5068      emit_jump_insn (gen_jump (lab_over));
5069      emit_barrier ();
5070
5071      emit_label (lab_false);
5072      if (n_reg > 2)
5073	{
5074	  /* Ensure that we don't find any more args in regs.
5075	     Alignment has taken care of the n_reg == 2 case.  */
5076	  t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
5077	  TREE_SIDE_EFFECTS (t) = 1;
5078	  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5079	}
5080    }
5081
5082  /* ... otherwise out of the overflow area.  */
5083
5084  /* Care for on-stack alignment if needed.  */
5085  t = ovf;
5086  if (align != 1)
5087    {
5088      t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (align - 1, 0));
5089      t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align, -1));
5090    }
5091  t = save_expr (t);
5092
5093  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5094  if (r != addr_rtx)
5095    emit_move_insn (addr_rtx, r);
5096
5097  t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5098  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5099  TREE_SIDE_EFFECTS (t) = 1;
5100  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5101
5102  if (lab_over)
5103    emit_label (lab_over);
5104
5105  if (indirect_p)
5106    {
5107      r = gen_rtx_MEM (Pmode, addr_rtx);
5108      set_mem_alias_set (r, get_varargs_alias_set ());
5109      emit_move_insn (addr_rtx, r);
5110    }
5111
5112  return addr_rtx;
5113}
5114
5115/* Builtins.  */
5116
5117#define def_builtin(MASK, NAME, TYPE, CODE)			\
5118do {								\
5119  if ((MASK) & target_flags)					\
5120    builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD,	\
5121		      NULL, NULL_TREE);				\
5122} while (0)
5123
5124/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
5125
5126static const struct builtin_description bdesc_3arg[] =
5127{
5128  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5129  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5130  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5131  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5132  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5133  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5134  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5135  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5136  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5137  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5138  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5139  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5140  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5141  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5142  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5143  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5144  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5145  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5146  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5147  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5148  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5149  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5150  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5151};
5152
5153/* DST operations: void foo (void *, const int, const char).  */
5154
5155static const struct builtin_description bdesc_dst[] =
5156{
5157  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5158  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5159  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5160  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5161};
5162
5163/* Simple binary operations: VECc = foo (VECa, VECb).  */
5164
5165static struct builtin_description bdesc_2arg[] =
5166{
5167  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5168  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5169  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5170  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5171  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5172  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5173  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5174  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5175  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5176  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5177  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5178  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5179  { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5180  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5181  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5182  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5183  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5184  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5185  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5186  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5187  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5188  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5189  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5190  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5191  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5192  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5193  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5194  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5195  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5196  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5197  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5198  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5199  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5200  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5201  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5202  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5203  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5204  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5205  { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5206  { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5207  { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5208  { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5209  { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5210  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5211  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5212  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5213  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5214  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5215  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5216  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5217  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5218  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5219  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5220  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5221  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5222  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5223  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5224  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5225  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5226  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5227  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5228  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5229  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5230  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5231  { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5232  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5233  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5234  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5235  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5236  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5237  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5238  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5239  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5240  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5241  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5242  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5243  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5244  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5245  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5246  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5247  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5248  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5249  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5250  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5251  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5252  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5253  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5254  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5255  { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5256  { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5257  { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5258  { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5259  { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5260  { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5261  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5262  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5263  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5264  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5265  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5266  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5267  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5268  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5269  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5270  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5271  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5272  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5273  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5274  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5275  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5276  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5277  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5278  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5279  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5280
5281  /* Place holder, leave as first spe builtin.  */
5282  { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5283  { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5284  { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5285  { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5286  { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5287  { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5288  { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5289  { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5290  { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5291  { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5292  { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5293  { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5294  { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5295  { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5296  { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5297  { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5298  { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5299  { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5300  { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5301  { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5302  { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5303  { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5304  { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5305  { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5306  { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5307  { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5308  { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5309  { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5310  { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5311  { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5312  { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5313  { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5314  { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5315  { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5316  { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5317  { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5318  { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5319  { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5320  { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5321  { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5322  { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5323  { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5324  { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5325  { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5326  { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5327  { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5328  { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5329  { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5330  { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5331  { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5332  { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5333  { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5334  { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5335  { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5336  { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5337  { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5338  { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5339  { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5340  { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5341  { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5342  { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5343  { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5344  { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5345  { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5346  { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5347  { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5348  { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5349  { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5350  { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5351  { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5352  { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5353  { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5354  { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5355  { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5356  { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5357  { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5358  { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5359  { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5360  { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5361  { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5362  { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5363  { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5364  { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5365  { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5366  { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5367  { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5368  { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5369  { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5370  { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5371  { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5372  { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5373  { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5374  { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5375  { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5376  { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5377  { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5378  { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5379  { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5380  { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5381  { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5382  { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5383  { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5384  { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5385  { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5386  { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5387  { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5388  { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5389  { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5390  { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5391
5392  /* SPE binary operations expecting a 5-bit unsigned literal.  */
5393  { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5394
5395  { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5396  { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5397  { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5398  { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5399  { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5400  { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5401  { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5402  { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5403  { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5404  { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5405  { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5406  { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5407  { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5408  { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5409  { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5410  { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5411  { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5412  { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5413  { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5414  { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5415  { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5416  { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5417  { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5418  { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5419  { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5420  { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5421
5422  /* Place-holder.  Leave as last binary SPE builtin.  */
5423  { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5424};
5425
5426/* AltiVec predicates.  */
5427
5428struct builtin_description_predicates
5429{
5430  const unsigned int mask;
5431  const enum insn_code icode;
5432  const char *opcode;
5433  const char *const name;
5434  const enum rs6000_builtins code;
5435};
5436
5437static const struct builtin_description_predicates bdesc_altivec_preds[] =
5438{
5439  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5440  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5441  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5442  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5443  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5444  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5445  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5446  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5447  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5448  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5449  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5450  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5451  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5452};
5453
5454/* SPE predicates.  */
5455static struct builtin_description bdesc_spe_predicates[] =
5456{
5457  /* Place-holder.  Leave as first.  */
5458  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5459  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5460  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5461  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5462  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5463  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5464  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5465  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5466  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5467  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5468  /* Place-holder.  Leave as last.  */
5469  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5470};
5471
5472/* SPE evsel predicates.  */
5473static struct builtin_description bdesc_spe_evsel[] =
5474{
5475  /* Place-holder.  Leave as first.  */
5476  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5477  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5478  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5479  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5480  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5481  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5482  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5483  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5484  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5485  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5486  /* Place-holder.  Leave as last.  */
5487  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5488};
5489
5490/* ABS* operations.  */
5491
5492static const struct builtin_description bdesc_abs[] =
5493{
5494  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5495  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5496  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5497  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5498  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5499  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5500  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5501};
5502
5503/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5504   foo (VECa).  */
5505
5506static struct builtin_description bdesc_1arg[] =
5507{
5508  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5509  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5510  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5511  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5512  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5513  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5514  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5515  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5516  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5517  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5518  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5519  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5520  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5521  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5522  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5523  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5524  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5525
5526  /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5527     end with SPE_BUILTIN_EVSUBFUSIAAW.  */
5528  { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5529  { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5530  { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5531  { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5532  { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5533  { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5534  { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5535  { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5536  { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5537  { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5538  { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5539  { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5540  { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5541  { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5542  { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5543  { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5544  { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5545  { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5546  { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5547  { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5548  { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5549  { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5550  { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5551  { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5552  { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5553  { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5554  { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5555  { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5556
5557  /* Place-holder.  Leave as last unary SPE builtin.  */
5558  { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5559};
5560
5561static rtx
5562rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5563{
5564  rtx pat;
5565  tree arg0 = TREE_VALUE (arglist);
5566  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5567  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5568  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5569
5570  if (icode == CODE_FOR_nothing)
5571    /* Builtin not supported on this processor.  */
5572    return 0;
5573
5574  /* If we got invalid arguments bail out before generating bad rtl.  */
5575  if (arg0 == error_mark_node)
5576    return const0_rtx;
5577
5578  if (icode == CODE_FOR_altivec_vspltisb
5579      || icode == CODE_FOR_altivec_vspltish
5580      || icode == CODE_FOR_altivec_vspltisw
5581      || icode == CODE_FOR_spe_evsplatfi
5582      || icode == CODE_FOR_spe_evsplati)
5583    {
5584      /* Only allow 5-bit *signed* literals.  */
5585      if (GET_CODE (op0) != CONST_INT
5586	  || INTVAL (op0) > 0x1f
5587	  || INTVAL (op0) < -0x1f)
5588	{
5589	  error ("argument 1 must be a 5-bit signed literal");
5590	  return const0_rtx;
5591	}
5592    }
5593
5594  if (target == 0
5595      || GET_MODE (target) != tmode
5596      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5597    target = gen_reg_rtx (tmode);
5598
5599  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5600    op0 = copy_to_mode_reg (mode0, op0);
5601
5602  pat = GEN_FCN (icode) (target, op0);
5603  if (! pat)
5604    return 0;
5605  emit_insn (pat);
5606
5607  return target;
5608}
5609
5610static rtx
5611altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5612{
5613  rtx pat, scratch1, scratch2;
5614  tree arg0 = TREE_VALUE (arglist);
5615  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5616  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5617  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5618
5619  /* If we have invalid arguments, bail out before generating bad rtl.  */
5620  if (arg0 == error_mark_node)
5621    return const0_rtx;
5622
5623  if (target == 0
5624      || GET_MODE (target) != tmode
5625      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5626    target = gen_reg_rtx (tmode);
5627
5628  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5629    op0 = copy_to_mode_reg (mode0, op0);
5630
5631  scratch1 = gen_reg_rtx (mode0);
5632  scratch2 = gen_reg_rtx (mode0);
5633
5634  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5635  if (! pat)
5636    return 0;
5637  emit_insn (pat);
5638
5639  return target;
5640}
5641
5642static rtx
5643rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5644{
5645  rtx pat;
5646  tree arg0 = TREE_VALUE (arglist);
5647  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5648  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5649  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5650  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5651  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5652  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5653
5654  if (icode == CODE_FOR_nothing)
5655    /* Builtin not supported on this processor.  */
5656    return 0;
5657
5658  /* If we got invalid arguments bail out before generating bad rtl.  */
5659  if (arg0 == error_mark_node || arg1 == error_mark_node)
5660    return const0_rtx;
5661
5662  if (icode == CODE_FOR_altivec_vcfux
5663      || icode == CODE_FOR_altivec_vcfsx
5664      || icode == CODE_FOR_altivec_vctsxs
5665      || icode == CODE_FOR_altivec_vctuxs
5666      || icode == CODE_FOR_altivec_vspltb
5667      || icode == CODE_FOR_altivec_vsplth
5668      || icode == CODE_FOR_altivec_vspltw
5669      || icode == CODE_FOR_spe_evaddiw
5670      || icode == CODE_FOR_spe_evldd
5671      || icode == CODE_FOR_spe_evldh
5672      || icode == CODE_FOR_spe_evldw
5673      || icode == CODE_FOR_spe_evlhhesplat
5674      || icode == CODE_FOR_spe_evlhhossplat
5675      || icode == CODE_FOR_spe_evlhhousplat
5676      || icode == CODE_FOR_spe_evlwhe
5677      || icode == CODE_FOR_spe_evlwhos
5678      || icode == CODE_FOR_spe_evlwhou
5679      || icode == CODE_FOR_spe_evlwhsplat
5680      || icode == CODE_FOR_spe_evlwwsplat
5681      || icode == CODE_FOR_spe_evrlwi
5682      || icode == CODE_FOR_spe_evslwi
5683      || icode == CODE_FOR_spe_evsrwis
5684      || icode == CODE_FOR_spe_evsubifw
5685      || icode == CODE_FOR_spe_evsrwiu)
5686    {
5687      /* Only allow 5-bit unsigned literals.  */
5688      STRIP_NOPS (arg1);
5689      if (TREE_CODE (arg1) != INTEGER_CST
5690	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
5691	{
5692	  error ("argument 2 must be a 5-bit unsigned literal");
5693	  return const0_rtx;
5694	}
5695    }
5696
5697  if (target == 0
5698      || GET_MODE (target) != tmode
5699      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5700    target = gen_reg_rtx (tmode);
5701
5702  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5703    op0 = copy_to_mode_reg (mode0, op0);
5704  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5705    op1 = copy_to_mode_reg (mode1, op1);
5706
5707  pat = GEN_FCN (icode) (target, op0, op1);
5708  if (! pat)
5709    return 0;
5710  emit_insn (pat);
5711
5712  return target;
5713}
5714
5715static rtx
5716altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5717				  tree arglist, rtx target)
5718{
5719  rtx pat, scratch;
5720  tree cr6_form = TREE_VALUE (arglist);
5721  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5722  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5723  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5724  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5725  enum machine_mode tmode = SImode;
5726  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5727  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5728  int cr6_form_int;
5729
5730  if (TREE_CODE (cr6_form) != INTEGER_CST)
5731    {
5732      error ("argument 1 of __builtin_altivec_predicate must be a constant");
5733      return const0_rtx;
5734    }
5735  else
5736    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5737
5738  if (mode0 != mode1)
5739    abort ();
5740
5741  /* If we have invalid arguments, bail out before generating bad rtl.  */
5742  if (arg0 == error_mark_node || arg1 == error_mark_node)
5743    return const0_rtx;
5744
5745  if (target == 0
5746      || GET_MODE (target) != tmode
5747      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5748    target = gen_reg_rtx (tmode);
5749
5750  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5751    op0 = copy_to_mode_reg (mode0, op0);
5752  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5753    op1 = copy_to_mode_reg (mode1, op1);
5754
5755  scratch = gen_reg_rtx (mode0);
5756
5757  pat = GEN_FCN (icode) (scratch, op0, op1,
5758			 gen_rtx (SYMBOL_REF, Pmode, opcode));
5759  if (! pat)
5760    return 0;
5761  emit_insn (pat);
5762
5763  /* The vec_any* and vec_all* predicates use the same opcodes for two
5764     different operations, but the bits in CR6 will be different
5765     depending on what information we want.  So we have to play tricks
5766     with CR6 to get the right bits out.
5767
5768     If you think this is disgusting, look at the specs for the
5769     AltiVec predicates.  */
5770
5771     switch (cr6_form_int)
5772       {
5773       case 0:
5774	 emit_insn (gen_cr6_test_for_zero (target));
5775	 break;
5776       case 1:
5777	 emit_insn (gen_cr6_test_for_zero_reverse (target));
5778	 break;
5779       case 2:
5780	 emit_insn (gen_cr6_test_for_lt (target));
5781	 break;
5782       case 3:
5783	 emit_insn (gen_cr6_test_for_lt_reverse (target));
5784	 break;
5785       default:
5786	 error ("argument 1 of __builtin_altivec_predicate is out of range");
5787	 break;
5788       }
5789
5790  return target;
5791}
5792
5793static rtx
5794altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5795{
5796  rtx pat, addr;
5797  tree arg0 = TREE_VALUE (arglist);
5798  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5799  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5800  enum machine_mode mode0 = Pmode;
5801  enum machine_mode mode1 = Pmode;
5802  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5803  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5804
5805  if (icode == CODE_FOR_nothing)
5806    /* Builtin not supported on this processor.  */
5807    return 0;
5808
5809  /* If we got invalid arguments bail out before generating bad rtl.  */
5810  if (arg0 == error_mark_node || arg1 == error_mark_node)
5811    return const0_rtx;
5812
5813  if (target == 0
5814      || GET_MODE (target) != tmode
5815      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5816    target = gen_reg_rtx (tmode);
5817
5818  op1 = copy_to_mode_reg (mode1, op1);
5819
5820  if (op0 == const0_rtx)
5821    {
5822      addr = gen_rtx_MEM (tmode, op1);
5823    }
5824  else
5825    {
5826      op0 = copy_to_mode_reg (mode0, op0);
5827      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5828    }
5829
5830  pat = GEN_FCN (icode) (target, addr);
5831
5832  if (! pat)
5833    return 0;
5834  emit_insn (pat);
5835
5836  return target;
5837}
5838
5839static rtx
5840spe_expand_stv_builtin (enum insn_code icode, tree arglist)
5841{
5842  tree arg0 = TREE_VALUE (arglist);
5843  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5844  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5845  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5846  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5847  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5848  rtx pat;
5849  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5850  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5851  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5852
5853  /* Invalid arguments.  Bail before doing anything stoopid!  */
5854  if (arg0 == error_mark_node
5855      || arg1 == error_mark_node
5856      || arg2 == error_mark_node)
5857    return const0_rtx;
5858
5859  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5860    op0 = copy_to_mode_reg (mode2, op0);
5861  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5862    op1 = copy_to_mode_reg (mode0, op1);
5863  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5864    op2 = copy_to_mode_reg (mode1, op2);
5865
5866  pat = GEN_FCN (icode) (op1, op2, op0);
5867  if (pat)
5868    emit_insn (pat);
5869  return NULL_RTX;
5870}
5871
5872static rtx
5873altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5874{
5875  tree arg0 = TREE_VALUE (arglist);
5876  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5877  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5878  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5879  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5880  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5881  rtx pat, addr;
5882  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5883  enum machine_mode mode1 = Pmode;
5884  enum machine_mode mode2 = Pmode;
5885
5886  /* Invalid arguments.  Bail before doing anything stoopid!  */
5887  if (arg0 == error_mark_node
5888      || arg1 == error_mark_node
5889      || arg2 == error_mark_node)
5890    return const0_rtx;
5891
5892  if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5893    op0 = copy_to_mode_reg (tmode, op0);
5894
5895  op2 = copy_to_mode_reg (mode2, op2);
5896
5897  if (op1 == const0_rtx)
5898    {
5899      addr = gen_rtx_MEM (tmode, op2);
5900    }
5901  else
5902    {
5903      op1 = copy_to_mode_reg (mode1, op1);
5904      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5905    }
5906
5907  pat = GEN_FCN (icode) (addr, op0);
5908  if (pat)
5909    emit_insn (pat);
5910  return NULL_RTX;
5911}
5912
5913static rtx
5914rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5915{
5916  rtx pat;
5917  tree arg0 = TREE_VALUE (arglist);
5918  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5919  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5920  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5921  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5922  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5923  enum machine_mode tmode = insn_data[icode].operand[0].mode;
5924  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5925  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5926  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5927
5928  if (icode == CODE_FOR_nothing)
5929    /* Builtin not supported on this processor.  */
5930    return 0;
5931
5932  /* If we got invalid arguments bail out before generating bad rtl.  */
5933  if (arg0 == error_mark_node
5934      || arg1 == error_mark_node
5935      || arg2 == error_mark_node)
5936    return const0_rtx;
5937
5938  if (icode == CODE_FOR_altivec_vsldoi_4sf
5939      || icode == CODE_FOR_altivec_vsldoi_4si
5940      || icode == CODE_FOR_altivec_vsldoi_8hi
5941      || icode == CODE_FOR_altivec_vsldoi_16qi)
5942    {
5943      /* Only allow 4-bit unsigned literals.  */
5944      if (TREE_CODE (arg2) != INTEGER_CST
5945	  || TREE_INT_CST_LOW (arg2) & ~0xf)
5946	{
5947	  error ("argument 3 must be a 4-bit unsigned literal");
5948	  return const0_rtx;
5949	}
5950    }
5951
5952  if (target == 0
5953      || GET_MODE (target) != tmode
5954      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5955    target = gen_reg_rtx (tmode);
5956
5957  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5958    op0 = copy_to_mode_reg (mode0, op0);
5959  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5960    op1 = copy_to_mode_reg (mode1, op1);
5961  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5962    op2 = copy_to_mode_reg (mode2, op2);
5963
5964  pat = GEN_FCN (icode) (target, op0, op1, op2);
5965  if (! pat)
5966    return 0;
5967  emit_insn (pat);
5968
5969  return target;
5970}
5971
5972/* Expand the lvx builtins.  */
5973static rtx
5974altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5975{
5976  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5977  tree arglist = TREE_OPERAND (exp, 1);
5978  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5979  tree arg0;
5980  enum machine_mode tmode, mode0;
5981  rtx pat, op0;
5982  enum insn_code icode;
5983
5984  switch (fcode)
5985    {
5986    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5987      icode = CODE_FOR_altivec_lvx_16qi;
5988      break;
5989    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5990      icode = CODE_FOR_altivec_lvx_8hi;
5991      break;
5992    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5993      icode = CODE_FOR_altivec_lvx_4si;
5994      break;
5995    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5996      icode = CODE_FOR_altivec_lvx_4sf;
5997      break;
5998    default:
5999      *expandedp = false;
6000      return NULL_RTX;
6001    }
6002
6003  *expandedp = true;
6004
6005  arg0 = TREE_VALUE (arglist);
6006  op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6007  tmode = insn_data[icode].operand[0].mode;
6008  mode0 = insn_data[icode].operand[1].mode;
6009
6010  if (target == 0
6011      || GET_MODE (target) != tmode
6012      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6013    target = gen_reg_rtx (tmode);
6014
6015  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6016    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6017
6018  pat = GEN_FCN (icode) (target, op0);
6019  if (! pat)
6020    return 0;
6021  emit_insn (pat);
6022  return target;
6023}
6024
6025/* Expand the stvx builtins.  */
6026static rtx
6027altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6028			   bool *expandedp)
6029{
6030  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6031  tree arglist = TREE_OPERAND (exp, 1);
6032  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6033  tree arg0, arg1;
6034  enum machine_mode mode0, mode1;
6035  rtx pat, op0, op1;
6036  enum insn_code icode;
6037
6038  switch (fcode)
6039    {
6040    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6041      icode = CODE_FOR_altivec_stvx_16qi;
6042      break;
6043    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6044      icode = CODE_FOR_altivec_stvx_8hi;
6045      break;
6046    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6047      icode = CODE_FOR_altivec_stvx_4si;
6048      break;
6049    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6050      icode = CODE_FOR_altivec_stvx_4sf;
6051      break;
6052    default:
6053      *expandedp = false;
6054      return NULL_RTX;
6055    }
6056
6057  arg0 = TREE_VALUE (arglist);
6058  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6059  op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6060  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6061  mode0 = insn_data[icode].operand[0].mode;
6062  mode1 = insn_data[icode].operand[1].mode;
6063
6064  if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6065    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6066  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6067    op1 = copy_to_mode_reg (mode1, op1);
6068
6069  pat = GEN_FCN (icode) (op0, op1);
6070  if (pat)
6071    emit_insn (pat);
6072
6073  *expandedp = true;
6074  return NULL_RTX;
6075}
6076
6077/* Expand the dst builtins.  */
6078static rtx
6079altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6080			    bool *expandedp)
6081{
6082  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6083  tree arglist = TREE_OPERAND (exp, 1);
6084  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6085  tree arg0, arg1, arg2;
6086  enum machine_mode mode0, mode1, mode2;
6087  rtx pat, op0, op1, op2;
6088  struct builtin_description *d;
6089  size_t i;
6090
6091  *expandedp = false;
6092
6093  /* Handle DST variants.  */
6094  d = (struct builtin_description *) bdesc_dst;
6095  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6096    if (d->code == fcode)
6097      {
6098	arg0 = TREE_VALUE (arglist);
6099	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6100	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6101	op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6102	op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6103	op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6104	mode0 = insn_data[d->icode].operand[0].mode;
6105	mode1 = insn_data[d->icode].operand[1].mode;
6106	mode2 = insn_data[d->icode].operand[2].mode;
6107
6108	/* Invalid arguments, bail out before generating bad rtl.  */
6109	if (arg0 == error_mark_node
6110	    || arg1 == error_mark_node
6111	    || arg2 == error_mark_node)
6112	  return const0_rtx;
6113
6114	*expandedp = true;
6115	STRIP_NOPS (arg2);
6116	if (TREE_CODE (arg2) != INTEGER_CST
6117	    || TREE_INT_CST_LOW (arg2) & ~0x3)
6118	  {
6119	    error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6120	    return const0_rtx;
6121	  }
6122
6123	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6124	  op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6125	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6126	  op1 = copy_to_mode_reg (mode1, op1);
6127
6128	pat = GEN_FCN (d->icode) (op0, op1, op2);
6129	if (pat != 0)
6130	  emit_insn (pat);
6131
6132	return NULL_RTX;
6133      }
6134
6135  return NULL_RTX;
6136}
6137
6138/* Expand the builtin in EXP and store the result in TARGET.  Store
6139   true in *EXPANDEDP if we found a builtin to expand.  */
6140static rtx
6141altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6142{
6143  struct builtin_description *d;
6144  struct builtin_description_predicates *dp;
6145  size_t i;
6146  enum insn_code icode;
6147  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6148  tree arglist = TREE_OPERAND (exp, 1);
6149  tree arg0;
6150  rtx op0, pat;
6151  enum machine_mode tmode, mode0;
6152  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6153
6154  target = altivec_expand_ld_builtin (exp, target, expandedp);
6155  if (*expandedp)
6156    return target;
6157
6158  target = altivec_expand_st_builtin (exp, target, expandedp);
6159  if (*expandedp)
6160    return target;
6161
6162  target = altivec_expand_dst_builtin (exp, target, expandedp);
6163  if (*expandedp)
6164    return target;
6165
6166  *expandedp = true;
6167
6168  switch (fcode)
6169    {
6170    case ALTIVEC_BUILTIN_STVX:
6171      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6172    case ALTIVEC_BUILTIN_STVEBX:
6173      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6174    case ALTIVEC_BUILTIN_STVEHX:
6175      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6176    case ALTIVEC_BUILTIN_STVEWX:
6177      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6178    case ALTIVEC_BUILTIN_STVXL:
6179      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6180
6181    case ALTIVEC_BUILTIN_MFVSCR:
6182      icode = CODE_FOR_altivec_mfvscr;
6183      tmode = insn_data[icode].operand[0].mode;
6184
6185      if (target == 0
6186	  || GET_MODE (target) != tmode
6187	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6188	target = gen_reg_rtx (tmode);
6189
6190      pat = GEN_FCN (icode) (target);
6191      if (! pat)
6192	return 0;
6193      emit_insn (pat);
6194      return target;
6195
6196    case ALTIVEC_BUILTIN_MTVSCR:
6197      icode = CODE_FOR_altivec_mtvscr;
6198      arg0 = TREE_VALUE (arglist);
6199      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6200      mode0 = insn_data[icode].operand[0].mode;
6201
6202      /* If we got invalid arguments bail out before generating bad rtl.  */
6203      if (arg0 == error_mark_node)
6204	return const0_rtx;
6205
6206      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6207	op0 = copy_to_mode_reg (mode0, op0);
6208
6209      pat = GEN_FCN (icode) (op0);
6210      if (pat)
6211	emit_insn (pat);
6212      return NULL_RTX;
6213
6214    case ALTIVEC_BUILTIN_DSSALL:
6215      emit_insn (gen_altivec_dssall ());
6216      return NULL_RTX;
6217
6218    case ALTIVEC_BUILTIN_DSS:
6219      icode = CODE_FOR_altivec_dss;
6220      arg0 = TREE_VALUE (arglist);
6221      STRIP_NOPS (arg0);
6222      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6223      mode0 = insn_data[icode].operand[0].mode;
6224
6225      /* If we got invalid arguments bail out before generating bad rtl.  */
6226      if (arg0 == error_mark_node)
6227	return const0_rtx;
6228
6229      if (TREE_CODE (arg0) != INTEGER_CST
6230	  || TREE_INT_CST_LOW (arg0) & ~0x3)
6231	{
6232	  error ("argument to dss must be a 2-bit unsigned literal");
6233	  return const0_rtx;
6234	}
6235
6236      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6237	op0 = copy_to_mode_reg (mode0, op0);
6238
6239      emit_insn (gen_altivec_dss (op0));
6240      return NULL_RTX;
6241
6242    case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6243      arg0 = TREE_VALUE (arglist);
6244      while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6245	arg0 = TREE_OPERAND (arg0, 0);
6246      error ("invalid parameter combination for `%s' AltiVec intrinsic",
6247	     TREE_STRING_POINTER (arg0));
6248
6249      return const0_rtx;
6250    }
6251
6252  /* Expand abs* operations.  */
6253  d = (struct builtin_description *) bdesc_abs;
6254  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6255    if (d->code == fcode)
6256      return altivec_expand_abs_builtin (d->icode, arglist, target);
6257
6258  /* Expand the AltiVec predicates.  */
6259  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6260  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6261    if (dp->code == fcode)
6262      return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6263
6264  /* LV* are funky.  We initialized them differently.  */
6265  switch (fcode)
6266    {
6267    case ALTIVEC_BUILTIN_LVSL:
6268      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6269					   arglist, target);
6270    case ALTIVEC_BUILTIN_LVSR:
6271      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6272					  arglist, target);
6273    case ALTIVEC_BUILTIN_LVEBX:
6274      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6275					  arglist, target);
6276    case ALTIVEC_BUILTIN_LVEHX:
6277      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6278					  arglist, target);
6279    case ALTIVEC_BUILTIN_LVEWX:
6280      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6281					  arglist, target);
6282    case ALTIVEC_BUILTIN_LVXL:
6283      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6284					  arglist, target);
6285    case ALTIVEC_BUILTIN_LVX:
6286      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6287					  arglist, target);
6288    default:
6289      break;
6290      /* Fall through.  */
6291    }
6292
6293  *expandedp = false;
6294  return NULL_RTX;
6295}
6296
6297/* Binops that need to be initialized manually, but can be expanded
6298   automagically by rs6000_expand_binop_builtin.  */
6299static struct builtin_description bdesc_2arg_spe[] =
6300{
6301  { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6302  { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6303  { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6304  { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6305  { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6306  { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6307  { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6308  { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6309  { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6310  { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6311  { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6312  { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6313  { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6314  { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6315  { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6316  { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6317  { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6318  { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6319  { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6320  { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6321  { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6322  { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6323};
6324
6325/* Expand the builtin in EXP and store the result in TARGET.  Store
6326   true in *EXPANDEDP if we found a builtin to expand.
6327
6328   This expands the SPE builtins that are not simple unary and binary
6329   operations.  */
6330static rtx
6331spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6332{
6333  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6334  tree arglist = TREE_OPERAND (exp, 1);
6335  tree arg1, arg0;
6336  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6337  enum insn_code icode;
6338  enum machine_mode tmode, mode0;
6339  rtx pat, op0;
6340  struct builtin_description *d;
6341  size_t i;
6342
6343  *expandedp = true;
6344
6345  /* Syntax check for a 5-bit unsigned immediate.  */
6346  switch (fcode)
6347    {
6348    case SPE_BUILTIN_EVSTDD:
6349    case SPE_BUILTIN_EVSTDH:
6350    case SPE_BUILTIN_EVSTDW:
6351    case SPE_BUILTIN_EVSTWHE:
6352    case SPE_BUILTIN_EVSTWHO:
6353    case SPE_BUILTIN_EVSTWWE:
6354    case SPE_BUILTIN_EVSTWWO:
6355      arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6356      if (TREE_CODE (arg1) != INTEGER_CST
6357	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
6358	{
6359	  error ("argument 2 must be a 5-bit unsigned literal");
6360	  return const0_rtx;
6361	}
6362      break;
6363    default:
6364      break;
6365    }
6366
6367  /* The evsplat*i instructions are not quite generic.  */
6368  switch (fcode)
6369    {
6370    case SPE_BUILTIN_EVSPLATFI:
6371      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6372					 arglist, target);
6373    case SPE_BUILTIN_EVSPLATI:
6374      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6375					 arglist, target);
6376    default:
6377      break;
6378    }
6379
6380  d = (struct builtin_description *) bdesc_2arg_spe;
6381  for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6382    if (d->code == fcode)
6383      return rs6000_expand_binop_builtin (d->icode, arglist, target);
6384
6385  d = (struct builtin_description *) bdesc_spe_predicates;
6386  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6387    if (d->code == fcode)
6388      return spe_expand_predicate_builtin (d->icode, arglist, target);
6389
6390  d = (struct builtin_description *) bdesc_spe_evsel;
6391  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6392    if (d->code == fcode)
6393      return spe_expand_evsel_builtin (d->icode, arglist, target);
6394
6395  switch (fcode)
6396    {
6397    case SPE_BUILTIN_EVSTDDX:
6398      return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6399    case SPE_BUILTIN_EVSTDHX:
6400      return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6401    case SPE_BUILTIN_EVSTDWX:
6402      return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6403    case SPE_BUILTIN_EVSTWHEX:
6404      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6405    case SPE_BUILTIN_EVSTWHOX:
6406      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6407    case SPE_BUILTIN_EVSTWWEX:
6408      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6409    case SPE_BUILTIN_EVSTWWOX:
6410      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6411    case SPE_BUILTIN_EVSTDD:
6412      return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6413    case SPE_BUILTIN_EVSTDH:
6414      return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6415    case SPE_BUILTIN_EVSTDW:
6416      return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6417    case SPE_BUILTIN_EVSTWHE:
6418      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6419    case SPE_BUILTIN_EVSTWHO:
6420      return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6421    case SPE_BUILTIN_EVSTWWE:
6422      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6423    case SPE_BUILTIN_EVSTWWO:
6424      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6425    case SPE_BUILTIN_MFSPEFSCR:
6426      icode = CODE_FOR_spe_mfspefscr;
6427      tmode = insn_data[icode].operand[0].mode;
6428
6429      if (target == 0
6430	  || GET_MODE (target) != tmode
6431	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6432	target = gen_reg_rtx (tmode);
6433
6434      pat = GEN_FCN (icode) (target);
6435      if (! pat)
6436	return 0;
6437      emit_insn (pat);
6438      return target;
6439    case SPE_BUILTIN_MTSPEFSCR:
6440      icode = CODE_FOR_spe_mtspefscr;
6441      arg0 = TREE_VALUE (arglist);
6442      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6443      mode0 = insn_data[icode].operand[0].mode;
6444
6445      if (arg0 == error_mark_node)
6446	return const0_rtx;
6447
6448      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6449	op0 = copy_to_mode_reg (mode0, op0);
6450
6451      pat = GEN_FCN (icode) (op0);
6452      if (pat)
6453	emit_insn (pat);
6454      return NULL_RTX;
6455    default:
6456      break;
6457    }
6458
6459  *expandedp = false;
6460  return NULL_RTX;
6461}
6462
6463static rtx
6464spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6465{
6466  rtx pat, scratch, tmp;
6467  tree form = TREE_VALUE (arglist);
6468  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6469  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6470  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6471  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6472  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6473  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6474  int form_int;
6475  enum rtx_code code;
6476
6477  if (TREE_CODE (form) != INTEGER_CST)
6478    {
6479      error ("argument 1 of __builtin_spe_predicate must be a constant");
6480      return const0_rtx;
6481    }
6482  else
6483    form_int = TREE_INT_CST_LOW (form);
6484
6485  if (mode0 != mode1)
6486    abort ();
6487
6488  if (arg0 == error_mark_node || arg1 == error_mark_node)
6489    return const0_rtx;
6490
6491  if (target == 0
6492      || GET_MODE (target) != SImode
6493      || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6494    target = gen_reg_rtx (SImode);
6495
6496  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6497    op0 = copy_to_mode_reg (mode0, op0);
6498  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6499    op1 = copy_to_mode_reg (mode1, op1);
6500
6501  scratch = gen_reg_rtx (CCmode);
6502
6503  pat = GEN_FCN (icode) (scratch, op0, op1);
6504  if (! pat)
6505    return const0_rtx;
6506  emit_insn (pat);
6507
6508  /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6509     _lower_.  We use one compare, but look in different bits of the
6510     CR for each variant.
6511
6512     There are 2 elements in each SPE simd type (upper/lower).  The CR
6513     bits are set as follows:
6514
6515     BIT0  | BIT 1  | BIT 2   | BIT 3
6516     U     |   L    | (U | L) | (U & L)
6517
6518     So, for an "all" relationship, BIT 3 would be set.
6519     For an "any" relationship, BIT 2 would be set.  Etc.
6520
6521     Following traditional nomenclature, these bits map to:
6522
6523     BIT0  | BIT 1  | BIT 2   | BIT 3
6524     LT    | GT     | EQ      | OV
6525
6526     Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6527  */
6528
6529  switch (form_int)
6530    {
6531      /* All variant.  OV bit.  */
6532    case 0:
6533      /* We need to get to the OV bit, which is the ORDERED bit.  We
6534	 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6535	 that's ugly and will trigger a validate_condition_mode abort.
6536	 So let's just use another pattern.  */
6537      emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6538      return target;
6539      /* Any variant.  EQ bit.  */
6540    case 1:
6541      code = EQ;
6542      break;
6543      /* Upper variant.  LT bit.  */
6544    case 2:
6545      code = LT;
6546      break;
6547      /* Lower variant.  GT bit.  */
6548    case 3:
6549      code = GT;
6550      break;
6551    default:
6552      error ("argument 1 of __builtin_spe_predicate is out of range");
6553      return const0_rtx;
6554    }
6555
6556  tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6557  emit_move_insn (target, tmp);
6558
6559  return target;
6560}
6561
6562/* The evsel builtins look like this:
6563
6564     e = __builtin_spe_evsel_OP (a, b, c, d);
6565
6566   and work like this:
6567
6568     e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6569     e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6570*/
6571
6572static rtx
6573spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6574{
6575  rtx pat, scratch;
6576  tree arg0 = TREE_VALUE (arglist);
6577  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6578  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6579  tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6580  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6581  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6582  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6583  rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6584  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6585  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6586
6587  if (mode0 != mode1)
6588    abort ();
6589
6590  if (arg0 == error_mark_node || arg1 == error_mark_node
6591      || arg2 == error_mark_node || arg3 == error_mark_node)
6592    return const0_rtx;
6593
6594  if (target == 0
6595      || GET_MODE (target) != mode0
6596      || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6597    target = gen_reg_rtx (mode0);
6598
6599  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6600    op0 = copy_to_mode_reg (mode0, op0);
6601  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6602    op1 = copy_to_mode_reg (mode0, op1);
6603  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6604    op2 = copy_to_mode_reg (mode0, op2);
6605  if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6606    op3 = copy_to_mode_reg (mode0, op3);
6607
6608  /* Generate the compare.  */
6609  scratch = gen_reg_rtx (CCmode);
6610  pat = GEN_FCN (icode) (scratch, op0, op1);
6611  if (! pat)
6612    return const0_rtx;
6613  emit_insn (pat);
6614
6615  if (mode0 == V2SImode)
6616    emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6617  else
6618    emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6619
6620  return target;
6621}
6622
6623/* Expand an expression EXP that calls a built-in function,
6624   with result going to TARGET if that's convenient
6625   (and in mode MODE if that's convenient).
6626   SUBTARGET may be used as the target for computing one of EXP's operands.
6627   IGNORE is nonzero if the value is to be ignored.  */
6628
6629static rtx
6630rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6631		      enum machine_mode mode ATTRIBUTE_UNUSED,
6632		      int ignore ATTRIBUTE_UNUSED)
6633{
6634  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6635  tree arglist = TREE_OPERAND (exp, 1);
6636  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6637  struct builtin_description *d;
6638  size_t i;
6639  rtx ret;
6640  bool success;
6641
6642  if (TARGET_ALTIVEC)
6643    {
6644      ret = altivec_expand_builtin (exp, target, &success);
6645
6646      if (success)
6647	return ret;
6648    }
6649  if (TARGET_SPE)
6650    {
6651      ret = spe_expand_builtin (exp, target, &success);
6652
6653      if (success)
6654	return ret;
6655    }
6656
6657  if (TARGET_ALTIVEC || TARGET_SPE)
6658    {
6659      /* Handle simple unary operations.  */
6660      d = (struct builtin_description *) bdesc_1arg;
6661      for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6662	if (d->code == fcode)
6663	  return rs6000_expand_unop_builtin (d->icode, arglist, target);
6664
6665      /* Handle simple binary operations.  */
6666      d = (struct builtin_description *) bdesc_2arg;
6667      for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6668	if (d->code == fcode)
6669	  return rs6000_expand_binop_builtin (d->icode, arglist, target);
6670
6671      /* Handle simple ternary operations.  */
6672      d = (struct builtin_description *) bdesc_3arg;
6673      for (i = 0; i < ARRAY_SIZE  (bdesc_3arg); i++, d++)
6674	if (d->code == fcode)
6675	  return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6676    }
6677
6678  abort ();
6679  return NULL_RTX;
6680}
6681
6682static void
6683rs6000_init_builtins (void)
6684{
6685  opaque_V2SI_type_node = copy_node (V2SI_type_node);
6686  opaque_V2SF_type_node = copy_node (V2SF_type_node);
6687  opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6688
6689  /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
6690     types, especially in C++ land.  Similarly, 'vector pixel' is distinct from+     'vector unsigned short'.  */
6691
6692  bool_char_type_node = copy_node (unsigned_intQI_type_node);
6693  TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
6694  bool_short_type_node = copy_node (unsigned_intHI_type_node);
6695  TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
6696  bool_int_type_node = copy_node (unsigned_intSI_type_node);
6697  TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
6698  pixel_type_node = copy_node (unsigned_intHI_type_node);
6699  TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
6700
6701  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6702					    get_identifier ("__bool char"),
6703					    bool_char_type_node));
6704  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6705					    get_identifier ("__bool short"),
6706					    bool_short_type_node));
6707  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6708					    get_identifier ("__bool int"),
6709					    bool_int_type_node));
6710  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6711					    get_identifier ("__pixel"),
6712					    pixel_type_node));
6713
6714  bool_V16QI_type_node = make_vector (V16QImode, bool_char_type_node, 1);
6715  bool_V8HI_type_node = make_vector (V8HImode, bool_short_type_node, 1);
6716  bool_V4SI_type_node = make_vector (V4SImode, bool_int_type_node, 1);
6717  pixel_V8HI_type_node = make_vector (V8HImode, pixel_type_node, 1);
6718
6719  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6720					    get_identifier ("__vector unsigned char"),
6721					    unsigned_V16QI_type_node));
6722  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6723					    get_identifier ("__vector signed char"),
6724					    V16QI_type_node));
6725  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6726					    get_identifier ("__vector __bool char"),
6727					    bool_V16QI_type_node));
6728
6729  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6730					    get_identifier ("__vector unsigned short"),
6731					    unsigned_V8HI_type_node));
6732  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6733					    get_identifier ("__vector signed short"),
6734					    V8HI_type_node));
6735  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6736					    get_identifier ("__vector __bool short"),
6737					    bool_V8HI_type_node));
6738
6739  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6740					    get_identifier ("__vector unsigned int"),
6741					    unsigned_V4SI_type_node));
6742  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6743					    get_identifier ("__vector signed int"),
6744					    V4SI_type_node));
6745  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6746					    get_identifier ("__vector __bool int"),
6747					    bool_V4SI_type_node));
6748
6749  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6750					    get_identifier ("__vector float"),
6751					    V4SF_type_node));
6752  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6753					    get_identifier ("__vector __pixel"),
6754					    pixel_V8HI_type_node));
6755
6756  if (TARGET_SPE)
6757    spe_init_builtins ();
6758  if (TARGET_ALTIVEC)
6759    altivec_init_builtins ();
6760  if (TARGET_ALTIVEC || TARGET_SPE)
6761    rs6000_common_init_builtins ();
6762}
6763
6764/* Search through a set of builtins and enable the mask bits.
6765   DESC is an array of builtins.
6766   SIZE is the total number of builtins.
6767   START is the builtin enum at which to start.
6768   END is the builtin enum at which to end.  */
6769static void
6770enable_mask_for_builtins (struct builtin_description *desc, int size,
6771			  enum rs6000_builtins start,
6772			  enum rs6000_builtins end)
6773{
6774  int i;
6775
6776  for (i = 0; i < size; ++i)
6777    if (desc[i].code == start)
6778      break;
6779
6780  if (i == size)
6781    return;
6782
6783  for (; i < size; ++i)
6784    {
6785      /* Flip all the bits on.  */
6786      desc[i].mask = target_flags;
6787      if (desc[i].code == end)
6788	break;
6789    }
6790}
6791
6792static void
6793spe_init_builtins (void)
6794{
6795  tree endlink = void_list_node;
6796  tree puint_type_node = build_pointer_type (unsigned_type_node);
6797  tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6798  struct builtin_description *d;
6799  size_t i;
6800
6801  tree v2si_ftype_4_v2si
6802    = build_function_type
6803    (opaque_V2SI_type_node,
6804     tree_cons (NULL_TREE, opaque_V2SI_type_node,
6805		tree_cons (NULL_TREE, opaque_V2SI_type_node,
6806			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6807				      tree_cons (NULL_TREE, opaque_V2SI_type_node,
6808						 endlink)))));
6809
6810  tree v2sf_ftype_4_v2sf
6811    = build_function_type
6812    (opaque_V2SF_type_node,
6813     tree_cons (NULL_TREE, opaque_V2SF_type_node,
6814		tree_cons (NULL_TREE, opaque_V2SF_type_node,
6815			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
6816				      tree_cons (NULL_TREE, opaque_V2SF_type_node,
6817						 endlink)))));
6818
6819  tree int_ftype_int_v2si_v2si
6820    = build_function_type
6821    (integer_type_node,
6822     tree_cons (NULL_TREE, integer_type_node,
6823		tree_cons (NULL_TREE, opaque_V2SI_type_node,
6824			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6825				      endlink))));
6826
6827  tree int_ftype_int_v2sf_v2sf
6828    = build_function_type
6829    (integer_type_node,
6830     tree_cons (NULL_TREE, integer_type_node,
6831		tree_cons (NULL_TREE, opaque_V2SF_type_node,
6832			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
6833				      endlink))));
6834
6835  tree void_ftype_v2si_puint_int
6836    = build_function_type (void_type_node,
6837			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6838				      tree_cons (NULL_TREE, puint_type_node,
6839						 tree_cons (NULL_TREE,
6840							    integer_type_node,
6841							    endlink))));
6842
6843  tree void_ftype_v2si_puint_char
6844    = build_function_type (void_type_node,
6845			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6846				      tree_cons (NULL_TREE, puint_type_node,
6847						 tree_cons (NULL_TREE,
6848							    char_type_node,
6849							    endlink))));
6850
6851  tree void_ftype_v2si_pv2si_int
6852    = build_function_type (void_type_node,
6853			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6854				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6855						 tree_cons (NULL_TREE,
6856							    integer_type_node,
6857							    endlink))));
6858
6859  tree void_ftype_v2si_pv2si_char
6860    = build_function_type (void_type_node,
6861			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
6862				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6863						 tree_cons (NULL_TREE,
6864							    char_type_node,
6865							    endlink))));
6866
6867  tree void_ftype_int
6868    = build_function_type (void_type_node,
6869			   tree_cons (NULL_TREE, integer_type_node, endlink));
6870
6871  tree int_ftype_void
6872    = build_function_type (integer_type_node, endlink);
6873
6874  tree v2si_ftype_pv2si_int
6875    = build_function_type (opaque_V2SI_type_node,
6876			   tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6877				      tree_cons (NULL_TREE, integer_type_node,
6878						 endlink)));
6879
6880  tree v2si_ftype_puint_int
6881    = build_function_type (opaque_V2SI_type_node,
6882			   tree_cons (NULL_TREE, puint_type_node,
6883				      tree_cons (NULL_TREE, integer_type_node,
6884						 endlink)));
6885
6886  tree v2si_ftype_pushort_int
6887    = build_function_type (opaque_V2SI_type_node,
6888			   tree_cons (NULL_TREE, pushort_type_node,
6889				      tree_cons (NULL_TREE, integer_type_node,
6890						 endlink)));
6891
6892  tree v2si_ftype_signed_char
6893    = build_function_type (opaque_V2SI_type_node,
6894			   tree_cons (NULL_TREE, signed_char_type_node,
6895				      endlink));
6896
6897  /* The initialization of the simple binary and unary builtins is
6898     done in rs6000_common_init_builtins, but we have to enable the
6899     mask bits here manually because we have run out of `target_flags'
6900     bits.  We really need to redesign this mask business.  */
6901
6902  enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6903			    ARRAY_SIZE (bdesc_2arg),
6904			    SPE_BUILTIN_EVADDW,
6905			    SPE_BUILTIN_EVXOR);
6906  enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6907			    ARRAY_SIZE (bdesc_1arg),
6908			    SPE_BUILTIN_EVABS,
6909			    SPE_BUILTIN_EVSUBFUSIAAW);
6910  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6911			    ARRAY_SIZE (bdesc_spe_predicates),
6912			    SPE_BUILTIN_EVCMPEQ,
6913			    SPE_BUILTIN_EVFSTSTLT);
6914  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6915			    ARRAY_SIZE (bdesc_spe_evsel),
6916			    SPE_BUILTIN_EVSEL_CMPGTS,
6917			    SPE_BUILTIN_EVSEL_FSTSTEQ);
6918
6919  (*lang_hooks.decls.pushdecl)
6920    (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6921		 opaque_V2SI_type_node));
6922
6923  /* Initialize irregular SPE builtins.  */
6924
6925  def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6926  def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6927  def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6928  def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6929  def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6930  def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6931  def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6932  def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6933  def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6934  def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6935  def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6936  def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6937  def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6938  def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6939  def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6940  def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6941  def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6942  def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6943
6944  /* Loads.  */
6945  def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6946  def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6947  def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6948  def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6949  def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6950  def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6951  def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6952  def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6953  def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6954  def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6955  def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6956  def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6957  def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6958  def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6959  def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6960  def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6961  def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6962  def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6963  def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6964  def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6965  def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6966  def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6967
6968  /* Predicates.  */
6969  d = (struct builtin_description *) bdesc_spe_predicates;
6970  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6971    {
6972      tree type;
6973
6974      switch (insn_data[d->icode].operand[1].mode)
6975	{
6976	case V2SImode:
6977	  type = int_ftype_int_v2si_v2si;
6978	  break;
6979	case V2SFmode:
6980	  type = int_ftype_int_v2sf_v2sf;
6981	  break;
6982	default:
6983	  abort ();
6984	}
6985
6986      def_builtin (d->mask, d->name, type, d->code);
6987    }
6988
6989  /* Evsel predicates.  */
6990  d = (struct builtin_description *) bdesc_spe_evsel;
6991  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6992    {
6993      tree type;
6994
6995      switch (insn_data[d->icode].operand[1].mode)
6996	{
6997	case V2SImode:
6998	  type = v2si_ftype_4_v2si;
6999	  break;
7000	case V2SFmode:
7001	  type = v2sf_ftype_4_v2sf;
7002	  break;
7003	default:
7004	  abort ();
7005	}
7006
7007      def_builtin (d->mask, d->name, type, d->code);
7008    }
7009}
7010
7011static void
7012altivec_init_builtins (void)
7013{
7014  struct builtin_description *d;
7015  struct builtin_description_predicates *dp;
7016  size_t i;
7017  tree pfloat_type_node = build_pointer_type (float_type_node);
7018  tree pint_type_node = build_pointer_type (integer_type_node);
7019  tree pshort_type_node = build_pointer_type (short_integer_type_node);
7020  tree pchar_type_node = build_pointer_type (char_type_node);
7021
7022  tree pvoid_type_node = build_pointer_type (void_type_node);
7023
7024  tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7025  tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7026  tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7027  tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7028
7029  tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7030
7031  tree int_ftype_int_v4si_v4si
7032    = build_function_type_list (integer_type_node,
7033				integer_type_node, V4SI_type_node,
7034				V4SI_type_node, NULL_TREE);
7035  tree v4sf_ftype_pcfloat
7036    = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7037  tree void_ftype_pfloat_v4sf
7038    = build_function_type_list (void_type_node,
7039				pfloat_type_node, V4SF_type_node, NULL_TREE);
7040  tree v4si_ftype_pcint
7041    = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7042  tree void_ftype_pint_v4si
7043    = build_function_type_list (void_type_node,
7044				pint_type_node, V4SI_type_node, NULL_TREE);
7045  tree v8hi_ftype_pcshort
7046    = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7047  tree void_ftype_pshort_v8hi
7048    = build_function_type_list (void_type_node,
7049				pshort_type_node, V8HI_type_node, NULL_TREE);
7050  tree v16qi_ftype_pcchar
7051    = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7052  tree void_ftype_pchar_v16qi
7053    = build_function_type_list (void_type_node,
7054				pchar_type_node, V16QI_type_node, NULL_TREE);
7055  tree void_ftype_v4si
7056    = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7057  tree v8hi_ftype_void
7058    = build_function_type (V8HI_type_node, void_list_node);
7059  tree void_ftype_void
7060    = build_function_type (void_type_node, void_list_node);
7061  tree void_ftype_int
7062    = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
7063
7064  tree v16qi_ftype_long_pcvoid
7065    = build_function_type_list (V16QI_type_node,
7066				long_integer_type_node, pcvoid_type_node, NULL_TREE);
7067  tree v8hi_ftype_long_pcvoid
7068    = build_function_type_list (V8HI_type_node,
7069				long_integer_type_node, pcvoid_type_node, NULL_TREE);
7070  tree v4si_ftype_long_pcvoid
7071    = build_function_type_list (V4SI_type_node,
7072				long_integer_type_node, pcvoid_type_node, NULL_TREE);
7073
7074  tree void_ftype_v4si_long_pvoid
7075    = build_function_type_list (void_type_node,
7076				V4SI_type_node, long_integer_type_node,
7077				pvoid_type_node, NULL_TREE);
7078  tree void_ftype_v16qi_long_pvoid
7079    = build_function_type_list (void_type_node,
7080				V16QI_type_node, long_integer_type_node,
7081				pvoid_type_node, NULL_TREE);
7082  tree void_ftype_v8hi_long_pvoid
7083    = build_function_type_list (void_type_node,
7084				V8HI_type_node, long_integer_type_node,
7085				pvoid_type_node, NULL_TREE);
7086  tree int_ftype_int_v8hi_v8hi
7087    = build_function_type_list (integer_type_node,
7088				integer_type_node, V8HI_type_node,
7089				V8HI_type_node, NULL_TREE);
7090  tree int_ftype_int_v16qi_v16qi
7091    = build_function_type_list (integer_type_node,
7092				integer_type_node, V16QI_type_node,
7093				V16QI_type_node, NULL_TREE);
7094  tree int_ftype_int_v4sf_v4sf
7095    = build_function_type_list (integer_type_node,
7096				integer_type_node, V4SF_type_node,
7097				V4SF_type_node, NULL_TREE);
7098  tree v4si_ftype_v4si
7099    = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7100  tree v8hi_ftype_v8hi
7101    = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7102  tree v16qi_ftype_v16qi
7103    = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7104  tree v4sf_ftype_v4sf
7105    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7106  tree void_ftype_pcvoid_int_int
7107    = build_function_type_list (void_type_node,
7108				pcvoid_type_node, integer_type_node,
7109				integer_type_node, NULL_TREE);
7110  tree int_ftype_pcchar
7111    = build_function_type_list (integer_type_node,
7112				pcchar_type_node, NULL_TREE);
7113
7114  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7115	       ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7116  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7117	       ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7118  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7119	       ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7120  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7121	       ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7122  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7123	       ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7124  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7125	       ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7126  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7127	       ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7128  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7129	       ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7130  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7131  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7132  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7133  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
7134  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7135  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7136  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7137  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7138  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7139  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7140  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7141  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7142  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7143  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7144  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7145  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7146
7147  /* See altivec.h for usage of "__builtin_altivec_compiletime_error".  */
7148  def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7149	       ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7150
7151  /* Add the DST variants.  */
7152  d = (struct builtin_description *) bdesc_dst;
7153  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7154    def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7155
7156  /* Initialize the predicates.  */
7157  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7158  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7159    {
7160      enum machine_mode mode1;
7161      tree type;
7162
7163      mode1 = insn_data[dp->icode].operand[1].mode;
7164
7165      switch (mode1)
7166	{
7167	case V4SImode:
7168	  type = int_ftype_int_v4si_v4si;
7169	  break;
7170	case V8HImode:
7171	  type = int_ftype_int_v8hi_v8hi;
7172	  break;
7173	case V16QImode:
7174	  type = int_ftype_int_v16qi_v16qi;
7175	  break;
7176	case V4SFmode:
7177	  type = int_ftype_int_v4sf_v4sf;
7178	  break;
7179	default:
7180	  abort ();
7181	}
7182
7183      def_builtin (dp->mask, dp->name, type, dp->code);
7184    }
7185
7186  /* Initialize the abs* operators.  */
7187  d = (struct builtin_description *) bdesc_abs;
7188  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7189    {
7190      enum machine_mode mode0;
7191      tree type;
7192
7193      mode0 = insn_data[d->icode].operand[0].mode;
7194
7195      switch (mode0)
7196	{
7197	case V4SImode:
7198	  type = v4si_ftype_v4si;
7199	  break;
7200	case V8HImode:
7201	  type = v8hi_ftype_v8hi;
7202	  break;
7203	case V16QImode:
7204	  type = v16qi_ftype_v16qi;
7205	  break;
7206	case V4SFmode:
7207	  type = v4sf_ftype_v4sf;
7208	  break;
7209	default:
7210	  abort ();
7211	}
7212
7213      def_builtin (d->mask, d->name, type, d->code);
7214    }
7215}
7216
7217static void
7218rs6000_common_init_builtins (void)
7219{
7220  struct builtin_description *d;
7221  size_t i;
7222
7223  tree v4sf_ftype_v4sf_v4sf_v16qi
7224    = build_function_type_list (V4SF_type_node,
7225				V4SF_type_node, V4SF_type_node,
7226				V16QI_type_node, NULL_TREE);
7227  tree v4si_ftype_v4si_v4si_v16qi
7228    = build_function_type_list (V4SI_type_node,
7229				V4SI_type_node, V4SI_type_node,
7230				V16QI_type_node, NULL_TREE);
7231  tree v8hi_ftype_v8hi_v8hi_v16qi
7232    = build_function_type_list (V8HI_type_node,
7233				V8HI_type_node, V8HI_type_node,
7234				V16QI_type_node, NULL_TREE);
7235  tree v16qi_ftype_v16qi_v16qi_v16qi
7236    = build_function_type_list (V16QI_type_node,
7237				V16QI_type_node, V16QI_type_node,
7238				V16QI_type_node, NULL_TREE);
7239  tree v4si_ftype_int
7240    = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7241  tree v8hi_ftype_int
7242    = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7243  tree v16qi_ftype_int
7244    = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7245  tree v8hi_ftype_v16qi
7246    = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7247  tree v4sf_ftype_v4sf
7248    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7249
7250  tree v2si_ftype_v2si_v2si
7251    = build_function_type_list (opaque_V2SI_type_node,
7252				opaque_V2SI_type_node,
7253				opaque_V2SI_type_node, NULL_TREE);
7254
7255  tree v2sf_ftype_v2sf_v2sf
7256    = build_function_type_list (opaque_V2SF_type_node,
7257				opaque_V2SF_type_node,
7258				opaque_V2SF_type_node, NULL_TREE);
7259
7260  tree v2si_ftype_int_int
7261    = build_function_type_list (opaque_V2SI_type_node,
7262				integer_type_node, integer_type_node,
7263				NULL_TREE);
7264
7265  tree v2si_ftype_v2si
7266    = build_function_type_list (opaque_V2SI_type_node,
7267				opaque_V2SI_type_node, NULL_TREE);
7268
7269  tree v2sf_ftype_v2sf
7270    = build_function_type_list (opaque_V2SF_type_node,
7271				opaque_V2SF_type_node, NULL_TREE);
7272
7273  tree v2sf_ftype_v2si
7274    = build_function_type_list (opaque_V2SF_type_node,
7275				opaque_V2SI_type_node, NULL_TREE);
7276
7277  tree v2si_ftype_v2sf
7278    = build_function_type_list (opaque_V2SI_type_node,
7279				opaque_V2SF_type_node, NULL_TREE);
7280
7281  tree v2si_ftype_v2si_char
7282    = build_function_type_list (opaque_V2SI_type_node,
7283				opaque_V2SI_type_node,
7284				char_type_node, NULL_TREE);
7285
7286  tree v2si_ftype_int_char
7287    = build_function_type_list (opaque_V2SI_type_node,
7288				integer_type_node, char_type_node, NULL_TREE);
7289
7290  tree v2si_ftype_char
7291    = build_function_type_list (opaque_V2SI_type_node,
7292				char_type_node, NULL_TREE);
7293
7294  tree int_ftype_int_int
7295    = build_function_type_list (integer_type_node,
7296				integer_type_node, integer_type_node,
7297				NULL_TREE);
7298
7299  tree v4si_ftype_v4si_v4si
7300    = build_function_type_list (V4SI_type_node,
7301				V4SI_type_node, V4SI_type_node, NULL_TREE);
7302  tree v4sf_ftype_v4si_int
7303    = build_function_type_list (V4SF_type_node,
7304				V4SI_type_node, integer_type_node, NULL_TREE);
7305  tree v4si_ftype_v4sf_int
7306    = build_function_type_list (V4SI_type_node,
7307				V4SF_type_node, integer_type_node, NULL_TREE);
7308  tree v4si_ftype_v4si_int
7309    = build_function_type_list (V4SI_type_node,
7310				V4SI_type_node, integer_type_node, NULL_TREE);
7311  tree v8hi_ftype_v8hi_int
7312    = build_function_type_list (V8HI_type_node,
7313				V8HI_type_node, integer_type_node, NULL_TREE);
7314  tree v16qi_ftype_v16qi_int
7315    = build_function_type_list (V16QI_type_node,
7316				V16QI_type_node, integer_type_node, NULL_TREE);
7317  tree v16qi_ftype_v16qi_v16qi_int
7318    = build_function_type_list (V16QI_type_node,
7319				V16QI_type_node, V16QI_type_node,
7320				integer_type_node, NULL_TREE);
7321  tree v8hi_ftype_v8hi_v8hi_int
7322    = build_function_type_list (V8HI_type_node,
7323				V8HI_type_node, V8HI_type_node,
7324				integer_type_node, NULL_TREE);
7325  tree v4si_ftype_v4si_v4si_int
7326    = build_function_type_list (V4SI_type_node,
7327				V4SI_type_node, V4SI_type_node,
7328				integer_type_node, NULL_TREE);
7329  tree v4sf_ftype_v4sf_v4sf_int
7330    = build_function_type_list (V4SF_type_node,
7331				V4SF_type_node, V4SF_type_node,
7332				integer_type_node, NULL_TREE);
7333  tree v4sf_ftype_v4sf_v4sf
7334    = build_function_type_list (V4SF_type_node,
7335				V4SF_type_node, V4SF_type_node, NULL_TREE);
7336  tree v4sf_ftype_v4sf_v4sf_v4si
7337    = build_function_type_list (V4SF_type_node,
7338				V4SF_type_node, V4SF_type_node,
7339				V4SI_type_node, NULL_TREE);
7340  tree v4sf_ftype_v4sf_v4sf_v4sf
7341    = build_function_type_list (V4SF_type_node,
7342				V4SF_type_node, V4SF_type_node,
7343				V4SF_type_node, NULL_TREE);
7344  tree v4si_ftype_v4si_v4si_v4si
7345    = build_function_type_list (V4SI_type_node,
7346				V4SI_type_node, V4SI_type_node,
7347				V4SI_type_node, NULL_TREE);
7348  tree v8hi_ftype_v8hi_v8hi
7349    = build_function_type_list (V8HI_type_node,
7350				V8HI_type_node, V8HI_type_node, NULL_TREE);
7351  tree v8hi_ftype_v8hi_v8hi_v8hi
7352    = build_function_type_list (V8HI_type_node,
7353				V8HI_type_node, V8HI_type_node,
7354				V8HI_type_node, NULL_TREE);
7355 tree v4si_ftype_v8hi_v8hi_v4si
7356    = build_function_type_list (V4SI_type_node,
7357				V8HI_type_node, V8HI_type_node,
7358				V4SI_type_node, NULL_TREE);
7359 tree v4si_ftype_v16qi_v16qi_v4si
7360    = build_function_type_list (V4SI_type_node,
7361				V16QI_type_node, V16QI_type_node,
7362				V4SI_type_node, NULL_TREE);
7363  tree v16qi_ftype_v16qi_v16qi
7364    = build_function_type_list (V16QI_type_node,
7365				V16QI_type_node, V16QI_type_node, NULL_TREE);
7366  tree v4si_ftype_v4sf_v4sf
7367    = build_function_type_list (V4SI_type_node,
7368				V4SF_type_node, V4SF_type_node, NULL_TREE);
7369  tree v8hi_ftype_v16qi_v16qi
7370    = build_function_type_list (V8HI_type_node,
7371				V16QI_type_node, V16QI_type_node, NULL_TREE);
7372  tree v4si_ftype_v8hi_v8hi
7373    = build_function_type_list (V4SI_type_node,
7374				V8HI_type_node, V8HI_type_node, NULL_TREE);
7375  tree v8hi_ftype_v4si_v4si
7376    = build_function_type_list (V8HI_type_node,
7377				V4SI_type_node, V4SI_type_node, NULL_TREE);
7378  tree v16qi_ftype_v8hi_v8hi
7379    = build_function_type_list (V16QI_type_node,
7380				V8HI_type_node, V8HI_type_node, NULL_TREE);
7381  tree v4si_ftype_v16qi_v4si
7382    = build_function_type_list (V4SI_type_node,
7383				V16QI_type_node, V4SI_type_node, NULL_TREE);
7384  tree v4si_ftype_v16qi_v16qi
7385    = build_function_type_list (V4SI_type_node,
7386				V16QI_type_node, V16QI_type_node, NULL_TREE);
7387  tree v4si_ftype_v8hi_v4si
7388    = build_function_type_list (V4SI_type_node,
7389				V8HI_type_node, V4SI_type_node, NULL_TREE);
7390  tree v4si_ftype_v8hi
7391    = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7392  tree int_ftype_v4si_v4si
7393    = build_function_type_list (integer_type_node,
7394				V4SI_type_node, V4SI_type_node, NULL_TREE);
7395  tree int_ftype_v4sf_v4sf
7396    = build_function_type_list (integer_type_node,
7397				V4SF_type_node, V4SF_type_node, NULL_TREE);
7398  tree int_ftype_v16qi_v16qi
7399    = build_function_type_list (integer_type_node,
7400				V16QI_type_node, V16QI_type_node, NULL_TREE);
7401  tree int_ftype_v8hi_v8hi
7402    = build_function_type_list (integer_type_node,
7403				V8HI_type_node, V8HI_type_node, NULL_TREE);
7404
7405  /* Add the simple ternary operators.  */
7406  d = (struct builtin_description *) bdesc_3arg;
7407  for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7408    {
7409
7410      enum machine_mode mode0, mode1, mode2, mode3;
7411      tree type;
7412
7413      if (d->name == 0 || d->icode == CODE_FOR_nothing)
7414	continue;
7415
7416      mode0 = insn_data[d->icode].operand[0].mode;
7417      mode1 = insn_data[d->icode].operand[1].mode;
7418      mode2 = insn_data[d->icode].operand[2].mode;
7419      mode3 = insn_data[d->icode].operand[3].mode;
7420
7421      /* When all four are of the same mode.  */
7422      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7423	{
7424	  switch (mode0)
7425	    {
7426	    case V4SImode:
7427	      type = v4si_ftype_v4si_v4si_v4si;
7428	      break;
7429	    case V4SFmode:
7430	      type = v4sf_ftype_v4sf_v4sf_v4sf;
7431	      break;
7432	    case V8HImode:
7433	      type = v8hi_ftype_v8hi_v8hi_v8hi;
7434	      break;
7435	    case V16QImode:
7436	      type = v16qi_ftype_v16qi_v16qi_v16qi;
7437	      break;
7438	    default:
7439	      abort();
7440	    }
7441	}
7442      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7443        {
7444	  switch (mode0)
7445	    {
7446	    case V4SImode:
7447	      type = v4si_ftype_v4si_v4si_v16qi;
7448	      break;
7449	    case V4SFmode:
7450	      type = v4sf_ftype_v4sf_v4sf_v16qi;
7451	      break;
7452	    case V8HImode:
7453	      type = v8hi_ftype_v8hi_v8hi_v16qi;
7454	      break;
7455	    case V16QImode:
7456	      type = v16qi_ftype_v16qi_v16qi_v16qi;
7457	      break;
7458	    default:
7459	      abort();
7460	    }
7461	}
7462      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7463	       && mode3 == V4SImode)
7464	type = v4si_ftype_v16qi_v16qi_v4si;
7465      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7466	       && mode3 == V4SImode)
7467	type = v4si_ftype_v8hi_v8hi_v4si;
7468      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7469	       && mode3 == V4SImode)
7470	type = v4sf_ftype_v4sf_v4sf_v4si;
7471
7472      /* vchar, vchar, vchar, 4 bit literal.  */
7473      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7474	       && mode3 == QImode)
7475	type = v16qi_ftype_v16qi_v16qi_int;
7476
7477      /* vshort, vshort, vshort, 4 bit literal.  */
7478      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7479	       && mode3 == QImode)
7480	type = v8hi_ftype_v8hi_v8hi_int;
7481
7482      /* vint, vint, vint, 4 bit literal.  */
7483      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7484	       && mode3 == QImode)
7485	type = v4si_ftype_v4si_v4si_int;
7486
7487      /* vfloat, vfloat, vfloat, 4 bit literal.  */
7488      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7489	       && mode3 == QImode)
7490	type = v4sf_ftype_v4sf_v4sf_int;
7491
7492      else
7493	abort ();
7494
7495      def_builtin (d->mask, d->name, type, d->code);
7496    }
7497
7498  /* Add the simple binary operators.  */
7499  d = (struct builtin_description *) bdesc_2arg;
7500  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7501    {
7502      enum machine_mode mode0, mode1, mode2;
7503      tree type;
7504
7505      if (d->name == 0 || d->icode == CODE_FOR_nothing)
7506	continue;
7507
7508      mode0 = insn_data[d->icode].operand[0].mode;
7509      mode1 = insn_data[d->icode].operand[1].mode;
7510      mode2 = insn_data[d->icode].operand[2].mode;
7511
7512      /* When all three operands are of the same mode.  */
7513      if (mode0 == mode1 && mode1 == mode2)
7514	{
7515	  switch (mode0)
7516	    {
7517	    case V4SFmode:
7518	      type = v4sf_ftype_v4sf_v4sf;
7519	      break;
7520	    case V4SImode:
7521	      type = v4si_ftype_v4si_v4si;
7522	      break;
7523	    case V16QImode:
7524	      type = v16qi_ftype_v16qi_v16qi;
7525	      break;
7526	    case V8HImode:
7527	      type = v8hi_ftype_v8hi_v8hi;
7528	      break;
7529	    case V2SImode:
7530	      type = v2si_ftype_v2si_v2si;
7531	      break;
7532	    case V2SFmode:
7533	      type = v2sf_ftype_v2sf_v2sf;
7534	      break;
7535	    case SImode:
7536	      type = int_ftype_int_int;
7537	      break;
7538	    default:
7539	      abort ();
7540	    }
7541	}
7542
7543      /* A few other combos we really don't want to do manually.  */
7544
7545      /* vint, vfloat, vfloat.  */
7546      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7547	type = v4si_ftype_v4sf_v4sf;
7548
7549      /* vshort, vchar, vchar.  */
7550      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7551	type = v8hi_ftype_v16qi_v16qi;
7552
7553      /* vint, vshort, vshort.  */
7554      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7555	type = v4si_ftype_v8hi_v8hi;
7556
7557      /* vshort, vint, vint.  */
7558      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7559	type = v8hi_ftype_v4si_v4si;
7560
7561      /* vchar, vshort, vshort.  */
7562      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7563	type = v16qi_ftype_v8hi_v8hi;
7564
7565      /* vint, vchar, vint.  */
7566      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7567	type = v4si_ftype_v16qi_v4si;
7568
7569      /* vint, vchar, vchar.  */
7570      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7571	type = v4si_ftype_v16qi_v16qi;
7572
7573      /* vint, vshort, vint.  */
7574      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7575	type = v4si_ftype_v8hi_v4si;
7576
7577      /* vint, vint, 5 bit literal.  */
7578      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7579	type = v4si_ftype_v4si_int;
7580
7581      /* vshort, vshort, 5 bit literal.  */
7582      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7583	type = v8hi_ftype_v8hi_int;
7584
7585      /* vchar, vchar, 5 bit literal.  */
7586      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7587	type = v16qi_ftype_v16qi_int;
7588
7589      /* vfloat, vint, 5 bit literal.  */
7590      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7591	type = v4sf_ftype_v4si_int;
7592
7593      /* vint, vfloat, 5 bit literal.  */
7594      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7595	type = v4si_ftype_v4sf_int;
7596
7597      else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7598	type = v2si_ftype_int_int;
7599
7600      else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7601	type = v2si_ftype_v2si_char;
7602
7603      else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7604	type = v2si_ftype_int_char;
7605
7606      /* int, x, x.  */
7607      else if (mode0 == SImode)
7608	{
7609	  switch (mode1)
7610	    {
7611	    case V4SImode:
7612	      type = int_ftype_v4si_v4si;
7613	      break;
7614	    case V4SFmode:
7615	      type = int_ftype_v4sf_v4sf;
7616	      break;
7617	    case V16QImode:
7618	      type = int_ftype_v16qi_v16qi;
7619	      break;
7620	    case V8HImode:
7621	      type = int_ftype_v8hi_v8hi;
7622	      break;
7623	    default:
7624	      abort ();
7625	    }
7626	}
7627
7628      else
7629	abort ();
7630
7631      def_builtin (d->mask, d->name, type, d->code);
7632    }
7633
7634  /* Add the simple unary operators.  */
7635  d = (struct builtin_description *) bdesc_1arg;
7636  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7637    {
7638      enum machine_mode mode0, mode1;
7639      tree type;
7640
7641      if (d->name == 0 || d->icode == CODE_FOR_nothing)
7642	continue;
7643
7644      mode0 = insn_data[d->icode].operand[0].mode;
7645      mode1 = insn_data[d->icode].operand[1].mode;
7646
7647      if (mode0 == V4SImode && mode1 == QImode)
7648        type = v4si_ftype_int;
7649      else if (mode0 == V8HImode && mode1 == QImode)
7650        type = v8hi_ftype_int;
7651      else if (mode0 == V16QImode && mode1 == QImode)
7652        type = v16qi_ftype_int;
7653      else if (mode0 == V4SFmode && mode1 == V4SFmode)
7654	type = v4sf_ftype_v4sf;
7655      else if (mode0 == V8HImode && mode1 == V16QImode)
7656	type = v8hi_ftype_v16qi;
7657      else if (mode0 == V4SImode && mode1 == V8HImode)
7658	type = v4si_ftype_v8hi;
7659      else if (mode0 == V2SImode && mode1 == V2SImode)
7660	type = v2si_ftype_v2si;
7661      else if (mode0 == V2SFmode && mode1 == V2SFmode)
7662	type = v2sf_ftype_v2sf;
7663      else if (mode0 == V2SFmode && mode1 == V2SImode)
7664	type = v2sf_ftype_v2si;
7665      else if (mode0 == V2SImode && mode1 == V2SFmode)
7666	type = v2si_ftype_v2sf;
7667      else if (mode0 == V2SImode && mode1 == QImode)
7668	type = v2si_ftype_char;
7669      else
7670	abort ();
7671
7672      def_builtin (d->mask, d->name, type, d->code);
7673    }
7674}
7675
7676static void
7677rs6000_init_libfuncs (void)
7678{
7679  if (!TARGET_HARD_FLOAT)
7680    return;
7681
7682  if (DEFAULT_ABI != ABI_V4)
7683    {
7684      if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7685	{
7686	  /* AIX library routines for float->int conversion.  */
7687	  set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7688	  set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7689	  set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7690	  set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7691	}
7692
7693      /* AIX/Darwin/64-bit Linux quad floating point routines.  */
7694      if (!TARGET_XL_COMPAT)
7695	{
7696	  set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
7697	  set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
7698	  set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
7699	  set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
7700	}
7701      else
7702	{
7703	  set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7704	  set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7705	  set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7706	  set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7707	}
7708    }
7709  else
7710    {
7711      /* 32-bit SVR4 quad floating point routines.  */
7712
7713      set_optab_libfunc (add_optab, TFmode, "_q_add");
7714      set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7715      set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7716      set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7717      set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7718      if (TARGET_PPC_GPOPT || TARGET_POWER2)
7719	set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7720
7721      set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7722      set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7723      set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7724      set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7725      set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7726      set_optab_libfunc (le_optab, TFmode, "_q_fle");
7727
7728      set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7729      set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7730      set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7731      set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7732      set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7733      set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7734      set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7735    }
7736}
7737
7738/* Expand a block move operation, and return 1 if successful.  Return 0
7739   if we should let the compiler generate normal code.
7740
7741   operands[0] is the destination
7742   operands[1] is the source
7743   operands[2] is the length
7744   operands[3] is the alignment */
7745
7746#define MAX_MOVE_REG 4
7747
7748int
7749expand_block_move (rtx operands[])
7750{
7751  rtx orig_dest = operands[0];
7752  rtx orig_src	= operands[1];
7753  rtx bytes_rtx	= operands[2];
7754  rtx align_rtx = operands[3];
7755  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
7756  int align;
7757  int bytes;
7758  int offset;
7759  int move_bytes;
7760  rtx stores[MAX_MOVE_REG];
7761  int num_reg = 0;
7762
7763  /* If this is not a fixed size move, just call memcpy */
7764  if (! constp)
7765    return 0;
7766
7767  /* If this is not a fixed size alignment, abort */
7768  if (GET_CODE (align_rtx) != CONST_INT)
7769    abort ();
7770  align = INTVAL (align_rtx);
7771
7772  /* Anything to move? */
7773  bytes = INTVAL (bytes_rtx);
7774  if (bytes <= 0)
7775    return 1;
7776
7777  /* store_one_arg depends on expand_block_move to handle at least the size of
7778     reg_parm_stack_space.  */
7779  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7780    return 0;
7781
7782  for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7783    {
7784      union {
7785	rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7786	rtx (*mov) (rtx, rtx);
7787      } gen_func;
7788      enum machine_mode mode = BLKmode;
7789      rtx src, dest;
7790
7791      if (TARGET_STRING
7792	  && bytes > 24		/* move up to 32 bytes at a time */
7793	  && ! fixed_regs[5]
7794	  && ! fixed_regs[6]
7795	  && ! fixed_regs[7]
7796	  && ! fixed_regs[8]
7797	  && ! fixed_regs[9]
7798	  && ! fixed_regs[10]
7799	  && ! fixed_regs[11]
7800	  && ! fixed_regs[12])
7801	{
7802	  move_bytes = (bytes > 32) ? 32 : bytes;
7803	  gen_func.movstrsi = gen_movstrsi_8reg;
7804	}
7805      else if (TARGET_STRING
7806	       && bytes > 16	/* move up to 24 bytes at a time */
7807	       && ! fixed_regs[5]
7808	       && ! fixed_regs[6]
7809	       && ! fixed_regs[7]
7810	       && ! fixed_regs[8]
7811	       && ! fixed_regs[9]
7812	       && ! fixed_regs[10])
7813	{
7814	  move_bytes = (bytes > 24) ? 24 : bytes;
7815	  gen_func.movstrsi = gen_movstrsi_6reg;
7816	}
7817      else if (TARGET_STRING
7818	       && bytes > 8	/* move up to 16 bytes at a time */
7819	       && ! fixed_regs[5]
7820	       && ! fixed_regs[6]
7821	       && ! fixed_regs[7]
7822	       && ! fixed_regs[8])
7823	{
7824	  move_bytes = (bytes > 16) ? 16 : bytes;
7825	  gen_func.movstrsi = gen_movstrsi_4reg;
7826	}
7827      else if (bytes >= 8 && TARGET_POWERPC64
7828	       /* 64-bit loads and stores require word-aligned
7829		  displacements.  */
7830	       && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7831	{
7832	  move_bytes = 8;
7833	  mode = DImode;
7834	  gen_func.mov = gen_movdi;
7835	}
7836      else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7837	{			/* move up to 8 bytes at a time */
7838	  move_bytes = (bytes > 8) ? 8 : bytes;
7839	  gen_func.movstrsi = gen_movstrsi_2reg;
7840	}
7841      else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7842	{			/* move 4 bytes */
7843	  move_bytes = 4;
7844	  mode = SImode;
7845	  gen_func.mov = gen_movsi;
7846	}
7847      else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7848	{			/* move 2 bytes */
7849	  move_bytes = 2;
7850	  mode = HImode;
7851	  gen_func.mov = gen_movhi;
7852	}
7853      else if (TARGET_STRING && bytes > 1)
7854	{			/* move up to 4 bytes at a time */
7855	  move_bytes = (bytes > 4) ? 4 : bytes;
7856	  gen_func.movstrsi = gen_movstrsi_1reg;
7857	}
7858      else /* move 1 byte at a time */
7859	{
7860	  move_bytes = 1;
7861	  mode = QImode;
7862	  gen_func.mov = gen_movqi;
7863	}
7864
7865      src = adjust_address (orig_src, mode, offset);
7866      dest = adjust_address (orig_dest, mode, offset);
7867
7868      if (mode != BLKmode)
7869	{
7870	  rtx tmp_reg = gen_reg_rtx (mode);
7871
7872	  emit_insn ((*gen_func.mov) (tmp_reg, src));
7873	  stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7874	}
7875
7876      if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7877	{
7878	  int i;
7879	  for (i = 0; i < num_reg; i++)
7880	    emit_insn (stores[i]);
7881	  num_reg = 0;
7882	}
7883
7884      if (mode == BLKmode)
7885	{
7886	  /* Move the address into scratch registers.  The movstrsi
7887	     patterns require zero offset.  */
7888	  if (!REG_P (XEXP (src, 0)))
7889	    {
7890	      rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7891	      src = replace_equiv_address (src, src_reg);
7892	    }
7893	  set_mem_size (src, GEN_INT (move_bytes));
7894
7895	  if (!REG_P (XEXP (dest, 0)))
7896	    {
7897	      rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7898	      dest = replace_equiv_address (dest, dest_reg);
7899	    }
7900	  set_mem_size (dest, GEN_INT (move_bytes));
7901
7902	  emit_insn ((*gen_func.movstrsi) (dest, src,
7903					   GEN_INT (move_bytes & 31),
7904					   align_rtx));
7905	}
7906    }
7907
7908  return 1;
7909}
7910
7911
7912/* Return 1 if OP is a load multiple operation.  It is known to be a
7913   PARALLEL and the first section will be tested.  */
7914
7915int
7916load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7917{
7918  int count = XVECLEN (op, 0);
7919  unsigned int dest_regno;
7920  rtx src_addr;
7921  int i;
7922
7923  /* Perform a quick check so we don't blow up below.  */
7924  if (count <= 1
7925      || GET_CODE (XVECEXP (op, 0, 0)) != SET
7926      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7927      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7928    return 0;
7929
7930  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7931  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7932
7933  for (i = 1; i < count; i++)
7934    {
7935      rtx elt = XVECEXP (op, 0, i);
7936
7937      if (GET_CODE (elt) != SET
7938	  || GET_CODE (SET_DEST (elt)) != REG
7939	  || GET_MODE (SET_DEST (elt)) != SImode
7940	  || REGNO (SET_DEST (elt)) != dest_regno + i
7941	  || GET_CODE (SET_SRC (elt)) != MEM
7942	  || GET_MODE (SET_SRC (elt)) != SImode
7943	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7944	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7945	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7946	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7947	return 0;
7948    }
7949
7950  return 1;
7951}
7952
7953/* Similar, but tests for store multiple.  Here, the second vector element
7954   is a CLOBBER.  It will be tested later.  */
7955
7956int
7957store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7958{
7959  int count = XVECLEN (op, 0) - 1;
7960  unsigned int src_regno;
7961  rtx dest_addr;
7962  int i;
7963
7964  /* Perform a quick check so we don't blow up below.  */
7965  if (count <= 1
7966      || GET_CODE (XVECEXP (op, 0, 0)) != SET
7967      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7968      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7969    return 0;
7970
7971  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7972  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7973
7974  for (i = 1; i < count; i++)
7975    {
7976      rtx elt = XVECEXP (op, 0, i + 1);
7977
7978      if (GET_CODE (elt) != SET
7979	  || GET_CODE (SET_SRC (elt)) != REG
7980	  || GET_MODE (SET_SRC (elt)) != SImode
7981	  || REGNO (SET_SRC (elt)) != src_regno + i
7982	  || GET_CODE (SET_DEST (elt)) != MEM
7983	  || GET_MODE (SET_DEST (elt)) != SImode
7984	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7985	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7986	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7987	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7988	return 0;
7989    }
7990
7991  return 1;
7992}
7993
7994/* Return a string to perform a load_multiple operation.
7995   operands[0] is the vector.
7996   operands[1] is the source address.
7997   operands[2] is the first destination register.  */
7998
7999const char *
8000rs6000_output_load_multiple (rtx operands[3])
8001{
8002  /* We have to handle the case where the pseudo used to contain the address
8003     is assigned to one of the output registers.  */
8004  int i, j;
8005  int words = XVECLEN (operands[0], 0);
8006  rtx xop[10];
8007
8008  if (XVECLEN (operands[0], 0) == 1)
8009    return "{l|lwz} %2,0(%1)";
8010
8011  for (i = 0; i < words; i++)
8012    if (refers_to_regno_p (REGNO (operands[2]) + i,
8013			   REGNO (operands[2]) + i + 1, operands[1], 0))
8014      {
8015	if (i == words-1)
8016	  {
8017	    xop[0] = GEN_INT (4 * (words-1));
8018	    xop[1] = operands[1];
8019	    xop[2] = operands[2];
8020	    output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8021	    return "";
8022	  }
8023	else if (i == 0)
8024	  {
8025	    xop[0] = GEN_INT (4 * (words-1));
8026	    xop[1] = operands[1];
8027	    xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8028	    output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8029	    return "";
8030	  }
8031	else
8032	  {
8033	    for (j = 0; j < words; j++)
8034	      if (j != i)
8035		{
8036		  xop[0] = GEN_INT (j * 4);
8037		  xop[1] = operands[1];
8038		  xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8039		  output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8040		}
8041	    xop[0] = GEN_INT (i * 4);
8042	    xop[1] = operands[1];
8043	    output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8044	    return "";
8045	  }
8046      }
8047
8048  return "{lsi|lswi} %2,%1,%N0";
8049}
8050
8051/* Return 1 for a parallel vrsave operation.  */
8052
8053int
8054vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8055{
8056  int count = XVECLEN (op, 0);
8057  unsigned int dest_regno, src_regno;
8058  int i;
8059
8060  if (count <= 1
8061      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8062      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8063      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8064    return 0;
8065
8066  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8067  src_regno  = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8068
8069  if (dest_regno != VRSAVE_REGNO
8070      && src_regno != VRSAVE_REGNO)
8071    return 0;
8072
8073  for (i = 1; i < count; i++)
8074    {
8075      rtx elt = XVECEXP (op, 0, i);
8076
8077      if (GET_CODE (elt) != CLOBBER
8078	  && GET_CODE (elt) != SET)
8079	return 0;
8080    }
8081
8082  return 1;
8083}
8084
8085/* Return 1 for an PARALLEL suitable for mfcr.  */
8086
8087int
8088mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8089{
8090  int count = XVECLEN (op, 0);
8091  int i;
8092
8093  /* Perform a quick check so we don't blow up below.  */
8094  if (count < 1
8095      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8096      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8097      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8098    return 0;
8099
8100  for (i = 0; i < count; i++)
8101    {
8102      rtx exp = XVECEXP (op, 0, i);
8103      rtx unspec;
8104      int maskval;
8105      rtx src_reg;
8106
8107      src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8108
8109      if (GET_CODE (src_reg) != REG
8110	  || GET_MODE (src_reg) != CCmode
8111	  || ! CR_REGNO_P (REGNO (src_reg)))
8112	return 0;
8113
8114      if (GET_CODE (exp) != SET
8115	  || GET_CODE (SET_DEST (exp)) != REG
8116	  || GET_MODE (SET_DEST (exp)) != SImode
8117	  || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8118	return 0;
8119      unspec = SET_SRC (exp);
8120      maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8121
8122      if (GET_CODE (unspec) != UNSPEC
8123	  || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8124	  || XVECLEN (unspec, 0) != 2
8125	  || XVECEXP (unspec, 0, 0) != src_reg
8126	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8127	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8128	return 0;
8129    }
8130  return 1;
8131}
8132
8133/* Return 1 for an PARALLEL suitable for mtcrf.  */
8134
8135int
8136mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8137{
8138  int count = XVECLEN (op, 0);
8139  int i;
8140  rtx src_reg;
8141
8142  /* Perform a quick check so we don't blow up below.  */
8143  if (count < 1
8144      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8145      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8146      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8147    return 0;
8148  src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8149
8150  if (GET_CODE (src_reg) != REG
8151      || GET_MODE (src_reg) != SImode
8152      || ! INT_REGNO_P (REGNO (src_reg)))
8153    return 0;
8154
8155  for (i = 0; i < count; i++)
8156    {
8157      rtx exp = XVECEXP (op, 0, i);
8158      rtx unspec;
8159      int maskval;
8160
8161      if (GET_CODE (exp) != SET
8162	  || GET_CODE (SET_DEST (exp)) != REG
8163	  || GET_MODE (SET_DEST (exp)) != CCmode
8164	  || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8165	return 0;
8166      unspec = SET_SRC (exp);
8167      maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8168
8169      if (GET_CODE (unspec) != UNSPEC
8170	  || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8171	  || XVECLEN (unspec, 0) != 2
8172	  || XVECEXP (unspec, 0, 0) != src_reg
8173	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8174	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8175	return 0;
8176    }
8177  return 1;
8178}
8179
8180/* Return 1 for an PARALLEL suitable for lmw.  */
8181
8182int
8183lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8184{
8185  int count = XVECLEN (op, 0);
8186  unsigned int dest_regno;
8187  rtx src_addr;
8188  unsigned int base_regno;
8189  HOST_WIDE_INT offset;
8190  int i;
8191
8192  /* Perform a quick check so we don't blow up below.  */
8193  if (count <= 1
8194      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8195      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8196      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8197    return 0;
8198
8199  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8200  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8201
8202  if (dest_regno > 31
8203      || count != 32 - (int) dest_regno)
8204    return 0;
8205
8206  if (legitimate_indirect_address_p (src_addr, 0))
8207    {
8208      offset = 0;
8209      base_regno = REGNO (src_addr);
8210      if (base_regno == 0)
8211	return 0;
8212    }
8213  else if (legitimate_offset_address_p (SImode, src_addr, 0))
8214    {
8215      offset = INTVAL (XEXP (src_addr, 1));
8216      base_regno = REGNO (XEXP (src_addr, 0));
8217    }
8218  else
8219    return 0;
8220
8221  for (i = 0; i < count; i++)
8222    {
8223      rtx elt = XVECEXP (op, 0, i);
8224      rtx newaddr;
8225      rtx addr_reg;
8226      HOST_WIDE_INT newoffset;
8227
8228      if (GET_CODE (elt) != SET
8229	  || GET_CODE (SET_DEST (elt)) != REG
8230	  || GET_MODE (SET_DEST (elt)) != SImode
8231	  || REGNO (SET_DEST (elt)) != dest_regno + i
8232	  || GET_CODE (SET_SRC (elt)) != MEM
8233	  || GET_MODE (SET_SRC (elt)) != SImode)
8234	return 0;
8235      newaddr = XEXP (SET_SRC (elt), 0);
8236      if (legitimate_indirect_address_p (newaddr, 0))
8237	{
8238	  newoffset = 0;
8239	  addr_reg = newaddr;
8240	}
8241      else if (legitimate_offset_address_p (SImode, newaddr, 0))
8242	{
8243	  addr_reg = XEXP (newaddr, 0);
8244	  newoffset = INTVAL (XEXP (newaddr, 1));
8245	}
8246      else
8247	return 0;
8248      if (REGNO (addr_reg) != base_regno
8249	  || newoffset != offset + 4 * i)
8250	return 0;
8251    }
8252
8253  return 1;
8254}
8255
8256/* Return 1 for an PARALLEL suitable for stmw.  */
8257
8258int
8259stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8260{
8261  int count = XVECLEN (op, 0);
8262  unsigned int src_regno;
8263  rtx dest_addr;
8264  unsigned int base_regno;
8265  HOST_WIDE_INT offset;
8266  int i;
8267
8268  /* Perform a quick check so we don't blow up below.  */
8269  if (count <= 1
8270      || GET_CODE (XVECEXP (op, 0, 0)) != SET
8271      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8272      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8273    return 0;
8274
8275  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8276  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8277
8278  if (src_regno > 31
8279      || count != 32 - (int) src_regno)
8280    return 0;
8281
8282  if (legitimate_indirect_address_p (dest_addr, 0))
8283    {
8284      offset = 0;
8285      base_regno = REGNO (dest_addr);
8286      if (base_regno == 0)
8287	return 0;
8288    }
8289  else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8290    {
8291      offset = INTVAL (XEXP (dest_addr, 1));
8292      base_regno = REGNO (XEXP (dest_addr, 0));
8293    }
8294  else
8295    return 0;
8296
8297  for (i = 0; i < count; i++)
8298    {
8299      rtx elt = XVECEXP (op, 0, i);
8300      rtx newaddr;
8301      rtx addr_reg;
8302      HOST_WIDE_INT newoffset;
8303
8304      if (GET_CODE (elt) != SET
8305	  || GET_CODE (SET_SRC (elt)) != REG
8306	  || GET_MODE (SET_SRC (elt)) != SImode
8307	  || REGNO (SET_SRC (elt)) != src_regno + i
8308	  || GET_CODE (SET_DEST (elt)) != MEM
8309	  || GET_MODE (SET_DEST (elt)) != SImode)
8310	return 0;
8311      newaddr = XEXP (SET_DEST (elt), 0);
8312      if (legitimate_indirect_address_p (newaddr, 0))
8313	{
8314	  newoffset = 0;
8315	  addr_reg = newaddr;
8316	}
8317      else if (legitimate_offset_address_p (SImode, newaddr, 0))
8318	{
8319	  addr_reg = XEXP (newaddr, 0);
8320	  newoffset = INTVAL (XEXP (newaddr, 1));
8321	}
8322      else
8323	return 0;
8324      if (REGNO (addr_reg) != base_regno
8325	  || newoffset != offset + 4 * i)
8326	return 0;
8327    }
8328
8329  return 1;
8330}
8331
8332/* A validation routine: say whether CODE, a condition code, and MODE
8333   match.  The other alternatives either don't make sense or should
8334   never be generated.  */
8335
8336static void
8337validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8338{
8339  if (GET_RTX_CLASS (code) != '<'
8340      || GET_MODE_CLASS (mode) != MODE_CC)
8341    abort ();
8342
8343  /* These don't make sense.  */
8344  if ((code == GT || code == LT || code == GE || code == LE)
8345      && mode == CCUNSmode)
8346    abort ();
8347
8348  if ((code == GTU || code == LTU || code == GEU || code == LEU)
8349      && mode != CCUNSmode)
8350    abort ();
8351
8352  if (mode != CCFPmode
8353      && (code == ORDERED || code == UNORDERED
8354	  || code == UNEQ || code == LTGT
8355	  || code == UNGT || code == UNLT
8356	  || code == UNGE || code == UNLE))
8357    abort ();
8358
8359  /* These should never be generated except for
8360     flag_finite_math_only.  */
8361  if (mode == CCFPmode
8362      && ! flag_finite_math_only
8363      && (code == LE || code == GE
8364	  || code == UNEQ || code == LTGT
8365	  || code == UNGT || code == UNLT))
8366    abort ();
8367
8368  /* These are invalid; the information is not there.  */
8369  if (mode == CCEQmode
8370      && code != EQ && code != NE)
8371    abort ();
8372}
8373
8374/* Return 1 if OP is a comparison operation that is valid for a branch insn.
8375   We only check the opcode against the mode of the CC value here.  */
8376
8377int
8378branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8379{
8380  enum rtx_code code = GET_CODE (op);
8381  enum machine_mode cc_mode;
8382
8383  if (GET_RTX_CLASS (code) != '<')
8384    return 0;
8385
8386  cc_mode = GET_MODE (XEXP (op, 0));
8387  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8388    return 0;
8389
8390  validate_condition_mode (code, cc_mode);
8391
8392  return 1;
8393}
8394
8395/* Return 1 if OP is a comparison operation that is valid for a branch
8396   insn and which is true if the corresponding bit in the CC register
8397   is set.  */
8398
8399int
8400branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8401{
8402  enum rtx_code code;
8403
8404  if (! branch_comparison_operator (op, mode))
8405    return 0;
8406
8407  code = GET_CODE (op);
8408  return (code == EQ || code == LT || code == GT
8409	  || code == LTU || code == GTU
8410	  || code == UNORDERED);
8411}
8412
8413/* Return 1 if OP is a comparison operation that is valid for an scc
8414   insn: it must be a positive comparison.  */
8415
8416int
8417scc_comparison_operator (rtx op, enum machine_mode mode)
8418{
8419  return branch_positive_comparison_operator (op, mode);
8420}
8421
8422int
8423trap_comparison_operator (rtx op, enum machine_mode mode)
8424{
8425  if (mode != VOIDmode && mode != GET_MODE (op))
8426    return 0;
8427  return GET_RTX_CLASS (GET_CODE (op)) == '<';
8428}
8429
8430int
8431boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8432{
8433  enum rtx_code code = GET_CODE (op);
8434  return (code == AND || code == IOR || code == XOR);
8435}
8436
8437int
8438boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8439{
8440  enum rtx_code code = GET_CODE (op);
8441  return (code == IOR || code == XOR);
8442}
8443
8444int
8445min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8446{
8447  enum rtx_code code = GET_CODE (op);
8448  return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8449}
8450
8451/* Return 1 if ANDOP is a mask that has no bits on that are not in the
8452   mask required to convert the result of a rotate insn into a shift
8453   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
8454
8455int
8456includes_lshift_p (rtx shiftop, rtx andop)
8457{
8458  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8459
8460  shift_mask <<= INTVAL (shiftop);
8461
8462  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8463}
8464
8465/* Similar, but for right shift.  */
8466
8467int
8468includes_rshift_p (rtx shiftop, rtx andop)
8469{
8470  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8471
8472  shift_mask >>= INTVAL (shiftop);
8473
8474  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8475}
8476
8477/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8478   to perform a left shift.  It must have exactly SHIFTOP least
8479   significant 0's, then one or more 1's, then zero or more 0's.  */
8480
8481int
8482includes_rldic_lshift_p (rtx shiftop, rtx andop)
8483{
8484  if (GET_CODE (andop) == CONST_INT)
8485    {
8486      HOST_WIDE_INT c, lsb, shift_mask;
8487
8488      c = INTVAL (andop);
8489      if (c == 0 || c == ~0)
8490	return 0;
8491
8492      shift_mask = ~0;
8493      shift_mask <<= INTVAL (shiftop);
8494
8495      /* Find the least significant one bit.  */
8496      lsb = c & -c;
8497
8498      /* It must coincide with the LSB of the shift mask.  */
8499      if (-lsb != shift_mask)
8500	return 0;
8501
8502      /* Invert to look for the next transition (if any).  */
8503      c = ~c;
8504
8505      /* Remove the low group of ones (originally low group of zeros).  */
8506      c &= -lsb;
8507
8508      /* Again find the lsb, and check we have all 1's above.  */
8509      lsb = c & -c;
8510      return c == -lsb;
8511    }
8512  else if (GET_CODE (andop) == CONST_DOUBLE
8513	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8514    {
8515      HOST_WIDE_INT low, high, lsb;
8516      HOST_WIDE_INT shift_mask_low, shift_mask_high;
8517
8518      low = CONST_DOUBLE_LOW (andop);
8519      if (HOST_BITS_PER_WIDE_INT < 64)
8520	high = CONST_DOUBLE_HIGH (andop);
8521
8522      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8523	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8524	return 0;
8525
8526      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8527	{
8528	  shift_mask_high = ~0;
8529	  if (INTVAL (shiftop) > 32)
8530	    shift_mask_high <<= INTVAL (shiftop) - 32;
8531
8532	  lsb = high & -high;
8533
8534	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8535	    return 0;
8536
8537	  high = ~high;
8538	  high &= -lsb;
8539
8540	  lsb = high & -high;
8541	  return high == -lsb;
8542	}
8543
8544      shift_mask_low = ~0;
8545      shift_mask_low <<= INTVAL (shiftop);
8546
8547      lsb = low & -low;
8548
8549      if (-lsb != shift_mask_low)
8550	return 0;
8551
8552      if (HOST_BITS_PER_WIDE_INT < 64)
8553	high = ~high;
8554      low = ~low;
8555      low &= -lsb;
8556
8557      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8558	{
8559	  lsb = high & -high;
8560	  return high == -lsb;
8561	}
8562
8563      lsb = low & -low;
8564      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8565    }
8566  else
8567    return 0;
8568}
8569
8570/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8571   to perform a left shift.  It must have SHIFTOP or more least
8572   significant 0's, with the remainder of the word 1's.  */
8573
8574int
8575includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8576{
8577  if (GET_CODE (andop) == CONST_INT)
8578    {
8579      HOST_WIDE_INT c, lsb, shift_mask;
8580
8581      shift_mask = ~0;
8582      shift_mask <<= INTVAL (shiftop);
8583      c = INTVAL (andop);
8584
8585      /* Find the least significant one bit.  */
8586      lsb = c & -c;
8587
8588      /* It must be covered by the shift mask.
8589	 This test also rejects c == 0.  */
8590      if ((lsb & shift_mask) == 0)
8591	return 0;
8592
8593      /* Check we have all 1's above the transition, and reject all 1's.  */
8594      return c == -lsb && lsb != 1;
8595    }
8596  else if (GET_CODE (andop) == CONST_DOUBLE
8597	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8598    {
8599      HOST_WIDE_INT low, lsb, shift_mask_low;
8600
8601      low = CONST_DOUBLE_LOW (andop);
8602
8603      if (HOST_BITS_PER_WIDE_INT < 64)
8604	{
8605	  HOST_WIDE_INT high, shift_mask_high;
8606
8607	  high = CONST_DOUBLE_HIGH (andop);
8608
8609	  if (low == 0)
8610	    {
8611	      shift_mask_high = ~0;
8612	      if (INTVAL (shiftop) > 32)
8613		shift_mask_high <<= INTVAL (shiftop) - 32;
8614
8615	      lsb = high & -high;
8616
8617	      if ((lsb & shift_mask_high) == 0)
8618		return 0;
8619
8620	      return high == -lsb;
8621	    }
8622	  if (high != ~0)
8623	    return 0;
8624	}
8625
8626      shift_mask_low = ~0;
8627      shift_mask_low <<= INTVAL (shiftop);
8628
8629      lsb = low & -low;
8630
8631      if ((lsb & shift_mask_low) == 0)
8632	return 0;
8633
8634      return low == -lsb && lsb != 1;
8635    }
8636  else
8637    return 0;
8638}
8639
8640/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8641   for lfq and stfq insns.
8642
8643   Note reg1 and reg2 *must* be hard registers.  To be sure we will
8644   abort if we are passed pseudo registers.  */
8645
8646int
8647registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8648{
8649  /* We might have been passed a SUBREG.  */
8650  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8651    return 0;
8652
8653  return (REGNO (reg1) == REGNO (reg2) - 1);
8654}
8655
8656/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8657   addr1 and addr2 must be in consecutive memory locations
8658   (addr2 == addr1 + 8).  */
8659
8660int
8661addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8662{
8663  unsigned int reg1;
8664  int offset1;
8665
8666  /* Extract an offset (if used) from the first addr.  */
8667  if (GET_CODE (addr1) == PLUS)
8668    {
8669      /* If not a REG, return zero.  */
8670      if (GET_CODE (XEXP (addr1, 0)) != REG)
8671	return 0;
8672      else
8673	{
8674          reg1 = REGNO (XEXP (addr1, 0));
8675	  /* The offset must be constant!  */
8676	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8677            return 0;
8678          offset1 = INTVAL (XEXP (addr1, 1));
8679	}
8680    }
8681  else if (GET_CODE (addr1) != REG)
8682    return 0;
8683  else
8684    {
8685      reg1 = REGNO (addr1);
8686      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
8687      offset1 = 0;
8688    }
8689
8690  /* Make sure the second address is a (mem (plus (reg) (const_int)))
8691      or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8692      register as addr1.  */
8693  if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8694   return 1;
8695  if (GET_CODE (addr2) != PLUS)
8696    return 0;
8697
8698  if (GET_CODE (XEXP (addr2, 0)) != REG
8699      || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8700    return 0;
8701
8702  if (reg1 != REGNO (XEXP (addr2, 0)))
8703    return 0;
8704
8705  /* The offset for the second addr must be 8 more than the first addr.  */
8706  if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8707    return 0;
8708
8709  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
8710     instructions.  */
8711  return 1;
8712}
8713
8714/* Return the register class of a scratch register needed to copy IN into
8715   or out of a register in CLASS in MODE.  If it can be done directly,
8716   NO_REGS is returned.  */
8717
8718enum reg_class
8719secondary_reload_class (enum reg_class class,
8720			enum machine_mode mode,
8721			rtx in)
8722{
8723  int regno;
8724
8725  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8726#if TARGET_MACHO
8727                    && MACHOPIC_INDIRECT
8728#endif
8729                    ))
8730    {
8731      /* We cannot copy a symbolic operand directly into anything
8732         other than BASE_REGS for TARGET_ELF.  So indicate that a
8733         register from BASE_REGS is needed as an intermediate
8734         register.
8735
8736	 On Darwin, pic addresses require a load from memory, which
8737	 needs a base register.  */
8738      if (class != BASE_REGS
8739          && (GET_CODE (in) == SYMBOL_REF
8740              || GET_CODE (in) == HIGH
8741              || GET_CODE (in) == LABEL_REF
8742              || GET_CODE (in) == CONST))
8743        return BASE_REGS;
8744    }
8745
8746  if (GET_CODE (in) == REG)
8747    {
8748      regno = REGNO (in);
8749      if (regno >= FIRST_PSEUDO_REGISTER)
8750	{
8751	  regno = true_regnum (in);
8752	  if (regno >= FIRST_PSEUDO_REGISTER)
8753	    regno = -1;
8754	}
8755    }
8756  else if (GET_CODE (in) == SUBREG)
8757    {
8758      regno = true_regnum (in);
8759      if (regno >= FIRST_PSEUDO_REGISTER)
8760	regno = -1;
8761    }
8762  else
8763    regno = -1;
8764
8765  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8766     into anything.  */
8767  if (class == GENERAL_REGS || class == BASE_REGS
8768      || (regno >= 0 && INT_REGNO_P (regno)))
8769    return NO_REGS;
8770
8771  /* Constants, memory, and FP registers can go into FP registers.  */
8772  if ((regno == -1 || FP_REGNO_P (regno))
8773      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8774    return NO_REGS;
8775
8776  /* Memory, and AltiVec registers can go into AltiVec registers.  */
8777  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8778      && class == ALTIVEC_REGS)
8779    return NO_REGS;
8780
8781  /* We can copy among the CR registers.  */
8782  if ((class == CR_REGS || class == CR0_REGS)
8783      && regno >= 0 && CR_REGNO_P (regno))
8784    return NO_REGS;
8785
8786  /* Otherwise, we need GENERAL_REGS.  */
8787  return GENERAL_REGS;
8788}
8789
8790/* Given a comparison operation, return the bit number in CCR to test.  We
8791   know this is a valid comparison.
8792
8793   SCC_P is 1 if this is for an scc.  That means that %D will have been
8794   used instead of %C, so the bits will be in different places.
8795
8796   Return -1 if OP isn't a valid comparison for some reason.  */
8797
8798int
8799ccr_bit (rtx op, int scc_p)
8800{
8801  enum rtx_code code = GET_CODE (op);
8802  enum machine_mode cc_mode;
8803  int cc_regnum;
8804  int base_bit;
8805  rtx reg;
8806
8807  if (GET_RTX_CLASS (code) != '<')
8808    return -1;
8809
8810  reg = XEXP (op, 0);
8811
8812  if (GET_CODE (reg) != REG
8813      || ! CR_REGNO_P (REGNO (reg)))
8814    abort ();
8815
8816  cc_mode = GET_MODE (reg);
8817  cc_regnum = REGNO (reg);
8818  base_bit = 4 * (cc_regnum - CR0_REGNO);
8819
8820  validate_condition_mode (code, cc_mode);
8821
8822  /* When generating a sCOND operation, only positive conditions are
8823     allowed.  */
8824  if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8825      && code != GTU && code != LTU)
8826    abort ();
8827
8828  switch (code)
8829    {
8830    case NE:
8831      return scc_p ? base_bit + 3 : base_bit + 2;
8832    case EQ:
8833      return base_bit + 2;
8834    case GT:  case GTU:  case UNLE:
8835      return base_bit + 1;
8836    case LT:  case LTU:  case UNGE:
8837      return base_bit;
8838    case ORDERED:  case UNORDERED:
8839      return base_bit + 3;
8840
8841    case GE:  case GEU:
8842      /* If scc, we will have done a cror to put the bit in the
8843	 unordered position.  So test that bit.  For integer, this is ! LT
8844	 unless this is an scc insn.  */
8845      return scc_p ? base_bit + 3 : base_bit;
8846
8847    case LE:  case LEU:
8848      return scc_p ? base_bit + 3 : base_bit + 1;
8849
8850    default:
8851      abort ();
8852    }
8853}
8854
8855/* Return the GOT register.  */
8856
8857struct rtx_def *
8858rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8859{
8860  /* The second flow pass currently (June 1999) can't update
8861     regs_ever_live without disturbing other parts of the compiler, so
8862     update it here to make the prolog/epilogue code happy.  */
8863  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8864    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8865
8866  current_function_uses_pic_offset_table = 1;
8867
8868  return pic_offset_table_rtx;
8869}
8870
8871/* Function to init struct machine_function.
8872   This will be called, via a pointer variable,
8873   from push_function_context.  */
8874
8875static struct machine_function *
8876rs6000_init_machine_status (void)
8877{
8878  return ggc_alloc_cleared (sizeof (machine_function));
8879}
8880
8881/* These macros test for integers and extract the low-order bits.  */
8882#define INT_P(X)  \
8883((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
8884 && GET_MODE (X) == VOIDmode)
8885
8886#define INT_LOWPART(X) \
8887  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8888
8889int
8890extract_MB (rtx op)
8891{
8892  int i;
8893  unsigned long val = INT_LOWPART (op);
8894
8895  /* If the high bit is zero, the value is the first 1 bit we find
8896     from the left.  */
8897  if ((val & 0x80000000) == 0)
8898    {
8899      if ((val & 0xffffffff) == 0)
8900	abort ();
8901
8902      i = 1;
8903      while (((val <<= 1) & 0x80000000) == 0)
8904	++i;
8905      return i;
8906    }
8907
8908  /* If the high bit is set and the low bit is not, or the mask is all
8909     1's, the value is zero.  */
8910  if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8911    return 0;
8912
8913  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
8914     from the right.  */
8915  i = 31;
8916  while (((val >>= 1) & 1) != 0)
8917    --i;
8918
8919  return i;
8920}
8921
8922int
8923extract_ME (rtx op)
8924{
8925  int i;
8926  unsigned long val = INT_LOWPART (op);
8927
8928  /* If the low bit is zero, the value is the first 1 bit we find from
8929     the right.  */
8930  if ((val & 1) == 0)
8931    {
8932      if ((val & 0xffffffff) == 0)
8933	abort ();
8934
8935      i = 30;
8936      while (((val >>= 1) & 1) == 0)
8937	--i;
8938
8939      return i;
8940    }
8941
8942  /* If the low bit is set and the high bit is not, or the mask is all
8943     1's, the value is 31.  */
8944  if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8945    return 31;
8946
8947  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
8948     from the left.  */
8949  i = 0;
8950  while (((val <<= 1) & 0x80000000) != 0)
8951    ++i;
8952
8953  return i;
8954}
8955
8956/* Locate some local-dynamic symbol still in use by this function
8957   so that we can print its name in some tls_ld pattern.  */
8958
8959static const char *
8960rs6000_get_some_local_dynamic_name (void)
8961{
8962  rtx insn;
8963
8964  if (cfun->machine->some_ld_name)
8965    return cfun->machine->some_ld_name;
8966
8967  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8968    if (INSN_P (insn)
8969	&& for_each_rtx (&PATTERN (insn),
8970			 rs6000_get_some_local_dynamic_name_1, 0))
8971      return cfun->machine->some_ld_name;
8972
8973  abort ();
8974}
8975
8976/* Helper function for rs6000_get_some_local_dynamic_name.  */
8977
8978static int
8979rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8980{
8981  rtx x = *px;
8982
8983  if (GET_CODE (x) == SYMBOL_REF)
8984    {
8985      const char *str = XSTR (x, 0);
8986      if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8987	{
8988	  cfun->machine->some_ld_name = str;
8989	  return 1;
8990	}
8991    }
8992
8993  return 0;
8994}
8995
8996/* Print an operand.  Recognize special options, documented below.  */
8997
8998#if TARGET_ELF
8999#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9000#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9001#else
9002#define SMALL_DATA_RELOC "sda21"
9003#define SMALL_DATA_REG 0
9004#endif
9005
9006void
9007print_operand (FILE *file, rtx x, int code)
9008{
9009  int i;
9010  HOST_WIDE_INT val;
9011  unsigned HOST_WIDE_INT uval;
9012
9013  switch (code)
9014    {
9015    case '.':
9016      /* Write out an instruction after the call which may be replaced
9017	 with glue code by the loader.  This depends on the AIX version.  */
9018      asm_fprintf (file, RS6000_CALL_GLUE);
9019      return;
9020
9021      /* %a is output_address.  */
9022
9023    case 'A':
9024      /* If X is a constant integer whose low-order 5 bits are zero,
9025	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
9026	 in the AIX assembler where "sri" with a zero shift count
9027	 writes a trash instruction.  */
9028      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9029	putc ('l', file);
9030      else
9031	putc ('r', file);
9032      return;
9033
9034    case 'b':
9035      /* If constant, low-order 16 bits of constant, unsigned.
9036	 Otherwise, write normally.  */
9037      if (INT_P (x))
9038	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9039      else
9040	print_operand (file, x, 0);
9041      return;
9042
9043    case 'B':
9044      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9045	 for 64-bit mask direction.  */
9046      putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9047      return;
9048
9049      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9050	 output_operand.  */
9051
9052    case 'c':
9053      /* X is a CR register.  Print the number of the GT bit of the CR.  */
9054      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9055       output_operand_lossage ("invalid %%E value");
9056      else
9057       fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9058      return;
9059
9060    case 'D':
9061      /* Like 'J' but get to the EQ bit.  */
9062      if (GET_CODE (x) != REG)
9063       abort ();
9064
9065      /* Bit 1 is EQ bit.  */
9066      i = 4 * (REGNO (x) - CR0_REGNO) + 2;
9067
9068      /* If we want bit 31, write a shift count of zero, not 32.  */
9069      fprintf (file, "%d", i == 31 ? 0 : i + 1);
9070      return;
9071
9072    case 'E':
9073      /* X is a CR register.  Print the number of the EQ bit of the CR */
9074      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9075	output_operand_lossage ("invalid %%E value");
9076      else
9077	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9078      return;
9079
9080    case 'f':
9081      /* X is a CR register.  Print the shift count needed to move it
9082	 to the high-order four bits.  */
9083      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9084	output_operand_lossage ("invalid %%f value");
9085      else
9086	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9087      return;
9088
9089    case 'F':
9090      /* Similar, but print the count for the rotate in the opposite
9091	 direction.  */
9092      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9093	output_operand_lossage ("invalid %%F value");
9094      else
9095	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9096      return;
9097
9098    case 'G':
9099      /* X is a constant integer.  If it is negative, print "m",
9100	 otherwise print "z".  This is to make an aze or ame insn.  */
9101      if (GET_CODE (x) != CONST_INT)
9102	output_operand_lossage ("invalid %%G value");
9103      else if (INTVAL (x) >= 0)
9104	putc ('z', file);
9105      else
9106	putc ('m', file);
9107      return;
9108
9109    case 'h':
9110      /* If constant, output low-order five bits.  Otherwise, write
9111	 normally.  */
9112      if (INT_P (x))
9113	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9114      else
9115	print_operand (file, x, 0);
9116      return;
9117
9118    case 'H':
9119      /* If constant, output low-order six bits.  Otherwise, write
9120	 normally.  */
9121      if (INT_P (x))
9122	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9123      else
9124	print_operand (file, x, 0);
9125      return;
9126
9127    case 'I':
9128      /* Print `i' if this is a constant, else nothing.  */
9129      if (INT_P (x))
9130	putc ('i', file);
9131      return;
9132
9133    case 'j':
9134      /* Write the bit number in CCR for jump.  */
9135      i = ccr_bit (x, 0);
9136      if (i == -1)
9137	output_operand_lossage ("invalid %%j code");
9138      else
9139	fprintf (file, "%d", i);
9140      return;
9141
9142    case 'J':
9143      /* Similar, but add one for shift count in rlinm for scc and pass
9144	 scc flag to `ccr_bit'.  */
9145      i = ccr_bit (x, 1);
9146      if (i == -1)
9147	output_operand_lossage ("invalid %%J code");
9148      else
9149	/* If we want bit 31, write a shift count of zero, not 32.  */
9150	fprintf (file, "%d", i == 31 ? 0 : i + 1);
9151      return;
9152
9153    case 'k':
9154      /* X must be a constant.  Write the 1's complement of the
9155	 constant.  */
9156      if (! INT_P (x))
9157	output_operand_lossage ("invalid %%k value");
9158      else
9159	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9160      return;
9161
9162    case 'K':
9163      /* X must be a symbolic constant on ELF.  Write an
9164	 expression suitable for an 'addi' that adds in the low 16
9165	 bits of the MEM.  */
9166      if (GET_CODE (x) != CONST)
9167	{
9168	  print_operand_address (file, x);
9169	  fputs ("@l", file);
9170	}
9171      else
9172	{
9173	  if (GET_CODE (XEXP (x, 0)) != PLUS
9174	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9175		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9176	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9177	    output_operand_lossage ("invalid %%K value");
9178	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
9179	  fputs ("@l", file);
9180	  /* For GNU as, there must be a non-alphanumeric character
9181	     between 'l' and the number.  The '-' is added by
9182	     print_operand() already.  */
9183	  if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9184	    fputs ("+", file);
9185	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9186	}
9187      return;
9188
9189      /* %l is output_asm_label.  */
9190
9191    case 'L':
9192      /* Write second word of DImode or DFmode reference.  Works on register
9193	 or non-indexed memory only.  */
9194      if (GET_CODE (x) == REG)
9195	fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9196      else if (GET_CODE (x) == MEM)
9197	{
9198	  /* Handle possible auto-increment.  Since it is pre-increment and
9199	     we have already done it, we can just use an offset of word.  */
9200	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
9201	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9202	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9203					   UNITS_PER_WORD));
9204	  else
9205	    output_address (XEXP (adjust_address_nv (x, SImode,
9206						     UNITS_PER_WORD),
9207				  0));
9208
9209	  if (small_data_operand (x, GET_MODE (x)))
9210	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9211		     reg_names[SMALL_DATA_REG]);
9212	}
9213      return;
9214
9215    case 'm':
9216      /* MB value for a mask operand.  */
9217      if (! mask_operand (x, SImode))
9218	output_operand_lossage ("invalid %%m value");
9219
9220      fprintf (file, "%d", extract_MB (x));
9221      return;
9222
9223    case 'M':
9224      /* ME value for a mask operand.  */
9225      if (! mask_operand (x, SImode))
9226	output_operand_lossage ("invalid %%M value");
9227
9228      fprintf (file, "%d", extract_ME (x));
9229      return;
9230
9231      /* %n outputs the negative of its operand.  */
9232
9233    case 'N':
9234      /* Write the number of elements in the vector times 4.  */
9235      if (GET_CODE (x) != PARALLEL)
9236	output_operand_lossage ("invalid %%N value");
9237      else
9238	fprintf (file, "%d", XVECLEN (x, 0) * 4);
9239      return;
9240
9241    case 'O':
9242      /* Similar, but subtract 1 first.  */
9243      if (GET_CODE (x) != PARALLEL)
9244	output_operand_lossage ("invalid %%O value");
9245      else
9246	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9247      return;
9248
9249    case 'p':
9250      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
9251      if (! INT_P (x)
9252	  || INT_LOWPART (x) < 0
9253	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
9254	output_operand_lossage ("invalid %%p value");
9255      else
9256	fprintf (file, "%d", i);
9257      return;
9258
9259    case 'P':
9260      /* The operand must be an indirect memory reference.  The result
9261	 is the register name.  */
9262      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9263	  || REGNO (XEXP (x, 0)) >= 32)
9264	output_operand_lossage ("invalid %%P value");
9265      else
9266	fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9267      return;
9268
9269    case 'q':
9270      /* This outputs the logical code corresponding to a boolean
9271	 expression.  The expression may have one or both operands
9272	 negated (if one, only the first one).  For condition register
9273         logical operations, it will also treat the negated
9274         CR codes as NOTs, but not handle NOTs of them.  */
9275      {
9276	const char *const *t = 0;
9277	const char *s;
9278	enum rtx_code code = GET_CODE (x);
9279	static const char * const tbl[3][3] = {
9280	  { "and", "andc", "nor" },
9281	  { "or", "orc", "nand" },
9282	  { "xor", "eqv", "xor" } };
9283
9284	if (code == AND)
9285	  t = tbl[0];
9286	else if (code == IOR)
9287	  t = tbl[1];
9288	else if (code == XOR)
9289	  t = tbl[2];
9290	else
9291	  output_operand_lossage ("invalid %%q value");
9292
9293	if (GET_CODE (XEXP (x, 0)) != NOT)
9294	  s = t[0];
9295	else
9296	  {
9297	    if (GET_CODE (XEXP (x, 1)) == NOT)
9298	      s = t[2];
9299	    else
9300	      s = t[1];
9301	  }
9302
9303	fputs (s, file);
9304      }
9305      return;
9306
9307    case 'Q':
9308      if (TARGET_MFCRF)
9309	fputc (',',file);
9310        /* FALLTHRU */
9311      else
9312	return;
9313
9314    case 'R':
9315      /* X is a CR register.  Print the mask for `mtcrf'.  */
9316      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9317	output_operand_lossage ("invalid %%R value");
9318      else
9319	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9320      return;
9321
9322    case 's':
9323      /* Low 5 bits of 32 - value */
9324      if (! INT_P (x))
9325	output_operand_lossage ("invalid %%s value");
9326      else
9327	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9328      return;
9329
9330    case 'S':
9331      /* PowerPC64 mask position.  All 0's is excluded.
9332	 CONST_INT 32-bit mask is considered sign-extended so any
9333	 transition must occur within the CONST_INT, not on the boundary.  */
9334      if (! mask64_operand (x, DImode))
9335	output_operand_lossage ("invalid %%S value");
9336
9337      uval = INT_LOWPART (x);
9338
9339      if (uval & 1)	/* Clear Left */
9340	{
9341#if HOST_BITS_PER_WIDE_INT > 64
9342	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9343#endif
9344	  i = 64;
9345	}
9346      else		/* Clear Right */
9347	{
9348	  uval = ~uval;
9349#if HOST_BITS_PER_WIDE_INT > 64
9350	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9351#endif
9352	  i = 63;
9353	}
9354      while (uval != 0)
9355	--i, uval >>= 1;
9356      if (i < 0)
9357	abort ();
9358      fprintf (file, "%d", i);
9359      return;
9360
9361    case 't':
9362      /* Like 'J' but get to the OVERFLOW/UNORDERED bit.  */
9363      if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9364	abort ();
9365
9366      /* Bit 3 is OV bit.  */
9367      i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9368
9369      /* If we want bit 31, write a shift count of zero, not 32.  */
9370      fprintf (file, "%d", i == 31 ? 0 : i + 1);
9371      return;
9372
9373    case 'T':
9374      /* Print the symbolic name of a branch target register.  */
9375      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9376				  && REGNO (x) != COUNT_REGISTER_REGNUM))
9377	output_operand_lossage ("invalid %%T value");
9378      else if (REGNO (x) == LINK_REGISTER_REGNUM)
9379	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9380      else
9381	fputs ("ctr", file);
9382      return;
9383
9384    case 'u':
9385      /* High-order 16 bits of constant for use in unsigned operand.  */
9386      if (! INT_P (x))
9387	output_operand_lossage ("invalid %%u value");
9388      else
9389	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9390		 (INT_LOWPART (x) >> 16) & 0xffff);
9391      return;
9392
9393    case 'v':
9394      /* High-order 16 bits of constant for use in signed operand.  */
9395      if (! INT_P (x))
9396	output_operand_lossage ("invalid %%v value");
9397      else
9398	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9399		 (INT_LOWPART (x) >> 16) & 0xffff);
9400      return;
9401
9402    case 'U':
9403      /* Print `u' if this has an auto-increment or auto-decrement.  */
9404      if (GET_CODE (x) == MEM
9405	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
9406	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9407	putc ('u', file);
9408      return;
9409
9410    case 'V':
9411      /* Print the trap code for this operand.  */
9412      switch (GET_CODE (x))
9413	{
9414	case EQ:
9415	  fputs ("eq", file);   /* 4 */
9416	  break;
9417	case NE:
9418	  fputs ("ne", file);   /* 24 */
9419	  break;
9420	case LT:
9421	  fputs ("lt", file);   /* 16 */
9422	  break;
9423	case LE:
9424	  fputs ("le", file);   /* 20 */
9425	  break;
9426	case GT:
9427	  fputs ("gt", file);   /* 8 */
9428	  break;
9429	case GE:
9430	  fputs ("ge", file);   /* 12 */
9431	  break;
9432	case LTU:
9433	  fputs ("llt", file);  /* 2 */
9434	  break;
9435	case LEU:
9436	  fputs ("lle", file);  /* 6 */
9437	  break;
9438	case GTU:
9439	  fputs ("lgt", file);  /* 1 */
9440	  break;
9441	case GEU:
9442	  fputs ("lge", file);  /* 5 */
9443	  break;
9444	default:
9445	  abort ();
9446	}
9447      break;
9448
9449    case 'w':
9450      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
9451	 normally.  */
9452      if (INT_P (x))
9453	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9454		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9455      else
9456	print_operand (file, x, 0);
9457      return;
9458
9459    case 'W':
9460      /* MB value for a PowerPC64 rldic operand.  */
9461      val = (GET_CODE (x) == CONST_INT
9462	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9463
9464      if (val < 0)
9465	i = -1;
9466      else
9467	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9468	  if ((val <<= 1) < 0)
9469	    break;
9470
9471#if HOST_BITS_PER_WIDE_INT == 32
9472      if (GET_CODE (x) == CONST_INT && i >= 0)
9473	i += 32;  /* zero-extend high-part was all 0's */
9474      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9475	{
9476	  val = CONST_DOUBLE_LOW (x);
9477
9478	  if (val == 0)
9479	    abort ();
9480	  else if (val < 0)
9481	    --i;
9482	  else
9483	    for ( ; i < 64; i++)
9484	      if ((val <<= 1) < 0)
9485		break;
9486	}
9487#endif
9488
9489      fprintf (file, "%d", i + 1);
9490      return;
9491
9492    case 'X':
9493      if (GET_CODE (x) == MEM
9494	  && legitimate_indexed_address_p (XEXP (x, 0), 0))
9495	putc ('x', file);
9496      return;
9497
9498    case 'Y':
9499      /* Like 'L', for third word of TImode  */
9500      if (GET_CODE (x) == REG)
9501	fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9502      else if (GET_CODE (x) == MEM)
9503	{
9504	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
9505	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9506	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9507	  else
9508	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9509	  if (small_data_operand (x, GET_MODE (x)))
9510	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9511		     reg_names[SMALL_DATA_REG]);
9512	}
9513      return;
9514
9515    case 'z':
9516      /* X is a SYMBOL_REF.  Write out the name preceded by a
9517	 period and without any trailing data in brackets.  Used for function
9518	 names.  If we are configured for System V (or the embedded ABI) on
9519	 the PowerPC, do not emit the period, since those systems do not use
9520	 TOCs and the like.  */
9521      if (GET_CODE (x) != SYMBOL_REF)
9522	abort ();
9523
9524      if (XSTR (x, 0)[0] != '.')
9525	{
9526	  switch (DEFAULT_ABI)
9527	    {
9528	    default:
9529	      abort ();
9530
9531	    case ABI_AIX:
9532	      putc ('.', file);
9533	      break;
9534
9535	    case ABI_V4:
9536	    case ABI_DARWIN:
9537	      break;
9538	    }
9539	}
9540      if (TARGET_AIX)
9541	RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9542      else
9543	assemble_name (file, XSTR (x, 0));
9544      return;
9545
9546    case 'Z':
9547      /* Like 'L', for last word of TImode.  */
9548      if (GET_CODE (x) == REG)
9549	fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9550      else if (GET_CODE (x) == MEM)
9551	{
9552	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
9553	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9554	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9555	  else
9556	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9557	  if (small_data_operand (x, GET_MODE (x)))
9558	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9559		     reg_names[SMALL_DATA_REG]);
9560	}
9561      return;
9562
9563      /* Print AltiVec or SPE memory operand.  */
9564    case 'y':
9565      {
9566	rtx tmp;
9567
9568	if (GET_CODE (x) != MEM)
9569	  abort ();
9570
9571	tmp = XEXP (x, 0);
9572
9573	if (TARGET_E500)
9574	  {
9575	    /* Handle [reg].  */
9576	    if (GET_CODE (tmp) == REG)
9577	      {
9578		fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9579		break;
9580	      }
9581	    /* Handle [reg+UIMM].  */
9582	    else if (GET_CODE (tmp) == PLUS &&
9583		     GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9584	      {
9585		int x;
9586
9587		if (GET_CODE (XEXP (tmp, 0)) != REG)
9588		  abort ();
9589
9590		x = INTVAL (XEXP (tmp, 1));
9591		fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9592		break;
9593	      }
9594
9595	    /* Fall through.  Must be [reg+reg].  */
9596	  }
9597	if (GET_CODE (tmp) == REG)
9598	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9599	else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9600	  {
9601	    if (REGNO (XEXP (tmp, 0)) == 0)
9602	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9603		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
9604	    else
9605	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9606		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
9607	  }
9608	else
9609	  abort ();
9610	break;
9611      }
9612
9613    case 0:
9614      if (GET_CODE (x) == REG)
9615	fprintf (file, "%s", reg_names[REGNO (x)]);
9616      else if (GET_CODE (x) == MEM)
9617	{
9618	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
9619	     know the width from the mode.  */
9620	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9621	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9622		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9623	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9624	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9625		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9626	  else
9627	    output_address (XEXP (x, 0));
9628	}
9629      else
9630	output_addr_const (file, x);
9631      return;
9632
9633    case '&':
9634      assemble_name (file, rs6000_get_some_local_dynamic_name ());
9635      return;
9636
9637    default:
9638      output_operand_lossage ("invalid %%xn code");
9639    }
9640}
9641
9642/* Print the address of an operand.  */
9643
9644void
9645print_operand_address (FILE *file, rtx x)
9646{
9647  if (GET_CODE (x) == REG)
9648    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9649  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9650	   || GET_CODE (x) == LABEL_REF)
9651    {
9652      output_addr_const (file, x);
9653      if (small_data_operand (x, GET_MODE (x)))
9654	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9655		 reg_names[SMALL_DATA_REG]);
9656      else if (TARGET_TOC)
9657	abort ();
9658    }
9659  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9660    {
9661      if (REGNO (XEXP (x, 0)) == 0)
9662	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9663		 reg_names[ REGNO (XEXP (x, 0)) ]);
9664      else
9665	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9666		 reg_names[ REGNO (XEXP (x, 1)) ]);
9667    }
9668  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9669    fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9670	     INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9671#if TARGET_ELF
9672  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9673           && CONSTANT_P (XEXP (x, 1)))
9674    {
9675      output_addr_const (file, XEXP (x, 1));
9676      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9677    }
9678#endif
9679#if TARGET_MACHO
9680  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9681           && CONSTANT_P (XEXP (x, 1)))
9682    {
9683      fprintf (file, "lo16(");
9684      output_addr_const (file, XEXP (x, 1));
9685      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9686    }
9687#endif
9688  else if (legitimate_constant_pool_address_p (x))
9689    {
9690      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9691	{
9692	  rtx contains_minus = XEXP (x, 1);
9693	  rtx minus, symref;
9694	  const char *name;
9695
9696	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
9697	     turn it into (sym) for output_addr_const.  */
9698	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9699	    contains_minus = XEXP (contains_minus, 0);
9700
9701	  minus = XEXP (contains_minus, 0);
9702	  symref = XEXP (minus, 0);
9703	  XEXP (contains_minus, 0) = symref;
9704	  if (TARGET_ELF)
9705	    {
9706	      char *newname;
9707
9708	      name = XSTR (symref, 0);
9709	      newname = alloca (strlen (name) + sizeof ("@toc"));
9710	      strcpy (newname, name);
9711	      strcat (newname, "@toc");
9712	      XSTR (symref, 0) = newname;
9713	    }
9714	  output_addr_const (file, XEXP (x, 1));
9715	  if (TARGET_ELF)
9716	    XSTR (symref, 0) = name;
9717	  XEXP (contains_minus, 0) = minus;
9718	}
9719      else
9720	output_addr_const (file, XEXP (x, 1));
9721
9722      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9723    }
9724  else
9725    abort ();
9726}
9727
9728/* Target hook for assembling integer objects.  The PowerPC version has
9729   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9730   is defined.  It also needs to handle DI-mode objects on 64-bit
9731   targets.  */
9732
9733static bool
9734rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9735{
9736#ifdef RELOCATABLE_NEEDS_FIXUP
9737  /* Special handling for SI values.  */
9738  if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
9739    {
9740      extern int in_toc_section (void);
9741      static int recurse = 0;
9742
9743      /* For -mrelocatable, we mark all addresses that need to be fixed up
9744	 in the .fixup section.  */
9745      if (TARGET_RELOCATABLE
9746	  && !in_toc_section ()
9747	  && !in_text_section ()
9748	  && !recurse
9749	  && GET_CODE (x) != CONST_INT
9750	  && GET_CODE (x) != CONST_DOUBLE
9751	  && CONSTANT_P (x))
9752	{
9753	  char buf[256];
9754
9755	  recurse = 1;
9756	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9757	  fixuplabelno++;
9758	  ASM_OUTPUT_LABEL (asm_out_file, buf);
9759	  fprintf (asm_out_file, "\t.long\t(");
9760	  output_addr_const (asm_out_file, x);
9761	  fprintf (asm_out_file, ")@fixup\n");
9762	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9763	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
9764	  fprintf (asm_out_file, "\t.long\t");
9765	  assemble_name (asm_out_file, buf);
9766	  fprintf (asm_out_file, "\n\t.previous\n");
9767	  recurse = 0;
9768	  return true;
9769	}
9770      /* Remove initial .'s to turn a -mcall-aixdesc function
9771	 address into the address of the descriptor, not the function
9772	 itself.  */
9773      else if (GET_CODE (x) == SYMBOL_REF
9774	       && XSTR (x, 0)[0] == '.'
9775	       && DEFAULT_ABI == ABI_AIX)
9776	{
9777	  const char *name = XSTR (x, 0);
9778	  while (*name == '.')
9779	    name++;
9780
9781	  fprintf (asm_out_file, "\t.long\t%s\n", name);
9782	  return true;
9783	}
9784    }
9785#endif /* RELOCATABLE_NEEDS_FIXUP */
9786  return default_assemble_integer (x, size, aligned_p);
9787}
9788
9789#ifdef HAVE_GAS_HIDDEN
9790/* Emit an assembler directive to set symbol visibility for DECL to
9791   VISIBILITY_TYPE.  */
9792
9793static void
9794rs6000_assemble_visibility (tree decl, int vis)
9795{
9796  /* Functions need to have their entry point symbol visibility set as
9797     well as their descriptor symbol visibility.  */
9798  if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9799    {
9800      static const char * const visibility_types[] = {
9801        NULL, "internal", "hidden", "protected"
9802      };
9803
9804      const char *name, *type;
9805
9806      name = ((* targetm.strip_name_encoding)
9807	      (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9808      type = visibility_types[vis];
9809
9810      fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9811      fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9812    }
9813  else
9814    default_assemble_visibility (decl, vis);
9815}
9816#endif
9817
9818enum rtx_code
9819rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9820{
9821  /* Reversal of FP compares takes care -- an ordered compare
9822     becomes an unordered compare and vice versa.  */
9823  if (mode == CCFPmode
9824      && (!flag_finite_math_only
9825	  || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9826	  || code == UNEQ || code == LTGT))
9827    return reverse_condition_maybe_unordered (code);
9828  else
9829    return reverse_condition (code);
9830}
9831
9832/* Generate a compare for CODE.  Return a brand-new rtx that
9833   represents the result of the compare.  */
9834
9835static rtx
9836rs6000_generate_compare (enum rtx_code code)
9837{
9838  enum machine_mode comp_mode;
9839  rtx compare_result;
9840
9841  if (rs6000_compare_fp_p)
9842    comp_mode = CCFPmode;
9843  else if (code == GTU || code == LTU
9844	  || code == GEU || code == LEU)
9845    comp_mode = CCUNSmode;
9846  else
9847    comp_mode = CCmode;
9848
9849  /* First, the compare.  */
9850  compare_result = gen_reg_rtx (comp_mode);
9851
9852  /* SPE FP compare instructions on the GPRs.  Yuck!  */
9853  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9854      && rs6000_compare_fp_p)
9855    {
9856      rtx cmp, or1, or2, or_result, compare_result2;
9857
9858      /* Note: The E500 comparison instructions set the GT bit (x +
9859        1), on success.  This explains the mess.  */
9860
9861      switch (code)
9862	{
9863       case EQ: case UNEQ: case NE: case LTGT:
9864	  cmp = flag_finite_math_only
9865	    ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9866			       rs6000_compare_op1)
9867	    : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9868			       rs6000_compare_op1);
9869	  break;
9870       case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
9871	  cmp = flag_finite_math_only
9872	    ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9873			       rs6000_compare_op1)
9874	    : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9875			       rs6000_compare_op1);
9876	  break;
9877       case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
9878	  cmp = flag_finite_math_only
9879	    ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9880			       rs6000_compare_op1)
9881	    : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9882			       rs6000_compare_op1);
9883	  break;
9884	default:
9885	  abort ();
9886	}
9887
9888      /* Synthesize LE and GE from LT/GT || EQ.  */
9889      if (code == LE || code == GE || code == LEU || code == GEU)
9890	{
9891	  emit_insn (cmp);
9892
9893	  switch (code)
9894	    {
9895	    case LE: code = LT; break;
9896	    case GE: code = GT; break;
9897	    case LEU: code = LT; break;
9898	    case GEU: code = GT; break;
9899	    default: abort ();
9900	    }
9901
9902	  or1 = gen_reg_rtx (SImode);
9903	  or2 = gen_reg_rtx (SImode);
9904	  or_result = gen_reg_rtx (CCEQmode);
9905	  compare_result2 = gen_reg_rtx (CCFPmode);
9906
9907	  /* Do the EQ.  */
9908	  cmp = flag_finite_math_only
9909	    ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9910			       rs6000_compare_op1)
9911	    : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9912			       rs6000_compare_op1);
9913	  emit_insn (cmp);
9914
9915	  or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
9916	  or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
9917
9918	  /* OR them together.  */
9919	  cmp = gen_rtx_SET (VOIDmode, or_result,
9920			     gen_rtx_COMPARE (CCEQmode,
9921					      gen_rtx_IOR (SImode, or1, or2),
9922					      const_true_rtx));
9923	  compare_result = or_result;
9924	  code = EQ;
9925	}
9926      else
9927	{
9928	  if (code == NE || code == LTGT)
9929	    code = NE;
9930         else
9931           code = EQ;
9932	}
9933
9934      emit_insn (cmp);
9935    }
9936  else
9937    {
9938      /* Generate XLC-compatible TFmode compare as PARALLEL with extra
9939         CLOBBERs to match cmptf_internal2 pattern.  */
9940      if (comp_mode == CCFPmode && TARGET_XL_COMPAT
9941          && GET_MODE (rs6000_compare_op0) == TFmode
9942          && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
9943          && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
9944        emit_insn (gen_rtx_PARALLEL (VOIDmode,
9945          gen_rtvec (9,
9946		     gen_rtx_SET (VOIDmode,
9947				  compare_result,
9948				  gen_rtx_COMPARE (comp_mode,
9949						   rs6000_compare_op0,
9950						   rs6000_compare_op1)),
9951		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9952		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9953		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9954		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9955		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9956		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9957		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9958		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
9959      else
9960	emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9961				gen_rtx_COMPARE (comp_mode,
9962						 rs6000_compare_op0,
9963						 rs6000_compare_op1)));
9964    }
9965
9966  /* Some kinds of FP comparisons need an OR operation;
9967     under flag_finite_math_only we don't bother.  */
9968  if (rs6000_compare_fp_p
9969      && ! flag_finite_math_only
9970      && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9971      && (code == LE || code == GE
9972	  || code == UNEQ || code == LTGT
9973	  || code == UNGT || code == UNLT))
9974    {
9975      enum rtx_code or1, or2;
9976      rtx or1_rtx, or2_rtx, compare2_rtx;
9977      rtx or_result = gen_reg_rtx (CCEQmode);
9978
9979      switch (code)
9980	{
9981	case LE: or1 = LT;  or2 = EQ;  break;
9982	case GE: or1 = GT;  or2 = EQ;  break;
9983	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
9984	case LTGT: or1 = LT;  or2 = GT;  break;
9985	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
9986	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
9987	default:  abort ();
9988	}
9989      validate_condition_mode (or1, comp_mode);
9990      validate_condition_mode (or2, comp_mode);
9991      or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9992      or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9993      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9994				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9995				      const_true_rtx);
9996      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9997
9998      compare_result = or_result;
9999      code = EQ;
10000    }
10001
10002  validate_condition_mode (code, GET_MODE (compare_result));
10003
10004  return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
10005}
10006
10007
10008/* Emit the RTL for an sCOND pattern.  */
10009
10010void
10011rs6000_emit_sCOND (enum rtx_code code, rtx result)
10012{
10013  rtx condition_rtx;
10014  enum machine_mode op_mode;
10015  enum rtx_code cond_code;
10016
10017  condition_rtx = rs6000_generate_compare (code);
10018  cond_code = GET_CODE (condition_rtx);
10019
10020  if (TARGET_E500 && rs6000_compare_fp_p
10021      && !TARGET_FPRS && TARGET_HARD_FLOAT)
10022    {
10023      rtx t;
10024
10025      PUT_MODE (condition_rtx, SImode);
10026      t = XEXP (condition_rtx, 0);
10027
10028      if (cond_code != NE && cond_code != EQ)
10029       abort ();
10030
10031      if (cond_code == NE)
10032       emit_insn (gen_e500_flip_eq_bit (t, t));
10033
10034      emit_insn (gen_move_from_CR_eq_bit (result, t));
10035      return;
10036    }
10037
10038  if (cond_code == NE
10039      || cond_code == GE || cond_code == LE
10040      || cond_code == GEU || cond_code == LEU
10041      || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10042    {
10043      rtx not_result = gen_reg_rtx (CCEQmode);
10044      rtx not_op, rev_cond_rtx;
10045      enum machine_mode cc_mode;
10046
10047      cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10048
10049      rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
10050			      SImode, XEXP (condition_rtx, 0), const0_rtx);
10051      not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10052      emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10053      condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10054    }
10055
10056  op_mode = GET_MODE (rs6000_compare_op0);
10057  if (op_mode == VOIDmode)
10058    op_mode = GET_MODE (rs6000_compare_op1);
10059
10060  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10061    {
10062      PUT_MODE (condition_rtx, DImode);
10063      convert_move (result, condition_rtx, 0);
10064    }
10065  else
10066    {
10067      PUT_MODE (condition_rtx, SImode);
10068      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10069    }
10070}
10071
10072/* Emit a branch of kind CODE to location LOC.  */
10073
10074void
10075rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10076{
10077  rtx condition_rtx, loc_ref;
10078
10079  condition_rtx = rs6000_generate_compare (code);
10080  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10081  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10082			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10083						     loc_ref, pc_rtx)));
10084}
10085
10086/* Return the string to output a conditional branch to LABEL, which is
10087   the operand number of the label, or -1 if the branch is really a
10088   conditional return.
10089
10090   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
10091   condition code register and its mode specifies what kind of
10092   comparison we made.
10093
10094   REVERSED is nonzero if we should reverse the sense of the comparison.
10095
10096   INSN is the insn.  */
10097
10098char *
10099output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10100{
10101  static char string[64];
10102  enum rtx_code code = GET_CODE (op);
10103  rtx cc_reg = XEXP (op, 0);
10104  enum machine_mode mode = GET_MODE (cc_reg);
10105  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10106  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10107  int really_reversed = reversed ^ need_longbranch;
10108  char *s = string;
10109  const char *ccode;
10110  const char *pred;
10111  rtx note;
10112
10113  validate_condition_mode (code, mode);
10114
10115  /* Work out which way this really branches.  We could use
10116     reverse_condition_maybe_unordered here always but this
10117     makes the resulting assembler clearer.  */
10118  if (really_reversed)
10119    {
10120      /* Reversal of FP compares takes care -- an ordered compare
10121	 becomes an unordered compare and vice versa.  */
10122      if (mode == CCFPmode)
10123	code = reverse_condition_maybe_unordered (code);
10124      else
10125	code = reverse_condition (code);
10126    }
10127
10128  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10129    {
10130      /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10131	 to the GT bit.  */
10132      if (code == EQ)
10133	/* Opposite of GT.  */
10134	code = GT;
10135      else if (code == NE)
10136       code = UNLE;
10137      else
10138	abort ();
10139    }
10140
10141  switch (code)
10142    {
10143      /* Not all of these are actually distinct opcodes, but
10144	 we distinguish them for clarity of the resulting assembler.  */
10145    case NE: case LTGT:
10146      ccode = "ne"; break;
10147    case EQ: case UNEQ:
10148      ccode = "eq"; break;
10149    case GE: case GEU:
10150      ccode = "ge"; break;
10151    case GT: case GTU: case UNGT:
10152      ccode = "gt"; break;
10153    case LE: case LEU:
10154      ccode = "le"; break;
10155    case LT: case LTU: case UNLT:
10156      ccode = "lt"; break;
10157    case UNORDERED: ccode = "un"; break;
10158    case ORDERED: ccode = "nu"; break;
10159    case UNGE: ccode = "nl"; break;
10160    case UNLE: ccode = "ng"; break;
10161    default:
10162      abort ();
10163    }
10164
10165  /* Maybe we have a guess as to how likely the branch is.
10166     The old mnemonics don't have a way to specify this information.  */
10167  pred = "";
10168  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10169  if (note != NULL_RTX)
10170    {
10171      /* PROB is the difference from 50%.  */
10172      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10173
10174      /* Only hint for highly probable/improbable branches on newer
10175	 cpus as static prediction overrides processor dynamic
10176	 prediction.  For older cpus we may as well always hint, but
10177	 assume not taken for branches that are very close to 50% as a
10178	 mispredicted taken branch is more expensive than a
10179	 mispredicted not-taken branch.  */
10180      if (rs6000_always_hint
10181	  || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10182	{
10183	  if (abs (prob) > REG_BR_PROB_BASE / 20
10184	      && ((prob > 0) ^ need_longbranch))
10185              pred = "+";
10186	  else
10187	    pred = "-";
10188	}
10189    }
10190
10191  if (label == NULL)
10192    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10193  else
10194    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10195
10196  /* We need to escape any '%' characters in the reg_names string.
10197     Assume they'd only be the first character....  */
10198  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10199    *s++ = '%';
10200  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10201
10202  if (label != NULL)
10203    {
10204      /* If the branch distance was too far, we may have to use an
10205	 unconditional branch to go the distance.  */
10206      if (need_longbranch)
10207	s += sprintf (s, ",$+8\n\tb %s", label);
10208      else
10209	s += sprintf (s, ",%s", label);
10210    }
10211
10212  return string;
10213}
10214
10215/* Return the string to flip the EQ bit on a CR.  */
10216char *
10217output_e500_flip_eq_bit (rtx dst, rtx src)
10218{
10219  static char string[64];
10220  int a, b;
10221
10222  if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
10223      || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
10224    abort ();
10225
10226  /* EQ bit.  */
10227  a = 4 * (REGNO (dst) - CR0_REGNO) + 2;
10228  b = 4 * (REGNO (src) - CR0_REGNO) + 2;
10229
10230  sprintf (string, "crnot %d,%d", a, b);
10231  return string;
10232}
10233
10234/* Emit a conditional move: move TRUE_COND to DEST if OP of the
10235   operands of the last comparison is nonzero/true, FALSE_COND if it
10236   is zero/false.  Return 0 if the hardware has no such operation.  */
10237
10238int
10239rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10240{
10241  enum rtx_code code = GET_CODE (op);
10242  rtx op0 = rs6000_compare_op0;
10243  rtx op1 = rs6000_compare_op1;
10244  REAL_VALUE_TYPE c1;
10245  enum machine_mode compare_mode = GET_MODE (op0);
10246  enum machine_mode result_mode = GET_MODE (dest);
10247  rtx temp;
10248
10249  /* These modes should always match.  */
10250  if (GET_MODE (op1) != compare_mode
10251      /* In the isel case however, we can use a compare immediate, so
10252	 op1 may be a small constant.  */
10253      && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10254    return 0;
10255  if (GET_MODE (true_cond) != result_mode)
10256    return 0;
10257  if (GET_MODE (false_cond) != result_mode)
10258    return 0;
10259
10260  /* First, work out if the hardware can do this at all, or
10261     if it's too slow....  */
10262  if (! rs6000_compare_fp_p)
10263    {
10264      if (TARGET_ISEL)
10265	return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10266      return 0;
10267    }
10268  else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
10269	   && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
10270    return 0;
10271
10272  /* Eliminate half of the comparisons by switching operands, this
10273     makes the remaining code simpler.  */
10274  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10275      || code == LTGT || code == LT || code == UNLE)
10276    {
10277      code = reverse_condition_maybe_unordered (code);
10278      temp = true_cond;
10279      true_cond = false_cond;
10280      false_cond = temp;
10281    }
10282
10283  /* UNEQ and LTGT take four instructions for a comparison with zero,
10284     it'll probably be faster to use a branch here too.  */
10285  if (code == UNEQ && HONOR_NANS (compare_mode))
10286    return 0;
10287
10288  if (GET_CODE (op1) == CONST_DOUBLE)
10289    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10290
10291  /* We're going to try to implement comparisons by performing
10292     a subtract, then comparing against zero.  Unfortunately,
10293     Inf - Inf is NaN which is not zero, and so if we don't
10294     know that the operand is finite and the comparison
10295     would treat EQ different to UNORDERED, we can't do it.  */
10296  if (HONOR_INFINITIES (compare_mode)
10297      && code != GT && code != UNGE
10298      && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10299      /* Constructs of the form (a OP b ? a : b) are safe.  */
10300      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10301	  || (! rtx_equal_p (op0, true_cond)
10302	      && ! rtx_equal_p (op1, true_cond))))
10303    return 0;
10304  /* At this point we know we can use fsel.  */
10305
10306  /* Reduce the comparison to a comparison against zero.  */
10307  temp = gen_reg_rtx (compare_mode);
10308  emit_insn (gen_rtx_SET (VOIDmode, temp,
10309			  gen_rtx_MINUS (compare_mode, op0, op1)));
10310  op0 = temp;
10311  op1 = CONST0_RTX (compare_mode);
10312
10313  /* If we don't care about NaNs we can reduce some of the comparisons
10314     down to faster ones.  */
10315  if (! HONOR_NANS (compare_mode))
10316    switch (code)
10317      {
10318      case GT:
10319	code = LE;
10320	temp = true_cond;
10321	true_cond = false_cond;
10322	false_cond = temp;
10323	break;
10324      case UNGE:
10325	code = GE;
10326	break;
10327      case UNEQ:
10328	code = EQ;
10329	break;
10330      default:
10331	break;
10332      }
10333
10334  /* Now, reduce everything down to a GE.  */
10335  switch (code)
10336    {
10337    case GE:
10338      break;
10339
10340    case LE:
10341      temp = gen_reg_rtx (compare_mode);
10342      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10343      op0 = temp;
10344      break;
10345
10346    case ORDERED:
10347      temp = gen_reg_rtx (compare_mode);
10348      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10349      op0 = temp;
10350      break;
10351
10352    case EQ:
10353      temp = gen_reg_rtx (compare_mode);
10354      emit_insn (gen_rtx_SET (VOIDmode, temp,
10355			      gen_rtx_NEG (compare_mode,
10356					   gen_rtx_ABS (compare_mode, op0))));
10357      op0 = temp;
10358      break;
10359
10360    case UNGE:
10361      /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10362      temp = gen_reg_rtx (result_mode);
10363      emit_insn (gen_rtx_SET (VOIDmode, temp,
10364			      gen_rtx_IF_THEN_ELSE (result_mode,
10365						    gen_rtx_GE (VOIDmode,
10366								op0, op1),
10367						    true_cond, false_cond)));
10368      false_cond = true_cond;
10369      true_cond = temp;
10370
10371      temp = gen_reg_rtx (compare_mode);
10372      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10373      op0 = temp;
10374      break;
10375
10376    case GT:
10377      /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10378      temp = gen_reg_rtx (result_mode);
10379      emit_insn (gen_rtx_SET (VOIDmode, temp,
10380			      gen_rtx_IF_THEN_ELSE (result_mode,
10381						    gen_rtx_GE (VOIDmode,
10382								op0, op1),
10383						    true_cond, false_cond)));
10384      true_cond = false_cond;
10385      false_cond = temp;
10386
10387      temp = gen_reg_rtx (compare_mode);
10388      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10389      op0 = temp;
10390      break;
10391
10392    default:
10393      abort ();
10394    }
10395
10396  emit_insn (gen_rtx_SET (VOIDmode, dest,
10397			  gen_rtx_IF_THEN_ELSE (result_mode,
10398						gen_rtx_GE (VOIDmode,
10399							    op0, op1),
10400						true_cond, false_cond)));
10401  return 1;
10402}
10403
10404/* Same as above, but for ints (isel).  */
10405
10406static int
10407rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10408{
10409  rtx condition_rtx, cr;
10410
10411  /* All isel implementations thus far are 32-bits.  */
10412  if (GET_MODE (rs6000_compare_op0) != SImode)
10413    return 0;
10414
10415  /* We still have to do the compare, because isel doesn't do a
10416     compare, it just looks at the CRx bits set by a previous compare
10417     instruction.  */
10418  condition_rtx = rs6000_generate_compare (GET_CODE (op));
10419  cr = XEXP (condition_rtx, 0);
10420
10421  if (GET_MODE (cr) == CCmode)
10422    emit_insn (gen_isel_signed (dest, condition_rtx,
10423				true_cond, false_cond, cr));
10424  else
10425    emit_insn (gen_isel_unsigned (dest, condition_rtx,
10426				  true_cond, false_cond, cr));
10427
10428  return 1;
10429}
10430
10431const char *
10432output_isel (rtx *operands)
10433{
10434  enum rtx_code code;
10435
10436  code = GET_CODE (operands[1]);
10437  if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10438    {
10439      PUT_CODE (operands[1], reverse_condition (code));
10440      return "isel %0,%3,%2,%j1";
10441    }
10442  else
10443    return "isel %0,%2,%3,%j1";
10444}
10445
10446void
10447rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10448{
10449  enum machine_mode mode = GET_MODE (op0);
10450  enum rtx_code c;
10451  rtx target;
10452
10453  if (code == SMAX || code == SMIN)
10454    c = GE;
10455  else
10456    c = GEU;
10457
10458  if (code == SMAX || code == UMAX)
10459    target = emit_conditional_move (dest, c, op0, op1, mode,
10460				    op0, op1, mode, 0);
10461  else
10462    target = emit_conditional_move (dest, c, op0, op1, mode,
10463				    op1, op0, mode, 0);
10464  if (target == NULL_RTX)
10465    abort ();
10466  if (target != dest)
10467    emit_move_insn (dest, target);
10468}
10469
10470/* Emit instructions to move SRC to DST.  Called by splitters for
10471   multi-register moves.  It will emit at most one instruction for
10472   each register that is accessed; that is, it won't emit li/lis pairs
10473   (or equivalent for 64-bit code).  One of SRC or DST must be a hard
10474   register.  */
10475
10476void
10477rs6000_split_multireg_move (rtx dst, rtx src)
10478{
10479  /* The register number of the first register being moved.  */
10480  int reg;
10481  /* The mode that is to be moved.  */
10482  enum machine_mode mode;
10483  /* The mode that the move is being done in, and its size.  */
10484  enum machine_mode reg_mode;
10485  int reg_mode_size;
10486  /* The number of registers that will be moved.  */
10487  int nregs;
10488
10489  reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10490  mode = GET_MODE (dst);
10491  nregs = HARD_REGNO_NREGS (reg, mode);
10492  if (FP_REGNO_P (reg))
10493    reg_mode = DFmode;
10494  else if (ALTIVEC_REGNO_P (reg))
10495    reg_mode = V16QImode;
10496  else
10497    reg_mode = word_mode;
10498  reg_mode_size = GET_MODE_SIZE (reg_mode);
10499
10500  if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10501    abort ();
10502
10503  if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10504    {
10505      /* Move register range backwards, if we might have destructive
10506	 overlap.  */
10507      int i;
10508      for (i = nregs - 1; i >= 0; i--)
10509	emit_insn (gen_rtx_SET (VOIDmode,
10510				simplify_gen_subreg (reg_mode, dst, mode,
10511						     i * reg_mode_size),
10512				simplify_gen_subreg (reg_mode, src, mode,
10513						     i * reg_mode_size)));
10514    }
10515  else
10516    {
10517      int i;
10518      int j = -1;
10519      bool used_update = false;
10520
10521      if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10522        {
10523          rtx breg;
10524
10525	  if (GET_CODE (XEXP (src, 0)) == PRE_INC
10526	      || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10527	    {
10528	      rtx delta_rtx;
10529	      breg = XEXP (XEXP (src, 0), 0);
10530	      delta_rtx =  GET_CODE (XEXP (src, 0)) == PRE_INC
10531		  ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10532		  : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10533	      emit_insn (TARGET_32BIT
10534			 ? gen_addsi3 (breg, breg, delta_rtx)
10535			 : gen_adddi3 (breg, breg, delta_rtx));
10536	      src = gen_rtx_MEM (mode, breg);
10537	    }
10538	  else if (! offsettable_memref_p (src))
10539	    {
10540	      rtx newsrc, basereg;
10541	      basereg = gen_rtx_REG (Pmode, reg);
10542	      emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
10543	      newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
10544	      MEM_COPY_ATTRIBUTES (newsrc, src);
10545	      src = newsrc;
10546	    }
10547
10548	  breg = XEXP (src, 0);
10549	  if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
10550	    breg = XEXP (breg, 0);
10551
10552	  /* If the base register we are using to address memory is
10553	     also a destination reg, then change that register last.  */
10554	  if (REG_P (breg)
10555	      && REGNO (breg) >= REGNO (dst)
10556	      && REGNO (breg) < REGNO (dst) + nregs)
10557	    j = REGNO (breg) - REGNO (dst);
10558	}
10559
10560      if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10561	{
10562	  rtx breg;
10563
10564	  if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10565	      || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10566	    {
10567	      rtx delta_rtx;
10568	      breg = XEXP (XEXP (dst, 0), 0);
10569	      delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10570		? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10571		: GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10572
10573	      /* We have to update the breg before doing the store.
10574		 Use store with update, if available.  */
10575
10576	      if (TARGET_UPDATE)
10577		{
10578		  rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10579		  emit_insn (TARGET_32BIT
10580			     ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10581			     : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10582		  used_update = true;
10583		}
10584	      else
10585		emit_insn (TARGET_32BIT
10586			   ? gen_addsi3 (breg, breg, delta_rtx)
10587			   : gen_adddi3 (breg, breg, delta_rtx));
10588	      dst = gen_rtx_MEM (mode, breg);
10589	    }
10590	  else if (! offsettable_memref_p (dst))
10591	    abort ();
10592	}
10593
10594      for (i = 0; i < nregs; i++)
10595	{
10596	  /* Calculate index to next subword.  */
10597	  ++j;
10598	  if (j == nregs)
10599	    j = 0;
10600
10601	  /* If compiler already emitted move of first word by
10602	     store with update, no need to do anything.  */
10603	  if (j == 0 && used_update)
10604	    continue;
10605
10606	  emit_insn (gen_rtx_SET (VOIDmode,
10607				  simplify_gen_subreg (reg_mode, dst, mode,
10608						       j * reg_mode_size),
10609				  simplify_gen_subreg (reg_mode, src, mode,
10610						       j * reg_mode_size)));
10611	}
10612    }
10613}
10614
10615
10616/* This page contains routines that are used to determine what the
10617   function prologue and epilogue code will do and write them out.  */
10618
10619/* Return the first fixed-point register that is required to be
10620   saved. 32 if none.  */
10621
10622int
10623first_reg_to_save (void)
10624{
10625  int first_reg;
10626
10627  /* Find lowest numbered live register.  */
10628  for (first_reg = 13; first_reg <= 31; first_reg++)
10629    if (regs_ever_live[first_reg]
10630	&& (! call_used_regs[first_reg]
10631	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10632		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10633		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
10634		    || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
10635      break;
10636
10637#if TARGET_MACHO
10638  if (flag_pic
10639      && current_function_uses_pic_offset_table
10640      && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10641    return RS6000_PIC_OFFSET_TABLE_REGNUM;
10642#endif
10643
10644  return first_reg;
10645}
10646
10647/* Similar, for FP regs.  */
10648
10649int
10650first_fp_reg_to_save (void)
10651{
10652  int first_reg;
10653
10654  /* Find lowest numbered live register.  */
10655  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10656    if (regs_ever_live[first_reg])
10657      break;
10658
10659  return first_reg;
10660}
10661
10662/* Similar, for AltiVec regs.  */
10663
10664static int
10665first_altivec_reg_to_save (void)
10666{
10667  int i;
10668
10669  /* Stack frame remains as is unless we are in AltiVec ABI.  */
10670  if (! TARGET_ALTIVEC_ABI)
10671    return LAST_ALTIVEC_REGNO + 1;
10672
10673  /* Find lowest numbered live register.  */
10674  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10675    if (regs_ever_live[i])
10676      break;
10677
10678  return i;
10679}
10680
10681/* Return a 32-bit mask of the AltiVec registers we need to set in
10682   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
10683   the 32-bit word is 0.  */
10684
10685static unsigned int
10686compute_vrsave_mask (void)
10687{
10688  unsigned int i, mask = 0;
10689
10690  /* First, find out if we use _any_ altivec registers.  */
10691  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10692    if (regs_ever_live[i])
10693      mask |= ALTIVEC_REG_BIT (i);
10694
10695  if (mask == 0)
10696    return mask;
10697
10698  /* Next, remove the argument registers from the set.  These must
10699     be in the VRSAVE mask set by the caller, so we don't need to add
10700     them in again.  More importantly, the mask we compute here is
10701     used to generate CLOBBERs in the set_vrsave insn, and we do not
10702     wish the argument registers to die.  */
10703  for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10704    mask &= ~ALTIVEC_REG_BIT (i);
10705
10706  /* Similarly, remove the return value from the set.  */
10707  {
10708    bool yes = false;
10709    diddle_return_value (is_altivec_return_reg, &yes);
10710    if (yes)
10711      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10712  }
10713
10714  return mask;
10715}
10716
10717static void
10718is_altivec_return_reg (rtx reg, void *xyes)
10719{
10720  bool *yes = (bool *) xyes;
10721  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10722    *yes = true;
10723}
10724
10725
10726/* Calculate the stack information for the current function.  This is
10727   complicated by having two separate calling sequences, the AIX calling
10728   sequence and the V.4 calling sequence.
10729
10730   AIX (and Darwin/Mac OS X) stack frames look like:
10731							  32-bit  64-bit
10732	SP---->	+---------------------------------------+
10733		| back chain to caller			| 0	  0
10734		+---------------------------------------+
10735		| saved CR				| 4       8 (8-11)
10736		+---------------------------------------+
10737		| saved LR				| 8       16
10738		+---------------------------------------+
10739		| reserved for compilers		| 12      24
10740		+---------------------------------------+
10741		| reserved for binders			| 16      32
10742		+---------------------------------------+
10743		| saved TOC pointer			| 20      40
10744		+---------------------------------------+
10745		| Parameter save area (P)		| 24      48
10746		+---------------------------------------+
10747		| Alloca space (A)			| 24+P    etc.
10748		+---------------------------------------+
10749		| Local variable space (L)		| 24+P+A
10750		+---------------------------------------+
10751		| Float/int conversion temporary (X)	| 24+P+A+L
10752		+---------------------------------------+
10753		| Save area for AltiVec registers (W)	| 24+P+A+L+X
10754		+---------------------------------------+
10755		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
10756		+---------------------------------------+
10757		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
10758		+---------------------------------------+
10759		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
10760		+---------------------------------------+
10761		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
10762		+---------------------------------------+
10763	old SP->| back chain to caller's caller		|
10764		+---------------------------------------+
10765
10766   The required alignment for AIX configurations is two words (i.e., 8
10767   or 16 bytes).
10768
10769
10770   V.4 stack frames look like:
10771
10772	SP---->	+---------------------------------------+
10773		| back chain to caller			| 0
10774		+---------------------------------------+
10775		| caller's saved LR			| 4
10776		+---------------------------------------+
10777		| Parameter save area (P)		| 8
10778		+---------------------------------------+
10779		| Alloca space (A)			| 8+P
10780		+---------------------------------------+
10781		| Varargs save area (V)			| 8+P+A
10782		+---------------------------------------+
10783		| Local variable space (L)		| 8+P+A+V
10784		+---------------------------------------+
10785		| Float/int conversion temporary (X)	| 8+P+A+V+L
10786		+---------------------------------------+
10787		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
10788		+---------------------------------------+
10789		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
10790		+---------------------------------------+
10791		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
10792		+---------------------------------------+
10793                | SPE: area for 64-bit GP registers     |
10794                +---------------------------------------+
10795                | SPE alignment padding                 |
10796                +---------------------------------------+
10797		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
10798		+---------------------------------------+
10799		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
10800		+---------------------------------------+
10801		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
10802		+---------------------------------------+
10803	old SP->| back chain to caller's caller		|
10804		+---------------------------------------+
10805
10806   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10807   given.  (But note below and in sysv4.h that we require only 8 and
10808   may round up the size of our stack frame anyways.  The historical
10809   reason is early versions of powerpc-linux which didn't properly
10810   align the stack at program startup.  A happy side-effect is that
10811   -mno-eabi libraries can be used with -meabi programs.)
10812
10813   The EABI configuration defaults to the V.4 layout.  However,
10814   the stack alignment requirements may differ.  If -mno-eabi is not
10815   given, the required stack alignment is 8 bytes; if -mno-eabi is
10816   given, the required alignment is 16 bytes.  (But see V.4 comment
10817   above.)  */
10818
10819#ifndef ABI_STACK_BOUNDARY
10820#define ABI_STACK_BOUNDARY STACK_BOUNDARY
10821#endif
10822
10823static rs6000_stack_t *
10824rs6000_stack_info (void)
10825{
10826  static rs6000_stack_t info, zero_info;
10827  rs6000_stack_t *info_ptr = &info;
10828  int reg_size = TARGET_32BIT ? 4 : 8;
10829  int ehrd_size;
10830  int save_align;
10831  HOST_WIDE_INT non_fixed_size;
10832
10833  /* Zero all fields portably.  */
10834  info = zero_info;
10835
10836  if (TARGET_SPE)
10837    {
10838      /* Cache value so we don't rescan instruction chain over and over.  */
10839      if (cfun->machine->insn_chain_scanned_p == 0)
10840	{
10841	  cfun->machine->insn_chain_scanned_p = 1;
10842	  info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10843	}
10844    }
10845
10846  /* Select which calling sequence.  */
10847  info_ptr->abi = DEFAULT_ABI;
10848
10849  /* Calculate which registers need to be saved & save area size.  */
10850  info_ptr->first_gp_reg_save = first_reg_to_save ();
10851  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10852     even if it currently looks like we won't.  */
10853  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10854       || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10855       || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10856      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10857    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10858  else
10859    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10860
10861  /* For the SPE, we have an additional upper 32-bits on each GPR.
10862     Ideally we should save the entire 64-bits only when the upper
10863     half is used in SIMD instructions.  Since we only record
10864     registers live (not the size they are used in), this proves
10865     difficult because we'd have to traverse the instruction chain at
10866     the right time, taking reload into account.  This is a real pain,
10867     so we opt to save the GPRs in 64-bits always if but one register
10868     gets used in 64-bits.  Otherwise, all the registers in the frame
10869     get saved in 32-bits.
10870
10871     So... since when we save all GPRs (except the SP) in 64-bits, the
10872     traditional GP save area will be empty.  */
10873  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10874    info_ptr->gp_size = 0;
10875
10876  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10877  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10878
10879  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10880  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10881				 - info_ptr->first_altivec_reg_save);
10882
10883  /* Does this function call anything?  */
10884  info_ptr->calls_p = (! current_function_is_leaf
10885		       || cfun->machine->ra_needs_full_frame);
10886
10887  /* Determine if we need to save the link register.  */
10888  if (rs6000_ra_ever_killed ()
10889      || (DEFAULT_ABI == ABI_AIX
10890	  && current_function_profile
10891	  && !TARGET_PROFILE_KERNEL)
10892#ifdef TARGET_RELOCATABLE
10893      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10894#endif
10895      || (info_ptr->first_fp_reg_save != 64
10896	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10897      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10898      || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10899      || (DEFAULT_ABI == ABI_DARWIN
10900	  && flag_pic
10901	  && current_function_uses_pic_offset_table)
10902      || info_ptr->calls_p)
10903    {
10904      info_ptr->lr_save_p = 1;
10905      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10906    }
10907
10908  /* Determine if we need to save the condition code registers.  */
10909  if (regs_ever_live[CR2_REGNO]
10910      || regs_ever_live[CR3_REGNO]
10911      || regs_ever_live[CR4_REGNO])
10912    {
10913      info_ptr->cr_save_p = 1;
10914      if (DEFAULT_ABI == ABI_V4)
10915	info_ptr->cr_size = reg_size;
10916    }
10917
10918  /* If the current function calls __builtin_eh_return, then we need
10919     to allocate stack space for registers that will hold data for
10920     the exception handler.  */
10921  if (current_function_calls_eh_return)
10922    {
10923      unsigned int i;
10924      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10925	continue;
10926
10927      /* SPE saves EH registers in 64-bits.  */
10928      ehrd_size = i * (TARGET_SPE_ABI
10929		       && info_ptr->spe_64bit_regs_used != 0
10930		       ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10931    }
10932  else
10933    ehrd_size = 0;
10934
10935  /* Determine various sizes.  */
10936  info_ptr->reg_size     = reg_size;
10937  info_ptr->fixed_size   = RS6000_SAVE_AREA;
10938  info_ptr->varargs_size = RS6000_VARARGS_AREA;
10939  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
10940  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
10941					 TARGET_ALTIVEC ? 16 : 8);
10942
10943  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10944    info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10945  else
10946    info_ptr->spe_gp_size = 0;
10947
10948  if (TARGET_ALTIVEC_ABI)
10949    info_ptr->vrsave_mask = compute_vrsave_mask ();
10950  else
10951    info_ptr->vrsave_mask = 0;
10952
10953  if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10954    info_ptr->vrsave_size  = 4;
10955  else
10956    info_ptr->vrsave_size  = 0;
10957
10958  /* Calculate the offsets.  */
10959  switch (DEFAULT_ABI)
10960    {
10961    case ABI_NONE:
10962    default:
10963      abort ();
10964
10965    case ABI_AIX:
10966    case ABI_DARWIN:
10967      info_ptr->fp_save_offset   = - info_ptr->fp_size;
10968      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
10969
10970      if (TARGET_ALTIVEC_ABI)
10971	{
10972	  info_ptr->vrsave_save_offset
10973	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10974
10975	  /* Align stack so vector save area is on a quadword boundary.  */
10976	  if (info_ptr->altivec_size != 0)
10977	    info_ptr->altivec_padding_size
10978	      = 16 - (-info_ptr->vrsave_save_offset % 16);
10979	  else
10980	    info_ptr->altivec_padding_size = 0;
10981
10982	  info_ptr->altivec_save_offset
10983	    = info_ptr->vrsave_save_offset
10984	    - info_ptr->altivec_padding_size
10985	    - info_ptr->altivec_size;
10986
10987	  /* Adjust for AltiVec case.  */
10988	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10989	}
10990      else
10991	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
10992      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
10993      info_ptr->lr_save_offset   = 2*reg_size;
10994      break;
10995
10996    case ABI_V4:
10997      info_ptr->fp_save_offset   = - info_ptr->fp_size;
10998      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
10999      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
11000
11001      if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11002      {
11003        /* Align stack so SPE GPR save area is aligned on a
11004           double-word boundary.  */
11005        if (info_ptr->spe_gp_size != 0)
11006          info_ptr->spe_padding_size
11007            = 8 - (-info_ptr->cr_save_offset % 8);
11008        else
11009          info_ptr->spe_padding_size = 0;
11010
11011        info_ptr->spe_gp_save_offset
11012          = info_ptr->cr_save_offset
11013          - info_ptr->spe_padding_size
11014          - info_ptr->spe_gp_size;
11015
11016        /* Adjust for SPE case.  */
11017        info_ptr->toc_save_offset
11018          = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11019      }
11020      else if (TARGET_ALTIVEC_ABI)
11021	{
11022	  info_ptr->vrsave_save_offset
11023	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11024
11025	  /* Align stack so vector save area is on a quadword boundary.  */
11026	  if (info_ptr->altivec_size != 0)
11027	    info_ptr->altivec_padding_size
11028	      = 16 - (-info_ptr->vrsave_save_offset % 16);
11029	  else
11030	    info_ptr->altivec_padding_size = 0;
11031
11032	  info_ptr->altivec_save_offset
11033	    = info_ptr->vrsave_save_offset
11034	    - info_ptr->altivec_padding_size
11035	    - info_ptr->altivec_size;
11036
11037	  /* Adjust for AltiVec case.  */
11038	  info_ptr->toc_save_offset
11039	    = info_ptr->altivec_save_offset - info_ptr->toc_size;
11040	}
11041      else
11042	info_ptr->toc_save_offset  = info_ptr->cr_save_offset - info_ptr->toc_size;
11043      info_ptr->ehrd_offset      = info_ptr->toc_save_offset - ehrd_size;
11044      info_ptr->lr_save_offset   = reg_size;
11045      break;
11046    }
11047
11048  save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
11049  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
11050					 + info_ptr->gp_size
11051					 + info_ptr->altivec_size
11052					 + info_ptr->altivec_padding_size
11053					 + info_ptr->spe_gp_size
11054					 + info_ptr->spe_padding_size
11055					 + ehrd_size
11056					 + info_ptr->cr_size
11057					 + info_ptr->lr_size
11058					 + info_ptr->vrsave_size
11059					 + info_ptr->toc_size,
11060					 save_align);
11061
11062  non_fixed_size	 = (info_ptr->vars_size
11063			    + info_ptr->parm_size
11064			    + info_ptr->save_size
11065			    + info_ptr->varargs_size);
11066
11067  info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11068				       ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11069
11070  /* Determine if we need to allocate any stack frame:
11071
11072     For AIX we need to push the stack if a frame pointer is needed
11073     (because the stack might be dynamically adjusted), if we are
11074     debugging, if we make calls, or if the sum of fp_save, gp_save,
11075     and local variables are more than the space needed to save all
11076     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11077     + 18*8 = 288 (GPR13 reserved).
11078
11079     For V.4 we don't have the stack cushion that AIX uses, but assume
11080     that the debugger can handle stackless frames.  */
11081
11082  if (info_ptr->calls_p)
11083    info_ptr->push_p = 1;
11084
11085  else if (DEFAULT_ABI == ABI_V4)
11086    info_ptr->push_p = non_fixed_size != 0;
11087
11088  else if (frame_pointer_needed)
11089    info_ptr->push_p = 1;
11090
11091  else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11092    info_ptr->push_p = 1;
11093
11094  else
11095    info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11096
11097  /* Zero offsets if we're not saving those registers.  */
11098  if (info_ptr->fp_size == 0)
11099    info_ptr->fp_save_offset = 0;
11100
11101  if (info_ptr->gp_size == 0)
11102    info_ptr->gp_save_offset = 0;
11103
11104  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11105    info_ptr->altivec_save_offset = 0;
11106
11107  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11108    info_ptr->vrsave_save_offset = 0;
11109
11110  if (! TARGET_SPE_ABI
11111      || info_ptr->spe_64bit_regs_used == 0
11112      || info_ptr->spe_gp_size == 0)
11113    info_ptr->spe_gp_save_offset = 0;
11114
11115  if (! info_ptr->lr_save_p)
11116    info_ptr->lr_save_offset = 0;
11117
11118  if (! info_ptr->cr_save_p)
11119    info_ptr->cr_save_offset = 0;
11120
11121  if (! info_ptr->toc_save_p)
11122    info_ptr->toc_save_offset = 0;
11123
11124  return info_ptr;
11125}
11126
11127/* Return true if the current function uses any GPRs in 64-bit SIMD
11128   mode.  */
11129
11130static bool
11131spe_func_has_64bit_regs_p (void)
11132{
11133  rtx insns, insn;
11134
11135  /* Functions that save and restore all the call-saved registers will
11136     need to save/restore the registers in 64-bits.  */
11137  if (current_function_calls_eh_return
11138      || current_function_calls_setjmp
11139      || current_function_has_nonlocal_goto)
11140    return true;
11141
11142  insns = get_insns ();
11143
11144  for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11145    {
11146      if (INSN_P (insn))
11147	{
11148	  rtx i;
11149
11150	  i = PATTERN (insn);
11151	  if (GET_CODE (i) == SET
11152	      && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11153	    return true;
11154	}
11155    }
11156
11157  return false;
11158}
11159
11160static void
11161debug_stack_info (rs6000_stack_t *info)
11162{
11163  const char *abi_string;
11164
11165  if (! info)
11166    info = rs6000_stack_info ();
11167
11168  fprintf (stderr, "\nStack information for function %s:\n",
11169	   ((current_function_decl && DECL_NAME (current_function_decl))
11170	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11171	    : "<unknown>"));
11172
11173  switch (info->abi)
11174    {
11175    default:		 abi_string = "Unknown";	break;
11176    case ABI_NONE:	 abi_string = "NONE";		break;
11177    case ABI_AIX:	 abi_string = "AIX";		break;
11178    case ABI_DARWIN:	 abi_string = "Darwin";		break;
11179    case ABI_V4:	 abi_string = "V.4";		break;
11180    }
11181
11182  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
11183
11184  if (TARGET_ALTIVEC_ABI)
11185    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11186
11187  if (TARGET_SPE_ABI)
11188    fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11189
11190  if (info->first_gp_reg_save != 32)
11191    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
11192
11193  if (info->first_fp_reg_save != 64)
11194    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
11195
11196  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11197    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11198	     info->first_altivec_reg_save);
11199
11200  if (info->lr_save_p)
11201    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
11202
11203  if (info->cr_save_p)
11204    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
11205
11206  if (info->toc_save_p)
11207    fprintf (stderr, "\ttoc_save_p          = %5d\n", info->toc_save_p);
11208
11209  if (info->vrsave_mask)
11210    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
11211
11212  if (info->push_p)
11213    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
11214
11215  if (info->calls_p)
11216    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
11217
11218  if (info->gp_save_offset)
11219    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
11220
11221  if (info->fp_save_offset)
11222    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
11223
11224  if (info->altivec_save_offset)
11225    fprintf (stderr, "\taltivec_save_offset = %5d\n",
11226	     info->altivec_save_offset);
11227
11228  if (info->spe_gp_save_offset)
11229    fprintf (stderr, "\tspe_gp_save_offset  = %5d\n",
11230	     info->spe_gp_save_offset);
11231
11232  if (info->vrsave_save_offset)
11233    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
11234	     info->vrsave_save_offset);
11235
11236  if (info->lr_save_offset)
11237    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
11238
11239  if (info->cr_save_offset)
11240    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
11241
11242  if (info->toc_save_offset)
11243    fprintf (stderr, "\ttoc_save_offset     = %5d\n", info->toc_save_offset);
11244
11245  if (info->varargs_save_offset)
11246    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11247
11248  if (info->total_size)
11249    fprintf (stderr, "\ttotal_size          = "HOST_WIDE_INT_PRINT_DEC"\n",
11250	     info->total_size);
11251
11252  if (info->varargs_size)
11253    fprintf (stderr, "\tvarargs_size        = %5d\n", info->varargs_size);
11254
11255  if (info->vars_size)
11256    fprintf (stderr, "\tvars_size           = "HOST_WIDE_INT_PRINT_DEC"\n",
11257	     info->vars_size);
11258
11259  if (info->parm_size)
11260    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
11261
11262  if (info->fixed_size)
11263    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
11264
11265  if (info->gp_size)
11266    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
11267
11268  if (info->spe_gp_size)
11269    fprintf (stderr, "\tspe_gp_size         = %5d\n", info->spe_gp_size);
11270
11271  if (info->fp_size)
11272    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
11273
11274  if (info->altivec_size)
11275    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
11276
11277  if (info->vrsave_size)
11278    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
11279
11280  if (info->altivec_padding_size)
11281    fprintf (stderr, "\taltivec_padding_size= %5d\n",
11282	     info->altivec_padding_size);
11283
11284  if (info->spe_padding_size)
11285    fprintf (stderr, "\tspe_padding_size    = %5d\n",
11286	     info->spe_padding_size);
11287
11288  if (info->lr_size)
11289    fprintf (stderr, "\tlr_size             = %5d\n", info->lr_size);
11290
11291  if (info->cr_size)
11292    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
11293
11294  if (info->toc_size)
11295    fprintf (stderr, "\ttoc_size            = %5d\n", info->toc_size);
11296
11297  if (info->save_size)
11298    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
11299
11300  if (info->reg_size != 4)
11301    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
11302
11303  fprintf (stderr, "\n");
11304}
11305
11306rtx
11307rs6000_return_addr (int count, rtx frame)
11308{
11309  /* Currently we don't optimize very well between prolog and body
11310     code and for PIC code the code can be actually quite bad, so
11311     don't try to be too clever here.  */
11312  if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11313    {
11314      cfun->machine->ra_needs_full_frame = 1;
11315
11316      return
11317	gen_rtx_MEM
11318	  (Pmode,
11319	   memory_address
11320	   (Pmode,
11321	    plus_constant (copy_to_reg
11322			   (gen_rtx_MEM (Pmode,
11323					 memory_address (Pmode, frame))),
11324			   RETURN_ADDRESS_OFFSET)));
11325    }
11326
11327  cfun->machine->ra_need_lr = 1;
11328  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11329}
11330
11331/* Say whether a function is a candidate for sibcall handling or not.
11332   We do not allow indirect calls to be optimized into sibling calls.
11333   Also, we can't do it if there are any vector parameters; there's
11334   nowhere to put the VRsave code so it works; note that functions with
11335   vector parameters are required to have a prototype, so the argument
11336   type info must be available here.  (The tail recursion case can work
11337   with vector parameters, but there's no way to distinguish here.) */
11338static bool
11339rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11340{
11341  tree type;
11342  if (decl)
11343    {
11344      if (TARGET_ALTIVEC_VRSAVE)
11345        {
11346	  for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11347	       type; type = TREE_CHAIN (type))
11348	    {
11349	      if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11350		return false;
11351	    }
11352        }
11353      if (DEFAULT_ABI == ABI_DARWIN
11354	  || (*targetm.binds_local_p) (decl))
11355	{
11356	  tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11357
11358	  if (!lookup_attribute ("longcall", attr_list)
11359	      || lookup_attribute ("shortcall", attr_list))
11360	    return true;
11361	}
11362    }
11363  return false;
11364}
11365
11366static int
11367rs6000_ra_ever_killed (void)
11368{
11369  rtx top;
11370  rtx reg;
11371  rtx insn;
11372
11373  if (current_function_is_thunk)
11374    return 0;
11375
11376  /* regs_ever_live has LR marked as used if any sibcalls are present,
11377     but this should not force saving and restoring in the
11378     pro/epilogue.  Likewise, reg_set_between_p thinks a sibcall
11379     clobbers LR, so that is inappropriate.  */
11380
11381  /* Also, the prologue can generate a store into LR that
11382     doesn't really count, like this:
11383
11384        move LR->R0
11385        bcl to set PIC register
11386        move LR->R31
11387        move R0->LR
11388
11389     When we're called from the epilogue, we need to avoid counting
11390     this as a store.  */
11391
11392  push_topmost_sequence ();
11393  top = get_insns ();
11394  pop_topmost_sequence ();
11395  reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11396
11397  for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11398    {
11399      if (INSN_P (insn))
11400	{
11401	  if (FIND_REG_INC_NOTE (insn, reg))
11402	    return 1;
11403	  else if (GET_CODE (insn) == CALL_INSN
11404		   && !SIBLING_CALL_P (insn))
11405	    return 1;
11406	  else if (set_of (reg, insn) != NULL_RTX
11407		   && !prologue_epilogue_contains (insn))
11408	    return 1;
11409    	}
11410    }
11411  return 0;
11412}
11413
11414/* Add a REG_MAYBE_DEAD note to the insn.  */
11415static void
11416rs6000_maybe_dead (rtx insn)
11417{
11418  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11419					const0_rtx,
11420					REG_NOTES (insn));
11421}
11422
11423/* Emit instructions needed to load the TOC register.
11424   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11425   a constant pool; or for SVR4 -fpic.  */
11426
11427void
11428rs6000_emit_load_toc_table (int fromprolog)
11429{
11430  rtx dest, insn;
11431  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11432
11433  if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11434    {
11435      rtx temp = (fromprolog
11436		  ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11437		  : gen_reg_rtx (Pmode));
11438      insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11439      if (fromprolog)
11440	rs6000_maybe_dead (insn);
11441      insn = emit_move_insn (dest, temp);
11442      if (fromprolog)
11443	rs6000_maybe_dead (insn);
11444    }
11445  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11446    {
11447      char buf[30];
11448      rtx tempLR = (fromprolog
11449		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11450		    : gen_reg_rtx (Pmode));
11451      rtx temp0 = (fromprolog
11452		   ? gen_rtx_REG (Pmode, 0)
11453		   : gen_reg_rtx (Pmode));
11454
11455      /* possibly create the toc section */
11456      if (! toc_initialized)
11457	{
11458	  toc_section ();
11459	  function_section (current_function_decl);
11460	}
11461
11462      if (fromprolog)
11463	{
11464	  rtx symF, symL;
11465
11466	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11467	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11468
11469	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11470	  symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11471
11472	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11473							       symF)));
11474	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11475	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11476							       symL,
11477							       symF)));
11478	}
11479      else
11480	{
11481	  rtx tocsym;
11482
11483	  tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11484	  emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
11485	  emit_move_insn (dest, tempLR);
11486	  emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11487	}
11488      insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11489      if (fromprolog)
11490	rs6000_maybe_dead (insn);
11491    }
11492  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11493    {
11494      /* This is for AIX code running in non-PIC ELF32.  */
11495      char buf[30];
11496      rtx realsym;
11497      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11498      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11499
11500      insn = emit_insn (gen_elf_high (dest, realsym));
11501      if (fromprolog)
11502	rs6000_maybe_dead (insn);
11503      insn = emit_insn (gen_elf_low (dest, dest, realsym));
11504      if (fromprolog)
11505	rs6000_maybe_dead (insn);
11506    }
11507  else if (DEFAULT_ABI == ABI_AIX)
11508    {
11509      if (TARGET_32BIT)
11510	insn = emit_insn (gen_load_toc_aix_si (dest));
11511      else
11512	insn = emit_insn (gen_load_toc_aix_di (dest));
11513      if (fromprolog)
11514	rs6000_maybe_dead (insn);
11515    }
11516  else
11517    abort ();
11518}
11519
11520/* Emit instructions to restore the link register after determining where
11521   its value has been stored.  */
11522
11523void
11524rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11525{
11526  rs6000_stack_t *info = rs6000_stack_info ();
11527  rtx operands[2];
11528
11529  operands[0] = source;
11530  operands[1] = scratch;
11531
11532  if (info->lr_save_p)
11533    {
11534      rtx frame_rtx = stack_pointer_rtx;
11535      HOST_WIDE_INT sp_offset = 0;
11536      rtx tmp;
11537
11538      if (frame_pointer_needed
11539	  || current_function_calls_alloca
11540	  || info->total_size > 32767)
11541	{
11542	  emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11543	  frame_rtx = operands[1];
11544	}
11545      else if (info->push_p)
11546	sp_offset = info->total_size;
11547
11548      tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11549      tmp = gen_rtx_MEM (Pmode, tmp);
11550      emit_move_insn (tmp, operands[0]);
11551    }
11552  else
11553    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11554}
11555
11556static GTY(()) int set = -1;
11557
11558int
11559get_TOC_alias_set (void)
11560{
11561  if (set == -1)
11562    set = new_alias_set ();
11563  return set;
11564}
11565
11566/* This returns nonzero if the current function uses the TOC.  This is
11567   determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11568   is generated by the ABI_V4 load_toc_* patterns.  */
11569#if TARGET_ELF
11570static int
11571uses_TOC (void)
11572{
11573  rtx insn;
11574
11575  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11576    if (INSN_P (insn))
11577      {
11578	rtx pat = PATTERN (insn);
11579	int i;
11580
11581	if (GET_CODE (pat) == PARALLEL)
11582	  for (i = 0; i < XVECLEN (pat, 0); i++)
11583	    {
11584	      rtx sub = XVECEXP (pat, 0, i);
11585	      if (GET_CODE (sub) == USE)
11586		{
11587		  sub = XEXP (sub, 0);
11588		  if (GET_CODE (sub) == UNSPEC
11589		      && XINT (sub, 1) == UNSPEC_TOC)
11590		    return 1;
11591		}
11592	    }
11593      }
11594  return 0;
11595}
11596#endif
11597
11598rtx
11599create_TOC_reference (rtx symbol)
11600{
11601  if (no_new_pseudos)
11602    regs_ever_live[TOC_REGISTER] = 1;
11603  return gen_rtx_PLUS (Pmode,
11604	   gen_rtx_REG (Pmode, TOC_REGISTER),
11605	     gen_rtx_CONST (Pmode,
11606	       gen_rtx_MINUS (Pmode, symbol,
11607		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11608}
11609
11610/* If _Unwind_* has been called from within the same module,
11611   toc register is not guaranteed to be saved to 40(1) on function
11612   entry.  Save it there in that case.  */
11613
11614void
11615rs6000_aix_emit_builtin_unwind_init (void)
11616{
11617  rtx mem;
11618  rtx stack_top = gen_reg_rtx (Pmode);
11619  rtx opcode_addr = gen_reg_rtx (Pmode);
11620  rtx opcode = gen_reg_rtx (SImode);
11621  rtx tocompare = gen_reg_rtx (SImode);
11622  rtx no_toc_save_needed = gen_label_rtx ();
11623
11624  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11625  emit_move_insn (stack_top, mem);
11626
11627  mem = gen_rtx_MEM (Pmode,
11628		     gen_rtx_PLUS (Pmode, stack_top,
11629				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11630  emit_move_insn (opcode_addr, mem);
11631  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11632  emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11633					   : 0xE8410028, SImode));
11634
11635  do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11636			   SImode, NULL_RTX, NULL_RTX,
11637			   no_toc_save_needed);
11638
11639  mem = gen_rtx_MEM (Pmode,
11640		     gen_rtx_PLUS (Pmode, stack_top,
11641				   GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11642  emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11643  emit_label (no_toc_save_needed);
11644}
11645
11646/* This ties together stack memory (MEM with an alias set of
11647   rs6000_sr_alias_set) and the change to the stack pointer.  */
11648
11649static void
11650rs6000_emit_stack_tie (void)
11651{
11652  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11653
11654  set_mem_alias_set (mem, rs6000_sr_alias_set);
11655  emit_insn (gen_stack_tie (mem));
11656}
11657
11658/* Emit the correct code for allocating stack space, as insns.
11659   If COPY_R12, make sure a copy of the old frame is left in r12.
11660   The generated code may use hard register 0 as a temporary.  */
11661
11662static void
11663rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11664{
11665  rtx insn;
11666  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11667  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11668  rtx todec = GEN_INT (-size);
11669
11670  if (current_function_limit_stack)
11671    {
11672      if (REG_P (stack_limit_rtx)
11673	  && REGNO (stack_limit_rtx) > 1
11674	  && REGNO (stack_limit_rtx) <= 31)
11675	{
11676	  emit_insn (TARGET_32BIT
11677		     ? gen_addsi3 (tmp_reg,
11678				   stack_limit_rtx,
11679				   GEN_INT (size))
11680		     : gen_adddi3 (tmp_reg,
11681				   stack_limit_rtx,
11682				   GEN_INT (size)));
11683
11684	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11685				    const0_rtx));
11686	}
11687      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11688	       && TARGET_32BIT
11689	       && DEFAULT_ABI == ABI_V4)
11690	{
11691	  rtx toload = gen_rtx_CONST (VOIDmode,
11692				      gen_rtx_PLUS (Pmode,
11693						    stack_limit_rtx,
11694						    GEN_INT (size)));
11695
11696	  emit_insn (gen_elf_high (tmp_reg, toload));
11697	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11698	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11699				    const0_rtx));
11700	}
11701      else
11702	warning ("stack limit expression is not supported");
11703    }
11704
11705  if (copy_r12 || ! TARGET_UPDATE)
11706    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11707
11708  if (TARGET_UPDATE)
11709    {
11710      if (size > 32767)
11711	{
11712	  /* Need a note here so that try_split doesn't get confused.  */
11713	  if (get_last_insn() == NULL_RTX)
11714	    emit_note (NOTE_INSN_DELETED);
11715	  insn = emit_move_insn (tmp_reg, todec);
11716	  try_split (PATTERN (insn), insn, 0);
11717	  todec = tmp_reg;
11718	}
11719
11720      insn = emit_insn (TARGET_32BIT
11721			? gen_movsi_update (stack_reg, stack_reg,
11722					    todec, stack_reg)
11723			: gen_movdi_update (stack_reg, stack_reg,
11724					    todec, stack_reg));
11725    }
11726  else
11727    {
11728      insn = emit_insn (TARGET_32BIT
11729			? gen_addsi3 (stack_reg, stack_reg, todec)
11730			: gen_adddi3 (stack_reg, stack_reg, todec));
11731      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11732		      gen_rtx_REG (Pmode, 12));
11733    }
11734
11735  RTX_FRAME_RELATED_P (insn) = 1;
11736  REG_NOTES (insn) =
11737    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11738		       gen_rtx_SET (VOIDmode, stack_reg,
11739				    gen_rtx_PLUS (Pmode, stack_reg,
11740						  GEN_INT (-size))),
11741		       REG_NOTES (insn));
11742}
11743
11744/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11745   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11746   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
11747   deduce these equivalences by itself so it wasn't necessary to hold
11748   its hand so much.  */
11749
11750static void
11751rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11752		      rtx reg2, rtx rreg)
11753{
11754  rtx real, temp;
11755
11756  /* copy_rtx will not make unique copies of registers, so we need to
11757     ensure we don't have unwanted sharing here.  */
11758  if (reg == reg2)
11759    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11760
11761  if (reg == rreg)
11762    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11763
11764  real = copy_rtx (PATTERN (insn));
11765
11766  if (reg2 != NULL_RTX)
11767    real = replace_rtx (real, reg2, rreg);
11768
11769  real = replace_rtx (real, reg,
11770		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11771							STACK_POINTER_REGNUM),
11772				    GEN_INT (val)));
11773
11774  /* We expect that 'real' is either a SET or a PARALLEL containing
11775     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
11776     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
11777
11778  if (GET_CODE (real) == SET)
11779    {
11780      rtx set = real;
11781
11782      temp = simplify_rtx (SET_SRC (set));
11783      if (temp)
11784	SET_SRC (set) = temp;
11785      temp = simplify_rtx (SET_DEST (set));
11786      if (temp)
11787	SET_DEST (set) = temp;
11788      if (GET_CODE (SET_DEST (set)) == MEM)
11789	{
11790	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11791	  if (temp)
11792	    XEXP (SET_DEST (set), 0) = temp;
11793	}
11794    }
11795  else if (GET_CODE (real) == PARALLEL)
11796    {
11797      int i;
11798      for (i = 0; i < XVECLEN (real, 0); i++)
11799	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11800	  {
11801	    rtx set = XVECEXP (real, 0, i);
11802
11803	    temp = simplify_rtx (SET_SRC (set));
11804	    if (temp)
11805	      SET_SRC (set) = temp;
11806	    temp = simplify_rtx (SET_DEST (set));
11807	    if (temp)
11808	      SET_DEST (set) = temp;
11809	    if (GET_CODE (SET_DEST (set)) == MEM)
11810	      {
11811		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11812		if (temp)
11813		  XEXP (SET_DEST (set), 0) = temp;
11814	      }
11815	    RTX_FRAME_RELATED_P (set) = 1;
11816	  }
11817    }
11818  else
11819    abort ();
11820
11821  if (TARGET_SPE)
11822    real = spe_synthesize_frame_save (real);
11823
11824  RTX_FRAME_RELATED_P (insn) = 1;
11825  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11826					real,
11827					REG_NOTES (insn));
11828}
11829
11830/* Given an SPE frame note, return a PARALLEL of SETs with the
11831   original note, plus a synthetic register save.  */
11832
11833static rtx
11834spe_synthesize_frame_save (rtx real)
11835{
11836  rtx synth, offset, reg, real2;
11837
11838  if (GET_CODE (real) != SET
11839      || GET_MODE (SET_SRC (real)) != V2SImode)
11840    return real;
11841
11842  /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11843     frame related note.  The parallel contains a set of the register
11844     being saved, and another set to a synthetic register (n+1200).
11845     This is so we can differentiate between 64-bit and 32-bit saves.
11846     Words cannot describe this nastiness.  */
11847
11848  if (GET_CODE (SET_DEST (real)) != MEM
11849      || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11850      || GET_CODE (SET_SRC (real)) != REG)
11851    abort ();
11852
11853  /* Transform:
11854       (set (mem (plus (reg x) (const y)))
11855            (reg z))
11856     into:
11857       (set (mem (plus (reg x) (const y+4)))
11858            (reg z+1200))
11859  */
11860
11861  real2 = copy_rtx (real);
11862  PUT_MODE (SET_DEST (real2), SImode);
11863  reg = SET_SRC (real2);
11864  real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11865  synth = copy_rtx (real2);
11866
11867  if (BYTES_BIG_ENDIAN)
11868    {
11869      offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11870      real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11871    }
11872
11873  reg = SET_SRC (synth);
11874
11875  synth = replace_rtx (synth, reg,
11876		       gen_rtx_REG (SImode, REGNO (reg) + 1200));
11877
11878  offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11879  synth = replace_rtx (synth, offset,
11880		       GEN_INT (INTVAL (offset)
11881				+ (BYTES_BIG_ENDIAN ? 0 : 4)));
11882
11883  RTX_FRAME_RELATED_P (synth) = 1;
11884  RTX_FRAME_RELATED_P (real2) = 1;
11885  if (BYTES_BIG_ENDIAN)
11886    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11887  else
11888    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11889
11890  return real;
11891}
11892
11893/* Returns an insn that has a vrsave set operation with the
11894   appropriate CLOBBERs.  */
11895
11896static rtx
11897generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11898{
11899  int nclobs, i;
11900  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11901  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11902
11903  clobs[0]
11904    = gen_rtx_SET (VOIDmode,
11905		   vrsave,
11906		   gen_rtx_UNSPEC_VOLATILE (SImode,
11907					    gen_rtvec (2, reg, vrsave),
11908					    30));
11909
11910  nclobs = 1;
11911
11912  /* We need to clobber the registers in the mask so the scheduler
11913     does not move sets to VRSAVE before sets of AltiVec registers.
11914
11915     However, if the function receives nonlocal gotos, reload will set
11916     all call saved registers live.  We will end up with:
11917
11918     	(set (reg 999) (mem))
11919	(parallel [ (set (reg vrsave) (unspec blah))
11920		    (clobber (reg 999))])
11921
11922     The clobber will cause the store into reg 999 to be dead, and
11923     flow will attempt to delete an epilogue insn.  In this case, we
11924     need an unspec use/set of the register.  */
11925
11926  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11927    if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11928      {
11929	if (!epiloguep || call_used_regs [i])
11930	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11931					     gen_rtx_REG (V4SImode, i));
11932	else
11933	  {
11934	    rtx reg = gen_rtx_REG (V4SImode, i);
11935
11936	    clobs[nclobs++]
11937	      = gen_rtx_SET (VOIDmode,
11938			     reg,
11939			     gen_rtx_UNSPEC (V4SImode,
11940					     gen_rtvec (1, reg), 27));
11941	  }
11942      }
11943
11944  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11945
11946  for (i = 0; i < nclobs; ++i)
11947    XVECEXP (insn, 0, i) = clobs[i];
11948
11949  return insn;
11950}
11951
11952/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11953   Save REGNO into [FRAME_REG + OFFSET] in mode MODE.  */
11954
11955static void
11956emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11957		 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11958{
11959  rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11960  rtx replacea, replaceb;
11961
11962  int_rtx = GEN_INT (offset);
11963
11964  /* Some cases that need register indexed addressing.  */
11965  if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11966      || (TARGET_SPE_ABI
11967	  && SPE_VECTOR_MODE (mode)
11968	  && !SPE_CONST_OFFSET_OK (offset)))
11969    {
11970      /* Whomever calls us must make sure r11 is available in the
11971         flow path of instructions in the prologue.  */
11972      offset_rtx = gen_rtx_REG (Pmode, 11);
11973      emit_move_insn (offset_rtx, int_rtx);
11974
11975      replacea = offset_rtx;
11976      replaceb = int_rtx;
11977    }
11978  else
11979    {
11980      offset_rtx = int_rtx;
11981      replacea = NULL_RTX;
11982      replaceb = NULL_RTX;
11983    }
11984
11985  reg = gen_rtx_REG (mode, regno);
11986  addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11987  mem = gen_rtx_MEM (mode, addr);
11988  set_mem_alias_set (mem, rs6000_sr_alias_set);
11989
11990  insn = emit_move_insn (mem, reg);
11991
11992  rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11993}
11994
11995/* Emit an offset memory reference suitable for a frame store, while
11996   converting to a valid addressing mode.  */
11997
11998static rtx
11999gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
12000{
12001  rtx int_rtx, offset_rtx;
12002
12003  int_rtx = GEN_INT (offset);
12004
12005  if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
12006    {
12007      offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12008      emit_move_insn (offset_rtx, int_rtx);
12009    }
12010  else
12011    offset_rtx = int_rtx;
12012
12013  return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12014}
12015
12016/* Emit function prologue as insns.  */
12017
12018void
12019rs6000_emit_prologue (void)
12020{
12021  rs6000_stack_t *info = rs6000_stack_info ();
12022  enum machine_mode reg_mode = Pmode;
12023  int reg_size = TARGET_32BIT ? 4 : 8;
12024  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12025  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12026  rtx frame_reg_rtx = sp_reg_rtx;
12027  rtx cr_save_rtx = NULL_RTX;
12028  rtx insn;
12029  int saving_FPRs_inline;
12030  int using_store_multiple;
12031  HOST_WIDE_INT sp_offset = 0;
12032
12033   if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12034     {
12035       reg_mode = V2SImode;
12036       reg_size = 8;
12037     }
12038
12039  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12040			  && (!TARGET_SPE_ABI
12041			      || info->spe_64bit_regs_used == 0)
12042			  && info->first_gp_reg_save < 31);
12043  saving_FPRs_inline = (info->first_fp_reg_save == 64
12044			|| FP_SAVE_INLINE (info->first_fp_reg_save)
12045			|| current_function_calls_eh_return
12046			|| cfun->machine->ra_need_lr);
12047
12048  /* For V.4, update stack before we do any saving and set back pointer.  */
12049  if (info->push_p
12050      && (DEFAULT_ABI == ABI_V4
12051	  || current_function_calls_eh_return))
12052    {
12053      if (info->total_size < 32767)
12054	sp_offset = info->total_size;
12055      else
12056	frame_reg_rtx = frame_ptr_rtx;
12057      rs6000_emit_allocate_stack (info->total_size,
12058				  (frame_reg_rtx != sp_reg_rtx
12059				   && (info->cr_save_p
12060				       || info->lr_save_p
12061				       || info->first_fp_reg_save < 64
12062				       || info->first_gp_reg_save < 32
12063				       )));
12064      if (frame_reg_rtx != sp_reg_rtx)
12065	rs6000_emit_stack_tie ();
12066    }
12067
12068  /* Save AltiVec registers if needed.  */
12069  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12070    {
12071      int i;
12072
12073      /* There should be a non inline version of this, for when we
12074	 are saving lots of vector registers.  */
12075      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12076	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12077	  {
12078	    rtx areg, savereg, mem;
12079	    int offset;
12080
12081	    offset = info->altivec_save_offset + sp_offset
12082	      + 16 * (i - info->first_altivec_reg_save);
12083
12084	    savereg = gen_rtx_REG (V4SImode, i);
12085
12086	    areg = gen_rtx_REG (Pmode, 0);
12087	    emit_move_insn (areg, GEN_INT (offset));
12088
12089	    /* AltiVec addressing mode is [reg+reg].  */
12090	    mem = gen_rtx_MEM (V4SImode,
12091			       gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
12092
12093	    set_mem_alias_set (mem, rs6000_sr_alias_set);
12094
12095	    insn = emit_move_insn (mem, savereg);
12096
12097	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12098				  areg, GEN_INT (offset));
12099	  }
12100    }
12101
12102  /* VRSAVE is a bit vector representing which AltiVec registers
12103     are used.  The OS uses this to determine which vector
12104     registers to save on a context switch.  We need to save
12105     VRSAVE on the stack frame, add whatever AltiVec registers we
12106     used in this function, and do the corresponding magic in the
12107     epilogue.  */
12108
12109  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12110      && info->vrsave_mask != 0)
12111    {
12112      rtx reg, mem, vrsave;
12113      int offset;
12114
12115      /* Get VRSAVE onto a GPR.  Note that ABI_V4 might be using r12
12116	 as frame_reg_rtx and r11 as the static chain pointer for
12117	 nested functions.  */
12118      reg = gen_rtx_REG (SImode, 0);
12119      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12120      if (TARGET_MACHO)
12121	emit_insn (gen_get_vrsave_internal (reg));
12122      else
12123	emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
12124
12125      /* Save VRSAVE.  */
12126      offset = info->vrsave_save_offset + sp_offset;
12127      mem
12128	= gen_rtx_MEM (SImode,
12129		       gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12130      set_mem_alias_set (mem, rs6000_sr_alias_set);
12131      insn = emit_move_insn (mem, reg);
12132
12133      /* Include the registers in the mask.  */
12134      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12135
12136      insn = emit_insn (generate_set_vrsave (reg, info, 0));
12137    }
12138
12139  /* If we use the link register, get it into r0.  */
12140  if (info->lr_save_p)
12141    emit_move_insn (gen_rtx_REG (Pmode, 0),
12142		    gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12143
12144  /* If we need to save CR, put it into r12.  */
12145  if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12146    {
12147      cr_save_rtx = gen_rtx_REG (SImode, 12);
12148      emit_insn (gen_movesi_from_cr (cr_save_rtx));
12149    }
12150
12151  /* Do any required saving of fpr's.  If only one or two to save, do
12152     it ourselves.  Otherwise, call function.  */
12153  if (saving_FPRs_inline)
12154    {
12155      int i;
12156      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12157	if ((regs_ever_live[info->first_fp_reg_save+i]
12158	     && ! call_used_regs[info->first_fp_reg_save+i]))
12159	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12160			   info->first_fp_reg_save + i,
12161			   info->fp_save_offset + sp_offset + 8 * i,
12162			   info->total_size);
12163    }
12164  else if (info->first_fp_reg_save != 64)
12165    {
12166      int i;
12167      char rname[30];
12168      const char *alloc_rname;
12169      rtvec p;
12170      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12171
12172      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12173					  gen_rtx_REG (Pmode,
12174						       LINK_REGISTER_REGNUM));
12175      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12176	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12177      alloc_rname = ggc_strdup (rname);
12178      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12179				      gen_rtx_SYMBOL_REF (Pmode,
12180							  alloc_rname));
12181      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12182	{
12183	  rtx addr, reg, mem;
12184	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12185	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12186			       GEN_INT (info->fp_save_offset
12187					+ sp_offset + 8*i));
12188	  mem = gen_rtx_MEM (DFmode, addr);
12189	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12190
12191	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12192	}
12193      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12194      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12195			    NULL_RTX, NULL_RTX);
12196    }
12197
12198  /* Save GPRs.  This is done as a PARALLEL if we are using
12199     the store-multiple instructions.  */
12200  if (using_store_multiple)
12201    {
12202      rtvec p;
12203      int i;
12204      p = rtvec_alloc (32 - info->first_gp_reg_save);
12205      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12206	{
12207	  rtx addr, reg, mem;
12208	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12209	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12210			       GEN_INT (info->gp_save_offset
12211					+ sp_offset
12212					+ reg_size * i));
12213	  mem = gen_rtx_MEM (reg_mode, addr);
12214	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12215
12216	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12217	}
12218      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12219      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12220			    NULL_RTX, NULL_RTX);
12221    }
12222  else
12223    {
12224      int i;
12225      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12226	if ((regs_ever_live[info->first_gp_reg_save+i]
12227	     && (! call_used_regs[info->first_gp_reg_save+i]
12228		 || (i+info->first_gp_reg_save
12229		     == RS6000_PIC_OFFSET_TABLE_REGNUM
12230		     && TARGET_TOC && TARGET_MINIMAL_TOC)))
12231	    || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12232		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12233		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12234	  {
12235	    rtx addr, reg, mem;
12236	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12237
12238	    if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12239	      {
12240		int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12241		rtx b;
12242
12243		if (!SPE_CONST_OFFSET_OK (offset))
12244		  {
12245		    b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12246		    emit_move_insn (b, GEN_INT (offset));
12247		  }
12248		else
12249		  b = GEN_INT (offset);
12250
12251		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12252		mem = gen_rtx_MEM (V2SImode, addr);
12253		set_mem_alias_set (mem, rs6000_sr_alias_set);
12254		insn = emit_move_insn (mem, reg);
12255
12256		if (GET_CODE (b) == CONST_INT)
12257		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12258					NULL_RTX, NULL_RTX);
12259		else
12260		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12261					b, GEN_INT (offset));
12262	      }
12263	    else
12264	      {
12265		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12266				     GEN_INT (info->gp_save_offset
12267					      + sp_offset
12268					      + reg_size * i));
12269		mem = gen_rtx_MEM (reg_mode, addr);
12270		set_mem_alias_set (mem, rs6000_sr_alias_set);
12271
12272		insn = emit_move_insn (mem, reg);
12273		rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12274				      NULL_RTX, NULL_RTX);
12275	      }
12276	  }
12277    }
12278
12279  /* ??? There's no need to emit actual instructions here, but it's the
12280     easiest way to get the frame unwind information emitted.  */
12281  if (current_function_calls_eh_return)
12282    {
12283      unsigned int i, regno;
12284
12285      /* In AIX ABI we need to pretend we save r2 here.  */
12286      if (TARGET_AIX)
12287	{
12288	  rtx addr, reg, mem;
12289
12290	  reg = gen_rtx_REG (reg_mode, 2);
12291	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12292			       GEN_INT (sp_offset + 5 * reg_size));
12293	  mem = gen_rtx_MEM (reg_mode, addr);
12294	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12295
12296	  insn = emit_move_insn (mem, reg);
12297	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12298				NULL_RTX, NULL_RTX);
12299	  PATTERN (insn) = gen_blockage ();
12300	}
12301
12302      for (i = 0; ; ++i)
12303	{
12304	  regno = EH_RETURN_DATA_REGNO (i);
12305	  if (regno == INVALID_REGNUM)
12306	    break;
12307
12308	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12309			   info->ehrd_offset + sp_offset
12310			   + reg_size * (int) i,
12311			   info->total_size);
12312	}
12313    }
12314
12315  /* Save lr if we used it.  */
12316  if (info->lr_save_p)
12317    {
12318      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12319			       GEN_INT (info->lr_save_offset + sp_offset));
12320      rtx reg = gen_rtx_REG (Pmode, 0);
12321      rtx mem = gen_rtx_MEM (Pmode, addr);
12322      /* This should not be of rs6000_sr_alias_set, because of
12323	 __builtin_return_address.  */
12324
12325      insn = emit_move_insn (mem, reg);
12326      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12327			    reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12328    }
12329
12330  /* Save CR if we use any that must be preserved.  */
12331  if (info->cr_save_p)
12332    {
12333      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12334			       GEN_INT (info->cr_save_offset + sp_offset));
12335      rtx mem = gen_rtx_MEM (SImode, addr);
12336
12337      set_mem_alias_set (mem, rs6000_sr_alias_set);
12338
12339      /* If r12 was used to hold the original sp, copy cr into r0 now
12340	 that it's free.  */
12341      if (REGNO (frame_reg_rtx) == 12)
12342	{
12343	  cr_save_rtx = gen_rtx_REG (SImode, 0);
12344	  emit_insn (gen_movesi_from_cr (cr_save_rtx));
12345	}
12346      insn = emit_move_insn (mem, cr_save_rtx);
12347
12348      /* Now, there's no way that dwarf2out_frame_debug_expr is going
12349	 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12350	 But that's OK.  All we have to do is specify that _one_ condition
12351	 code register is saved in this stack slot.  The thrower's epilogue
12352	 will then restore all the call-saved registers.
12353	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
12354      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12355			    cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12356    }
12357
12358  /* Update stack and set back pointer unless this is V.4,
12359     for which it was done previously.  */
12360  if (info->push_p
12361      && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12362    rs6000_emit_allocate_stack (info->total_size, FALSE);
12363
12364  /* Set frame pointer, if needed.  */
12365  if (frame_pointer_needed)
12366    {
12367      insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12368			     sp_reg_rtx);
12369      RTX_FRAME_RELATED_P (insn) = 1;
12370    }
12371
12372  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
12373  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12374      || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12375	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12376  {
12377    /* If emit_load_toc_table will use the link register, we need to save
12378       it.  We use R12 for this purpose because emit_load_toc_table
12379       can use register 0.  This allows us to use a plain 'blr' to return
12380       from the procedure more often.  */
12381    int save_LR_around_toc_setup = (TARGET_ELF
12382				    && DEFAULT_ABI != ABI_AIX
12383				    && flag_pic
12384				    && ! info->lr_save_p
12385				    && EXIT_BLOCK_PTR->pred != NULL);
12386    if (save_LR_around_toc_setup)
12387      {
12388	rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12389	rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12390	rs6000_emit_load_toc_table (TRUE);
12391	rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12392      }
12393    else
12394      rs6000_emit_load_toc_table (TRUE);
12395  }
12396
12397#if TARGET_MACHO
12398  if (DEFAULT_ABI == ABI_DARWIN
12399      && flag_pic && current_function_uses_pic_offset_table)
12400    {
12401      rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12402      const char *picbase = machopic_function_base_name ();
12403      rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12404
12405      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12406
12407      rs6000_maybe_dead (
12408	emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12409			gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12410    }
12411#endif
12412}
12413
12414/* Write function prologue.  */
12415
12416static void
12417rs6000_output_function_prologue (FILE *file,
12418				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12419{
12420  rs6000_stack_t *info = rs6000_stack_info ();
12421
12422  if (TARGET_DEBUG_STACK)
12423    debug_stack_info (info);
12424
12425  /* Write .extern for any function we will call to save and restore
12426     fp values.  */
12427  if (info->first_fp_reg_save < 64
12428      && !FP_SAVE_INLINE (info->first_fp_reg_save))
12429    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12430	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12431	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12432	     RESTORE_FP_SUFFIX);
12433
12434  /* Write .extern for AIX common mode routines, if needed.  */
12435  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12436    {
12437      fputs ("\t.extern __mulh\n", file);
12438      fputs ("\t.extern __mull\n", file);
12439      fputs ("\t.extern __divss\n", file);
12440      fputs ("\t.extern __divus\n", file);
12441      fputs ("\t.extern __quoss\n", file);
12442      fputs ("\t.extern __quous\n", file);
12443      common_mode_defined = 1;
12444    }
12445
12446  if (! HAVE_prologue)
12447    {
12448      start_sequence ();
12449
12450      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12451	 the "toplevel" insn chain.  */
12452      emit_note (NOTE_INSN_DELETED);
12453      rs6000_emit_prologue ();
12454      emit_note (NOTE_INSN_DELETED);
12455
12456      /* Expand INSN_ADDRESSES so final() doesn't crash.  */
12457      {
12458	rtx insn;
12459	unsigned addr = 0;
12460	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12461	  {
12462	    INSN_ADDRESSES_NEW (insn, addr);
12463	    addr += 4;
12464	  }
12465      }
12466
12467      if (TARGET_DEBUG_STACK)
12468	debug_rtx_list (get_insns (), 100);
12469      final (get_insns (), file, FALSE, FALSE);
12470      end_sequence ();
12471    }
12472
12473  rs6000_pic_labelno++;
12474}
12475
12476/* Emit function epilogue as insns.
12477
12478   At present, dwarf2out_frame_debug_expr doesn't understand
12479   register restores, so we don't bother setting RTX_FRAME_RELATED_P
12480   anywhere in the epilogue.  Most of the insns below would in any case
12481   need special notes to explain where r11 is in relation to the stack.  */
12482
12483void
12484rs6000_emit_epilogue (int sibcall)
12485{
12486  rs6000_stack_t *info;
12487  int restoring_FPRs_inline;
12488  int using_load_multiple;
12489  int using_mfcr_multiple;
12490  int use_backchain_to_restore_sp;
12491  int sp_offset = 0;
12492  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12493  rtx frame_reg_rtx = sp_reg_rtx;
12494  enum machine_mode reg_mode = Pmode;
12495  int reg_size = TARGET_32BIT ? 4 : 8;
12496  int i;
12497
12498  info = rs6000_stack_info ();
12499
12500  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12501    {
12502      reg_mode = V2SImode;
12503      reg_size = 8;
12504    }
12505
12506  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12507			 && (!TARGET_SPE_ABI
12508			     || info->spe_64bit_regs_used == 0)
12509			 && info->first_gp_reg_save < 31);
12510  restoring_FPRs_inline = (sibcall
12511			   || current_function_calls_eh_return
12512			   || info->first_fp_reg_save == 64
12513			   || FP_SAVE_INLINE (info->first_fp_reg_save));
12514  use_backchain_to_restore_sp = (frame_pointer_needed
12515				 || current_function_calls_alloca
12516				 || info->total_size > 32767);
12517  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12518			 || rs6000_cpu == PROCESSOR_PPC603
12519			 || rs6000_cpu == PROCESSOR_PPC750
12520			 || optimize_size);
12521
12522  /* If we have a frame pointer, a call to alloca,  or a large stack
12523     frame, restore the old stack pointer using the backchain.  Otherwise,
12524     we know what size to update it with.  */
12525  if (use_backchain_to_restore_sp)
12526    {
12527      /* Under V.4, don't reset the stack pointer until after we're done
12528	 loading the saved registers.  */
12529      if (DEFAULT_ABI == ABI_V4)
12530	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12531
12532      emit_move_insn (frame_reg_rtx,
12533		      gen_rtx_MEM (Pmode, sp_reg_rtx));
12534
12535    }
12536  else if (info->push_p)
12537    {
12538      if (DEFAULT_ABI == ABI_V4
12539	  || current_function_calls_eh_return)
12540	sp_offset = info->total_size;
12541      else
12542	{
12543	  emit_insn (TARGET_32BIT
12544		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12545				   GEN_INT (info->total_size))
12546		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12547				   GEN_INT (info->total_size)));
12548	}
12549    }
12550
12551  /* Restore AltiVec registers if needed.  */
12552  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12553    {
12554      int i;
12555
12556      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12557	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12558	  {
12559	    rtx addr, areg, mem;
12560
12561	    areg = gen_rtx_REG (Pmode, 0);
12562	    emit_move_insn
12563	      (areg, GEN_INT (info->altivec_save_offset
12564			      + sp_offset
12565			      + 16 * (i - info->first_altivec_reg_save)));
12566
12567	    /* AltiVec addressing mode is [reg+reg].  */
12568	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12569	    mem = gen_rtx_MEM (V4SImode, addr);
12570	    set_mem_alias_set (mem, rs6000_sr_alias_set);
12571
12572	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12573	  }
12574    }
12575
12576  /* Restore VRSAVE if needed.  */
12577  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12578      && info->vrsave_mask != 0)
12579    {
12580      rtx addr, mem, reg;
12581
12582      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12583			   GEN_INT (info->vrsave_save_offset + sp_offset));
12584      mem = gen_rtx_MEM (SImode, addr);
12585      set_mem_alias_set (mem, rs6000_sr_alias_set);
12586      reg = gen_rtx_REG (SImode, 12);
12587      emit_move_insn (reg, mem);
12588
12589      emit_insn (generate_set_vrsave (reg, info, 1));
12590    }
12591
12592  /* Get the old lr if we saved it.  */
12593  if (info->lr_save_p)
12594    {
12595      rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12596				      info->lr_save_offset + sp_offset);
12597
12598      set_mem_alias_set (mem, rs6000_sr_alias_set);
12599
12600      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12601    }
12602
12603  /* Get the old cr if we saved it.  */
12604  if (info->cr_save_p)
12605    {
12606      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12607			       GEN_INT (info->cr_save_offset + sp_offset));
12608      rtx mem = gen_rtx_MEM (SImode, addr);
12609
12610      set_mem_alias_set (mem, rs6000_sr_alias_set);
12611
12612      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12613    }
12614
12615  /* Set LR here to try to overlap restores below.  */
12616  if (info->lr_save_p)
12617    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12618		    gen_rtx_REG (Pmode, 0));
12619
12620  /* Load exception handler data registers, if needed.  */
12621  if (current_function_calls_eh_return)
12622    {
12623      unsigned int i, regno;
12624
12625      if (TARGET_AIX)
12626	{
12627	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12628				   GEN_INT (sp_offset + 5 * reg_size));
12629	  rtx mem = gen_rtx_MEM (reg_mode, addr);
12630
12631	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12632
12633	  emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12634	}
12635
12636      for (i = 0; ; ++i)
12637	{
12638	  rtx mem;
12639
12640	  regno = EH_RETURN_DATA_REGNO (i);
12641	  if (regno == INVALID_REGNUM)
12642	    break;
12643
12644	  mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12645				      info->ehrd_offset + sp_offset
12646				      + reg_size * (int) i);
12647	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12648
12649	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12650	}
12651    }
12652
12653  /* Restore GPRs.  This is done as a PARALLEL if we are using
12654     the load-multiple instructions.  */
12655  if (using_load_multiple)
12656    {
12657      rtvec p;
12658      p = rtvec_alloc (32 - info->first_gp_reg_save);
12659      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12660	{
12661	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12662				   GEN_INT (info->gp_save_offset
12663					    + sp_offset
12664					    + reg_size * i));
12665	  rtx mem = gen_rtx_MEM (reg_mode, addr);
12666
12667	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12668
12669	  RTVEC_ELT (p, i) =
12670	    gen_rtx_SET (VOIDmode,
12671			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12672			 mem);
12673	}
12674      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12675    }
12676  else
12677    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12678      if ((regs_ever_live[info->first_gp_reg_save+i]
12679	   && (! call_used_regs[info->first_gp_reg_save+i]
12680	       || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12681		   && TARGET_TOC && TARGET_MINIMAL_TOC)))
12682	  || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12683	      && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12684		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12685	{
12686	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12687				   GEN_INT (info->gp_save_offset
12688					    + sp_offset
12689					    + reg_size * i));
12690	  rtx mem = gen_rtx_MEM (reg_mode, addr);
12691
12692	  /* Restore 64-bit quantities for SPE.  */
12693	  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12694	    {
12695	      int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12696	      rtx b;
12697
12698	      if (!SPE_CONST_OFFSET_OK (offset))
12699		{
12700		  b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12701		  emit_move_insn (b, GEN_INT (offset));
12702		}
12703	      else
12704		b = GEN_INT (offset);
12705
12706	      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12707	      mem = gen_rtx_MEM (V2SImode, addr);
12708	    }
12709
12710	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12711
12712	  emit_move_insn (gen_rtx_REG (reg_mode,
12713				       info->first_gp_reg_save + i), mem);
12714	}
12715
12716  /* Restore fpr's if we need to do it without calling a function.  */
12717  if (restoring_FPRs_inline)
12718    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12719      if ((regs_ever_live[info->first_fp_reg_save+i]
12720	   && ! call_used_regs[info->first_fp_reg_save+i]))
12721	{
12722	  rtx addr, mem;
12723	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12724			       GEN_INT (info->fp_save_offset
12725					+ sp_offset
12726					+ 8 * i));
12727	  mem = gen_rtx_MEM (DFmode, addr);
12728	  set_mem_alias_set (mem, rs6000_sr_alias_set);
12729
12730	  emit_move_insn (gen_rtx_REG (DFmode,
12731				       info->first_fp_reg_save + i),
12732			  mem);
12733	}
12734
12735  /* If we saved cr, restore it here.  Just those that were used.  */
12736  if (info->cr_save_p)
12737    {
12738      rtx r12_rtx = gen_rtx_REG (SImode, 12);
12739      int count = 0;
12740
12741      if (using_mfcr_multiple)
12742	{
12743	  for (i = 0; i < 8; i++)
12744	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12745	      count++;
12746	  if (count == 0)
12747	    abort ();
12748	}
12749
12750      if (using_mfcr_multiple && count > 1)
12751	{
12752	  rtvec p;
12753	  int ndx;
12754
12755	  p = rtvec_alloc (count);
12756
12757	  ndx = 0;
12758	  for (i = 0; i < 8; i++)
12759	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12760	      {
12761		rtvec r = rtvec_alloc (2);
12762		RTVEC_ELT (r, 0) = r12_rtx;
12763		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12764		RTVEC_ELT (p, ndx) =
12765		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12766			       gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12767		ndx++;
12768	      }
12769	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12770	  if (ndx != count)
12771	    abort ();
12772	}
12773      else
12774	for (i = 0; i < 8; i++)
12775	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12776	    {
12777	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12778							   CR0_REGNO+i),
12779					      r12_rtx));
12780	    }
12781    }
12782
12783  /* If this is V.4, unwind the stack pointer after all of the loads
12784     have been done.  We need to emit a block here so that sched
12785     doesn't decide to move the sp change before the register restores
12786     (which may not have any obvious dependency on the stack).  This
12787     doesn't hurt performance, because there is no scheduling that can
12788     be done after this point.  */
12789  if (DEFAULT_ABI == ABI_V4
12790      || current_function_calls_eh_return)
12791    {
12792      if (frame_reg_rtx != sp_reg_rtx)
12793	  rs6000_emit_stack_tie ();
12794
12795      if (use_backchain_to_restore_sp)
12796	{
12797	  emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12798	}
12799      else if (sp_offset != 0)
12800	{
12801	  emit_insn (TARGET_32BIT
12802		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12803				   GEN_INT (sp_offset))
12804		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12805				   GEN_INT (sp_offset)));
12806	}
12807    }
12808
12809  if (current_function_calls_eh_return)
12810    {
12811      rtx sa = EH_RETURN_STACKADJ_RTX;
12812      emit_insn (TARGET_32BIT
12813		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12814		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12815    }
12816
12817  if (!sibcall)
12818    {
12819      rtvec p;
12820      if (! restoring_FPRs_inline)
12821	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12822      else
12823	p = rtvec_alloc (2);
12824
12825      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12826      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12827				      gen_rtx_REG (Pmode,
12828						   LINK_REGISTER_REGNUM));
12829
12830      /* If we have to restore more than two FP registers, branch to the
12831	 restore function.  It will return to our caller.  */
12832      if (! restoring_FPRs_inline)
12833	{
12834	  int i;
12835	  char rname[30];
12836	  const char *alloc_rname;
12837
12838	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12839		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12840	  alloc_rname = ggc_strdup (rname);
12841	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12842					  gen_rtx_SYMBOL_REF (Pmode,
12843							      alloc_rname));
12844
12845	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12846	    {
12847	      rtx addr, mem;
12848	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12849				   GEN_INT (info->fp_save_offset + 8*i));
12850	      mem = gen_rtx_MEM (DFmode, addr);
12851	      set_mem_alias_set (mem, rs6000_sr_alias_set);
12852
12853	      RTVEC_ELT (p, i+3) =
12854		gen_rtx_SET (VOIDmode,
12855			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12856			     mem);
12857	    }
12858	}
12859
12860      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12861    }
12862}
12863
12864/* Write function epilogue.  */
12865
12866static void
12867rs6000_output_function_epilogue (FILE *file,
12868				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12869{
12870  rs6000_stack_t *info = rs6000_stack_info ();
12871
12872  if (! HAVE_epilogue)
12873    {
12874      rtx insn = get_last_insn ();
12875      /* If the last insn was a BARRIER, we don't have to write anything except
12876	 the trace table.  */
12877      if (GET_CODE (insn) == NOTE)
12878	insn = prev_nonnote_insn (insn);
12879      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
12880	{
12881	  /* This is slightly ugly, but at least we don't have two
12882	     copies of the epilogue-emitting code.  */
12883	  start_sequence ();
12884
12885	  /* A NOTE_INSN_DELETED is supposed to be at the start
12886	     and end of the "toplevel" insn chain.  */
12887	  emit_note (NOTE_INSN_DELETED);
12888	  rs6000_emit_epilogue (FALSE);
12889	  emit_note (NOTE_INSN_DELETED);
12890
12891	  /* Expand INSN_ADDRESSES so final() doesn't crash.  */
12892	  {
12893	    rtx insn;
12894	    unsigned addr = 0;
12895	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12896	      {
12897		INSN_ADDRESSES_NEW (insn, addr);
12898		addr += 4;
12899	      }
12900	  }
12901
12902	  if (TARGET_DEBUG_STACK)
12903	    debug_rtx_list (get_insns (), 100);
12904	  final (get_insns (), file, FALSE, FALSE);
12905	  end_sequence ();
12906	}
12907    }
12908
12909#if TARGET_MACHO
12910  macho_branch_islands ();
12911  /* Mach-O doesn't support labels at the end of objects, so if
12912     it looks like we might want one, insert a NOP.  */
12913  {
12914    rtx insn = get_last_insn ();
12915    while (insn
12916	   && NOTE_P (insn)
12917	   && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12918      insn = PREV_INSN (insn);
12919    if (insn
12920	&& (LABEL_P (insn)
12921	    || (NOTE_P (insn)
12922		&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12923      fputs ("\tnop\n", file);
12924  }
12925#endif
12926
12927  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
12928     on its format.
12929
12930     We don't output a traceback table if -finhibit-size-directive was
12931     used.  The documentation for -finhibit-size-directive reads
12932     ``don't output a @code{.size} assembler directive, or anything
12933     else that would cause trouble if the function is split in the
12934     middle, and the two halves are placed at locations far apart in
12935     memory.''  The traceback table has this property, since it
12936     includes the offset from the start of the function to the
12937     traceback table itself.
12938
12939     System V.4 Powerpc's (and the embedded ABI derived from it) use a
12940     different traceback table.  */
12941  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12942      && rs6000_traceback != traceback_none)
12943    {
12944      const char *fname = NULL;
12945      const char *language_string = lang_hooks.name;
12946      int fixed_parms = 0, float_parms = 0, parm_info = 0;
12947      int i;
12948      int optional_tbtab;
12949
12950      if (rs6000_traceback == traceback_full)
12951	optional_tbtab = 1;
12952      else if (rs6000_traceback == traceback_part)
12953	optional_tbtab = 0;
12954      else
12955	optional_tbtab = !optimize_size && !TARGET_ELF;
12956
12957      if (optional_tbtab)
12958	{
12959	  fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12960	  while (*fname == '.')	/* V.4 encodes . in the name */
12961	    fname++;
12962
12963	  /* Need label immediately before tbtab, so we can compute
12964	     its offset from the function start.  */
12965	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12966	  ASM_OUTPUT_LABEL (file, fname);
12967	}
12968
12969      /* The .tbtab pseudo-op can only be used for the first eight
12970	 expressions, since it can't handle the possibly variable
12971	 length fields that follow.  However, if you omit the optional
12972	 fields, the assembler outputs zeros for all optional fields
12973	 anyways, giving each variable length field is minimum length
12974	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
12975	 pseudo-op at all.  */
12976
12977      /* An all-zero word flags the start of the tbtab, for debuggers
12978	 that have to find it by searching forward from the entry
12979	 point or from the current pc.  */
12980      fputs ("\t.long 0\n", file);
12981
12982      /* Tbtab format type.  Use format type 0.  */
12983      fputs ("\t.byte 0,", file);
12984
12985      /* Language type.  Unfortunately, there does not seem to be any
12986	 official way to discover the language being compiled, so we
12987	 use language_string.
12988	 C is 0.  Fortran is 1.  Pascal is 2.  Ada is 3.  C++ is 9.
12989	 Java is 13.  Objective-C is 14.  */
12990      if (! strcmp (language_string, "GNU C"))
12991	i = 0;
12992      else if (! strcmp (language_string, "GNU F77"))
12993	i = 1;
12994      else if (! strcmp (language_string, "GNU Pascal"))
12995	i = 2;
12996      else if (! strcmp (language_string, "GNU Ada"))
12997	i = 3;
12998      else if (! strcmp (language_string, "GNU C++"))
12999	i = 9;
13000      else if (! strcmp (language_string, "GNU Java"))
13001	i = 13;
13002      else if (! strcmp (language_string, "GNU Objective-C"))
13003	i = 14;
13004      else
13005	abort ();
13006      fprintf (file, "%d,", i);
13007
13008      /* 8 single bit fields: global linkage (not set for C extern linkage,
13009	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
13010	 from start of procedure stored in tbtab, internal function, function
13011	 has controlled storage, function has no toc, function uses fp,
13012	 function logs/aborts fp operations.  */
13013      /* Assume that fp operations are used if any fp reg must be saved.  */
13014      fprintf (file, "%d,",
13015	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
13016
13017      /* 6 bitfields: function is interrupt handler, name present in
13018	 proc table, function calls alloca, on condition directives
13019	 (controls stack walks, 3 bits), saves condition reg, saves
13020	 link reg.  */
13021      /* The `function calls alloca' bit seems to be set whenever reg 31 is
13022	 set up as a frame pointer, even when there is no alloca call.  */
13023      fprintf (file, "%d,",
13024	       ((optional_tbtab << 6)
13025		| ((optional_tbtab & frame_pointer_needed) << 5)
13026		| (info->cr_save_p << 1)
13027		| (info->lr_save_p)));
13028
13029      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
13030	 (6 bits).  */
13031      fprintf (file, "%d,",
13032	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
13033
13034      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
13035      fprintf (file, "%d,", (32 - first_reg_to_save ()));
13036
13037      if (optional_tbtab)
13038	{
13039	  /* Compute the parameter info from the function decl argument
13040	     list.  */
13041	  tree decl;
13042	  int next_parm_info_bit = 31;
13043
13044	  for (decl = DECL_ARGUMENTS (current_function_decl);
13045	       decl; decl = TREE_CHAIN (decl))
13046	    {
13047	      rtx parameter = DECL_INCOMING_RTL (decl);
13048	      enum machine_mode mode = GET_MODE (parameter);
13049
13050	      if (GET_CODE (parameter) == REG)
13051		{
13052		  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
13053		    {
13054		      int bits;
13055
13056		      float_parms++;
13057
13058		      if (mode == SFmode)
13059			bits = 0x2;
13060		      else if (mode == DFmode || mode == TFmode)
13061			bits = 0x3;
13062		      else
13063			abort ();
13064
13065		      /* If only one bit will fit, don't or in this entry.  */
13066		      if (next_parm_info_bit > 0)
13067			parm_info |= (bits << (next_parm_info_bit - 1));
13068		      next_parm_info_bit -= 2;
13069		    }
13070		  else
13071		    {
13072		      fixed_parms += ((GET_MODE_SIZE (mode)
13073				       + (UNITS_PER_WORD - 1))
13074				      / UNITS_PER_WORD);
13075		      next_parm_info_bit -= 1;
13076		    }
13077		}
13078	    }
13079	}
13080
13081      /* Number of fixed point parameters.  */
13082      /* This is actually the number of words of fixed point parameters; thus
13083	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
13084      fprintf (file, "%d,", fixed_parms);
13085
13086      /* 2 bitfields: number of floating point parameters (7 bits), parameters
13087	 all on stack.  */
13088      /* This is actually the number of fp registers that hold parameters;
13089	 and thus the maximum value is 13.  */
13090      /* Set parameters on stack bit if parameters are not in their original
13091	 registers, regardless of whether they are on the stack?  Xlc
13092	 seems to set the bit when not optimizing.  */
13093      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
13094
13095      if (! optional_tbtab)
13096	return;
13097
13098      /* Optional fields follow.  Some are variable length.  */
13099
13100      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
13101	 11 double float.  */
13102      /* There is an entry for each parameter in a register, in the order that
13103	 they occur in the parameter list.  Any intervening arguments on the
13104	 stack are ignored.  If the list overflows a long (max possible length
13105	 34 bits) then completely leave off all elements that don't fit.  */
13106      /* Only emit this long if there was at least one parameter.  */
13107      if (fixed_parms || float_parms)
13108	fprintf (file, "\t.long %d\n", parm_info);
13109
13110      /* Offset from start of code to tb table.  */
13111      fputs ("\t.long ", file);
13112      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13113#if TARGET_AIX
13114      RS6000_OUTPUT_BASENAME (file, fname);
13115#else
13116      assemble_name (file, fname);
13117#endif
13118      fputs ("-.", file);
13119#if TARGET_AIX
13120      RS6000_OUTPUT_BASENAME (file, fname);
13121#else
13122      assemble_name (file, fname);
13123#endif
13124      putc ('\n', file);
13125
13126      /* Interrupt handler mask.  */
13127      /* Omit this long, since we never set the interrupt handler bit
13128	 above.  */
13129
13130      /* Number of CTL (controlled storage) anchors.  */
13131      /* Omit this long, since the has_ctl bit is never set above.  */
13132
13133      /* Displacement into stack of each CTL anchor.  */
13134      /* Omit this list of longs, because there are no CTL anchors.  */
13135
13136      /* Length of function name.  */
13137      if (*fname == '*')
13138	++fname;
13139      fprintf (file, "\t.short %d\n", (int) strlen (fname));
13140
13141      /* Function name.  */
13142      assemble_string (fname, strlen (fname));
13143
13144      /* Register for alloca automatic storage; this is always reg 31.
13145	 Only emit this if the alloca bit was set above.  */
13146      if (frame_pointer_needed)
13147	fputs ("\t.byte 31\n", file);
13148
13149      fputs ("\t.align 2\n", file);
13150    }
13151}
13152
13153/* A C compound statement that outputs the assembler code for a thunk
13154   function, used to implement C++ virtual function calls with
13155   multiple inheritance.  The thunk acts as a wrapper around a virtual
13156   function, adjusting the implicit object parameter before handing
13157   control off to the real function.
13158
13159   First, emit code to add the integer DELTA to the location that
13160   contains the incoming first argument.  Assume that this argument
13161   contains a pointer, and is the one used to pass the `this' pointer
13162   in C++.  This is the incoming argument *before* the function
13163   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
13164   values of all other incoming arguments.
13165
13166   After the addition, emit code to jump to FUNCTION, which is a
13167   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
13168   not touch the return address.  Hence returning from FUNCTION will
13169   return to whoever called the current `thunk'.
13170
13171   The effect must be as if FUNCTION had been called directly with the
13172   adjusted first argument.  This macro is responsible for emitting
13173   all of the code for a thunk function; output_function_prologue()
13174   and output_function_epilogue() are not invoked.
13175
13176   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
13177   been extracted from it.)  It might possibly be useful on some
13178   targets, but probably not.
13179
13180   If you do not define this macro, the target-independent code in the
13181   C++ frontend will generate a less efficient heavyweight thunk that
13182   calls FUNCTION instead of jumping to it.  The generic approach does
13183   not support varargs.  */
13184
13185static void
13186rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13187			HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13188			tree function)
13189{
13190  rtx this, insn, funexp;
13191
13192  reload_completed = 1;
13193  epilogue_completed = 1;
13194  no_new_pseudos = 1;
13195
13196  /* Mark the end of the (empty) prologue.  */
13197  emit_note (NOTE_INSN_PROLOGUE_END);
13198
13199  /* Find the "this" pointer.  If the function returns a structure,
13200     the structure return pointer is in r3.  */
13201  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13202    this = gen_rtx_REG (Pmode, 4);
13203  else
13204    this = gen_rtx_REG (Pmode, 3);
13205
13206  /* Apply the constant offset, if required.  */
13207  if (delta)
13208    {
13209      rtx delta_rtx = GEN_INT (delta);
13210      emit_insn (TARGET_32BIT
13211		 ? gen_addsi3 (this, this, delta_rtx)
13212		 : gen_adddi3 (this, this, delta_rtx));
13213    }
13214
13215  /* Apply the offset from the vtable, if required.  */
13216  if (vcall_offset)
13217    {
13218      rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13219      rtx tmp = gen_rtx_REG (Pmode, 12);
13220
13221      emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13222      if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13223	{
13224	  emit_insn (TARGET_32BIT
13225		     ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13226		     : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13227	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13228	}
13229      else
13230	{
13231	  rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13232
13233	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13234	}
13235      emit_insn (TARGET_32BIT
13236		 ? gen_addsi3 (this, this, tmp)
13237		 : gen_adddi3 (this, this, tmp));
13238    }
13239
13240  /* Generate a tail call to the target function.  */
13241  if (!TREE_USED (function))
13242    {
13243      assemble_external (function);
13244      TREE_USED (function) = 1;
13245    }
13246  funexp = XEXP (DECL_RTL (function), 0);
13247  funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13248
13249#if TARGET_MACHO
13250  if (MACHOPIC_INDIRECT)
13251    funexp = machopic_indirect_call_target (funexp);
13252#endif
13253
13254  /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13255     generate sibcall RTL explicitly to avoid constraint abort.  */
13256  insn = emit_call_insn (
13257	   gen_rtx_PARALLEL (VOIDmode,
13258	     gen_rtvec (4,
13259			gen_rtx_CALL (VOIDmode,
13260				      funexp, const0_rtx),
13261			gen_rtx_USE (VOIDmode, const0_rtx),
13262			gen_rtx_USE (VOIDmode,
13263				     gen_rtx_REG (SImode,
13264						  LINK_REGISTER_REGNUM)),
13265			gen_rtx_RETURN (VOIDmode))));
13266  SIBLING_CALL_P (insn) = 1;
13267  emit_barrier ();
13268
13269  /* Run just enough of rest_of_compilation to get the insns emitted.
13270     There's not really enough bulk here to make other passes such as
13271     instruction scheduling worth while.  Note that use_thunk calls
13272     assemble_start_function and assemble_end_function.  */
13273  insn = get_insns ();
13274  insn_locators_initialize ();
13275  shorten_branches (insn);
13276  final_start_function (insn, file, 1);
13277  final (insn, file, 1, 0);
13278  final_end_function ();
13279
13280  reload_completed = 0;
13281  epilogue_completed = 0;
13282  no_new_pseudos = 0;
13283}
13284
13285/* A quick summary of the various types of 'constant-pool tables'
13286   under PowerPC:
13287
13288   Target	Flags		Name		One table per
13289   AIX		(none)		AIX TOC		object file
13290   AIX		-mfull-toc	AIX TOC		object file
13291   AIX		-mminimal-toc	AIX minimal TOC	translation unit
13292   SVR4/EABI	(none)		SVR4 SDATA	object file
13293   SVR4/EABI	-fpic		SVR4 pic	object file
13294   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
13295   SVR4/EABI	-mrelocatable	EABI TOC	function
13296   SVR4/EABI	-maix		AIX TOC		object file
13297   SVR4/EABI	-maix -mminimal-toc
13298				AIX minimal TOC	translation unit
13299
13300   Name			Reg.	Set by	entries	      contains:
13301					made by	 addrs?	fp?	sum?
13302
13303   AIX TOC		2	crt0	as	 Y	option	option
13304   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
13305   SVR4 SDATA		13	crt0	gcc	 N	Y	N
13306   SVR4 pic		30	prolog	ld	 Y	not yet	N
13307   SVR4 PIC		30	prolog	gcc	 Y	option	option
13308   EABI TOC		30	prolog	gcc	 Y	option	option
13309
13310*/
13311
13312/* Hash functions for the hash table.  */
13313
13314static unsigned
13315rs6000_hash_constant (rtx k)
13316{
13317  enum rtx_code code = GET_CODE (k);
13318  enum machine_mode mode = GET_MODE (k);
13319  unsigned result = (code << 3) ^ mode;
13320  const char *format;
13321  int flen, fidx;
13322
13323  format = GET_RTX_FORMAT (code);
13324  flen = strlen (format);
13325  fidx = 0;
13326
13327  switch (code)
13328    {
13329    case LABEL_REF:
13330      return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13331
13332    case CONST_DOUBLE:
13333      if (mode != VOIDmode)
13334	return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13335      flen = 2;
13336      break;
13337
13338    case CODE_LABEL:
13339      fidx = 3;
13340      break;
13341
13342    default:
13343      break;
13344    }
13345
13346  for (; fidx < flen; fidx++)
13347    switch (format[fidx])
13348      {
13349      case 's':
13350	{
13351	  unsigned i, len;
13352	  const char *str = XSTR (k, fidx);
13353	  len = strlen (str);
13354	  result = result * 613 + len;
13355	  for (i = 0; i < len; i++)
13356	    result = result * 613 + (unsigned) str[i];
13357	  break;
13358	}
13359      case 'u':
13360      case 'e':
13361	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13362	break;
13363      case 'i':
13364      case 'n':
13365	result = result * 613 + (unsigned) XINT (k, fidx);
13366	break;
13367      case 'w':
13368	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13369	  result = result * 613 + (unsigned) XWINT (k, fidx);
13370	else
13371	  {
13372	    size_t i;
13373	    for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13374	      result = result * 613 + (unsigned) (XWINT (k, fidx)
13375						  >> CHAR_BIT * i);
13376	  }
13377	break;
13378      case '0':
13379	break;
13380      default:
13381	abort ();
13382      }
13383
13384  return result;
13385}
13386
13387static unsigned
13388toc_hash_function (const void *hash_entry)
13389{
13390  const struct toc_hash_struct *thc =
13391    (const struct toc_hash_struct *) hash_entry;
13392  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13393}
13394
13395/* Compare H1 and H2 for equivalence.  */
13396
13397static int
13398toc_hash_eq (const void *h1, const void *h2)
13399{
13400  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13401  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13402
13403  if (((const struct toc_hash_struct *) h1)->key_mode
13404      != ((const struct toc_hash_struct *) h2)->key_mode)
13405    return 0;
13406
13407  return rtx_equal_p (r1, r2);
13408}
13409
13410/* These are the names given by the C++ front-end to vtables, and
13411   vtable-like objects.  Ideally, this logic should not be here;
13412   instead, there should be some programmatic way of inquiring as
13413   to whether or not an object is a vtable.  */
13414
13415#define VTABLE_NAME_P(NAME)				\
13416  (strncmp ("_vt.", name, strlen("_vt.")) == 0		\
13417  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
13418  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
13419  || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0	\
13420  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13421
13422void
13423rs6000_output_symbol_ref (FILE *file, rtx x)
13424{
13425  /* Currently C++ toc references to vtables can be emitted before it
13426     is decided whether the vtable is public or private.  If this is
13427     the case, then the linker will eventually complain that there is
13428     a reference to an unknown section.  Thus, for vtables only,
13429     we emit the TOC reference to reference the symbol and not the
13430     section.  */
13431  const char *name = XSTR (x, 0);
13432
13433  if (VTABLE_NAME_P (name))
13434    {
13435      RS6000_OUTPUT_BASENAME (file, name);
13436    }
13437  else
13438    assemble_name (file, name);
13439}
13440
13441/* Output a TOC entry.  We derive the entry name from what is being
13442   written.  */
13443
13444void
13445output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13446{
13447  char buf[256];
13448  const char *name = buf;
13449  const char *real_name;
13450  rtx base = x;
13451  int offset = 0;
13452
13453  if (TARGET_NO_TOC)
13454    abort ();
13455
13456  /* When the linker won't eliminate them, don't output duplicate
13457     TOC entries (this happens on AIX if there is any kind of TOC,
13458     and on SVR4 under -fPIC or -mrelocatable).  Don't do this for
13459     CODE_LABELs.  */
13460  if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13461    {
13462      struct toc_hash_struct *h;
13463      void * * found;
13464
13465      /* Create toc_hash_table.  This can't be done at OVERRIDE_OPTIONS
13466         time because GGC is not initialized at that point.  */
13467      if (toc_hash_table == NULL)
13468	toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13469					  toc_hash_eq, NULL);
13470
13471      h = ggc_alloc (sizeof (*h));
13472      h->key = x;
13473      h->key_mode = mode;
13474      h->labelno = labelno;
13475
13476      found = htab_find_slot (toc_hash_table, h, 1);
13477      if (*found == NULL)
13478	*found = h;
13479      else  /* This is indeed a duplicate.
13480	       Set this label equal to that label.  */
13481	{
13482	  fputs ("\t.set ", file);
13483	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13484	  fprintf (file, "%d,", labelno);
13485	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13486	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13487					      found)->labelno));
13488	  return;
13489	}
13490    }
13491
13492  /* If we're going to put a double constant in the TOC, make sure it's
13493     aligned properly when strict alignment is on.  */
13494  if (GET_CODE (x) == CONST_DOUBLE
13495      && STRICT_ALIGNMENT
13496      && GET_MODE_BITSIZE (mode) >= 64
13497      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13498    ASM_OUTPUT_ALIGN (file, 3);
13499  }
13500
13501  (*targetm.asm_out.internal_label) (file, "LC", labelno);
13502
13503  /* Handle FP constants specially.  Note that if we have a minimal
13504     TOC, things we put here aren't actually in the TOC, so we can allow
13505     FP constants.  */
13506  if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13507    {
13508      REAL_VALUE_TYPE rv;
13509      long k[4];
13510
13511      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13512      REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13513
13514      if (TARGET_64BIT)
13515	{
13516	  if (TARGET_MINIMAL_TOC)
13517	    fputs (DOUBLE_INT_ASM_OP, file);
13518	  else
13519	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13520		     k[0] & 0xffffffff, k[1] & 0xffffffff,
13521		     k[2] & 0xffffffff, k[3] & 0xffffffff);
13522	  fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13523		   k[0] & 0xffffffff, k[1] & 0xffffffff,
13524		   k[2] & 0xffffffff, k[3] & 0xffffffff);
13525	  return;
13526	}
13527      else
13528	{
13529	  if (TARGET_MINIMAL_TOC)
13530	    fputs ("\t.long ", file);
13531	  else
13532	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13533		     k[0] & 0xffffffff, k[1] & 0xffffffff,
13534		     k[2] & 0xffffffff, k[3] & 0xffffffff);
13535	  fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13536		   k[0] & 0xffffffff, k[1] & 0xffffffff,
13537		   k[2] & 0xffffffff, k[3] & 0xffffffff);
13538	  return;
13539	}
13540    }
13541  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13542    {
13543      REAL_VALUE_TYPE rv;
13544      long k[2];
13545
13546      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13547      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13548
13549      if (TARGET_64BIT)
13550	{
13551	  if (TARGET_MINIMAL_TOC)
13552	    fputs (DOUBLE_INT_ASM_OP, file);
13553	  else
13554	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13555		     k[0] & 0xffffffff, k[1] & 0xffffffff);
13556	  fprintf (file, "0x%lx%08lx\n",
13557		   k[0] & 0xffffffff, k[1] & 0xffffffff);
13558	  return;
13559	}
13560      else
13561	{
13562	  if (TARGET_MINIMAL_TOC)
13563	    fputs ("\t.long ", file);
13564	  else
13565	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13566		     k[0] & 0xffffffff, k[1] & 0xffffffff);
13567	  fprintf (file, "0x%lx,0x%lx\n",
13568		   k[0] & 0xffffffff, k[1] & 0xffffffff);
13569	  return;
13570	}
13571    }
13572  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13573    {
13574      REAL_VALUE_TYPE rv;
13575      long l;
13576
13577      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13578      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13579
13580      if (TARGET_64BIT)
13581	{
13582	  if (TARGET_MINIMAL_TOC)
13583	    fputs (DOUBLE_INT_ASM_OP, file);
13584	  else
13585	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13586	  fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13587	  return;
13588	}
13589      else
13590	{
13591	  if (TARGET_MINIMAL_TOC)
13592	    fputs ("\t.long ", file);
13593	  else
13594	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13595	  fprintf (file, "0x%lx\n", l & 0xffffffff);
13596	  return;
13597	}
13598    }
13599  else if (GET_MODE (x) == VOIDmode
13600	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13601    {
13602      unsigned HOST_WIDE_INT low;
13603      HOST_WIDE_INT high;
13604
13605      if (GET_CODE (x) == CONST_DOUBLE)
13606	{
13607	  low = CONST_DOUBLE_LOW (x);
13608	  high = CONST_DOUBLE_HIGH (x);
13609	}
13610      else
13611#if HOST_BITS_PER_WIDE_INT == 32
13612	{
13613	  low = INTVAL (x);
13614	  high = (low & 0x80000000) ? ~0 : 0;
13615	}
13616#else
13617	{
13618          low = INTVAL (x) & 0xffffffff;
13619          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13620	}
13621#endif
13622
13623      /* TOC entries are always Pmode-sized, but since this
13624	 is a bigendian machine then if we're putting smaller
13625	 integer constants in the TOC we have to pad them.
13626	 (This is still a win over putting the constants in
13627	 a separate constant pool, because then we'd have
13628	 to have both a TOC entry _and_ the actual constant.)
13629
13630	 For a 32-bit target, CONST_INT values are loaded and shifted
13631	 entirely within `low' and can be stored in one TOC entry.  */
13632
13633      if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13634	abort ();/* It would be easy to make this work, but it doesn't now.  */
13635
13636      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13637	{
13638#if HOST_BITS_PER_WIDE_INT == 32
13639	  lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13640			 POINTER_SIZE, &low, &high, 0);
13641#else
13642	  low |= high << 32;
13643	  low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13644	  high = (HOST_WIDE_INT) low >> 32;
13645	  low &= 0xffffffff;
13646#endif
13647	}
13648
13649      if (TARGET_64BIT)
13650	{
13651	  if (TARGET_MINIMAL_TOC)
13652	    fputs (DOUBLE_INT_ASM_OP, file);
13653	  else
13654	    fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13655		     (long) high & 0xffffffff, (long) low & 0xffffffff);
13656	  fprintf (file, "0x%lx%08lx\n",
13657		   (long) high & 0xffffffff, (long) low & 0xffffffff);
13658	  return;
13659	}
13660      else
13661	{
13662	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13663	    {
13664	      if (TARGET_MINIMAL_TOC)
13665		fputs ("\t.long ", file);
13666	      else
13667		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13668			 (long) high & 0xffffffff, (long) low & 0xffffffff);
13669	      fprintf (file, "0x%lx,0x%lx\n",
13670		       (long) high & 0xffffffff, (long) low & 0xffffffff);
13671	    }
13672	  else
13673	    {
13674	      if (TARGET_MINIMAL_TOC)
13675		fputs ("\t.long ", file);
13676	      else
13677		fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13678	      fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13679	    }
13680	  return;
13681	}
13682    }
13683
13684  if (GET_CODE (x) == CONST)
13685    {
13686      if (GET_CODE (XEXP (x, 0)) != PLUS)
13687	abort ();
13688
13689      base = XEXP (XEXP (x, 0), 0);
13690      offset = INTVAL (XEXP (XEXP (x, 0), 1));
13691    }
13692
13693  if (GET_CODE (base) == SYMBOL_REF)
13694    name = XSTR (base, 0);
13695  else if (GET_CODE (base) == LABEL_REF)
13696    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13697  else if (GET_CODE (base) == CODE_LABEL)
13698    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13699  else
13700    abort ();
13701
13702  real_name = (*targetm.strip_name_encoding) (name);
13703  if (TARGET_MINIMAL_TOC)
13704    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13705  else
13706    {
13707      fprintf (file, "\t.tc %s", real_name);
13708
13709      if (offset < 0)
13710	fprintf (file, ".N%d", - offset);
13711      else if (offset)
13712	fprintf (file, ".P%d", offset);
13713
13714      fputs ("[TC],", file);
13715    }
13716
13717  /* Currently C++ toc references to vtables can be emitted before it
13718     is decided whether the vtable is public or private.  If this is
13719     the case, then the linker will eventually complain that there is
13720     a TOC reference to an unknown section.  Thus, for vtables only,
13721     we emit the TOC reference to reference the symbol and not the
13722     section.  */
13723  if (VTABLE_NAME_P (name))
13724    {
13725      RS6000_OUTPUT_BASENAME (file, name);
13726      if (offset < 0)
13727	fprintf (file, "%d", offset);
13728      else if (offset > 0)
13729	fprintf (file, "+%d", offset);
13730    }
13731  else
13732    output_addr_const (file, x);
13733  putc ('\n', file);
13734}
13735
13736/* Output an assembler pseudo-op to write an ASCII string of N characters
13737   starting at P to FILE.
13738
13739   On the RS/6000, we have to do this using the .byte operation and
13740   write out special characters outside the quoted string.
13741   Also, the assembler is broken; very long strings are truncated,
13742   so we must artificially break them up early.  */
13743
13744void
13745output_ascii (FILE *file, const char *p, int n)
13746{
13747  char c;
13748  int i, count_string;
13749  const char *for_string = "\t.byte \"";
13750  const char *for_decimal = "\t.byte ";
13751  const char *to_close = NULL;
13752
13753  count_string = 0;
13754  for (i = 0; i < n; i++)
13755    {
13756      c = *p++;
13757      if (c >= ' ' && c < 0177)
13758	{
13759	  if (for_string)
13760	    fputs (for_string, file);
13761	  putc (c, file);
13762
13763	  /* Write two quotes to get one.  */
13764	  if (c == '"')
13765	    {
13766	      putc (c, file);
13767	      ++count_string;
13768	    }
13769
13770	  for_string = NULL;
13771	  for_decimal = "\"\n\t.byte ";
13772	  to_close = "\"\n";
13773	  ++count_string;
13774
13775	  if (count_string >= 512)
13776	    {
13777	      fputs (to_close, file);
13778
13779	      for_string = "\t.byte \"";
13780	      for_decimal = "\t.byte ";
13781	      to_close = NULL;
13782	      count_string = 0;
13783	    }
13784	}
13785      else
13786	{
13787	  if (for_decimal)
13788	    fputs (for_decimal, file);
13789	  fprintf (file, "%d", c);
13790
13791	  for_string = "\n\t.byte \"";
13792	  for_decimal = ", ";
13793	  to_close = "\n";
13794	  count_string = 0;
13795	}
13796    }
13797
13798  /* Now close the string if we have written one.  Then end the line.  */
13799  if (to_close)
13800    fputs (to_close, file);
13801}
13802
13803/* Generate a unique section name for FILENAME for a section type
13804   represented by SECTION_DESC.  Output goes into BUF.
13805
13806   SECTION_DESC can be any string, as long as it is different for each
13807   possible section type.
13808
13809   We name the section in the same manner as xlc.  The name begins with an
13810   underscore followed by the filename (after stripping any leading directory
13811   names) with the last period replaced by the string SECTION_DESC.  If
13812   FILENAME does not contain a period, SECTION_DESC is appended to the end of
13813   the name.  */
13814
13815void
13816rs6000_gen_section_name (char **buf, const char *filename,
13817		         const char *section_desc)
13818{
13819  const char *q, *after_last_slash, *last_period = 0;
13820  char *p;
13821  int len;
13822
13823  after_last_slash = filename;
13824  for (q = filename; *q; q++)
13825    {
13826      if (*q == '/')
13827	after_last_slash = q + 1;
13828      else if (*q == '.')
13829	last_period = q;
13830    }
13831
13832  len = strlen (after_last_slash) + strlen (section_desc) + 2;
13833  *buf = (char *) xmalloc (len);
13834
13835  p = *buf;
13836  *p++ = '_';
13837
13838  for (q = after_last_slash; *q; q++)
13839    {
13840      if (q == last_period)
13841        {
13842	  strcpy (p, section_desc);
13843	  p += strlen (section_desc);
13844	  break;
13845        }
13846
13847      else if (ISALNUM (*q))
13848        *p++ = *q;
13849    }
13850
13851  if (last_period == 0)
13852    strcpy (p, section_desc);
13853  else
13854    *p = '\0';
13855}
13856
13857/* Emit profile function.  */
13858
13859void
13860output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13861{
13862  if (TARGET_PROFILE_KERNEL)
13863    return;
13864
13865  if (DEFAULT_ABI == ABI_AIX)
13866    {
13867#ifndef NO_PROFILE_COUNTERS
13868# define NO_PROFILE_COUNTERS 0
13869#endif
13870      if (NO_PROFILE_COUNTERS)
13871	emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13872      else
13873	{
13874	  char buf[30];
13875	  const char *label_name;
13876	  rtx fun;
13877
13878	  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13879	  label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13880	  fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13881
13882	  emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13883			     fun, Pmode);
13884	}
13885    }
13886  else if (DEFAULT_ABI == ABI_DARWIN)
13887    {
13888      const char *mcount_name = RS6000_MCOUNT;
13889      int caller_addr_regno = LINK_REGISTER_REGNUM;
13890
13891      /* Be conservative and always set this, at least for now.  */
13892      current_function_uses_pic_offset_table = 1;
13893
13894#if TARGET_MACHO
13895      /* For PIC code, set up a stub and collect the caller's address
13896	 from r0, which is where the prologue puts it.  */
13897      if (MACHOPIC_INDIRECT)
13898	{
13899	  mcount_name = machopic_stub_name (mcount_name);
13900	  if (current_function_uses_pic_offset_table)
13901	    caller_addr_regno = 0;
13902	}
13903#endif
13904      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13905			 0, VOIDmode, 1,
13906			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13907    }
13908}
13909
13910/* Write function profiler code.  */
13911
13912void
13913output_function_profiler (FILE *file, int labelno)
13914{
13915  char buf[100];
13916  int save_lr = 8;
13917
13918  switch (DEFAULT_ABI)
13919    {
13920    default:
13921      abort ();
13922
13923    case ABI_V4:
13924      save_lr = 4;
13925      if (!TARGET_32BIT)
13926	{
13927	  warning ("no profiling of 64-bit code for this ABI");
13928	  return;
13929	}
13930      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13931      fprintf (file, "\tmflr %s\n", reg_names[0]);
13932      if (flag_pic == 1)
13933	{
13934	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13935	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13936		       reg_names[0], save_lr, reg_names[1]);
13937	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13938	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13939	  assemble_name (file, buf);
13940	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13941	}
13942      else if (flag_pic > 1)
13943	{
13944	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13945		       reg_names[0], save_lr, reg_names[1]);
13946	  /* Now, we need to get the address of the label.  */
13947	  fputs ("\tbl 1f\n\t.long ", file);
13948	  assemble_name (file, buf);
13949	  fputs ("-.\n1:", file);
13950	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13951	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13952		       reg_names[0], reg_names[11]);
13953	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13954		       reg_names[0], reg_names[0], reg_names[11]);
13955	}
13956      else
13957	{
13958	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13959	  assemble_name (file, buf);
13960	  fputs ("@ha\n", file);
13961	  asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13962		       reg_names[0], save_lr, reg_names[1]);
13963	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13964	  assemble_name (file, buf);
13965	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13966	}
13967
13968      /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
13969      fprintf (file, "\tbl %s%s\n",
13970	       RS6000_MCOUNT, flag_pic ? "@plt" : "");
13971
13972      break;
13973
13974    case ABI_AIX:
13975    case ABI_DARWIN:
13976      if (!TARGET_PROFILE_KERNEL)
13977	{
13978	  /* Don't do anything, done in output_profile_hook ().  */
13979	}
13980      else
13981	{
13982	  if (TARGET_32BIT)
13983	    abort ();
13984
13985	  asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13986	  asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13987
13988	  if (current_function_needs_context)
13989	    {
13990	      asm_fprintf (file, "\tstd %s,24(%s)\n",
13991			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13992	      fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13993	      asm_fprintf (file, "\tld %s,24(%s)\n",
13994			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13995	    }
13996	  else
13997	    fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13998	}
13999      break;
14000    }
14001}
14002
14003
14004static int
14005rs6000_use_dfa_pipeline_interface (void)
14006{
14007  return 1;
14008}
14009
14010/* Power4 load update and store update instructions are cracked into a
14011   load or store and an integer insn which are executed in the same cycle.
14012   Branches have their own dispatch slot which does not count against the
14013   GCC issue rate, but it changes the program flow so there are no other
14014   instructions to issue in this cycle.  */
14015
14016static int
14017rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
14018		       int verbose ATTRIBUTE_UNUSED,
14019		       rtx insn, int more)
14020{
14021  if (GET_CODE (PATTERN (insn)) == USE
14022      || GET_CODE (PATTERN (insn)) == CLOBBER)
14023    return more;
14024
14025  if (rs6000_sched_groups)
14026    {
14027      if (is_microcoded_insn (insn))
14028        return 0;
14029      else if (is_cracked_insn (insn))
14030        return more > 2 ? more - 2 : 0;
14031    }
14032
14033  return more - 1;
14034}
14035
14036/* Adjust the cost of a scheduling dependency.  Return the new cost of
14037   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
14038
14039static int
14040rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
14041		    int cost)
14042{
14043  if (! recog_memoized (insn))
14044    return 0;
14045
14046  if (REG_NOTE_KIND (link) != 0)
14047    return 0;
14048
14049  if (REG_NOTE_KIND (link) == 0)
14050    {
14051      /* Data dependency; DEP_INSN writes a register that INSN reads
14052	 some cycles later.  */
14053      switch (get_attr_type (insn))
14054	{
14055	case TYPE_JMPREG:
14056	  /* Tell the first scheduling pass about the latency between
14057	     a mtctr and bctr (and mtlr and br/blr).  The first
14058	     scheduling pass will not know about this latency since
14059	     the mtctr instruction, which has the latency associated
14060	     to it, will be generated by reload.  */
14061	  return TARGET_POWER ? 5 : 4;
14062	case TYPE_BRANCH:
14063	  /* Leave some extra cycles between a compare and its
14064	     dependent branch, to inhibit expensive mispredicts.  */
14065	  if ((rs6000_cpu_attr == CPU_PPC603
14066	       || rs6000_cpu_attr == CPU_PPC604
14067	       || rs6000_cpu_attr == CPU_PPC604E
14068	       || rs6000_cpu_attr == CPU_PPC620
14069	       || rs6000_cpu_attr == CPU_PPC630
14070	       || rs6000_cpu_attr == CPU_PPC750
14071	       || rs6000_cpu_attr == CPU_PPC7400
14072	       || rs6000_cpu_attr == CPU_PPC7450
14073	       || rs6000_cpu_attr == CPU_POWER4
14074	       || rs6000_cpu_attr == CPU_POWER5)
14075	      && recog_memoized (dep_insn)
14076	      && (INSN_CODE (dep_insn) >= 0)
14077	      && (get_attr_type (dep_insn) == TYPE_CMP
14078		  || get_attr_type (dep_insn) == TYPE_COMPARE
14079		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
14080		  || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
14081		  || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
14082		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
14083		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
14084		  || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
14085	    return cost + 2;
14086	default:
14087	  break;
14088	}
14089      /* Fall out to return default cost.  */
14090    }
14091
14092  return cost;
14093}
14094
14095/* The function returns a true if INSN is microcoded.
14096   Return false otherwise.  */
14097
14098static bool
14099is_microcoded_insn (rtx insn)
14100{
14101  if (!insn || !INSN_P (insn)
14102      || GET_CODE (PATTERN (insn)) == USE
14103      || GET_CODE (PATTERN (insn)) == CLOBBER)
14104    return false;
14105
14106  if (rs6000_sched_groups)
14107    {
14108      enum attr_type type = get_attr_type (insn);
14109      if (type == TYPE_LOAD_EXT_U
14110	  || type == TYPE_LOAD_EXT_UX
14111	  || type == TYPE_LOAD_UX
14112	  || type == TYPE_STORE_UX
14113	  || type == TYPE_MFCR)
14114        return true;
14115    }
14116
14117  return false;
14118}
14119
14120/* The function returns a nonzero value if INSN can be scheduled only
14121   as the first insn in a dispatch group ("dispatch-slot restricted").
14122   In this case, the returned value indicates how many dispatch slots
14123   the insn occupies (at the beginning of the group).
14124   Return 0 otherwise.  */
14125
14126static int
14127is_dispatch_slot_restricted (rtx insn)
14128{
14129  enum attr_type type;
14130
14131  if (!rs6000_sched_groups)
14132    return 0;
14133
14134  if (!insn
14135      || insn == NULL_RTX
14136      || GET_CODE (insn) == NOTE
14137      || GET_CODE (PATTERN (insn)) == USE
14138      || GET_CODE (PATTERN (insn)) == CLOBBER)
14139    return 0;
14140
14141  type = get_attr_type (insn);
14142
14143  switch (type)
14144    {
14145    case TYPE_MFCR:
14146    case TYPE_MFCRF:
14147    case TYPE_MTCR:
14148    case TYPE_DELAYED_CR:
14149    case TYPE_CR_LOGICAL:
14150    case TYPE_MTJMPR:
14151    case TYPE_MFJMPR:
14152      return 1;
14153    case TYPE_IDIV:
14154    case TYPE_LDIV:
14155      return 2;
14156    default:
14157      if (rs6000_cpu == PROCESSOR_POWER5
14158	  && is_cracked_insn (insn))
14159	return 2;
14160      return 0;
14161    }
14162}
14163
14164/* The function returns true if INSN is cracked into 2 instructions
14165   by the processor (and therefore occupies 2 issue slots).  */
14166
14167static bool
14168is_cracked_insn (rtx insn)
14169{
14170  if (!insn || !INSN_P (insn)
14171      || GET_CODE (PATTERN (insn)) == USE
14172      || GET_CODE (PATTERN (insn)) == CLOBBER)
14173    return false;
14174
14175  if (rs6000_sched_groups)
14176    {
14177      enum attr_type type = get_attr_type (insn);
14178      if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14179	       || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14180	       || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14181	       || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14182	       || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14183	       || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14184	       || type == TYPE_IDIV || type == TYPE_LDIV
14185	       || type == TYPE_INSERT_WORD)
14186        return true;
14187    }
14188
14189  return false;
14190}
14191
14192/* The function returns true if INSN can be issued only from
14193   the branch slot.  */
14194
14195static bool
14196is_branch_slot_insn (rtx insn)
14197{
14198  if (!insn || !INSN_P (insn)
14199      || GET_CODE (PATTERN (insn)) == USE
14200      || GET_CODE (PATTERN (insn)) == CLOBBER)
14201    return false;
14202
14203  if (rs6000_sched_groups)
14204    {
14205      enum attr_type type = get_attr_type (insn);
14206      if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14207	return true;
14208      return false;
14209    }
14210
14211  return false;
14212}
14213
14214/* A C statement (sans semicolon) to update the integer scheduling
14215   priority INSN_PRIORITY (INSN). Increase the priority to execute the
14216   INSN earlier, reduce the priority to execute INSN later.  Do not
14217   define this macro if you do not need to adjust the scheduling
14218   priorities of insns.  */
14219
14220static int
14221rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14222{
14223  /* On machines (like the 750) which have asymmetric integer units,
14224     where one integer unit can do multiply and divides and the other
14225     can't, reduce the priority of multiply/divide so it is scheduled
14226     before other integer operations.  */
14227
14228#if 0
14229  if (! INSN_P (insn))
14230    return priority;
14231
14232  if (GET_CODE (PATTERN (insn)) == USE)
14233    return priority;
14234
14235  switch (rs6000_cpu_attr) {
14236  case CPU_PPC750:
14237    switch (get_attr_type (insn))
14238      {
14239      default:
14240	break;
14241
14242      case TYPE_IMUL:
14243      case TYPE_IDIV:
14244	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14245		 priority, priority);
14246	if (priority >= 0 && priority < 0x01000000)
14247	  priority >>= 3;
14248	break;
14249      }
14250  }
14251#endif
14252
14253  if (is_dispatch_slot_restricted (insn)
14254      && reload_completed
14255      && current_sched_info->sched_max_insns_priority
14256      && rs6000_sched_restricted_insns_priority)
14257    {
14258
14259      /* Prioritize insns that can be dispatched only in the first dispatch slot.  */
14260      if (rs6000_sched_restricted_insns_priority == 1)
14261	/* Attach highest priority to insn. This means that in
14262	   haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14263	   precede 'priority' (critical path) considerations.  */
14264	return current_sched_info->sched_max_insns_priority;
14265      else if (rs6000_sched_restricted_insns_priority == 2)
14266	/* Increase priority of insn by a minimal amount. This means that in
14267	   haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14268	   precede dispatch-slot restriction considerations.  */
14269	return (priority + 1);
14270    }
14271
14272  return priority;
14273}
14274
14275/* Return how many instructions the machine can issue per cycle.  */
14276
14277static int
14278rs6000_issue_rate (void)
14279{
14280  /* Use issue rate of 1 for first scheduling pass to decrease degradation.  */
14281  if (!reload_completed)
14282    return 1;
14283
14284  switch (rs6000_cpu_attr) {
14285  case CPU_RIOS1:  /* ? */
14286  case CPU_RS64A:
14287  case CPU_PPC601: /* ? */
14288  case CPU_PPC7450:
14289    return 3;
14290  case CPU_PPC440:
14291  case CPU_PPC603:
14292  case CPU_PPC750:
14293  case CPU_PPC7400:
14294  case CPU_PPC8540:
14295    return 2;
14296  case CPU_RIOS2:
14297  case CPU_PPC604:
14298  case CPU_PPC604E:
14299  case CPU_PPC620:
14300  case CPU_PPC630:
14301    return 4;
14302  case CPU_POWER4:
14303  case CPU_POWER5:
14304    return 5;
14305  default:
14306    return 1;
14307  }
14308}
14309
14310/* Return how many instructions to look ahead for better insn
14311   scheduling.  */
14312
14313static int
14314rs6000_use_sched_lookahead (void)
14315{
14316  if (rs6000_cpu_attr == CPU_PPC8540)
14317    return 4;
14318  return 0;
14319}
14320
14321/* Determine is PAT refers to memory.  */
14322
14323static bool
14324is_mem_ref (rtx pat)
14325{
14326  const char * fmt;
14327  int i, j;
14328  bool ret = false;
14329
14330  if (GET_CODE (pat) == MEM)
14331    return true;
14332
14333  /* Recursively process the pattern.  */
14334  fmt = GET_RTX_FORMAT (GET_CODE (pat));
14335
14336  for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14337    {
14338      if (fmt[i] == 'e')
14339	ret |= is_mem_ref (XEXP (pat, i));
14340      else if (fmt[i] == 'E')
14341	for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14342	  ret |= is_mem_ref (XVECEXP (pat, i, j));
14343    }
14344
14345  return ret;
14346}
14347
14348/* Determine if PAT is a PATTERN of a load insn.  */
14349
14350static bool
14351is_load_insn1 (rtx pat)
14352{
14353  if (!pat || pat == NULL_RTX)
14354    return false;
14355
14356  if (GET_CODE (pat) == SET)
14357    return is_mem_ref (SET_SRC (pat));
14358
14359  if (GET_CODE (pat) == PARALLEL)
14360    {
14361      int i;
14362
14363      for (i = 0; i < XVECLEN (pat, 0); i++)
14364	if (is_load_insn1 (XVECEXP (pat, 0, i)))
14365	  return true;
14366    }
14367
14368  return false;
14369}
14370
14371/* Determine if INSN loads from memory.  */
14372
14373static bool
14374is_load_insn (rtx insn)
14375{
14376  if (!insn || !INSN_P (insn))
14377    return false;
14378
14379  if (GET_CODE (insn) == CALL_INSN)
14380    return false;
14381
14382  return is_load_insn1 (PATTERN (insn));
14383}
14384
14385/* Determine if PAT is a PATTERN of a store insn.  */
14386
14387static bool
14388is_store_insn1 (rtx pat)
14389{
14390  if (!pat || pat == NULL_RTX)
14391    return false;
14392
14393  if (GET_CODE (pat) == SET)
14394    return is_mem_ref (SET_DEST (pat));
14395
14396  if (GET_CODE (pat) == PARALLEL)
14397    {
14398      int i;
14399
14400      for (i = 0; i < XVECLEN (pat, 0); i++)
14401	if (is_store_insn1 (XVECEXP (pat, 0, i)))
14402	  return true;
14403    }
14404
14405  return false;
14406}
14407
14408/* Determine if INSN stores to memory.  */
14409
14410static bool
14411is_store_insn (rtx insn)
14412{
14413  if (!insn || !INSN_P (insn))
14414    return false;
14415
14416  return is_store_insn1 (PATTERN (insn));
14417}
14418
14419/* Returns whether the dependence between INSN and NEXT is considered
14420   costly by the given target.  */
14421
14422static bool
14423rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14424{
14425  /* If the flag is not enbled - no dependence is considered costly;
14426     allow all dependent insns in the same group.
14427     This is the most aggressive option.  */
14428  if (rs6000_sched_costly_dep == no_dep_costly)
14429    return false;
14430
14431  /* If the flag is set to 1 - a dependence is always considered costly;
14432     do not allow dependent instructions in the same group.
14433     This is the most conservative option.  */
14434  if (rs6000_sched_costly_dep == all_deps_costly)
14435    return true;
14436
14437  if (rs6000_sched_costly_dep == store_to_load_dep_costly
14438      && is_load_insn (next)
14439      && is_store_insn (insn))
14440    /* Prevent load after store in the same group.  */
14441    return true;
14442
14443  if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14444      && is_load_insn (next)
14445      && is_store_insn (insn)
14446      && (!link || (int) REG_NOTE_KIND (link) == 0))
14447     /* Prevent load after store in the same group if it is a true dependence.  */
14448     return true;
14449
14450  /* The flag is set to X; dependences with latency >= X are considered costly,
14451     and will not be scheduled in the same group.  */
14452  if (rs6000_sched_costly_dep <= max_dep_latency
14453      && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14454    return true;
14455
14456  return false;
14457}
14458
14459/* Return the next insn after INSN that is found before TAIL is reached,
14460   skipping any "non-active" insns - insns that will not actually occupy
14461   an issue slot.  Return NULL_RTX if such an insn is not found.  */
14462
14463static rtx
14464get_next_active_insn (rtx insn, rtx tail)
14465{
14466  rtx next_insn;
14467
14468  if (!insn || insn == tail)
14469    return NULL_RTX;
14470
14471  next_insn = NEXT_INSN (insn);
14472
14473  while (next_insn
14474  	 && next_insn != tail
14475	 && (GET_CODE(next_insn) == NOTE
14476	     || GET_CODE (PATTERN (next_insn)) == USE
14477	     || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14478    {
14479      next_insn = NEXT_INSN (next_insn);
14480    }
14481
14482  if (!next_insn || next_insn == tail)
14483    return NULL_RTX;
14484
14485  return next_insn;
14486}
14487
14488/* Return whether the presence of INSN causes a dispatch group termination
14489   of group WHICH_GROUP.
14490
14491   If WHICH_GROUP == current_group, this function will return true if INSN
14492   causes the termination of the current group (i.e, the dispatch group to
14493   which INSN belongs). This means that INSN will be the last insn in the
14494   group it belongs to.
14495
14496   If WHICH_GROUP == previous_group, this function will return true if INSN
14497   causes the termination of the previous group (i.e, the dispatch group that
14498   precedes the group to which INSN belongs).  This means that INSN will be
14499   the first insn in the group it belongs to).  */
14500
14501static bool
14502insn_terminates_group_p (rtx insn, enum group_termination which_group)
14503{
14504  enum attr_type type;
14505
14506  if (! insn)
14507    return false;
14508
14509  type = get_attr_type (insn);
14510
14511  if (is_microcoded_insn (insn))
14512    return true;
14513
14514  if (which_group == current_group)
14515    {
14516      if (is_branch_slot_insn (insn))
14517        return true;
14518      return false;
14519    }
14520  else if (which_group == previous_group)
14521    {
14522      if (is_dispatch_slot_restricted (insn))
14523        return true;
14524      return false;
14525    }
14526
14527  return false;
14528}
14529
14530/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14531   dispatch group) from the insns in GROUP_INSNS.  Return false otherwise.  */
14532
14533static bool
14534is_costly_group (rtx *group_insns, rtx next_insn)
14535{
14536  int i;
14537  rtx link;
14538  int cost;
14539  int issue_rate = rs6000_issue_rate ();
14540
14541  for (i = 0; i < issue_rate; i++)
14542    {
14543      rtx insn = group_insns[i];
14544      if (!insn)
14545        continue;
14546      for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14547        {
14548          rtx next = XEXP (link, 0);
14549          if (next == next_insn)
14550            {
14551              cost = insn_cost (insn, link, next_insn);
14552              if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14553                return true;
14554            }
14555        }
14556    }
14557
14558  return false;
14559}
14560
14561/* Utility of the function redefine_groups.
14562   Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14563   in the same dispatch group.  If so, insert nops before NEXT_INSN, in order
14564   to keep it "far" (in a separate group) from GROUP_INSNS, following
14565   one of the following schemes, depending on the value of the flag
14566   -minsert_sched_nops = X:
14567   (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14568       in order to force NEXT_INSN into a separate group.
14569   (2) X < sched_finish_regroup_exact: insert exactly X nops.
14570   GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14571   insertion (has a group just ended, how many vacant issue slots remain in the
14572   last group, and how many dispatch groups were encountered so far).  */
14573
14574static int
14575force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14576		 bool *group_end, int can_issue_more, int *group_count)
14577{
14578  rtx nop;
14579  bool force;
14580  int issue_rate = rs6000_issue_rate ();
14581  bool end = *group_end;
14582  int i;
14583
14584  if (next_insn == NULL_RTX)
14585    return can_issue_more;
14586
14587  if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14588    return can_issue_more;
14589
14590  force = is_costly_group (group_insns, next_insn);
14591  if (!force)
14592    return can_issue_more;
14593
14594  if (sched_verbose > 6)
14595    fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14596			*group_count ,can_issue_more);
14597
14598  if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14599    {
14600      if (*group_end)
14601        can_issue_more = 0;
14602
14603      /* Since only a branch can be issued in the last issue_slot, it is
14604	 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14605	 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14606	 in this case the last nop will start a new group and the branch will be
14607	 forced to the new group.  */
14608      if (can_issue_more && !is_branch_slot_insn (next_insn))
14609        can_issue_more--;
14610
14611      while (can_issue_more > 0)
14612        {
14613          nop = gen_nop();
14614          emit_insn_before (nop, next_insn);
14615          can_issue_more--;
14616        }
14617
14618      *group_end = true;
14619      return 0;
14620    }
14621
14622  if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14623    {
14624      int n_nops = rs6000_sched_insert_nops;
14625
14626      /* Nops can't be issued from the branch slot, so the effective
14627         issue_rate for nops is 'issue_rate - 1'.  */
14628      if (can_issue_more == 0)
14629        can_issue_more = issue_rate;
14630      can_issue_more--;
14631      if (can_issue_more == 0)
14632        {
14633          can_issue_more = issue_rate - 1;
14634          (*group_count)++;
14635          end = true;
14636          for (i = 0; i < issue_rate; i++)
14637            {
14638              group_insns[i] = 0;
14639            }
14640        }
14641
14642      while (n_nops > 0)
14643        {
14644          nop = gen_nop ();
14645          emit_insn_before (nop, next_insn);
14646          if (can_issue_more == issue_rate - 1) /* new group begins */
14647            end = false;
14648          can_issue_more--;
14649          if (can_issue_more == 0)
14650            {
14651              can_issue_more = issue_rate - 1;
14652              (*group_count)++;
14653              end = true;
14654              for (i = 0; i < issue_rate; i++)
14655                {
14656                  group_insns[i] = 0;
14657                }
14658            }
14659          n_nops--;
14660        }
14661
14662      /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1').  */
14663      can_issue_more++;
14664
14665      *group_end = /* Is next_insn going to start a new group?  */
14666	  (end
14667	   || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14668	   || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14669	   || (can_issue_more < issue_rate &&
14670	      insn_terminates_group_p (next_insn, previous_group)));
14671      if (*group_end && end)
14672        (*group_count)--;
14673
14674      if (sched_verbose > 6)
14675        fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14676			*group_count, can_issue_more);
14677      return can_issue_more;
14678    }
14679
14680  return can_issue_more;
14681}
14682
14683/* This function tries to synch the dispatch groups that the compiler "sees"
14684   with the dispatch groups that the processor dispatcher is expected to
14685   form in practice.  It tries to achieve this synchronization by forcing the
14686   estimated processor grouping on the compiler (as opposed to the function
14687   'pad_goups' which tries to force the scheduler's grouping on the processor).
14688
14689   The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14690   examines the (estimated) dispatch groups that will be formed by the processor
14691   dispatcher.  It marks these group boundaries to reflect the estimated
14692   processor grouping, overriding the grouping that the scheduler had marked.
14693   Depending on the value of the flag '-minsert-sched-nops' this function can
14694   force certain insns into separate groups or force a certain distance between
14695   them by inserting nops, for example, if there exists a "costly dependence"
14696   between the insns.
14697
14698   The function estimates the group boundaries that the processor will form as
14699   folllows:  It keeps track of how many vacant issue slots are available after
14700   each insn.  A subsequent insn will start a new group if one of the following
14701   4 cases applies:
14702   - no more vacant issue slots remain in the current dispatch group.
14703   - only the last issue slot, which is the branch slot, is vacant, but the next
14704     insn is not a branch.
14705   - only the last 2 or less issue slots, including the branch slot, are vacant,
14706     which means that a cracked insn (which occupies two issue slots) can't be
14707     issued in this group.
14708   - less than 'issue_rate' slots are vacant, and the next insn always needs to
14709     start a new group.  */
14710
14711static int
14712redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14713{
14714  rtx insn, next_insn;
14715  int issue_rate;
14716  int can_issue_more;
14717  int slot, i;
14718  bool group_end;
14719  int group_count = 0;
14720  rtx *group_insns;
14721
14722  /* Initialize.  */
14723  issue_rate = rs6000_issue_rate ();
14724  group_insns = alloca (issue_rate * sizeof (rtx));
14725  for (i = 0; i < issue_rate; i++)
14726    {
14727      group_insns[i] = 0;
14728    }
14729  can_issue_more = issue_rate;
14730  slot = 0;
14731  insn = get_next_active_insn (prev_head_insn, tail);
14732  group_end = false;
14733
14734  while (insn != NULL_RTX)
14735    {
14736      slot = (issue_rate - can_issue_more);
14737      group_insns[slot] = insn;
14738      can_issue_more =
14739        rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14740      if (insn_terminates_group_p (insn, current_group))
14741        can_issue_more = 0;
14742
14743      next_insn = get_next_active_insn (insn, tail);
14744      if (next_insn == NULL_RTX)
14745        return group_count + 1;
14746
14747      group_end = /* Is next_insn going to start a new group?  */
14748        (can_issue_more == 0
14749         || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14750         || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14751         || (can_issue_more < issue_rate &&
14752             insn_terminates_group_p (next_insn, previous_group)));
14753
14754      can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14755			next_insn, &group_end, can_issue_more, &group_count);
14756
14757      if (group_end)
14758        {
14759          group_count++;
14760          can_issue_more = 0;
14761          for (i = 0; i < issue_rate; i++)
14762            {
14763              group_insns[i] = 0;
14764            }
14765        }
14766
14767      if (GET_MODE (next_insn) == TImode && can_issue_more)
14768        PUT_MODE(next_insn, VOIDmode);
14769      else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14770        PUT_MODE (next_insn, TImode);
14771
14772      insn = next_insn;
14773      if (can_issue_more == 0)
14774        can_issue_more = issue_rate;
14775   } /* while */
14776
14777  return group_count;
14778}
14779
14780/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14781   dispatch group boundaries that the scheduler had marked.  Pad with nops
14782   any dispatch groups which have vacant issue slots, in order to force the
14783   scheduler's grouping on the processor dispatcher.  The function
14784   returns the number of dispatch groups found.  */
14785
14786static int
14787pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14788{
14789  rtx insn, next_insn;
14790  rtx nop;
14791  int issue_rate;
14792  int can_issue_more;
14793  int group_end;
14794  int group_count = 0;
14795
14796  /* Initialize issue_rate.  */
14797  issue_rate = rs6000_issue_rate ();
14798  can_issue_more = issue_rate;
14799
14800  insn = get_next_active_insn (prev_head_insn, tail);
14801  next_insn = get_next_active_insn (insn, tail);
14802
14803  while (insn != NULL_RTX)
14804    {
14805      can_issue_more =
14806      	rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14807
14808      group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14809
14810      if (next_insn == NULL_RTX)
14811        break;
14812
14813      if (group_end)
14814        {
14815          /* If the scheduler had marked group termination at this location
14816             (between insn and next_indn), and neither insn nor next_insn will
14817             force group termination, pad the group with nops to force group
14818             termination.  */
14819          if (can_issue_more
14820              && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14821              && !insn_terminates_group_p (insn, current_group)
14822              && !insn_terminates_group_p (next_insn, previous_group))
14823            {
14824              if (!is_branch_slot_insn(next_insn))
14825                can_issue_more--;
14826
14827              while (can_issue_more)
14828                {
14829                  nop = gen_nop ();
14830                  emit_insn_before (nop, next_insn);
14831                  can_issue_more--;
14832                }
14833            }
14834
14835          can_issue_more = issue_rate;
14836          group_count++;
14837        }
14838
14839      insn = next_insn;
14840      next_insn = get_next_active_insn (insn, tail);
14841    }
14842
14843  return group_count;
14844}
14845
14846/* The following function is called at the end of scheduling BB.
14847   After reload, it inserts nops at insn group bundling.  */
14848
14849static void
14850rs6000_sched_finish (FILE *dump, int sched_verbose)
14851{
14852  int n_groups;
14853
14854  if (sched_verbose)
14855    fprintf (dump, "=== Finishing schedule.\n");
14856
14857  if (reload_completed && rs6000_sched_groups)
14858    {
14859      if (rs6000_sched_insert_nops == sched_finish_none)
14860        return;
14861
14862      if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14863        n_groups = pad_groups (dump, sched_verbose,
14864				current_sched_info->prev_head,
14865  			   	current_sched_info->next_tail);
14866      else
14867        n_groups = redefine_groups (dump, sched_verbose,
14868				current_sched_info->prev_head,
14869  				current_sched_info->next_tail);
14870
14871      if (sched_verbose >= 6)
14872	{
14873    	  fprintf (dump, "ngroups = %d\n", n_groups);
14874	  print_rtl (dump, current_sched_info->prev_head);
14875	  fprintf (dump, "Done finish_sched\n");
14876	}
14877    }
14878}
14879
14880/* Length in units of the trampoline for entering a nested function.  */
14881
14882int
14883rs6000_trampoline_size (void)
14884{
14885  int ret = 0;
14886
14887  switch (DEFAULT_ABI)
14888    {
14889    default:
14890      abort ();
14891
14892    case ABI_AIX:
14893      ret = (TARGET_32BIT) ? 12 : 24;
14894      break;
14895
14896    case ABI_DARWIN:
14897    case ABI_V4:
14898      ret = (TARGET_32BIT) ? 40 : 48;
14899      break;
14900    }
14901
14902  return ret;
14903}
14904
14905/* Emit RTL insns to initialize the variable parts of a trampoline.
14906   FNADDR is an RTX for the address of the function's pure code.
14907   CXT is an RTX for the static chain value for the function.  */
14908
14909void
14910rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14911{
14912  enum machine_mode pmode = Pmode;
14913  int regsize = (TARGET_32BIT) ? 4 : 8;
14914  rtx ctx_reg = force_reg (pmode, cxt);
14915
14916  switch (DEFAULT_ABI)
14917    {
14918    default:
14919      abort ();
14920
14921/* Macros to shorten the code expansions below.  */
14922#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14923#define MEM_PLUS(addr,offset) \
14924  gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14925
14926    /* Under AIX, just build the 3 word function descriptor */
14927    case ABI_AIX:
14928      {
14929	rtx fn_reg = gen_reg_rtx (pmode);
14930	rtx toc_reg = gen_reg_rtx (pmode);
14931	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14932	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14933	emit_move_insn (MEM_DEREF (addr), fn_reg);
14934	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14935	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14936      }
14937      break;
14938
14939    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
14940    case ABI_DARWIN:
14941    case ABI_V4:
14942      emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14943			 FALSE, VOIDmode, 4,
14944			 addr, pmode,
14945			 GEN_INT (rs6000_trampoline_size ()), SImode,
14946			 fnaddr, pmode,
14947			 ctx_reg, pmode);
14948      break;
14949    }
14950
14951  return;
14952}
14953
14954
14955/* Table of valid machine attributes.  */
14956
14957const struct attribute_spec rs6000_attribute_table[] =
14958{
14959  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14960  { "altivec",   1, 1, false, true,  false, rs6000_handle_altivec_attribute },
14961  { "longcall",  0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
14962  { "shortcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
14963  { NULL,        0, 0, false, false, false, NULL }
14964};
14965
14966/* Handle the "altivec" attribute.  The attribute may have
14967   arguments as follows:
14968
14969       __attribute__((altivec(vector__)))
14970       __attribute__((altivec(pixel__)))       (always followed by 'unsigned short')
14971       __attribute__((altivec(bool__)))        (always followed by 'unsigned')
14972
14973  and may appear more than once (e.g., 'vector bool char') in a
14974  given declaration.  */
14975
14976static tree
14977rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
14978				 int flags ATTRIBUTE_UNUSED,
14979				 bool *no_add_attrs)
14980{
14981  tree type = *node, result = NULL_TREE;
14982  enum machine_mode mode;
14983  int unsigned_p;
14984  char altivec_type
14985    = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
14986       && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
14987       ? *IDENTIFIER_POINTER (TREE_VALUE (args))
14988       : '?');
14989
14990  while (POINTER_TYPE_P (type)
14991	 || TREE_CODE (type) == FUNCTION_TYPE
14992	 || TREE_CODE (type) == METHOD_TYPE
14993	 || TREE_CODE (type) == ARRAY_TYPE)
14994    type = TREE_TYPE (type);
14995
14996  mode = TYPE_MODE (type);
14997
14998  if (rs6000_warn_altivec_long
14999      && (type == long_unsigned_type_node || type == long_integer_type_node))
15000    warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
15001
15002  switch (altivec_type)
15003    {
15004    case 'v':
15005      unsigned_p = TREE_UNSIGNED (type);
15006      switch (mode)
15007	{
15008	  case SImode:
15009	    result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
15010	    break;
15011	  case HImode:
15012	    result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
15013	    break;
15014	  case QImode:
15015	    result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
15016	    break;
15017	  case SFmode: result = V4SF_type_node; break;
15018	    /* If the user says 'vector int bool', we may be handed the 'bool'
15019	       attribute _before_ the 'vector' attribute, and so select the proper
15020	       type in the 'b' case below.  */
15021	  case V4SImode: case V8HImode: case V16QImode: result = type;
15022	  default: break;
15023	}
15024      break;
15025    case 'b':
15026      switch (mode)
15027	{
15028	  case SImode: case V4SImode: result = bool_V4SI_type_node; break;
15029	  case HImode: case V8HImode: result = bool_V8HI_type_node; break;
15030	  case QImode: case V16QImode: result = bool_V16QI_type_node;
15031	  default: break;
15032	}
15033      break;
15034    case 'p':
15035      switch (mode)
15036	{
15037	  case V8HImode: result = pixel_V8HI_type_node;
15038	  default: break;
15039	}
15040    default: break;
15041    }
15042
15043  if (result && result != type && TYPE_READONLY (type))
15044    result = build_qualified_type (result, TYPE_QUAL_CONST);
15045
15046  *no_add_attrs = true;  /* No need to hang on to the attribute.  */
15047
15048  if (!result)
15049    warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
15050  else
15051    *node = reconstruct_complex_type (*node, result);
15052
15053  return NULL_TREE;
15054}
15055
15056/* AltiVec defines four built-in scalar types that serve as vector
15057   elements; we must teach the compiler how to mangle them.  */
15058
15059static const char *
15060rs6000_mangle_fundamental_type (tree type)
15061{
15062  if (type == bool_char_type_node) return "U6__boolc";
15063  if (type == bool_short_type_node) return "U6__bools";
15064  if (type == pixel_type_node) return "u7__pixel";
15065  if (type == bool_int_type_node) return "U6__booli";
15066
15067  /* For all other types, use normal C++ mangling.  */
15068  return NULL;
15069}
15070
15071/* Handle a "longcall" or "shortcall" attribute; arguments as in
15072   struct attribute_spec.handler.  */
15073
15074static tree
15075rs6000_handle_longcall_attribute (tree *node, tree name,
15076				  tree args ATTRIBUTE_UNUSED,
15077				  int flags ATTRIBUTE_UNUSED,
15078				  bool *no_add_attrs)
15079{
15080  if (TREE_CODE (*node) != FUNCTION_TYPE
15081      && TREE_CODE (*node) != FIELD_DECL
15082      && TREE_CODE (*node) != TYPE_DECL)
15083    {
15084      warning ("`%s' attribute only applies to functions",
15085	       IDENTIFIER_POINTER (name));
15086      *no_add_attrs = true;
15087    }
15088
15089  return NULL_TREE;
15090}
15091
15092/* Set longcall attributes on all functions declared when
15093   rs6000_default_long_calls is true.  */
15094static void
15095rs6000_set_default_type_attributes (tree type)
15096{
15097  if (rs6000_default_long_calls
15098      && (TREE_CODE (type) == FUNCTION_TYPE
15099	  || TREE_CODE (type) == METHOD_TYPE))
15100    TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
15101					NULL_TREE,
15102					TYPE_ATTRIBUTES (type));
15103}
15104
15105/* Return a reference suitable for calling a function with the
15106   longcall attribute.  */
15107
15108struct rtx_def *
15109rs6000_longcall_ref (rtx call_ref)
15110{
15111  const char *call_name;
15112  tree node;
15113
15114  if (GET_CODE (call_ref) != SYMBOL_REF)
15115    return call_ref;
15116
15117  /* System V adds '.' to the internal name, so skip them.  */
15118  call_name = XSTR (call_ref, 0);
15119  if (*call_name == '.')
15120    {
15121      while (*call_name == '.')
15122	call_name++;
15123
15124      node = get_identifier (call_name);
15125      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
15126    }
15127
15128  return force_reg (Pmode, call_ref);
15129}
15130
15131#ifdef USING_ELFOS_H
15132
15133/* A C statement or statements to switch to the appropriate section
15134   for output of RTX in mode MODE.  You can assume that RTX is some
15135   kind of constant in RTL.  The argument MODE is redundant except in
15136   the case of a `const_int' rtx.  Select the section by calling
15137   `text_section' or one of the alternatives for other sections.
15138
15139   Do not define this macro if you put all constants in the read-only
15140   data section.  */
15141
15142static void
15143rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
15144			       unsigned HOST_WIDE_INT align)
15145{
15146  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15147    toc_section ();
15148  else
15149    default_elf_select_rtx_section (mode, x, align);
15150}
15151
15152/* A C statement or statements to switch to the appropriate
15153   section for output of DECL.  DECL is either a `VAR_DECL' node
15154   or a constant of some sort.  RELOC indicates whether forming
15155   the initial value of DECL requires link-time relocations.  */
15156
15157static void
15158rs6000_elf_select_section (tree decl, int reloc,
15159			   unsigned HOST_WIDE_INT align)
15160{
15161  /* Pretend that we're always building for a shared library when
15162     ABI_AIX, because otherwise we end up with dynamic relocations
15163     in read-only sections.  This happens for function pointers,
15164     references to vtables in typeinfo, and probably other cases.  */
15165  default_elf_select_section_1 (decl, reloc, align,
15166				flag_pic || DEFAULT_ABI == ABI_AIX);
15167}
15168
15169/* A C statement to build up a unique section name, expressed as a
15170   STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15171   RELOC indicates whether the initial value of EXP requires
15172   link-time relocations.  If you do not define this macro, GCC will use
15173   the symbol name prefixed by `.' as the section name.  Note - this
15174   macro can now be called for uninitialized data items as well as
15175   initialized data and functions.  */
15176
15177static void
15178rs6000_elf_unique_section (tree decl, int reloc)
15179{
15180  /* As above, pretend that we're always building for a shared library
15181     when ABI_AIX, to avoid dynamic relocations in read-only sections.  */
15182  default_unique_section_1 (decl, reloc,
15183			    flag_pic || DEFAULT_ABI == ABI_AIX);
15184}
15185
15186/* For a SYMBOL_REF, set generic flags and then perform some
15187   target-specific processing.
15188
15189   When the AIX ABI is requested on a non-AIX system, replace the
15190   function name with the real name (with a leading .) rather than the
15191   function descriptor name.  This saves a lot of overriding code to
15192   read the prefixes.  */
15193
15194static void
15195rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15196{
15197  default_encode_section_info (decl, rtl, first);
15198
15199  if (first
15200      && TREE_CODE (decl) == FUNCTION_DECL
15201      && !TARGET_AIX
15202      && DEFAULT_ABI == ABI_AIX)
15203    {
15204      rtx sym_ref = XEXP (rtl, 0);
15205      size_t len = strlen (XSTR (sym_ref, 0));
15206      char *str = alloca (len + 2);
15207      str[0] = '.';
15208      memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15209      XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15210    }
15211}
15212
15213static bool
15214rs6000_elf_in_small_data_p (tree decl)
15215{
15216  if (rs6000_sdata == SDATA_NONE)
15217    return false;
15218
15219  /* We want to merge strings, so we never consider them small data.  */
15220  if (TREE_CODE (decl) == STRING_CST)
15221    return false;
15222
15223  /* Functions are never in the small data area.  */
15224  if (TREE_CODE (decl) == FUNCTION_DECL)
15225    return false;
15226
15227  /* Thread-local vars can't go in the small data area.  */
15228  if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL (decl))
15229    return false;
15230
15231  if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15232    {
15233      const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15234      if (strcmp (section, ".sdata") == 0
15235	  || strcmp (section, ".sdata2") == 0
15236	  || strcmp (section, ".sbss") == 0
15237	  || strcmp (section, ".sbss2") == 0
15238	  || strcmp (section, ".PPC.EMB.sdata0") == 0
15239	  || strcmp (section, ".PPC.EMB.sbss0") == 0)
15240	return true;
15241    }
15242  else
15243    {
15244      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15245
15246      if (size > 0
15247	  && (unsigned HOST_WIDE_INT) size <= g_switch_value
15248	  /* If it's not public, and we're not going to reference it there,
15249	     there's no need to put it in the small data section.  */
15250	  && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15251	return true;
15252    }
15253
15254  return false;
15255}
15256
15257#endif /* USING_ELFOS_H */
15258
15259
15260/* Return a REG that occurs in ADDR with coefficient 1.
15261   ADDR can be effectively incremented by incrementing REG.
15262
15263   r0 is special and we must not select it as an address
15264   register by this routine since our caller will try to
15265   increment the returned register via an "la" instruction.  */
15266
15267struct rtx_def *
15268find_addr_reg (rtx addr)
15269{
15270  while (GET_CODE (addr) == PLUS)
15271    {
15272      if (GET_CODE (XEXP (addr, 0)) == REG
15273	  && REGNO (XEXP (addr, 0)) != 0)
15274	addr = XEXP (addr, 0);
15275      else if (GET_CODE (XEXP (addr, 1)) == REG
15276	       && REGNO (XEXP (addr, 1)) != 0)
15277	addr = XEXP (addr, 1);
15278      else if (CONSTANT_P (XEXP (addr, 0)))
15279	addr = XEXP (addr, 1);
15280      else if (CONSTANT_P (XEXP (addr, 1)))
15281	addr = XEXP (addr, 0);
15282      else
15283	abort ();
15284    }
15285  if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15286    return addr;
15287  abort ();
15288}
15289
15290void
15291rs6000_fatal_bad_address (rtx op)
15292{
15293  fatal_insn ("bad address", op);
15294}
15295
15296#if TARGET_MACHO
15297
15298#if 0
15299/* Returns 1 if OP is either a symbol reference or a sum of a symbol
15300   reference and a constant.  */
15301
15302int
15303symbolic_operand (rtx op)
15304{
15305  switch (GET_CODE (op))
15306    {
15307    case SYMBOL_REF:
15308    case LABEL_REF:
15309      return 1;
15310    case CONST:
15311      op = XEXP (op, 0);
15312      return (GET_CODE (op) == SYMBOL_REF ||
15313	      (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15314	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15315	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
15316    default:
15317      return 0;
15318    }
15319}
15320#endif
15321
15322#if TARGET_MACHO
15323
15324static tree branch_island_list = 0;
15325
15326/* Remember to generate a branch island for far calls to the given
15327   function.  */
15328
15329static void
15330add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15331{
15332  tree branch_island = build_tree_list (function_name, label_name);
15333  TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15334  TREE_CHAIN (branch_island) = branch_island_list;
15335  branch_island_list = branch_island;
15336}
15337
15338#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND)     TREE_VALUE (BRANCH_ISLAND)
15339#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND)  TREE_PURPOSE (BRANCH_ISLAND)
15340#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND)    \
15341		TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15342
15343/* Generate far-jump branch islands for everything on the
15344   branch_island_list.  Invoked immediately after the last instruction
15345   of the epilogue has been emitted; the branch-islands must be
15346   appended to, and contiguous with, the function body.  Mach-O stubs
15347   are generated in machopic_output_stub().  */
15348
15349static void
15350macho_branch_islands (void)
15351{
15352  char tmp_buf[512];
15353  tree branch_island;
15354
15355  for (branch_island = branch_island_list;
15356       branch_island;
15357       branch_island = TREE_CHAIN (branch_island))
15358    {
15359      const char *label =
15360	IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15361      const char *name  =
15362	darwin_strip_name_encoding (
15363	  IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15364      char name_buf[512];
15365      /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF().  */
15366      if (name[0] == '*' || name[0] == '&')
15367	strcpy (name_buf, name+1);
15368      else
15369	{
15370	  name_buf[0] = '_';
15371	  strcpy (name_buf+1, name);
15372	}
15373      strcpy (tmp_buf, "\n");
15374      strcat (tmp_buf, label);
15375#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15376      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15377	fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15378		 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15379#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15380      if (flag_pic)
15381	{
15382	  strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15383	  strcat (tmp_buf, label);
15384	  strcat (tmp_buf, "_pic\n");
15385	  strcat (tmp_buf, label);
15386	  strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15387
15388	  strcat (tmp_buf, "\taddis r11,r11,ha16(");
15389	  strcat (tmp_buf, name_buf);
15390	  strcat (tmp_buf, " - ");
15391	  strcat (tmp_buf, label);
15392	  strcat (tmp_buf, "_pic)\n");
15393
15394	  strcat (tmp_buf, "\tmtlr r0\n");
15395
15396	  strcat (tmp_buf, "\taddi r12,r11,lo16(");
15397	  strcat (tmp_buf, name_buf);
15398	  strcat (tmp_buf, " - ");
15399	  strcat (tmp_buf, label);
15400	  strcat (tmp_buf, "_pic)\n");
15401
15402	  strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15403	}
15404      else
15405	{
15406	  strcat (tmp_buf, ":\nlis r12,hi16(");
15407	  strcat (tmp_buf, name_buf);
15408	  strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15409	  strcat (tmp_buf, name_buf);
15410	  strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15411	}
15412      output_asm_insn (tmp_buf, 0);
15413#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15414      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15415	fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15416		BRANCH_ISLAND_LINE_NUMBER (branch_island));
15417#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15418    }
15419
15420  branch_island_list = 0;
15421}
15422
15423/* NO_PREVIOUS_DEF checks in the link list whether the function name is
15424   already there or not.  */
15425
15426static int
15427no_previous_def (tree function_name)
15428{
15429  tree branch_island;
15430  for (branch_island = branch_island_list;
15431       branch_island;
15432       branch_island = TREE_CHAIN (branch_island))
15433    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15434      return 0;
15435  return 1;
15436}
15437
15438/* GET_PREV_LABEL gets the label name from the previous definition of
15439   the function.  */
15440
15441static tree
15442get_prev_label (tree function_name)
15443{
15444  tree branch_island;
15445  for (branch_island = branch_island_list;
15446       branch_island;
15447       branch_island = TREE_CHAIN (branch_island))
15448    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15449      return BRANCH_ISLAND_LABEL_NAME (branch_island);
15450  return 0;
15451}
15452
15453/* INSN is either a function call or a millicode call.  It may have an
15454   unconditional jump in its delay slot.
15455
15456   CALL_DEST is the routine we are calling.  */
15457
15458char *
15459output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15460{
15461  static char buf[256];
15462  if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15463      && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15464    {
15465      tree labelname;
15466      tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15467
15468      if (no_previous_def (funname))
15469	{
15470	  int line_number = 0;
15471	  rtx label_rtx = gen_label_rtx ();
15472	  char *label_buf, temp_buf[256];
15473	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15474				       CODE_LABEL_NUMBER (label_rtx));
15475	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15476	  labelname = get_identifier (label_buf);
15477	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15478	  if (insn)
15479	    line_number = NOTE_LINE_NUMBER (insn);
15480	  add_compiler_branch_island (labelname, funname, line_number);
15481	}
15482      else
15483	labelname = get_prev_label (funname);
15484
15485      /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15486	 instruction will reach 'foo', otherwise link as 'bl L42'".
15487	 "L42" should be a 'branch island', that will do a far jump to
15488	 'foo'.  Branch islands are generated in
15489	 macho_branch_islands().  */
15490      sprintf (buf, "jbsr %%z%d,%.246s",
15491	       dest_operand_number, IDENTIFIER_POINTER (labelname));
15492    }
15493  else
15494    sprintf (buf, "bl %%z%d", dest_operand_number);
15495  return buf;
15496}
15497
15498#endif /* TARGET_MACHO */
15499
15500/* Generate PIC and indirect symbol stubs.  */
15501
15502void
15503machopic_output_stub (FILE *file, const char *symb, const char *stub)
15504{
15505  unsigned int length;
15506  char *symbol_name, *lazy_ptr_name;
15507  char *local_label_0;
15508  static int label = 0;
15509
15510  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
15511  symb = (*targetm.strip_name_encoding) (symb);
15512
15513
15514  length = strlen (symb);
15515  symbol_name = alloca (length + 32);
15516  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15517
15518  lazy_ptr_name = alloca (length + 32);
15519  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15520
15521  if (flag_pic == 2)
15522    machopic_picsymbol_stub1_section ();
15523  else
15524    machopic_symbol_stub1_section ();
15525  fprintf (file, "\t.align 2\n");
15526
15527  fprintf (file, "%s:\n", stub);
15528  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15529
15530  if (flag_pic == 2)
15531    {
15532      label++;
15533      local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15534      sprintf (local_label_0, "\"L%011d$spb\"", label);
15535
15536      fprintf (file, "\tmflr r0\n");
15537      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15538      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15539      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15540	       lazy_ptr_name, local_label_0);
15541      fprintf (file, "\tmtlr r0\n");
15542      fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15543	       lazy_ptr_name, local_label_0);
15544      fprintf (file, "\tmtctr r12\n");
15545      fprintf (file, "\tbctr\n");
15546    }
15547  else
15548   {
15549     fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15550     fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15551     fprintf (file, "\tmtctr r12\n");
15552     fprintf (file, "\tbctr\n");
15553   }
15554
15555  machopic_lazy_symbol_ptr_section ();
15556  fprintf (file, "%s:\n", lazy_ptr_name);
15557  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15558  fprintf (file, "\t.long dyld_stub_binding_helper\n");
15559}
15560
15561/* Legitimize PIC addresses.  If the address is already
15562   position-independent, we return ORIG.  Newly generated
15563   position-independent addresses go into a reg.  This is REG if non
15564   zero, otherwise we allocate register(s) as necessary.  */
15565
15566#define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15567
15568rtx
15569rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15570					rtx reg)
15571{
15572  rtx base, offset;
15573
15574  if (reg == NULL && ! reload_in_progress && ! reload_completed)
15575    reg = gen_reg_rtx (Pmode);
15576
15577  if (GET_CODE (orig) == CONST)
15578    {
15579      if (GET_CODE (XEXP (orig, 0)) == PLUS
15580	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15581	return orig;
15582
15583      if (GET_CODE (XEXP (orig, 0)) == PLUS)
15584	{
15585	  /* Use a different reg for the intermediate value, as
15586	     it will be marked UNCHANGING.  */
15587	  rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15588
15589	  base =
15590	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15591						    Pmode, reg_temp);
15592	  offset =
15593	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15594						    Pmode, reg);
15595	}
15596      else
15597	abort ();
15598
15599      if (GET_CODE (offset) == CONST_INT)
15600	{
15601	  if (SMALL_INT (offset))
15602	    return plus_constant (base, INTVAL (offset));
15603	  else if (! reload_in_progress && ! reload_completed)
15604	    offset = force_reg (Pmode, offset);
15605	  else
15606	    {
15607 	      rtx mem = force_const_mem (Pmode, orig);
15608	      return machopic_legitimize_pic_address (mem, Pmode, reg);
15609	    }
15610	}
15611      return gen_rtx (PLUS, Pmode, base, offset);
15612    }
15613
15614  /* Fall back on generic machopic code.  */
15615  return machopic_legitimize_pic_address (orig, mode, reg);
15616}
15617
15618/* This is just a placeholder to make linking work without having to
15619   add this to the generic Darwin EXTRA_SECTIONS.  If -mcall-aix is
15620   ever needed for Darwin (not too likely!) this would have to get a
15621   real definition.  */
15622
15623void
15624toc_section (void)
15625{
15626}
15627
15628#endif /* TARGET_MACHO */
15629
15630#if TARGET_ELF
15631static unsigned int
15632rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15633{
15634  return default_section_type_flags_1 (decl, name, reloc,
15635				       flag_pic || DEFAULT_ABI == ABI_AIX);
15636}
15637
15638/* Record an element in the table of global constructors.  SYMBOL is
15639   a SYMBOL_REF of the function to be called; PRIORITY is a number
15640   between 0 and MAX_INIT_PRIORITY.
15641
15642   This differs from default_named_section_asm_out_constructor in
15643   that we have special handling for -mrelocatable.  */
15644
15645static void
15646rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15647{
15648  const char *section = ".ctors";
15649  char buf[16];
15650
15651  if (priority != DEFAULT_INIT_PRIORITY)
15652    {
15653      sprintf (buf, ".ctors.%.5u",
15654               /* Invert the numbering so the linker puts us in the proper
15655                  order; constructors are run from right to left, and the
15656                  linker sorts in increasing order.  */
15657               MAX_INIT_PRIORITY - priority);
15658      section = buf;
15659    }
15660
15661  named_section_flags (section, SECTION_WRITE);
15662  assemble_align (POINTER_SIZE);
15663
15664  if (TARGET_RELOCATABLE)
15665    {
15666      fputs ("\t.long (", asm_out_file);
15667      output_addr_const (asm_out_file, symbol);
15668      fputs (")@fixup\n", asm_out_file);
15669    }
15670  else
15671    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15672}
15673
15674static void
15675rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15676{
15677  const char *section = ".dtors";
15678  char buf[16];
15679
15680  if (priority != DEFAULT_INIT_PRIORITY)
15681    {
15682      sprintf (buf, ".dtors.%.5u",
15683               /* Invert the numbering so the linker puts us in the proper
15684                  order; constructors are run from right to left, and the
15685                  linker sorts in increasing order.  */
15686               MAX_INIT_PRIORITY - priority);
15687      section = buf;
15688    }
15689
15690  named_section_flags (section, SECTION_WRITE);
15691  assemble_align (POINTER_SIZE);
15692
15693  if (TARGET_RELOCATABLE)
15694    {
15695      fputs ("\t.long (", asm_out_file);
15696      output_addr_const (asm_out_file, symbol);
15697      fputs (")@fixup\n", asm_out_file);
15698    }
15699  else
15700    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15701}
15702
15703void
15704rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15705{
15706  if (TARGET_64BIT)
15707    {
15708      fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15709      ASM_OUTPUT_LABEL (file, name);
15710      fputs (DOUBLE_INT_ASM_OP, file);
15711      putc ('.', file);
15712      assemble_name (file, name);
15713      fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15714      assemble_name (file, name);
15715      fputs (",24\n\t.type\t.", file);
15716      assemble_name (file, name);
15717      fputs (",@function\n", file);
15718      if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15719	{
15720	  fputs ("\t.globl\t.", file);
15721	  assemble_name (file, name);
15722	  putc ('\n', file);
15723	}
15724      ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15725      putc ('.', file);
15726      ASM_OUTPUT_LABEL (file, name);
15727      return;
15728    }
15729
15730  if (TARGET_RELOCATABLE
15731      && (get_pool_size () != 0 || current_function_profile)
15732      && uses_TOC ())
15733    {
15734      char buf[256];
15735
15736      (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15737
15738      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15739      fprintf (file, "\t.long ");
15740      assemble_name (file, buf);
15741      putc ('-', file);
15742      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15743      assemble_name (file, buf);
15744      putc ('\n', file);
15745    }
15746
15747  ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15748  ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15749
15750  if (DEFAULT_ABI == ABI_AIX)
15751    {
15752      const char *desc_name, *orig_name;
15753
15754      orig_name = (*targetm.strip_name_encoding) (name);
15755      desc_name = orig_name;
15756      while (*desc_name == '.')
15757	desc_name++;
15758
15759      if (TREE_PUBLIC (decl))
15760	fprintf (file, "\t.globl %s\n", desc_name);
15761
15762      fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15763      fprintf (file, "%s:\n", desc_name);
15764      fprintf (file, "\t.long %s\n", orig_name);
15765      fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15766      if (DEFAULT_ABI == ABI_AIX)
15767	fputs ("\t.long 0\n", file);
15768      fprintf (file, "\t.previous\n");
15769    }
15770  ASM_OUTPUT_LABEL (file, name);
15771}
15772
15773static void
15774rs6000_elf_end_indicate_exec_stack (void)
15775{
15776  if (TARGET_32BIT)
15777    file_end_indicate_exec_stack ();
15778}
15779#endif
15780
15781#if TARGET_XCOFF
15782static void
15783rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15784{
15785  fputs (GLOBAL_ASM_OP, stream);
15786  RS6000_OUTPUT_BASENAME (stream, name);
15787  putc ('\n', stream);
15788}
15789
15790static void
15791rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15792{
15793  int smclass;
15794  static const char * const suffix[3] = { "PR", "RO", "RW" };
15795
15796  if (flags & SECTION_CODE)
15797    smclass = 0;
15798  else if (flags & SECTION_WRITE)
15799    smclass = 2;
15800  else
15801    smclass = 1;
15802
15803  fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15804	   (flags & SECTION_CODE) ? "." : "",
15805	   name, suffix[smclass], flags & SECTION_ENTSIZE);
15806}
15807
15808static void
15809rs6000_xcoff_select_section (tree decl, int reloc,
15810			    unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15811{
15812  if (decl_readonly_section_1 (decl, reloc, 1))
15813    {
15814      if (TREE_PUBLIC (decl))
15815        read_only_data_section ();
15816      else
15817        read_only_private_data_section ();
15818    }
15819  else
15820    {
15821      if (TREE_PUBLIC (decl))
15822        data_section ();
15823      else
15824        private_data_section ();
15825    }
15826}
15827
15828static void
15829rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15830{
15831  const char *name;
15832
15833  /* Use select_section for private and uninitialized data.  */
15834  if (!TREE_PUBLIC (decl)
15835      || DECL_COMMON (decl)
15836      || DECL_INITIAL (decl) == NULL_TREE
15837      || DECL_INITIAL (decl) == error_mark_node
15838      || (flag_zero_initialized_in_bss
15839	  && initializer_zerop (DECL_INITIAL (decl))))
15840    return;
15841
15842  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15843  name = (*targetm.strip_name_encoding) (name);
15844  DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15845}
15846
15847/* Select section for constant in constant pool.
15848
15849   On RS/6000, all constants are in the private read-only data area.
15850   However, if this is being placed in the TOC it must be output as a
15851   toc entry.  */
15852
15853static void
15854rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15855				unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15856{
15857  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15858    toc_section ();
15859  else
15860    read_only_private_data_section ();
15861}
15862
15863/* Remove any trailing [DS] or the like from the symbol name.  */
15864
15865static const char *
15866rs6000_xcoff_strip_name_encoding (const char *name)
15867{
15868  size_t len;
15869  if (*name == '*')
15870    name++;
15871  len = strlen (name);
15872  if (name[len - 1] == ']')
15873    return ggc_alloc_string (name, len - 4);
15874  else
15875    return name;
15876}
15877
15878/* Section attributes.  AIX is always PIC.  */
15879
15880static unsigned int
15881rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15882{
15883  unsigned int align;
15884  unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15885
15886  /* Align to at least UNIT size.  */
15887  if (flags & SECTION_CODE)
15888    align = MIN_UNITS_PER_WORD;
15889  else
15890    /* Increase alignment of large objects if not already stricter.  */
15891    align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15892		 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15893		 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15894
15895  return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15896}
15897
15898/* Output at beginning of assembler file.
15899
15900   Initialize the section names for the RS/6000 at this point.
15901
15902   Specify filename, including full path, to assembler.
15903
15904   We want to go into the TOC section so at least one .toc will be emitted.
15905   Also, in order to output proper .bs/.es pairs, we need at least one static
15906   [RW] section emitted.
15907
15908   Finally, declare mcount when profiling to make the assembler happy.  */
15909
15910static void
15911rs6000_xcoff_file_start (void)
15912{
15913  rs6000_gen_section_name (&xcoff_bss_section_name,
15914			   main_input_filename, ".bss_");
15915  rs6000_gen_section_name (&xcoff_private_data_section_name,
15916			   main_input_filename, ".rw_");
15917  rs6000_gen_section_name (&xcoff_read_only_section_name,
15918			   main_input_filename, ".ro_");
15919
15920  fputs ("\t.file\t", asm_out_file);
15921  output_quoted_string (asm_out_file, main_input_filename);
15922  fputc ('\n', asm_out_file);
15923  toc_section ();
15924  if (write_symbols != NO_DEBUG)
15925    private_data_section ();
15926  text_section ();
15927  if (profile_flag)
15928    fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15929  rs6000_file_start ();
15930}
15931
15932/* Output at end of assembler file.
15933   On the RS/6000, referencing data should automatically pull in text.  */
15934
15935static void
15936rs6000_xcoff_file_end (void)
15937{
15938  text_section ();
15939  fputs ("_section_.text:\n", asm_out_file);
15940  data_section ();
15941  fputs (TARGET_32BIT
15942	 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15943	 asm_out_file);
15944}
15945#endif /* TARGET_XCOFF */
15946
15947#if TARGET_MACHO
15948/* Cross-module name binding.  Darwin does not support overriding
15949   functions at dynamic-link time.  */
15950
15951static bool
15952rs6000_binds_local_p (tree decl)
15953{
15954  return default_binds_local_p_1 (decl, 0);
15955}
15956#endif
15957
15958/* Compute a (partial) cost for rtx X.  Return true if the complete
15959   cost has been computed, and false if subexpressions should be
15960   scanned.  In either case, *TOTAL contains the cost result.  */
15961
15962static bool
15963rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15964		  int *total)
15965{
15966  switch (code)
15967    {
15968      /* On the RS/6000, if it is valid in the insn, it is free.
15969	 So this always returns 0.  */
15970    case CONST_INT:
15971    case CONST:
15972    case LABEL_REF:
15973    case SYMBOL_REF:
15974    case CONST_DOUBLE:
15975    case HIGH:
15976      *total = 0;
15977      return true;
15978
15979    case PLUS:
15980      *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15981		 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15982					       + 0x8000) >= 0x10000)
15983		 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15984		? COSTS_N_INSNS (2)
15985		: COSTS_N_INSNS (1));
15986      return true;
15987
15988    case AND:
15989    case IOR:
15990    case XOR:
15991      *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15992		 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15993		 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15994		? COSTS_N_INSNS (2)
15995		: COSTS_N_INSNS (1));
15996      return true;
15997
15998    case MULT:
15999      if (optimize_size)
16000	{
16001	  *total = COSTS_N_INSNS (2);
16002	  return true;
16003	}
16004      switch (rs6000_cpu)
16005	{
16006	case PROCESSOR_RIOS1:
16007	case PROCESSOR_PPC405:
16008	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16009		    ? COSTS_N_INSNS (5)
16010		    : (INTVAL (XEXP (x, 1)) >= -256
16011		       && INTVAL (XEXP (x, 1)) <= 255)
16012		    ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16013	  return true;
16014
16015	case PROCESSOR_PPC440:
16016	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16017		    ? COSTS_N_INSNS (3)
16018		    : COSTS_N_INSNS (2));
16019	  return true;
16020
16021	case PROCESSOR_RS64A:
16022	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16023		    ? GET_MODE (XEXP (x, 1)) != DImode
16024		    ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
16025		    : (INTVAL (XEXP (x, 1)) >= -256
16026		       && INTVAL (XEXP (x, 1)) <= 255)
16027		    ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
16028	  return true;
16029
16030	case PROCESSOR_RIOS2:
16031	case PROCESSOR_MPCCORE:
16032	case PROCESSOR_PPC604e:
16033	  *total = COSTS_N_INSNS (2);
16034	  return true;
16035
16036	case PROCESSOR_PPC601:
16037	  *total = COSTS_N_INSNS (5);
16038	  return true;
16039
16040	case PROCESSOR_PPC603:
16041	case PROCESSOR_PPC7400:
16042	case PROCESSOR_PPC750:
16043	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16044		    ? COSTS_N_INSNS (5)
16045		    : (INTVAL (XEXP (x, 1)) >= -256
16046		       && INTVAL (XEXP (x, 1)) <= 255)
16047		    ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
16048	  return true;
16049
16050	case PROCESSOR_PPC7450:
16051	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16052		    ? COSTS_N_INSNS (4)
16053		    : COSTS_N_INSNS (3));
16054	  return true;
16055
16056	case PROCESSOR_PPC403:
16057	case PROCESSOR_PPC604:
16058	case PROCESSOR_PPC8540:
16059	  *total = COSTS_N_INSNS (4);
16060	  return true;
16061
16062	case PROCESSOR_PPC620:
16063	case PROCESSOR_PPC630:
16064	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16065		    ? GET_MODE (XEXP (x, 1)) != DImode
16066		    ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
16067		    : (INTVAL (XEXP (x, 1)) >= -256
16068		       && INTVAL (XEXP (x, 1)) <= 255)
16069		    ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16070	  return true;
16071
16072	case PROCESSOR_POWER4:
16073	case PROCESSOR_POWER5:
16074	  *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16075		    ? GET_MODE (XEXP (x, 1)) != DImode
16076		    ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
16077		    : COSTS_N_INSNS (2));
16078	  return true;
16079
16080	default:
16081	  abort ();
16082	}
16083
16084    case DIV:
16085    case MOD:
16086      if (GET_CODE (XEXP (x, 1)) == CONST_INT
16087	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
16088	{
16089	  *total = COSTS_N_INSNS (2);
16090	  return true;
16091	}
16092      /* FALLTHRU */
16093
16094    case UDIV:
16095    case UMOD:
16096      switch (rs6000_cpu)
16097	{
16098	case PROCESSOR_RIOS1:
16099	  *total = COSTS_N_INSNS (19);
16100	  return true;
16101
16102	case PROCESSOR_RIOS2:
16103	  *total = COSTS_N_INSNS (13);
16104	  return true;
16105
16106	case PROCESSOR_RS64A:
16107	  *total = (GET_MODE (XEXP (x, 1)) != DImode
16108		    ? COSTS_N_INSNS (65)
16109		    : COSTS_N_INSNS (67));
16110	  return true;
16111
16112	case PROCESSOR_MPCCORE:
16113	  *total = COSTS_N_INSNS (6);
16114	  return true;
16115
16116	case PROCESSOR_PPC403:
16117	  *total = COSTS_N_INSNS (33);
16118	  return true;
16119
16120	case PROCESSOR_PPC405:
16121	  *total = COSTS_N_INSNS (35);
16122	  return true;
16123
16124	case PROCESSOR_PPC440:
16125	  *total = COSTS_N_INSNS (34);
16126	  return true;
16127
16128	case PROCESSOR_PPC601:
16129	  *total = COSTS_N_INSNS (36);
16130	  return true;
16131
16132	case PROCESSOR_PPC603:
16133	  *total = COSTS_N_INSNS (37);
16134	  return true;
16135
16136	case PROCESSOR_PPC604:
16137	case PROCESSOR_PPC604e:
16138	  *total = COSTS_N_INSNS (20);
16139	  return true;
16140
16141	case PROCESSOR_PPC620:
16142	case PROCESSOR_PPC630:
16143	  *total = (GET_MODE (XEXP (x, 1)) != DImode
16144		    ? COSTS_N_INSNS (21)
16145		    : COSTS_N_INSNS (37));
16146	  return true;
16147
16148	case PROCESSOR_PPC750:
16149	case PROCESSOR_PPC8540:
16150	case PROCESSOR_PPC7400:
16151	  *total = COSTS_N_INSNS (19);
16152	  return true;
16153
16154	case PROCESSOR_PPC7450:
16155	  *total = COSTS_N_INSNS (23);
16156	  return true;
16157
16158	case PROCESSOR_POWER4:
16159	case PROCESSOR_POWER5:
16160	  *total = (GET_MODE (XEXP (x, 1)) != DImode
16161		    ? COSTS_N_INSNS (18)
16162		    : COSTS_N_INSNS (34));
16163	  return true;
16164
16165	default:
16166	  abort ();
16167	}
16168
16169    case FFS:
16170      *total = COSTS_N_INSNS (4);
16171      return true;
16172
16173    case MEM:
16174      /* MEM should be slightly more expensive than (plus (reg) (const)).  */
16175      *total = 5;
16176      return true;
16177
16178    default:
16179      return false;
16180    }
16181}
16182
16183/* A C expression returning the cost of moving data from a register of class
16184   CLASS1 to one of CLASS2.  */
16185
16186int
16187rs6000_register_move_cost (enum machine_mode mode,
16188			   enum reg_class from, enum reg_class to)
16189{
16190  /*  Moves from/to GENERAL_REGS.  */
16191  if (reg_classes_intersect_p (to, GENERAL_REGS)
16192      || reg_classes_intersect_p (from, GENERAL_REGS))
16193    {
16194      if (! reg_classes_intersect_p (to, GENERAL_REGS))
16195	from = to;
16196
16197      if (from == FLOAT_REGS || from == ALTIVEC_REGS)
16198	return (rs6000_memory_move_cost (mode, from, 0)
16199		+ rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
16200
16201/* It's more expensive to move CR_REGS than CR0_REGS because of the shift....  */
16202      else if (from == CR_REGS)
16203	return 4;
16204
16205      else
16206/* A move will cost one instruction per GPR moved.  */
16207	return 2 * HARD_REGNO_NREGS (0, mode);
16208    }
16209
16210/* Moving between two similar registers is just one instruction.  */
16211  else if (reg_classes_intersect_p (to, from))
16212    return mode == TFmode ? 4 : 2;
16213
16214/* Everything else has to go through GENERAL_REGS.  */
16215  else
16216    return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16217	    + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16218}
16219
16220/* A C expressions returning the cost of moving data of MODE from a register to
16221   or from memory.  */
16222
16223int
16224rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16225			 int in ATTRIBUTE_UNUSED)
16226{
16227  if (reg_classes_intersect_p (class, GENERAL_REGS))
16228    return 4 * HARD_REGNO_NREGS (0, mode);
16229  else if (reg_classes_intersect_p (class, FLOAT_REGS))
16230    return 4 * HARD_REGNO_NREGS (32, mode);
16231  else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16232    return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16233  else
16234    return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16235}
16236
16237/* Return an RTX representing where to find the function value of a
16238   function returning MODE.  */
16239static rtx
16240rs6000_complex_function_value (enum machine_mode mode)
16241{
16242  unsigned int regno;
16243  rtx r1, r2;
16244  enum machine_mode inner = GET_MODE_INNER (mode);
16245  unsigned int inner_bytes = GET_MODE_SIZE (inner);
16246
16247  if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
16248    regno = FP_ARG_RETURN;
16249  else
16250    {
16251      regno = GP_ARG_RETURN;
16252
16253      /* 32-bit is OK since it'll go in r3/r4.  */
16254      if (TARGET_32BIT && inner_bytes >= 4)
16255	return gen_rtx_REG (mode, regno);
16256    }
16257
16258  if (inner_bytes >= 8)
16259    return gen_rtx_REG (mode, regno);
16260
16261  r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16262			  const0_rtx);
16263  r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16264			  GEN_INT (inner_bytes));
16265  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16266}
16267
16268/* Define how to find the value returned by a function.
16269   VALTYPE is the data type of the value (as a tree).
16270   If the precise function being called is known, FUNC is its FUNCTION_DECL;
16271   otherwise, FUNC is 0.
16272
16273   On the SPE, both FPs and vectors are returned in r3.
16274
16275   On RS/6000 an integer value is in r3 and a floating-point value is in
16276   fp1, unless -msoft-float.  */
16277
16278rtx
16279rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16280{
16281  enum machine_mode mode;
16282  unsigned int regno;
16283
16284  if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16285    {
16286      /* Long long return value need be split in -mpowerpc64, 32bit ABI.  */
16287      return gen_rtx_PARALLEL (DImode,
16288	gen_rtvec (2,
16289		   gen_rtx_EXPR_LIST (VOIDmode,
16290				      gen_rtx_REG (SImode, GP_ARG_RETURN),
16291				      const0_rtx),
16292		   gen_rtx_EXPR_LIST (VOIDmode,
16293				      gen_rtx_REG (SImode,
16294						   GP_ARG_RETURN + 1),
16295				      GEN_INT (4))));
16296    }
16297
16298  if ((INTEGRAL_TYPE_P (valtype)
16299       && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16300      || POINTER_TYPE_P (valtype))
16301    mode = TARGET_32BIT ? SImode : DImode;
16302  else
16303    mode = TYPE_MODE (valtype);
16304
16305  if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
16306    regno = FP_ARG_RETURN;
16307  else if (TREE_CODE (valtype) == COMPLEX_TYPE
16308	   && targetm.calls.split_complex_arg)
16309    return rs6000_complex_function_value (mode);
16310  else if (TREE_CODE (valtype) == VECTOR_TYPE
16311	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16312    regno = ALTIVEC_ARG_RETURN;
16313  else
16314    regno = GP_ARG_RETURN;
16315
16316  return gen_rtx_REG (mode, regno);
16317}
16318
16319/* Define how to find the value returned by a library function
16320   assuming the value has mode MODE.  */
16321rtx
16322rs6000_libcall_value (enum machine_mode mode)
16323{
16324  unsigned int regno;
16325
16326  if (GET_MODE_CLASS (mode) == MODE_FLOAT
16327	   && TARGET_HARD_FLOAT && TARGET_FPRS)
16328    regno = FP_ARG_RETURN;
16329  else if (ALTIVEC_VECTOR_MODE (mode)
16330	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16331    regno = ALTIVEC_ARG_RETURN;
16332  else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16333    return rs6000_complex_function_value (mode);
16334  else
16335    regno = GP_ARG_RETURN;
16336
16337  return gen_rtx_REG (mode, regno);
16338}
16339
16340/* Define the offset between two registers, FROM to be eliminated and its
16341   replacement TO, at the start of a routine.  */
16342HOST_WIDE_INT
16343rs6000_initial_elimination_offset (int from, int to)
16344{
16345  rs6000_stack_t *info = rs6000_stack_info ();
16346  HOST_WIDE_INT offset;
16347
16348  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16349    offset = info->push_p ? 0 : -info->total_size;
16350  else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16351    offset = info->total_size;
16352  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16353    offset = info->push_p ? info->total_size : 0;
16354  else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16355    offset = 0;
16356  else
16357    abort ();
16358
16359  return offset;
16360}
16361
16362/* Return true if TYPE is of type __ev64_opaque__.  */
16363
16364static bool
16365is_ev64_opaque_type (tree type)
16366{
16367  return (TARGET_SPE
16368	  && (type == opaque_V2SI_type_node
16369	      || type == opaque_V2SF_type_node
16370	      || type == opaque_p_V2SI_type_node));
16371}
16372
16373static rtx
16374rs6000_dwarf_register_span (rtx reg)
16375{
16376  unsigned regno;
16377
16378  if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16379    return NULL_RTX;
16380
16381  regno = REGNO (reg);
16382
16383  /* The duality of the SPE register size wreaks all kinds of havoc.
16384     This is a way of distinguishing r0 in 32-bits from r0 in
16385     64-bits.  */
16386  return
16387    gen_rtx_PARALLEL (VOIDmode,
16388		      BYTES_BIG_ENDIAN
16389		      ? gen_rtvec (2,
16390				   gen_rtx_REG (SImode, regno + 1200),
16391				   gen_rtx_REG (SImode, regno))
16392		      : gen_rtvec (2,
16393				   gen_rtx_REG (SImode, regno),
16394				   gen_rtx_REG (SImode, regno + 1200)));
16395}
16396
16397/* Map internal gcc register numbers to DWARF2 register numbers.  */
16398
16399unsigned int
16400rs6000_dbx_register_number (unsigned int regno)
16401{
16402  if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16403    return regno;
16404  if (regno == MQ_REGNO)
16405    return 100;
16406  if (regno == LINK_REGISTER_REGNUM)
16407    return 108;
16408  if (regno == COUNT_REGISTER_REGNUM)
16409    return 109;
16410  if (CR_REGNO_P (regno))
16411    return regno - CR0_REGNO + 86;
16412  if (regno == XER_REGNO)
16413    return 101;
16414  if (ALTIVEC_REGNO_P (regno))
16415    return regno - FIRST_ALTIVEC_REGNO + 1124;
16416  if (regno == VRSAVE_REGNO)
16417    return 356;
16418  if (regno == VSCR_REGNO)
16419    return 67;
16420  if (regno == SPE_ACC_REGNO)
16421    return 99;
16422  if (regno == SPEFSCR_REGNO)
16423    return 612;
16424  /* SPE high reg number.  We get these values of regno from
16425     rs6000_dwarf_register_span.  */
16426  if (regno >= 1200 && regno < 1232)
16427    return regno;
16428
16429  abort ();
16430}
16431
16432#include "gt-rs6000.h"
16433