rs6000.c revision 258651
1214082Sdim/* Subroutines used for code generation on IBM RS/6000.
2214082Sdim   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3214082Sdim   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4214082Sdim   Free Software Foundation, Inc.
5214082Sdim   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6214082Sdim
7214082Sdim   This file is part of GCC.
8214082Sdim
9214082Sdim   GCC is free software; you can redistribute it and/or modify it
10214082Sdim   under the terms of the GNU General Public License as published
11214082Sdim   by the Free Software Foundation; either version 2, or (at your
12214082Sdim   option) any later version.
13214082Sdim
14214082Sdim   GCC is distributed in the hope that it will be useful, but WITHOUT
15214082Sdim   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16214082Sdim   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
17214082Sdim   License for more details.
18214082Sdim
19214082Sdim   You should have received a copy of the GNU General Public License
20214082Sdim   along with GCC; see the file COPYING.  If not, write to the
21214082Sdim   Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
22214082Sdim   MA 02110-1301, USA.  */
23214082Sdim
24214082Sdim#include "config.h"
25214082Sdim#include "system.h"
26214082Sdim#include "coretypes.h"
27214082Sdim#include "tm.h"
28214082Sdim#include "rtl.h"
29214082Sdim#include "regs.h"
30214082Sdim#include "hard-reg-set.h"
31214082Sdim#include "real.h"
32214082Sdim#include "insn-config.h"
33214082Sdim#include "conditions.h"
34214082Sdim#include "insn-attr.h"
35214082Sdim#include "flags.h"
36214082Sdim#include "recog.h"
37214082Sdim#include "obstack.h"
38214082Sdim#include "tree.h"
39214082Sdim#include "expr.h"
40214082Sdim#include "optabs.h"
41214082Sdim#include "except.h"
42214082Sdim#include "function.h"
43214082Sdim#include "output.h"
44214082Sdim#include "basic-block.h"
45214082Sdim#include "integrate.h"
46214082Sdim#include "toplev.h"
47214082Sdim#include "ggc.h"
48214082Sdim#include "hashtab.h"
49214082Sdim#include "tm_p.h"
50214082Sdim#include "target.h"
51214082Sdim#include "target-def.h"
52214082Sdim#include "langhooks.h"
53214082Sdim#include "reload.h"
54214082Sdim#include "cfglayout.h"
55214082Sdim#include "sched-int.h"
56214082Sdim#include "tree-gimple.h"
57214082Sdim#include "intl.h"
58214082Sdim#include "params.h"
59214082Sdim#include "tm-constrs.h"
60214082Sdim#if TARGET_XCOFF
61214082Sdim#include "xcoffout.h"  /* get declarations of xcoff_*_section_name */
62214082Sdim#endif
63214082Sdim#if TARGET_MACHO
64214082Sdim#include "gstab.h"  /* for N_SLINE */
65214082Sdim#endif
66214082Sdim
67214082Sdim#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
71#define min(A,B)	((A) < (B) ? (A) : (B))
72#define max(A,B)	((A) > (B) ? (A) : (B))
73
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76  int first_gp_reg_save;	/* first callee saved GP register used */
77  int first_fp_reg_save;	/* first callee saved FP register used */
78  int first_altivec_reg_save;	/* first callee saved AltiVec register used */
79  int lr_save_p;		/* true if the link reg needs to be saved */
80  int cr_save_p;		/* true if the CR reg needs to be saved */
81  unsigned int vrsave_mask;	/* mask of vec registers to save */
82  int push_p;			/* true if we need to allocate stack space */
83  int calls_p;			/* true if the function makes any calls */
84  int world_save_p;		/* true if we're saving *everything*:
85				   r13-r31, cr, f14-f31, vrsave, v20-v31  */
86  enum rs6000_abi abi;		/* which ABI to use */
87  int gp_save_offset;		/* offset to save GP regs from initial SP */
88  int fp_save_offset;		/* offset to save FP regs from initial SP */
89  int altivec_save_offset;	/* offset to save AltiVec regs from initial SP */
90  int lr_save_offset;		/* offset to save LR from initial SP */
91  int cr_save_offset;		/* offset to save CR from initial SP */
92  int vrsave_save_offset;	/* offset to save VRSAVE from initial SP */
93  int spe_gp_save_offset;	/* offset to save spe 64-bit gprs  */
94  int varargs_save_offset;	/* offset to save the varargs registers */
95  int ehrd_offset;		/* offset to EH return data */
96  int reg_size;			/* register size (4 or 8) */
97  HOST_WIDE_INT vars_size;	/* variable save area size */
98  int parm_size;		/* outgoing parameter size */
99  int save_size;		/* save area size */
100  int fixed_size;		/* fixed size of stack frame */
101  int gp_size;			/* size of saved GP registers */
102  int fp_size;			/* size of saved FP registers */
103  int altivec_size;		/* size of saved AltiVec registers */
104  int cr_size;			/* size to hold CR if not in save_size */
105  int vrsave_size;		/* size to hold VRSAVE if not in save_size */
106  int altivec_padding_size;	/* size of altivec alignment padding if
107				   not in save_size */
108  int spe_gp_size;		/* size of 64-bit GPR save size for SPE */
109  int spe_padding_size;
110  HOST_WIDE_INT total_size;	/* total bytes allocated for stack */
111  int spe_64bit_regs_used;
112} rs6000_stack_t;
113
114/* A C structure for machine-specific, per-function data.
115   This is added to the cfun structure.  */
116typedef struct machine_function GTY(())
117{
118  /* Flags if __builtin_return_address (n) with n >= 1 was used.  */
119  int ra_needs_full_frame;
120  /* Some local-dynamic symbol.  */
121  const char *some_ld_name;
122  /* Whether the instruction chain has been scanned already.  */
123  int insn_chain_scanned_p;
124  /* Flags if __builtin_return_address (0) was used.  */
125  int ra_need_lr;
126  /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127     varargs save area.  */
128  HOST_WIDE_INT varargs_save_offset;
129} machine_function;
130
131/* Target cpu type */
132
133enum processor_type rs6000_cpu;
134struct rs6000_cpu_select rs6000_select[3] =
135{
136  /* switch		name,			tune	arch */
137  { (const char *)0,	"--with-cpu=",		1,	1 },
138  { (const char *)0,	"-mcpu=",		1,	1 },
139  { (const char *)0,	"-mtune=",		1,	0 },
140};
141
142/* Always emit branch hint bits.  */
143static GTY(()) bool rs6000_always_hint;
144
145/* Schedule instructions for group formation.  */
146static GTY(()) bool rs6000_sched_groups;
147
148/* Support for -msched-costly-dep option.  */
149const char *rs6000_sched_costly_dep_str;
150enum rs6000_dependence_cost rs6000_sched_costly_dep;
151
152/* Support for -minsert-sched-nops option.  */
153const char *rs6000_sched_insert_nops_str;
154enum rs6000_nop_insertion rs6000_sched_insert_nops;
155
156/* Support targetm.vectorize.builtin_mask_for_load.  */
157static GTY(()) tree altivec_builtin_mask_for_load;
158
159/* Size of long double.  */
160int rs6000_long_double_type_size;
161
162/* IEEE quad extended precision long double. */
163int rs6000_ieeequad;
164
165/* Whether -mabi=altivec has appeared.  */
166int rs6000_altivec_abi;
167
168/* Nonzero if we want SPE ABI extensions.  */
169int rs6000_spe_abi;
170
171/* Nonzero if floating point operations are done in the GPRs.  */
172int rs6000_float_gprs = 0;
173
174/* Nonzero if we want Darwin's struct-by-value-in-regs ABI.  */
175int rs6000_darwin64_abi;
176
177/* Set to nonzero once AIX common-mode calls have been defined.  */
178static GTY(()) int common_mode_defined;
179
180/* Save information from a "cmpxx" operation until the branch or scc is
181   emitted.  */
182rtx rs6000_compare_op0, rs6000_compare_op1;
183int rs6000_compare_fp_p;
184
185/* Label number of label created for -mrelocatable, to call to so we can
186   get the address of the GOT section */
187int rs6000_pic_labelno;
188
189#ifdef USING_ELFOS_H
190/* Which abi to adhere to */
191const char *rs6000_abi_name;
192
193/* Semantics of the small data area */
194enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
195
196/* Which small data model to use */
197const char *rs6000_sdata_name = (char *)0;
198
199/* Counter for labels which are to be placed in .fixup.  */
200int fixuplabelno = 0;
201#endif
202
203/* Bit size of immediate TLS offsets and string from which it is decoded.  */
204int rs6000_tls_size = 32;
205const char *rs6000_tls_size_string;
206
207/* ABI enumeration available for subtarget to use.  */
208enum rs6000_abi rs6000_current_abi;
209
210/* Whether to use variant of AIX ABI for PowerPC64 Linux.  */
211int dot_symbols;
212
213/* Debug flags */
214const char *rs6000_debug_name;
215int rs6000_debug_stack;		/* debug stack applications */
216int rs6000_debug_arg;		/* debug argument handling */
217
218/* Value is TRUE if register/mode pair is acceptable.  */
219bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
220
221/* Built in types.  */
222
223tree rs6000_builtin_types[RS6000_BTI_MAX];
224tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
225
226const char *rs6000_traceback_name;
227static enum {
228  traceback_default = 0,
229  traceback_none,
230  traceback_part,
231  traceback_full
232} rs6000_traceback;
233
234/* Flag to say the TOC is initialized */
235int toc_initialized;
236char toc_label_name[10];
237
238static GTY(()) section *read_only_data_section;
239static GTY(()) section *private_data_section;
240static GTY(()) section *read_only_private_data_section;
241static GTY(()) section *sdata2_section;
242static GTY(()) section *toc_section;
243
244/* Control alignment for fields within structures.  */
245/* String from -malign-XXXXX.  */
246int rs6000_alignment_flags;
247
248/* True for any options that were explicitly set.  */
249struct {
250  bool aix_struct_ret;		/* True if -maix-struct-ret was used.  */
251  bool alignment;		/* True if -malign- was used.  */
252  bool abi;			/* True if -mabi=spe/nospe was used.  */
253  bool spe;			/* True if -mspe= was used.  */
254  bool float_gprs;		/* True if -mfloat-gprs= was used.  */
255  bool isel;			/* True if -misel was used. */
256  bool long_double;	        /* True if -mlong-double- was used.  */
257  bool ieee;			/* True if -mabi=ieee/ibmlongdouble used.  */
258} rs6000_explicit_options;
259
260struct builtin_description
261{
262  /* mask is not const because we're going to alter it below.  This
263     nonsense will go away when we rewrite the -march infrastructure
264     to give us more target flag bits.  */
265  unsigned int mask;
266  const enum insn_code icode;
267  const char *const name;
268  const enum rs6000_builtins code;
269};
270
271/* Target cpu costs.  */
272
273struct processor_costs {
274  const int mulsi;	  /* cost of SImode multiplication.  */
275  const int mulsi_const;  /* cost of SImode multiplication by constant.  */
276  const int mulsi_const9; /* cost of SImode mult by short constant.  */
277  const int muldi;	  /* cost of DImode multiplication.  */
278  const int divsi;	  /* cost of SImode division.  */
279  const int divdi;	  /* cost of DImode division.  */
280  const int fp;		  /* cost of simple SFmode and DFmode insns.  */
281  const int dmul;	  /* cost of DFmode multiplication (and fmadd).  */
282  const int sdiv;	  /* cost of SFmode division (fdivs).  */
283  const int ddiv;	  /* cost of DFmode division (fdiv).  */
284};
285
286const struct processor_costs *rs6000_cost;
287
288/* Processor costs (relative to an add) */
289
290/* Instruction size costs on 32bit processors.  */
291static const
292struct processor_costs size32_cost = {
293  COSTS_N_INSNS (1),    /* mulsi */
294  COSTS_N_INSNS (1),    /* mulsi_const */
295  COSTS_N_INSNS (1),    /* mulsi_const9 */
296  COSTS_N_INSNS (1),    /* muldi */
297  COSTS_N_INSNS (1),    /* divsi */
298  COSTS_N_INSNS (1),    /* divdi */
299  COSTS_N_INSNS (1),    /* fp */
300  COSTS_N_INSNS (1),    /* dmul */
301  COSTS_N_INSNS (1),    /* sdiv */
302  COSTS_N_INSNS (1),    /* ddiv */
303};
304
305/* Instruction size costs on 64bit processors.  */
306static const
307struct processor_costs size64_cost = {
308  COSTS_N_INSNS (1),    /* mulsi */
309  COSTS_N_INSNS (1),    /* mulsi_const */
310  COSTS_N_INSNS (1),    /* mulsi_const9 */
311  COSTS_N_INSNS (1),    /* muldi */
312  COSTS_N_INSNS (1),    /* divsi */
313  COSTS_N_INSNS (1),    /* divdi */
314  COSTS_N_INSNS (1),    /* fp */
315  COSTS_N_INSNS (1),    /* dmul */
316  COSTS_N_INSNS (1),    /* sdiv */
317  COSTS_N_INSNS (1),    /* ddiv */
318};
319
320/* Instruction costs on RIOS1 processors.  */
321static const
322struct processor_costs rios1_cost = {
323  COSTS_N_INSNS (5),    /* mulsi */
324  COSTS_N_INSNS (4),    /* mulsi_const */
325  COSTS_N_INSNS (3),    /* mulsi_const9 */
326  COSTS_N_INSNS (5),    /* muldi */
327  COSTS_N_INSNS (19),   /* divsi */
328  COSTS_N_INSNS (19),   /* divdi */
329  COSTS_N_INSNS (2),    /* fp */
330  COSTS_N_INSNS (2),    /* dmul */
331  COSTS_N_INSNS (19),   /* sdiv */
332  COSTS_N_INSNS (19),   /* ddiv */
333};
334
335/* Instruction costs on RIOS2 processors.  */
336static const
337struct processor_costs rios2_cost = {
338  COSTS_N_INSNS (2),    /* mulsi */
339  COSTS_N_INSNS (2),    /* mulsi_const */
340  COSTS_N_INSNS (2),    /* mulsi_const9 */
341  COSTS_N_INSNS (2),    /* muldi */
342  COSTS_N_INSNS (13),   /* divsi */
343  COSTS_N_INSNS (13),   /* divdi */
344  COSTS_N_INSNS (2),    /* fp */
345  COSTS_N_INSNS (2),    /* dmul */
346  COSTS_N_INSNS (17),   /* sdiv */
347  COSTS_N_INSNS (17),   /* ddiv */
348};
349
350/* Instruction costs on RS64A processors.  */
351static const
352struct processor_costs rs64a_cost = {
353  COSTS_N_INSNS (20),   /* mulsi */
354  COSTS_N_INSNS (12),   /* mulsi_const */
355  COSTS_N_INSNS (8),    /* mulsi_const9 */
356  COSTS_N_INSNS (34),   /* muldi */
357  COSTS_N_INSNS (65),   /* divsi */
358  COSTS_N_INSNS (67),   /* divdi */
359  COSTS_N_INSNS (4),    /* fp */
360  COSTS_N_INSNS (4),    /* dmul */
361  COSTS_N_INSNS (31),   /* sdiv */
362  COSTS_N_INSNS (31),   /* ddiv */
363};
364
365/* Instruction costs on MPCCORE processors.  */
366static const
367struct processor_costs mpccore_cost = {
368  COSTS_N_INSNS (2),    /* mulsi */
369  COSTS_N_INSNS (2),    /* mulsi_const */
370  COSTS_N_INSNS (2),    /* mulsi_const9 */
371  COSTS_N_INSNS (2),    /* muldi */
372  COSTS_N_INSNS (6),    /* divsi */
373  COSTS_N_INSNS (6),    /* divdi */
374  COSTS_N_INSNS (4),    /* fp */
375  COSTS_N_INSNS (5),    /* dmul */
376  COSTS_N_INSNS (10),   /* sdiv */
377  COSTS_N_INSNS (17),   /* ddiv */
378};
379
380/* Instruction costs on PPC403 processors.  */
381static const
382struct processor_costs ppc403_cost = {
383  COSTS_N_INSNS (4),    /* mulsi */
384  COSTS_N_INSNS (4),    /* mulsi_const */
385  COSTS_N_INSNS (4),    /* mulsi_const9 */
386  COSTS_N_INSNS (4),    /* muldi */
387  COSTS_N_INSNS (33),   /* divsi */
388  COSTS_N_INSNS (33),   /* divdi */
389  COSTS_N_INSNS (11),   /* fp */
390  COSTS_N_INSNS (11),   /* dmul */
391  COSTS_N_INSNS (11),   /* sdiv */
392  COSTS_N_INSNS (11),   /* ddiv */
393};
394
395/* Instruction costs on PPC405 processors.  */
396static const
397struct processor_costs ppc405_cost = {
398  COSTS_N_INSNS (5),    /* mulsi */
399  COSTS_N_INSNS (4),    /* mulsi_const */
400  COSTS_N_INSNS (3),    /* mulsi_const9 */
401  COSTS_N_INSNS (5),    /* muldi */
402  COSTS_N_INSNS (35),   /* divsi */
403  COSTS_N_INSNS (35),   /* divdi */
404  COSTS_N_INSNS (11),   /* fp */
405  COSTS_N_INSNS (11),   /* dmul */
406  COSTS_N_INSNS (11),   /* sdiv */
407  COSTS_N_INSNS (11),   /* ddiv */
408};
409
410/* Instruction costs on PPC440 processors.  */
411static const
412struct processor_costs ppc440_cost = {
413  COSTS_N_INSNS (3),    /* mulsi */
414  COSTS_N_INSNS (2),    /* mulsi_const */
415  COSTS_N_INSNS (2),    /* mulsi_const9 */
416  COSTS_N_INSNS (3),    /* muldi */
417  COSTS_N_INSNS (34),   /* divsi */
418  COSTS_N_INSNS (34),   /* divdi */
419  COSTS_N_INSNS (5),    /* fp */
420  COSTS_N_INSNS (5),    /* dmul */
421  COSTS_N_INSNS (19),   /* sdiv */
422  COSTS_N_INSNS (33),   /* ddiv */
423};
424
425/* Instruction costs on PPC601 processors.  */
426static const
427struct processor_costs ppc601_cost = {
428  COSTS_N_INSNS (5),    /* mulsi */
429  COSTS_N_INSNS (5),    /* mulsi_const */
430  COSTS_N_INSNS (5),    /* mulsi_const9 */
431  COSTS_N_INSNS (5),    /* muldi */
432  COSTS_N_INSNS (36),   /* divsi */
433  COSTS_N_INSNS (36),   /* divdi */
434  COSTS_N_INSNS (4),    /* fp */
435  COSTS_N_INSNS (5),    /* dmul */
436  COSTS_N_INSNS (17),   /* sdiv */
437  COSTS_N_INSNS (31),   /* ddiv */
438};
439
440/* Instruction costs on PPC603 processors.  */
441static const
442struct processor_costs ppc603_cost = {
443  COSTS_N_INSNS (5),    /* mulsi */
444  COSTS_N_INSNS (3),    /* mulsi_const */
445  COSTS_N_INSNS (2),    /* mulsi_const9 */
446  COSTS_N_INSNS (5),    /* muldi */
447  COSTS_N_INSNS (37),   /* divsi */
448  COSTS_N_INSNS (37),   /* divdi */
449  COSTS_N_INSNS (3),    /* fp */
450  COSTS_N_INSNS (4),    /* dmul */
451  COSTS_N_INSNS (18),   /* sdiv */
452  COSTS_N_INSNS (33),   /* ddiv */
453};
454
455/* Instruction costs on PPC604 processors.  */
456static const
457struct processor_costs ppc604_cost = {
458  COSTS_N_INSNS (4),    /* mulsi */
459  COSTS_N_INSNS (4),    /* mulsi_const */
460  COSTS_N_INSNS (4),    /* mulsi_const9 */
461  COSTS_N_INSNS (4),    /* muldi */
462  COSTS_N_INSNS (20),   /* divsi */
463  COSTS_N_INSNS (20),   /* divdi */
464  COSTS_N_INSNS (3),    /* fp */
465  COSTS_N_INSNS (3),    /* dmul */
466  COSTS_N_INSNS (18),   /* sdiv */
467  COSTS_N_INSNS (32),   /* ddiv */
468};
469
470/* Instruction costs on PPC604e processors.  */
471static const
472struct processor_costs ppc604e_cost = {
473  COSTS_N_INSNS (2),    /* mulsi */
474  COSTS_N_INSNS (2),    /* mulsi_const */
475  COSTS_N_INSNS (2),    /* mulsi_const9 */
476  COSTS_N_INSNS (2),    /* muldi */
477  COSTS_N_INSNS (20),   /* divsi */
478  COSTS_N_INSNS (20),   /* divdi */
479  COSTS_N_INSNS (3),    /* fp */
480  COSTS_N_INSNS (3),    /* dmul */
481  COSTS_N_INSNS (18),   /* sdiv */
482  COSTS_N_INSNS (32),   /* ddiv */
483};
484
485/* Instruction costs on PPC620 processors.  */
486static const
487struct processor_costs ppc620_cost = {
488  COSTS_N_INSNS (5),    /* mulsi */
489  COSTS_N_INSNS (4),    /* mulsi_const */
490  COSTS_N_INSNS (3),    /* mulsi_const9 */
491  COSTS_N_INSNS (7),    /* muldi */
492  COSTS_N_INSNS (21),   /* divsi */
493  COSTS_N_INSNS (37),   /* divdi */
494  COSTS_N_INSNS (3),    /* fp */
495  COSTS_N_INSNS (3),    /* dmul */
496  COSTS_N_INSNS (18),   /* sdiv */
497  COSTS_N_INSNS (32),   /* ddiv */
498};
499
500/* Instruction costs on PPC630 processors.  */
501static const
502struct processor_costs ppc630_cost = {
503  COSTS_N_INSNS (5),    /* mulsi */
504  COSTS_N_INSNS (4),    /* mulsi_const */
505  COSTS_N_INSNS (3),    /* mulsi_const9 */
506  COSTS_N_INSNS (7),    /* muldi */
507  COSTS_N_INSNS (21),   /* divsi */
508  COSTS_N_INSNS (37),   /* divdi */
509  COSTS_N_INSNS (3),    /* fp */
510  COSTS_N_INSNS (3),    /* dmul */
511  COSTS_N_INSNS (17),   /* sdiv */
512  COSTS_N_INSNS (21),   /* ddiv */
513};
514
515/* Instruction costs on PPC750 and PPC7400 processors.  */
516static const
517struct processor_costs ppc750_cost = {
518  COSTS_N_INSNS (5),    /* mulsi */
519  COSTS_N_INSNS (3),    /* mulsi_const */
520  COSTS_N_INSNS (2),    /* mulsi_const9 */
521  COSTS_N_INSNS (5),    /* muldi */
522  COSTS_N_INSNS (17),   /* divsi */
523  COSTS_N_INSNS (17),   /* divdi */
524  COSTS_N_INSNS (3),    /* fp */
525  COSTS_N_INSNS (3),    /* dmul */
526  COSTS_N_INSNS (17),   /* sdiv */
527  COSTS_N_INSNS (31),   /* ddiv */
528};
529
530/* Instruction costs on PPC7450 processors.  */
531static const
532struct processor_costs ppc7450_cost = {
533  COSTS_N_INSNS (4),    /* mulsi */
534  COSTS_N_INSNS (3),    /* mulsi_const */
535  COSTS_N_INSNS (3),    /* mulsi_const9 */
536  COSTS_N_INSNS (4),    /* muldi */
537  COSTS_N_INSNS (23),   /* divsi */
538  COSTS_N_INSNS (23),   /* divdi */
539  COSTS_N_INSNS (5),    /* fp */
540  COSTS_N_INSNS (5),    /* dmul */
541  COSTS_N_INSNS (21),   /* sdiv */
542  COSTS_N_INSNS (35),   /* ddiv */
543};
544
545/* Instruction costs on PPC8540 processors.  */
546static const
547struct processor_costs ppc8540_cost = {
548  COSTS_N_INSNS (4),    /* mulsi */
549  COSTS_N_INSNS (4),    /* mulsi_const */
550  COSTS_N_INSNS (4),    /* mulsi_const9 */
551  COSTS_N_INSNS (4),    /* muldi */
552  COSTS_N_INSNS (19),   /* divsi */
553  COSTS_N_INSNS (19),   /* divdi */
554  COSTS_N_INSNS (4),    /* fp */
555  COSTS_N_INSNS (4),    /* dmul */
556  COSTS_N_INSNS (29),   /* sdiv */
557  COSTS_N_INSNS (29),   /* ddiv */
558};
559
560/* Instruction costs on POWER4 and POWER5 processors.  */
561static const
562struct processor_costs power4_cost = {
563  COSTS_N_INSNS (3),    /* mulsi */
564  COSTS_N_INSNS (2),    /* mulsi_const */
565  COSTS_N_INSNS (2),    /* mulsi_const9 */
566  COSTS_N_INSNS (4),    /* muldi */
567  COSTS_N_INSNS (18),   /* divsi */
568  COSTS_N_INSNS (34),   /* divdi */
569  COSTS_N_INSNS (3),    /* fp */
570  COSTS_N_INSNS (3),    /* dmul */
571  COSTS_N_INSNS (17),   /* sdiv */
572  COSTS_N_INSNS (17),   /* ddiv */
573};
574
575
576static bool rs6000_function_ok_for_sibcall (tree, tree);
577static const char *rs6000_invalid_within_doloop (rtx);
578static rtx rs6000_generate_compare (enum rtx_code);
579static void rs6000_maybe_dead (rtx);
580static void rs6000_emit_stack_tie (void);
581static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
582static rtx spe_synthesize_frame_save (rtx);
583static bool spe_func_has_64bit_regs_p (void);
584static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
585			     int, HOST_WIDE_INT);
586static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
587static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
588static unsigned rs6000_hash_constant (rtx);
589static unsigned toc_hash_function (const void *);
590static int toc_hash_eq (const void *, const void *);
591static int constant_pool_expr_1 (rtx, int *, int *);
592static bool constant_pool_expr_p (rtx);
593static bool legitimate_small_data_p (enum machine_mode, rtx);
594static bool legitimate_indexed_address_p (rtx, int);
595static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
596static struct machine_function * rs6000_init_machine_status (void);
597static bool rs6000_assemble_integer (rtx, unsigned int, int);
598static bool no_global_regs_above (int);
599#ifdef HAVE_GAS_HIDDEN
600static void rs6000_assemble_visibility (tree, int);
601#endif
602static int rs6000_ra_ever_killed (void);
603static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
604static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
605static bool rs6000_ms_bitfield_layout_p (tree);
606static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
607static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
608static const char *rs6000_mangle_fundamental_type (tree);
609extern const struct attribute_spec rs6000_attribute_table[];
610static void rs6000_set_default_type_attributes (tree);
611static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
612static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
613static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
614				    tree);
615static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
616static bool rs6000_return_in_memory (tree, tree);
617static void rs6000_file_start (void);
618#if TARGET_ELF
619static int rs6000_elf_reloc_rw_mask (void);
620static void rs6000_elf_asm_out_constructor (rtx, int);
621static void rs6000_elf_asm_out_destructor (rtx, int);
622static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
623static void rs6000_elf_asm_init_sections (void);
624static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
625					       unsigned HOST_WIDE_INT);
626static void rs6000_elf_encode_section_info (tree, rtx, int)
627     ATTRIBUTE_UNUSED;
628#endif
629static bool rs6000_use_blocks_for_constant_p (enum machine_mode, rtx);
630#if TARGET_XCOFF
631static void rs6000_xcoff_asm_output_anchor (rtx);
632static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
633static void rs6000_xcoff_asm_init_sections (void);
634static int rs6000_xcoff_reloc_rw_mask (void);
635static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
636static section *rs6000_xcoff_select_section (tree, int,
637					     unsigned HOST_WIDE_INT);
638static void rs6000_xcoff_unique_section (tree, int);
639static section *rs6000_xcoff_select_rtx_section
640  (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
641static const char * rs6000_xcoff_strip_name_encoding (const char *);
642static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
643static void rs6000_xcoff_file_start (void);
644static void rs6000_xcoff_file_end (void);
645#endif
646static int rs6000_variable_issue (FILE *, int, rtx, int);
647static bool rs6000_rtx_costs (rtx, int, int, int *);
648static int rs6000_adjust_cost (rtx, rtx, rtx, int);
649static bool is_microcoded_insn (rtx);
650static int is_dispatch_slot_restricted (rtx);
651static bool is_cracked_insn (rtx);
652static bool is_branch_slot_insn (rtx);
653static int rs6000_adjust_priority (rtx, int);
654static int rs6000_issue_rate (void);
655static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
656static rtx get_next_active_insn (rtx, rtx);
657static bool insn_terminates_group_p (rtx , enum group_termination);
658static bool is_costly_group (rtx *, rtx);
659static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
660static int redefine_groups (FILE *, int, rtx, rtx);
661static int pad_groups (FILE *, int, rtx, rtx);
662static void rs6000_sched_finish (FILE *, int);
663static int rs6000_use_sched_lookahead (void);
664static tree rs6000_builtin_mask_for_load (void);
665
666static void def_builtin (int, const char *, tree, int);
667static bool rs6000_vector_alignment_reachable (tree, bool);
668static void rs6000_init_builtins (void);
669static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
670static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
671static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
672static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
673static void altivec_init_builtins (void);
674static void rs6000_common_init_builtins (void);
675static void rs6000_init_libfuncs (void);
676
677static void enable_mask_for_builtins (struct builtin_description *, int,
678				      enum rs6000_builtins,
679				      enum rs6000_builtins);
680static tree build_opaque_vector_type (tree, int);
681static void spe_init_builtins (void);
682static rtx spe_expand_builtin (tree, rtx, bool *);
683static rtx spe_expand_stv_builtin (enum insn_code, tree);
684static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
685static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
686static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
687static rs6000_stack_t *rs6000_stack_info (void);
688static void debug_stack_info (rs6000_stack_t *);
689
690static rtx altivec_expand_builtin (tree, rtx, bool *);
691static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
692static rtx altivec_expand_st_builtin (tree, rtx, bool *);
693static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
694static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
695static rtx altivec_expand_predicate_builtin (enum insn_code,
696					     const char *, tree, rtx);
697static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
698static rtx altivec_expand_stv_builtin (enum insn_code, tree);
699static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
700static rtx altivec_expand_vec_set_builtin (tree);
701static rtx altivec_expand_vec_ext_builtin (tree, rtx);
702static int get_element_number (tree, tree);
703static bool rs6000_handle_option (size_t, const char *, int);
704static void rs6000_parse_tls_size_option (void);
705static void rs6000_parse_yes_no_option (const char *, const char *, int *);
706static int first_altivec_reg_to_save (void);
707static unsigned int compute_vrsave_mask (void);
708static void compute_save_world_info (rs6000_stack_t *info_ptr);
709static void is_altivec_return_reg (rtx, void *);
710static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
711int easy_vector_constant (rtx, enum machine_mode);
712static bool rs6000_is_opaque_type (tree);
713static rtx rs6000_dwarf_register_span (rtx);
714static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
715static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
716static rtx rs6000_tls_get_addr (void);
717static rtx rs6000_got_sym (void);
718static int rs6000_tls_symbol_ref_1 (rtx *, void *);
719static const char *rs6000_get_some_local_dynamic_name (void);
720static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
721static rtx rs6000_complex_function_value (enum machine_mode);
722static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
723				    enum machine_mode, tree);
724static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
725						      HOST_WIDE_INT);
726static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
727							tree, HOST_WIDE_INT);
728static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
729					      HOST_WIDE_INT,
730					      rtx[], int *);
731static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
732					       tree, HOST_WIDE_INT,
733					       rtx[], int *);
734static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
735static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
736static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
737static void setup_incoming_varargs (CUMULATIVE_ARGS *,
738				    enum machine_mode, tree,
739				    int *, int);
740static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
741				      tree, bool);
742static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
743				     tree, bool);
744static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
745#if TARGET_MACHO
746static void macho_branch_islands (void);
747static int no_previous_def (tree function_name);
748static tree get_prev_label (tree function_name);
749static void rs6000_darwin_file_start (void);
750#endif
751
752static tree rs6000_build_builtin_va_list (void);
753static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
754static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
755static bool rs6000_scalar_mode_supported_p (enum machine_mode);
756static bool rs6000_vector_mode_supported_p (enum machine_mode);
757static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
758			     enum machine_mode);
759static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
760				       enum machine_mode);
761static int get_vsel_insn (enum machine_mode);
762static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
763static tree rs6000_stack_protect_fail (void);
764
765const int INSN_NOT_AVAILABLE = -1;
766static enum machine_mode rs6000_eh_return_filter_mode (void);
767
768/* Hash table stuff for keeping track of TOC entries.  */
769
770struct toc_hash_struct GTY(())
771{
772  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
773     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
774  rtx key;
775  enum machine_mode key_mode;
776  int labelno;
777};
778
779static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
780
781/* Default register names.  */
782char rs6000_reg_names[][8] =
783{
784      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
785      "8",  "9", "10", "11", "12", "13", "14", "15",
786     "16", "17", "18", "19", "20", "21", "22", "23",
787     "24", "25", "26", "27", "28", "29", "30", "31",
788      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
789      "8",  "9", "10", "11", "12", "13", "14", "15",
790     "16", "17", "18", "19", "20", "21", "22", "23",
791     "24", "25", "26", "27", "28", "29", "30", "31",
792     "mq", "lr", "ctr","ap",
793      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
794      "xer",
795      /* AltiVec registers.  */
796      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
797      "8",  "9",  "10", "11", "12", "13", "14", "15",
798      "16", "17", "18", "19", "20", "21", "22", "23",
799      "24", "25", "26", "27", "28", "29", "30", "31",
800      "vrsave", "vscr",
801      /* SPE registers.  */
802      "spe_acc", "spefscr",
803      /* Soft frame pointer.  */
804      "sfp"
805};
806
807#ifdef TARGET_REGNAMES
808static const char alt_reg_names[][8] =
809{
810   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
811   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
812  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
813  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
814   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
815   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
816  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
817  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
818    "mq",    "lr",  "ctr",   "ap",
819  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
820   "xer",
821  /* AltiVec registers.  */
822   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
823   "%v8",  "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
824  "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
825  "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
826  "vrsave", "vscr",
827  /* SPE registers.  */
828  "spe_acc", "spefscr",
829  /* Soft frame pointer.  */
830  "sfp"
831};
832#endif
833
834#ifndef MASK_STRICT_ALIGN
835#define MASK_STRICT_ALIGN 0
836#endif
837#ifndef TARGET_PROFILE_KERNEL
838#define TARGET_PROFILE_KERNEL 0
839#endif
840
841/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
842#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
843
844/* Initialize the GCC target structure.  */
845#undef TARGET_ATTRIBUTE_TABLE
846#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
847#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
848#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
849
850#undef TARGET_ASM_ALIGNED_DI_OP
851#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
852
853/* Default unaligned ops are only provided for ELF.  Find the ops needed
854   for non-ELF systems.  */
855#ifndef OBJECT_FORMAT_ELF
856#if TARGET_XCOFF
857/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
858   64-bit targets.  */
859#undef TARGET_ASM_UNALIGNED_HI_OP
860#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
861#undef TARGET_ASM_UNALIGNED_SI_OP
862#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
863#undef TARGET_ASM_UNALIGNED_DI_OP
864#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
865#else
866/* For Darwin.  */
867#undef TARGET_ASM_UNALIGNED_HI_OP
868#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
869#undef TARGET_ASM_UNALIGNED_SI_OP
870#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
871#undef TARGET_ASM_UNALIGNED_DI_OP
872#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
873#undef TARGET_ASM_ALIGNED_DI_OP
874#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
875#endif
876#endif
877
878/* This hook deals with fixups for relocatable code and DI-mode objects
879   in 64-bit code.  */
880#undef TARGET_ASM_INTEGER
881#define TARGET_ASM_INTEGER rs6000_assemble_integer
882
883#ifdef HAVE_GAS_HIDDEN
884#undef TARGET_ASM_ASSEMBLE_VISIBILITY
885#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
886#endif
887
888#undef TARGET_HAVE_TLS
889#define TARGET_HAVE_TLS HAVE_AS_TLS
890
891#undef TARGET_CANNOT_FORCE_CONST_MEM
892#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
893
894#undef TARGET_ASM_FUNCTION_PROLOGUE
895#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
896#undef TARGET_ASM_FUNCTION_EPILOGUE
897#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
898
899#undef  TARGET_SCHED_VARIABLE_ISSUE
900#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
901
902#undef TARGET_SCHED_ISSUE_RATE
903#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
904#undef TARGET_SCHED_ADJUST_COST
905#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
906#undef TARGET_SCHED_ADJUST_PRIORITY
907#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
908#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
909#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
910#undef TARGET_SCHED_FINISH
911#define TARGET_SCHED_FINISH rs6000_sched_finish
912
913#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
914#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
915
916#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
917#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
918
919#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
920#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
921
922#undef TARGET_INIT_BUILTINS
923#define TARGET_INIT_BUILTINS rs6000_init_builtins
924
925#undef TARGET_EXPAND_BUILTIN
926#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
927
928#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
929#define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
930
931#undef TARGET_INIT_LIBFUNCS
932#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
933
934#if TARGET_MACHO
935#undef TARGET_BINDS_LOCAL_P
936#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
937#endif
938
939#undef TARGET_MS_BITFIELD_LAYOUT_P
940#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
941
942#undef TARGET_ASM_OUTPUT_MI_THUNK
943#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
944
945#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
946#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
947
948#undef TARGET_FUNCTION_OK_FOR_SIBCALL
949#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
950
951#undef TARGET_INVALID_WITHIN_DOLOOP
952#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
953
954#undef TARGET_RTX_COSTS
955#define TARGET_RTX_COSTS rs6000_rtx_costs
956#undef TARGET_ADDRESS_COST
957#define TARGET_ADDRESS_COST hook_int_rtx_0
958
959#undef TARGET_VECTOR_OPAQUE_P
960#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
961
962#undef TARGET_DWARF_REGISTER_SPAN
963#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
964
965/* On rs6000, function arguments are promoted, as are function return
966   values.  */
967#undef TARGET_PROMOTE_FUNCTION_ARGS
968#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
969#undef TARGET_PROMOTE_FUNCTION_RETURN
970#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
971
972#undef TARGET_RETURN_IN_MEMORY
973#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
974
975#undef TARGET_SETUP_INCOMING_VARARGS
976#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
977
978/* Always strict argument naming on rs6000.  */
979#undef TARGET_STRICT_ARGUMENT_NAMING
980#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
981#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
982#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
983#undef TARGET_SPLIT_COMPLEX_ARG
984#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
985#undef TARGET_MUST_PASS_IN_STACK
986#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
987#undef TARGET_PASS_BY_REFERENCE
988#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
989#undef TARGET_ARG_PARTIAL_BYTES
990#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
991
992#undef TARGET_BUILD_BUILTIN_VA_LIST
993#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
994
995#undef TARGET_GIMPLIFY_VA_ARG_EXPR
996#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
997
998#undef TARGET_EH_RETURN_FILTER_MODE
999#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1000
1001#undef TARGET_SCALAR_MODE_SUPPORTED_P
1002#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1003
1004#undef TARGET_VECTOR_MODE_SUPPORTED_P
1005#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1006
1007#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1008#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1009
1010#undef TARGET_HANDLE_OPTION
1011#define TARGET_HANDLE_OPTION rs6000_handle_option
1012
1013#undef TARGET_DEFAULT_TARGET_FLAGS
1014#define TARGET_DEFAULT_TARGET_FLAGS \
1015  (TARGET_DEFAULT)
1016
1017#undef TARGET_STACK_PROTECT_FAIL
1018#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1019
1020/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1021   The PowerPC architecture requires only weak consistency among
1022   processors--that is, memory accesses between processors need not be
1023   sequentially consistent and memory accesses among processors can occur
1024   in any order. The ability to order memory accesses weakly provides
1025   opportunities for more efficient use of the system bus. Unless a
1026   dependency exists, the 604e allows read operations to precede store
1027   operations.  */
1028#undef TARGET_RELAXED_ORDERING
1029#define TARGET_RELAXED_ORDERING true
1030
1031#ifdef HAVE_AS_TLS
1032#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1033#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1034#endif
1035
1036/* Use a 32-bit anchor range.  This leads to sequences like:
1037
1038	addis	tmp,anchor,high
1039	add	dest,tmp,low
1040
1041   where tmp itself acts as an anchor, and can be shared between
1042   accesses to the same 64k page.  */
1043#undef TARGET_MIN_ANCHOR_OFFSET
1044#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1045#undef TARGET_MAX_ANCHOR_OFFSET
1046#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1047#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1048#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1049
1050struct gcc_target targetm = TARGET_INITIALIZER;
1051
1052
1053/* Value is 1 if hard register REGNO can hold a value of machine-mode
1054   MODE.  */
1055static int
1056rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1057{
1058  /* The GPRs can hold any mode, but values bigger than one register
1059     cannot go past R31.  */
1060  if (INT_REGNO_P (regno))
1061    return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1062
1063  /* The float registers can only hold floating modes and DImode.
1064     This also excludes decimal float modes.  */
1065  if (FP_REGNO_P (regno))
1066    return
1067      (SCALAR_FLOAT_MODE_P (mode)
1068       && !DECIMAL_FLOAT_MODE_P (mode)
1069       && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1070      || (GET_MODE_CLASS (mode) == MODE_INT
1071	  && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1072
1073  /* The CR register can only hold CC modes.  */
1074  if (CR_REGNO_P (regno))
1075    return GET_MODE_CLASS (mode) == MODE_CC;
1076
1077  if (XER_REGNO_P (regno))
1078    return mode == PSImode;
1079
1080  /* AltiVec only in AldyVec registers.  */
1081  if (ALTIVEC_REGNO_P (regno))
1082    return ALTIVEC_VECTOR_MODE (mode);
1083
1084  /* ...but GPRs can hold SIMD data on the SPE in one register.  */
1085  if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1086    return 1;
1087
1088  /* We cannot put TImode anywhere except general register and it must be
1089     able to fit within the register set.  */
1090
1091  return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1092}
1093
1094/* Initialize rs6000_hard_regno_mode_ok_p table.  */
1095static void
1096rs6000_init_hard_regno_mode_ok (void)
1097{
1098  int r, m;
1099
1100  for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1101    for (m = 0; m < NUM_MACHINE_MODES; ++m)
1102      if (rs6000_hard_regno_mode_ok (r, m))
1103	rs6000_hard_regno_mode_ok_p[m][r] = true;
1104}
1105
1106/* If not otherwise specified by a target, make 'long double' equivalent to
1107   'double'.  */
1108
1109#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1110#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1111#endif
1112
1113/* Override command line options.  Mostly we process the processor
1114   type and sometimes adjust other TARGET_ options.  */
1115
1116void
1117rs6000_override_options (const char *default_cpu)
1118{
1119  size_t i, j;
1120  struct rs6000_cpu_select *ptr;
1121  int set_masks;
1122
1123  /* Simplifications for entries below.  */
1124
1125  enum {
1126    POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1127    POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1128  };
1129
1130  /* This table occasionally claims that a processor does not support
1131     a particular feature even though it does, but the feature is slower
1132     than the alternative.  Thus, it shouldn't be relied on as a
1133     complete description of the processor's support.
1134
1135     Please keep this list in order, and don't forget to update the
1136     documentation in invoke.texi when adding a new processor or
1137     flag.  */
1138  static struct ptt
1139    {
1140      const char *const name;		/* Canonical processor name.  */
1141      const enum processor_type processor; /* Processor type enum value.  */
1142      const int target_enable;	/* Target flags to enable.  */
1143    } const processor_target_table[]
1144      = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1145	 {"403", PROCESSOR_PPC403,
1146	  POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1147	 {"405", PROCESSOR_PPC405,
1148	  POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1149	 {"405fp", PROCESSOR_PPC405,
1150	  POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1151	 {"440", PROCESSOR_PPC440,
1152	  POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1153	 {"440fp", PROCESSOR_PPC440,
1154	  POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1155	 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1156	 {"601", PROCESSOR_PPC601,
1157	  MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1158	 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1159	 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1160	 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1161	 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1162	 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1163	 {"620", PROCESSOR_PPC620,
1164	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1165	 {"630", PROCESSOR_PPC630,
1166	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1167	 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1168	 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1169	 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1170	 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1171	 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1172	 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1173	 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1174	 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
1175	 /* 8548 has a dummy entry for now.  */
1176	 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
1177	 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1178	 {"970", PROCESSOR_POWER4,
1179	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1180	 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1181	 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1182	 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1183	 {"G4",  PROCESSOR_PPC7450, POWERPC_7400_MASK},
1184	 {"G5", PROCESSOR_POWER4,
1185	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1186	 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1187	 {"power2", PROCESSOR_POWER,
1188	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1189	 {"power3", PROCESSOR_PPC630,
1190	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1191	 {"power4", PROCESSOR_POWER4,
1192	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1193	 {"power5", PROCESSOR_POWER5,
1194	  POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1195	  | MASK_MFCRF | MASK_POPCNTB},
1196	 {"power5+", PROCESSOR_POWER5,
1197	  POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1198	  | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
1199 	 {"power6", PROCESSOR_POWER5,
1200	  POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
1201	  | MASK_FPRND},
1202	 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1203	 {"powerpc64", PROCESSOR_POWERPC64,
1204	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1205	 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1206	 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1207	 {"rios2", PROCESSOR_RIOS2,
1208	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1209	 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1210	 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1211	 {"rs64", PROCESSOR_RS64A,
1212	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1213      };
1214
1215  const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1216
1217  /* Some OSs don't support saving the high part of 64-bit registers on
1218     context switch.  Other OSs don't support saving Altivec registers.
1219     On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1220     settings; if the user wants either, the user must explicitly specify
1221     them and we won't interfere with the user's specification.  */
1222
1223  enum {
1224    POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1225    POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
1226		     | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1227		     | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
1228		     | MASK_DLMZB)
1229  };
1230
1231  rs6000_init_hard_regno_mode_ok ();
1232
1233  set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1234#ifdef OS_MISSING_POWERPC64
1235  if (OS_MISSING_POWERPC64)
1236    set_masks &= ~MASK_POWERPC64;
1237#endif
1238#ifdef OS_MISSING_ALTIVEC
1239  if (OS_MISSING_ALTIVEC)
1240    set_masks &= ~MASK_ALTIVEC;
1241#endif
1242
1243  /* Don't override by the processor default if given explicitly.  */
1244  set_masks &= ~target_flags_explicit;
1245
1246  /* Identify the processor type.  */
1247  rs6000_select[0].string = default_cpu;
1248  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1249
1250  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1251    {
1252      ptr = &rs6000_select[i];
1253      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1254	{
1255	  for (j = 0; j < ptt_size; j++)
1256	    if (! strcmp (ptr->string, processor_target_table[j].name))
1257	      {
1258		if (ptr->set_tune_p)
1259		  rs6000_cpu = processor_target_table[j].processor;
1260
1261		if (ptr->set_arch_p)
1262		  {
1263		    target_flags &= ~set_masks;
1264		    target_flags |= (processor_target_table[j].target_enable
1265				     & set_masks);
1266		  }
1267		break;
1268	      }
1269
1270	  if (j == ptt_size)
1271	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1272	}
1273    }
1274
1275  if (TARGET_E500)
1276    rs6000_isel = 1;
1277
1278  /* If we are optimizing big endian systems for space, use the load/store
1279     multiple and string instructions.  */
1280  if (BYTES_BIG_ENDIAN && optimize_size)
1281    target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1282
1283  /* Don't allow -mmultiple or -mstring on little endian systems
1284     unless the cpu is a 750, because the hardware doesn't support the
1285     instructions used in little endian mode, and causes an alignment
1286     trap.  The 750 does not cause an alignment trap (except when the
1287     target is unaligned).  */
1288
1289  if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1290    {
1291      if (TARGET_MULTIPLE)
1292	{
1293	  target_flags &= ~MASK_MULTIPLE;
1294	  if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1295	    warning (0, "-mmultiple is not supported on little endian systems");
1296	}
1297
1298      if (TARGET_STRING)
1299	{
1300	  target_flags &= ~MASK_STRING;
1301	  if ((target_flags_explicit & MASK_STRING) != 0)
1302	    warning (0, "-mstring is not supported on little endian systems");
1303	}
1304    }
1305
1306  /* Set debug flags */
1307  if (rs6000_debug_name)
1308    {
1309      if (! strcmp (rs6000_debug_name, "all"))
1310	rs6000_debug_stack = rs6000_debug_arg = 1;
1311      else if (! strcmp (rs6000_debug_name, "stack"))
1312	rs6000_debug_stack = 1;
1313      else if (! strcmp (rs6000_debug_name, "arg"))
1314	rs6000_debug_arg = 1;
1315      else
1316	error ("unknown -mdebug-%s switch", rs6000_debug_name);
1317    }
1318
1319  if (rs6000_traceback_name)
1320    {
1321      if (! strncmp (rs6000_traceback_name, "full", 4))
1322	rs6000_traceback = traceback_full;
1323      else if (! strncmp (rs6000_traceback_name, "part", 4))
1324	rs6000_traceback = traceback_part;
1325      else if (! strncmp (rs6000_traceback_name, "no", 2))
1326	rs6000_traceback = traceback_none;
1327      else
1328	error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1329	       rs6000_traceback_name);
1330    }
1331
1332  if (!rs6000_explicit_options.long_double)
1333    rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1334
1335#ifndef POWERPC_LINUX
1336  if (!rs6000_explicit_options.ieee)
1337    rs6000_ieeequad = 1;
1338#endif
1339
1340  /* Set Altivec ABI as default for powerpc64 linux.  */
1341  if (TARGET_ELF && TARGET_64BIT)
1342    {
1343      rs6000_altivec_abi = 1;
1344      TARGET_ALTIVEC_VRSAVE = 1;
1345    }
1346
1347  /* Set the Darwin64 ABI as default for 64-bit Darwin.  */
1348  if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1349    {
1350      rs6000_darwin64_abi = 1;
1351#if TARGET_MACHO
1352      darwin_one_byte_bool = 1;
1353#endif
1354      /* Default to natural alignment, for better performance.  */
1355      rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1356    }
1357
1358  /* Place FP constants in the constant pool instead of TOC
1359     if section anchors enabled.  */
1360  if (flag_section_anchors)
1361    TARGET_NO_FP_IN_TOC = 1;
1362
1363  /* Handle -mtls-size option.  */
1364  rs6000_parse_tls_size_option ();
1365
1366#ifdef SUBTARGET_OVERRIDE_OPTIONS
1367  SUBTARGET_OVERRIDE_OPTIONS;
1368#endif
1369#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1370  SUBSUBTARGET_OVERRIDE_OPTIONS;
1371#endif
1372#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1373  SUB3TARGET_OVERRIDE_OPTIONS;
1374#endif
1375
1376  if (TARGET_E500)
1377    {
1378      if (TARGET_ALTIVEC)
1379	error ("AltiVec and E500 instructions cannot coexist");
1380
1381      /* The e500 does not have string instructions, and we set
1382	 MASK_STRING above when optimizing for size.  */
1383      if ((target_flags & MASK_STRING) != 0)
1384	target_flags = target_flags & ~MASK_STRING;
1385    }
1386  else if (rs6000_select[1].string != NULL)
1387    {
1388      /* For the powerpc-eabispe configuration, we set all these by
1389	 default, so let's unset them if we manually set another
1390	 CPU that is not the E500.  */
1391      if (!rs6000_explicit_options.abi)
1392	rs6000_spe_abi = 0;
1393      if (!rs6000_explicit_options.spe)
1394	rs6000_spe = 0;
1395      if (!rs6000_explicit_options.float_gprs)
1396	rs6000_float_gprs = 0;
1397      if (!rs6000_explicit_options.isel)
1398	rs6000_isel = 0;
1399      if (!rs6000_explicit_options.long_double)
1400	rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1401    }
1402
1403  rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1404			&& rs6000_cpu != PROCESSOR_POWER5);
1405  rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1406			 || rs6000_cpu == PROCESSOR_POWER5);
1407
1408  rs6000_sched_restricted_insns_priority
1409    = (rs6000_sched_groups ? 1 : 0);
1410
1411  /* Handle -msched-costly-dep option.  */
1412  rs6000_sched_costly_dep
1413    = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1414
1415  if (rs6000_sched_costly_dep_str)
1416    {
1417      if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1418	rs6000_sched_costly_dep = no_dep_costly;
1419      else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1420	rs6000_sched_costly_dep = all_deps_costly;
1421      else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1422	rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1423      else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1424	rs6000_sched_costly_dep = store_to_load_dep_costly;
1425      else
1426	rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1427    }
1428
1429  /* Handle -minsert-sched-nops option.  */
1430  rs6000_sched_insert_nops
1431    = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1432
1433  if (rs6000_sched_insert_nops_str)
1434    {
1435      if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1436	rs6000_sched_insert_nops = sched_finish_none;
1437      else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1438	rs6000_sched_insert_nops = sched_finish_pad_groups;
1439      else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1440	rs6000_sched_insert_nops = sched_finish_regroup_exact;
1441      else
1442	rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1443    }
1444
1445#ifdef TARGET_REGNAMES
1446  /* If the user desires alternate register names, copy in the
1447     alternate names now.  */
1448  if (TARGET_REGNAMES)
1449    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1450#endif
1451
1452  /* Set aix_struct_return last, after the ABI is determined.
1453     If -maix-struct-return or -msvr4-struct-return was explicitly
1454     used, don't override with the ABI default.  */
1455  if (!rs6000_explicit_options.aix_struct_ret)
1456    aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
1457
1458  if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
1459    REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1460
1461  if (TARGET_TOC)
1462    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1463
1464  /* We can only guarantee the availability of DI pseudo-ops when
1465     assembling for 64-bit targets.  */
1466  if (!TARGET_64BIT)
1467    {
1468      targetm.asm_out.aligned_op.di = NULL;
1469      targetm.asm_out.unaligned_op.di = NULL;
1470    }
1471
1472  /* Set branch target alignment, if not optimizing for size.  */
1473  if (!optimize_size)
1474    {
1475      if (rs6000_sched_groups)
1476	{
1477	  if (align_functions <= 0)
1478	    align_functions = 16;
1479	  if (align_jumps <= 0)
1480	    align_jumps = 16;
1481	  if (align_loops <= 0)
1482	    align_loops = 16;
1483	}
1484      if (align_jumps_max_skip <= 0)
1485	align_jumps_max_skip = 15;
1486      if (align_loops_max_skip <= 0)
1487	align_loops_max_skip = 15;
1488    }
1489
1490  /* Arrange to save and restore machine status around nested functions.  */
1491  init_machine_status = rs6000_init_machine_status;
1492
1493  /* We should always be splitting complex arguments, but we can't break
1494     Linux and Darwin ABIs at the moment.  For now, only AIX is fixed.  */
1495  if (DEFAULT_ABI != ABI_AIX)
1496    targetm.calls.split_complex_arg = NULL;
1497
1498  /* Initialize rs6000_cost with the appropriate target costs.  */
1499  if (optimize_size)
1500    rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1501  else
1502    switch (rs6000_cpu)
1503      {
1504      case PROCESSOR_RIOS1:
1505	rs6000_cost = &rios1_cost;
1506	break;
1507
1508      case PROCESSOR_RIOS2:
1509	rs6000_cost = &rios2_cost;
1510	break;
1511
1512      case PROCESSOR_RS64A:
1513	rs6000_cost = &rs64a_cost;
1514	break;
1515
1516      case PROCESSOR_MPCCORE:
1517	rs6000_cost = &mpccore_cost;
1518	break;
1519
1520      case PROCESSOR_PPC403:
1521	rs6000_cost = &ppc403_cost;
1522	break;
1523
1524      case PROCESSOR_PPC405:
1525	rs6000_cost = &ppc405_cost;
1526	break;
1527
1528      case PROCESSOR_PPC440:
1529	rs6000_cost = &ppc440_cost;
1530	break;
1531
1532      case PROCESSOR_PPC601:
1533	rs6000_cost = &ppc601_cost;
1534	break;
1535
1536      case PROCESSOR_PPC603:
1537	rs6000_cost = &ppc603_cost;
1538	break;
1539
1540      case PROCESSOR_PPC604:
1541	rs6000_cost = &ppc604_cost;
1542	break;
1543
1544      case PROCESSOR_PPC604e:
1545	rs6000_cost = &ppc604e_cost;
1546	break;
1547
1548      case PROCESSOR_PPC620:
1549	rs6000_cost = &ppc620_cost;
1550	break;
1551
1552      case PROCESSOR_PPC630:
1553	rs6000_cost = &ppc630_cost;
1554	break;
1555
1556      case PROCESSOR_PPC750:
1557      case PROCESSOR_PPC7400:
1558	rs6000_cost = &ppc750_cost;
1559	break;
1560
1561      case PROCESSOR_PPC7450:
1562	rs6000_cost = &ppc7450_cost;
1563	break;
1564
1565      case PROCESSOR_PPC8540:
1566	rs6000_cost = &ppc8540_cost;
1567	break;
1568
1569      case PROCESSOR_POWER4:
1570      case PROCESSOR_POWER5:
1571	rs6000_cost = &power4_cost;
1572	break;
1573
1574      default:
1575	gcc_unreachable ();
1576      }
1577}
1578
1579/* Implement targetm.vectorize.builtin_mask_for_load.  */
1580static tree
1581rs6000_builtin_mask_for_load (void)
1582{
1583  if (TARGET_ALTIVEC)
1584    return altivec_builtin_mask_for_load;
1585  else
1586    return 0;
1587}
1588
1589
1590/* Return true iff, data reference of TYPE can reach vector alignment (16)
1591   after applying N number of iterations.  This routine does not determine
1592   how may iterations are required to reach desired alignment.  */
1593
1594static bool
1595rs6000_vector_alignment_reachable (tree type ATTRIBUTE_UNUSED, bool is_packed)
1596{
1597  if (is_packed)
1598    return false;
1599
1600  if (TARGET_32BIT)
1601    {
1602      if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1603        return true;
1604
1605      if (rs6000_alignment_flags ==  MASK_ALIGN_POWER)
1606        return true;
1607
1608      return false;
1609    }
1610  else
1611    {
1612      if (TARGET_MACHO)
1613        return false;
1614
1615      /* Assuming that all other types are naturally aligned. CHECKME!  */
1616      return true;
1617    }
1618}
1619
1620/* Handle generic options of the form -mfoo=yes/no.
1621   NAME is the option name.
1622   VALUE is the option value.
1623   FLAG is the pointer to the flag where to store a 1 or 0, depending on
1624   whether the option value is 'yes' or 'no' respectively.  */
1625static void
1626rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1627{
1628  if (value == 0)
1629    return;
1630  else if (!strcmp (value, "yes"))
1631    *flag = 1;
1632  else if (!strcmp (value, "no"))
1633    *flag = 0;
1634  else
1635    error ("unknown -m%s= option specified: '%s'", name, value);
1636}
1637
1638/* Validate and record the size specified with the -mtls-size option.  */
1639
1640static void
1641rs6000_parse_tls_size_option (void)
1642{
1643  if (rs6000_tls_size_string == 0)
1644    return;
1645  else if (strcmp (rs6000_tls_size_string, "16") == 0)
1646    rs6000_tls_size = 16;
1647  else if (strcmp (rs6000_tls_size_string, "32") == 0)
1648    rs6000_tls_size = 32;
1649  else if (strcmp (rs6000_tls_size_string, "64") == 0)
1650    rs6000_tls_size = 64;
1651  else
1652    error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
1653}
1654
1655void
1656optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1657{
1658  if (DEFAULT_ABI == ABI_DARWIN)
1659    /* The Darwin libraries never set errno, so we might as well
1660       avoid calling them when that's the only reason we would.  */
1661    flag_errno_math = 0;
1662
1663  /* Double growth factor to counter reduced min jump length.  */
1664  set_param_value ("max-grow-copy-bb-insns", 16);
1665
1666  /* Enable section anchors by default.
1667     Skip section anchors for Objective C and Objective C++
1668     until front-ends fixed.  */
1669  if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
1670    flag_section_anchors = 1;
1671}
1672
1673/* Implement TARGET_HANDLE_OPTION.  */
1674
1675static bool
1676rs6000_handle_option (size_t code, const char *arg, int value)
1677{
1678  switch (code)
1679    {
1680    case OPT_mno_power:
1681      target_flags &= ~(MASK_POWER | MASK_POWER2
1682			| MASK_MULTIPLE | MASK_STRING);
1683      target_flags_explicit |= (MASK_POWER | MASK_POWER2
1684				| MASK_MULTIPLE | MASK_STRING);
1685      break;
1686    case OPT_mno_powerpc:
1687      target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1688			| MASK_PPC_GFXOPT | MASK_POWERPC64);
1689      target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1690				| MASK_PPC_GFXOPT | MASK_POWERPC64);
1691      break;
1692    case OPT_mfull_toc:
1693      target_flags &= ~MASK_MINIMAL_TOC;
1694      TARGET_NO_FP_IN_TOC = 0;
1695      TARGET_NO_SUM_IN_TOC = 0;
1696      target_flags_explicit |= MASK_MINIMAL_TOC;
1697#ifdef TARGET_USES_SYSV4_OPT
1698      /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1699	 just the same as -mminimal-toc.  */
1700      target_flags |= MASK_MINIMAL_TOC;
1701      target_flags_explicit |= MASK_MINIMAL_TOC;
1702#endif
1703      break;
1704
1705#ifdef TARGET_USES_SYSV4_OPT
1706    case OPT_mtoc:
1707      /* Make -mtoc behave like -mminimal-toc.  */
1708      target_flags |= MASK_MINIMAL_TOC;
1709      target_flags_explicit |= MASK_MINIMAL_TOC;
1710      break;
1711#endif
1712
1713#ifdef TARGET_USES_AIX64_OPT
1714    case OPT_maix64:
1715#else
1716    case OPT_m64:
1717#endif
1718      target_flags |= MASK_POWERPC64 | MASK_POWERPC;
1719      target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
1720      target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
1721      break;
1722
1723#ifdef TARGET_USES_AIX64_OPT
1724    case OPT_maix32:
1725#else
1726    case OPT_m32:
1727#endif
1728      target_flags &= ~MASK_POWERPC64;
1729      target_flags_explicit |= MASK_POWERPC64;
1730      break;
1731
1732    case OPT_minsert_sched_nops_:
1733      rs6000_sched_insert_nops_str = arg;
1734      break;
1735
1736    case OPT_mminimal_toc:
1737      if (value == 1)
1738	{
1739	  TARGET_NO_FP_IN_TOC = 0;
1740	  TARGET_NO_SUM_IN_TOC = 0;
1741	}
1742      break;
1743
1744    case OPT_mpower:
1745      if (value == 1)
1746	{
1747	  target_flags |= (MASK_MULTIPLE | MASK_STRING);
1748	  target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1749	}
1750      break;
1751
1752    case OPT_mpower2:
1753      if (value == 1)
1754	{
1755	  target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1756	  target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1757	}
1758      break;
1759
1760    case OPT_mpowerpc_gpopt:
1761    case OPT_mpowerpc_gfxopt:
1762      if (value == 1)
1763	{
1764	  target_flags |= MASK_POWERPC;
1765	  target_flags_explicit |= MASK_POWERPC;
1766	}
1767      break;
1768
1769    case OPT_maix_struct_return:
1770    case OPT_msvr4_struct_return:
1771      rs6000_explicit_options.aix_struct_ret = true;
1772      break;
1773
1774    case OPT_mvrsave_:
1775      rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1776      break;
1777
1778    case OPT_misel_:
1779      rs6000_explicit_options.isel = true;
1780      rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1781      break;
1782
1783    case OPT_mspe_:
1784      rs6000_explicit_options.spe = true;
1785      rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1786      /* No SPE means 64-bit long doubles, even if an E500.  */
1787      if (!rs6000_spe)
1788	rs6000_long_double_type_size = 64;
1789      break;
1790
1791    case OPT_mdebug_:
1792      rs6000_debug_name = arg;
1793      break;
1794
1795#ifdef TARGET_USES_SYSV4_OPT
1796    case OPT_mcall_:
1797      rs6000_abi_name = arg;
1798      break;
1799
1800    case OPT_msdata_:
1801      rs6000_sdata_name = arg;
1802      break;
1803
1804    case OPT_mtls_size_:
1805      rs6000_tls_size_string = arg;
1806      break;
1807
1808    case OPT_mrelocatable:
1809      if (value == 1)
1810	{
1811	  target_flags |= MASK_MINIMAL_TOC;
1812	  target_flags_explicit |= MASK_MINIMAL_TOC;
1813	  TARGET_NO_FP_IN_TOC = 1;
1814	}
1815      break;
1816
1817    case OPT_mrelocatable_lib:
1818      if (value == 1)
1819	{
1820	  target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1821	  target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1822	  TARGET_NO_FP_IN_TOC = 1;
1823	}
1824      else
1825	{
1826	  target_flags &= ~MASK_RELOCATABLE;
1827	  target_flags_explicit |= MASK_RELOCATABLE;
1828	}
1829      break;
1830#endif
1831
1832    case OPT_mabi_:
1833      if (!strcmp (arg, "altivec"))
1834	{
1835	  rs6000_explicit_options.abi = true;
1836	  rs6000_altivec_abi = 1;
1837	  rs6000_spe_abi = 0;
1838	}
1839      else if (! strcmp (arg, "no-altivec"))
1840	{
1841	  /* ??? Don't set rs6000_explicit_options.abi here, to allow
1842	     the default for rs6000_spe_abi to be chosen later.  */
1843	  rs6000_altivec_abi = 0;
1844	}
1845      else if (! strcmp (arg, "spe"))
1846	{
1847	  rs6000_explicit_options.abi = true;
1848	  rs6000_spe_abi = 1;
1849	  rs6000_altivec_abi = 0;
1850	  if (!TARGET_SPE_ABI)
1851	    error ("not configured for ABI: '%s'", arg);
1852	}
1853      else if (! strcmp (arg, "no-spe"))
1854	{
1855	  rs6000_explicit_options.abi = true;
1856	  rs6000_spe_abi = 0;
1857	}
1858
1859      /* These are here for testing during development only, do not
1860	 document in the manual please.  */
1861      else if (! strcmp (arg, "d64"))
1862	{
1863	  rs6000_darwin64_abi = 1;
1864	  warning (0, "Using darwin64 ABI");
1865	}
1866      else if (! strcmp (arg, "d32"))
1867	{
1868	  rs6000_darwin64_abi = 0;
1869	  warning (0, "Using old darwin ABI");
1870	}
1871
1872      else if (! strcmp (arg, "ibmlongdouble"))
1873	{
1874	  rs6000_explicit_options.ieee = true;
1875	  rs6000_ieeequad = 0;
1876	  warning (0, "Using IBM extended precision long double");
1877	}
1878      else if (! strcmp (arg, "ieeelongdouble"))
1879	{
1880	  rs6000_explicit_options.ieee = true;
1881	  rs6000_ieeequad = 1;
1882	  warning (0, "Using IEEE extended precision long double");
1883	}
1884
1885      else
1886	{
1887	  error ("unknown ABI specified: '%s'", arg);
1888	  return false;
1889	}
1890      break;
1891
1892    case OPT_mcpu_:
1893      rs6000_select[1].string = arg;
1894      break;
1895
1896    case OPT_mtune_:
1897      rs6000_select[2].string = arg;
1898      break;
1899
1900    case OPT_mtraceback_:
1901      rs6000_traceback_name = arg;
1902      break;
1903
1904    case OPT_mfloat_gprs_:
1905      rs6000_explicit_options.float_gprs = true;
1906      if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1907	rs6000_float_gprs = 1;
1908      else if (! strcmp (arg, "double"))
1909	rs6000_float_gprs = 2;
1910      else if (! strcmp (arg, "no"))
1911	rs6000_float_gprs = 0;
1912      else
1913	{
1914	  error ("invalid option for -mfloat-gprs: '%s'", arg);
1915	  return false;
1916	}
1917      break;
1918
1919    case OPT_mlong_double_:
1920      rs6000_explicit_options.long_double = true;
1921      rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1922      if (value != 64 && value != 128)
1923	{
1924	  error ("Unknown switch -mlong-double-%s", arg);
1925	  rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1926	  return false;
1927	}
1928      else
1929	rs6000_long_double_type_size = value;
1930      break;
1931
1932    case OPT_msched_costly_dep_:
1933      rs6000_sched_costly_dep_str = arg;
1934      break;
1935
1936    case OPT_malign_:
1937      rs6000_explicit_options.alignment = true;
1938      if (! strcmp (arg, "power"))
1939	{
1940	  /* On 64-bit Darwin, power alignment is ABI-incompatible with
1941	     some C library functions, so warn about it. The flag may be
1942	     useful for performance studies from time to time though, so
1943	     don't disable it entirely.  */
1944	  if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1945	    warning (0, "-malign-power is not supported for 64-bit Darwin;"
1946		     " it is incompatible with the installed C and C++ libraries");
1947	  rs6000_alignment_flags = MASK_ALIGN_POWER;
1948	}
1949      else if (! strcmp (arg, "natural"))
1950	rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1951      else
1952	{
1953	  error ("unknown -malign-XXXXX option specified: '%s'", arg);
1954	  return false;
1955	}
1956      break;
1957    }
1958  return true;
1959}
1960
1961/* Do anything needed at the start of the asm file.  */
1962
1963static void
1964rs6000_file_start (void)
1965{
1966  size_t i;
1967  char buffer[80];
1968  const char *start = buffer;
1969  struct rs6000_cpu_select *ptr;
1970  const char *default_cpu = TARGET_CPU_DEFAULT;
1971  FILE *file = asm_out_file;
1972
1973  default_file_start ();
1974
1975#ifdef TARGET_BI_ARCH
1976  if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1977    default_cpu = 0;
1978#endif
1979
1980  if (flag_verbose_asm)
1981    {
1982      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1983      rs6000_select[0].string = default_cpu;
1984
1985      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1986	{
1987	  ptr = &rs6000_select[i];
1988	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1989	    {
1990	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1991	      start = "";
1992	    }
1993	}
1994
1995      if (PPC405_ERRATUM77)
1996	{
1997	  fprintf (file, "%s PPC405CR_ERRATUM77", start);
1998	  start = "";
1999	}
2000
2001#ifdef USING_ELFOS_H
2002      switch (rs6000_sdata)
2003	{
2004	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2005	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2006	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2007	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2008	}
2009
2010      if (rs6000_sdata && g_switch_value)
2011	{
2012	  fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2013		   g_switch_value);
2014	  start = "";
2015	}
2016#endif
2017
2018      if (*start == '\0')
2019	putc ('\n', file);
2020    }
2021
2022  if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2023    {
2024      switch_to_section (toc_section);
2025      switch_to_section (text_section);
2026    }
2027}
2028
2029
2030/* Return nonzero if this function is known to have a null epilogue.  */
2031
2032int
2033direct_return (void)
2034{
2035  if (reload_completed)
2036    {
2037      rs6000_stack_t *info = rs6000_stack_info ();
2038
2039      if (info->first_gp_reg_save == 32
2040	  && info->first_fp_reg_save == 64
2041	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
2042	  && ! info->lr_save_p
2043	  && ! info->cr_save_p
2044	  && info->vrsave_mask == 0
2045	  && ! info->push_p)
2046	return 1;
2047    }
2048
2049  return 0;
2050}
2051
2052/* Return the number of instructions it takes to form a constant in an
2053   integer register.  */
2054
2055int
2056num_insns_constant_wide (HOST_WIDE_INT value)
2057{
2058  /* signed constant loadable with {cal|addi} */
2059  if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
2060    return 1;
2061
2062  /* constant loadable with {cau|addis} */
2063  else if ((value & 0xffff) == 0
2064	   && (value >> 31 == -1 || value >> 31 == 0))
2065    return 1;
2066
2067#if HOST_BITS_PER_WIDE_INT == 64
2068  else if (TARGET_POWERPC64)
2069    {
2070      HOST_WIDE_INT low  = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2071      HOST_WIDE_INT high = value >> 31;
2072
2073      if (high == 0 || high == -1)
2074	return 2;
2075
2076      high >>= 1;
2077
2078      if (low == 0)
2079	return num_insns_constant_wide (high) + 1;
2080      else
2081	return (num_insns_constant_wide (high)
2082		+ num_insns_constant_wide (low) + 1);
2083    }
2084#endif
2085
2086  else
2087    return 2;
2088}
2089
2090int
2091num_insns_constant (rtx op, enum machine_mode mode)
2092{
2093  HOST_WIDE_INT low, high;
2094
2095  switch (GET_CODE (op))
2096    {
2097    case CONST_INT:
2098#if HOST_BITS_PER_WIDE_INT == 64
2099      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2100	  && mask64_operand (op, mode))
2101	return 2;
2102      else
2103#endif
2104	return num_insns_constant_wide (INTVAL (op));
2105
2106      case CONST_DOUBLE:
2107	if (mode == SFmode)
2108	  {
2109	    long l;
2110	    REAL_VALUE_TYPE rv;
2111
2112	    REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2113	    REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2114	    return num_insns_constant_wide ((HOST_WIDE_INT) l);
2115	  }
2116
2117	if (mode == VOIDmode || mode == DImode)
2118	  {
2119	    high = CONST_DOUBLE_HIGH (op);
2120	    low  = CONST_DOUBLE_LOW (op);
2121	  }
2122	else
2123	  {
2124	    long l[2];
2125	    REAL_VALUE_TYPE rv;
2126
2127	    REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2128	    REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2129	    high = l[WORDS_BIG_ENDIAN == 0];
2130	    low  = l[WORDS_BIG_ENDIAN != 0];
2131	  }
2132
2133	if (TARGET_32BIT)
2134	  return (num_insns_constant_wide (low)
2135		  + num_insns_constant_wide (high));
2136	else
2137	  {
2138	    if ((high == 0 && low >= 0)
2139		|| (high == -1 && low < 0))
2140	      return num_insns_constant_wide (low);
2141
2142	    else if (mask64_operand (op, mode))
2143	      return 2;
2144
2145	    else if (low == 0)
2146	      return num_insns_constant_wide (high) + 1;
2147
2148	    else
2149	      return (num_insns_constant_wide (high)
2150		      + num_insns_constant_wide (low) + 1);
2151	  }
2152
2153    default:
2154      gcc_unreachable ();
2155    }
2156}
2157
2158/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2159   If the mode of OP is MODE_VECTOR_INT, this simply returns the
2160   corresponding element of the vector, but for V4SFmode and V2SFmode,
2161   the corresponding "float" is interpreted as an SImode integer.  */
2162
2163static HOST_WIDE_INT
2164const_vector_elt_as_int (rtx op, unsigned int elt)
2165{
2166  rtx tmp = CONST_VECTOR_ELT (op, elt);
2167  if (GET_MODE (op) == V4SFmode
2168      || GET_MODE (op) == V2SFmode)
2169    tmp = gen_lowpart (SImode, tmp);
2170  return INTVAL (tmp);
2171}
2172
2173/* Return true if OP can be synthesized with a particular vspltisb, vspltish
2174   or vspltisw instruction.  OP is a CONST_VECTOR.  Which instruction is used
2175   depends on STEP and COPIES, one of which will be 1.  If COPIES > 1,
2176   all items are set to the same value and contain COPIES replicas of the
2177   vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2178   operand and the others are set to the value of the operand's msb.  */
2179
2180static bool
2181vspltis_constant (rtx op, unsigned step, unsigned copies)
2182{
2183  enum machine_mode mode = GET_MODE (op);
2184  enum machine_mode inner = GET_MODE_INNER (mode);
2185
2186  unsigned i;
2187  unsigned nunits = GET_MODE_NUNITS (mode);
2188  unsigned bitsize = GET_MODE_BITSIZE (inner);
2189  unsigned mask = GET_MODE_MASK (inner);
2190
2191  HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
2192  HOST_WIDE_INT splat_val = val;
2193  HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2194
2195  /* Construct the value to be splatted, if possible.  If not, return 0.  */
2196  for (i = 2; i <= copies; i *= 2)
2197    {
2198      HOST_WIDE_INT small_val;
2199      bitsize /= 2;
2200      small_val = splat_val >> bitsize;
2201      mask >>= bitsize;
2202      if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2203	return false;
2204      splat_val = small_val;
2205    }
2206
2207  /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw].  */
2208  if (EASY_VECTOR_15 (splat_val))
2209    ;
2210
2211  /* Also check if we can splat, and then add the result to itself.  Do so if
2212     the value is positive, of if the splat instruction is using OP's mode;
2213     for splat_val < 0, the splat and the add should use the same mode.  */
2214  else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2215           && (splat_val >= 0 || (step == 1 && copies == 1)))
2216    ;
2217
2218  else
2219    return false;
2220
2221  /* Check if VAL is present in every STEP-th element, and the
2222     other elements are filled with its most significant bit.  */
2223  for (i = 0; i < nunits - 1; ++i)
2224    {
2225      HOST_WIDE_INT desired_val;
2226      if (((i + 1) & (step - 1)) == 0)
2227	desired_val = val;
2228      else
2229	desired_val = msb_val;
2230
2231      if (desired_val != const_vector_elt_as_int (op, i))
2232	return false;
2233    }
2234
2235  return true;
2236}
2237
2238
2239/* Return true if OP is of the given MODE and can be synthesized
2240   with a vspltisb, vspltish or vspltisw.  */
2241
2242bool
2243easy_altivec_constant (rtx op, enum machine_mode mode)
2244{
2245  unsigned step, copies;
2246
2247  if (mode == VOIDmode)
2248    mode = GET_MODE (op);
2249  else if (mode != GET_MODE (op))
2250    return false;
2251
2252  /* Start with a vspltisw.  */
2253  step = GET_MODE_NUNITS (mode) / 4;
2254  copies = 1;
2255
2256  if (vspltis_constant (op, step, copies))
2257    return true;
2258
2259  /* Then try with a vspltish.  */
2260  if (step == 1)
2261    copies <<= 1;
2262  else
2263    step >>= 1;
2264
2265  if (vspltis_constant (op, step, copies))
2266    return true;
2267
2268  /* And finally a vspltisb.  */
2269  if (step == 1)
2270    copies <<= 1;
2271  else
2272    step >>= 1;
2273
2274  if (vspltis_constant (op, step, copies))
2275    return true;
2276
2277  return false;
2278}
2279
2280/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2281   result is OP.  Abort if it is not possible.  */
2282
2283rtx
2284gen_easy_altivec_constant (rtx op)
2285{
2286  enum machine_mode mode = GET_MODE (op);
2287  int nunits = GET_MODE_NUNITS (mode);
2288  rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2289  unsigned step = nunits / 4;
2290  unsigned copies = 1;
2291
2292  /* Start with a vspltisw.  */
2293  if (vspltis_constant (op, step, copies))
2294    return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2295
2296  /* Then try with a vspltish.  */
2297  if (step == 1)
2298    copies <<= 1;
2299  else
2300    step >>= 1;
2301
2302  if (vspltis_constant (op, step, copies))
2303    return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2304
2305  /* And finally a vspltisb.  */
2306  if (step == 1)
2307    copies <<= 1;
2308  else
2309    step >>= 1;
2310
2311  if (vspltis_constant (op, step, copies))
2312    return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2313
2314  gcc_unreachable ();
2315}
2316
2317const char *
2318output_vec_const_move (rtx *operands)
2319{
2320  int cst, cst2;
2321  enum machine_mode mode;
2322  rtx dest, vec;
2323
2324  dest = operands[0];
2325  vec = operands[1];
2326  mode = GET_MODE (dest);
2327
2328  if (TARGET_ALTIVEC)
2329    {
2330      rtx splat_vec;
2331      if (zero_constant (vec, mode))
2332	return "vxor %0,%0,%0";
2333
2334      splat_vec = gen_easy_altivec_constant (vec);
2335      gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2336      operands[1] = XEXP (splat_vec, 0);
2337      if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2338	return "#";
2339
2340      switch (GET_MODE (splat_vec))
2341	{
2342	case V4SImode:
2343	  return "vspltisw %0,%1";
2344
2345	case V8HImode:
2346	  return "vspltish %0,%1";
2347
2348	case V16QImode:
2349	  return "vspltisb %0,%1";
2350
2351	default:
2352	  gcc_unreachable ();
2353	}
2354    }
2355
2356  gcc_assert (TARGET_SPE);
2357
2358  /* Vector constant 0 is handled as a splitter of V2SI, and in the
2359     pattern of V1DI, V4HI, and V2SF.
2360
2361     FIXME: We should probably return # and add post reload
2362     splitters for these, but this way is so easy ;-).  */
2363  cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2364  cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2365  operands[1] = CONST_VECTOR_ELT (vec, 0);
2366  operands[2] = CONST_VECTOR_ELT (vec, 1);
2367  if (cst == cst2)
2368    return "li %0,%1\n\tevmergelo %0,%0,%0";
2369  else
2370    return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2371}
2372
2373/* Initialize vector TARGET to VALS.  */
2374
2375void
2376rs6000_expand_vector_init (rtx target, rtx vals)
2377{
2378  enum machine_mode mode = GET_MODE (target);
2379  enum machine_mode inner_mode = GET_MODE_INNER (mode);
2380  int n_elts = GET_MODE_NUNITS (mode);
2381  int n_var = 0, one_var = -1;
2382  bool all_same = true, all_const_zero = true;
2383  rtx x, mem;
2384  int i;
2385
2386  for (i = 0; i < n_elts; ++i)
2387    {
2388      x = XVECEXP (vals, 0, i);
2389      if (!CONSTANT_P (x))
2390	++n_var, one_var = i;
2391      else if (x != CONST0_RTX (inner_mode))
2392	all_const_zero = false;
2393
2394      if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2395	all_same = false;
2396    }
2397
2398  if (n_var == 0)
2399    {
2400      if (mode != V4SFmode && all_const_zero)
2401	{
2402	  /* Zero register.  */
2403	  emit_insn (gen_rtx_SET (VOIDmode, target,
2404				  gen_rtx_XOR (mode, target, target)));
2405	  return;
2406	}
2407      else if (mode != V4SFmode && easy_vector_constant (vals, mode))
2408	{
2409	  /* Splat immediate.  */
2410	  emit_insn (gen_rtx_SET (VOIDmode, target, vals));
2411	  return;
2412	}
2413      else if (all_same)
2414	;	/* Splat vector element.  */
2415      else
2416	{
2417	  /* Load from constant pool.  */
2418	  emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2419	  return;
2420	}
2421    }
2422
2423  /* Store value to stack temp.  Load vector element.  Splat.  */
2424  if (all_same)
2425    {
2426      mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2427      emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2428		      XVECEXP (vals, 0, 0));
2429      x = gen_rtx_UNSPEC (VOIDmode,
2430			  gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2431      emit_insn (gen_rtx_PARALLEL (VOIDmode,
2432				   gen_rtvec (2,
2433					      gen_rtx_SET (VOIDmode,
2434							   target, mem),
2435					      x)));
2436      x = gen_rtx_VEC_SELECT (inner_mode, target,
2437			      gen_rtx_PARALLEL (VOIDmode,
2438						gen_rtvec (1, const0_rtx)));
2439      emit_insn (gen_rtx_SET (VOIDmode, target,
2440			      gen_rtx_VEC_DUPLICATE (mode, x)));
2441      return;
2442    }
2443
2444  /* One field is non-constant.  Load constant then overwrite
2445     varying field.  */
2446  if (n_var == 1)
2447    {
2448      rtx copy = copy_rtx (vals);
2449
2450      /* Load constant part of vector, substitute neighboring value for
2451	 varying element.  */
2452      XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2453      rs6000_expand_vector_init (target, copy);
2454
2455      /* Insert variable.  */
2456      rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2457      return;
2458    }
2459
2460  /* Construct the vector in memory one field at a time
2461     and load the whole vector.  */
2462  mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2463  for (i = 0; i < n_elts; i++)
2464    emit_move_insn (adjust_address_nv (mem, inner_mode,
2465				    i * GET_MODE_SIZE (inner_mode)),
2466		    XVECEXP (vals, 0, i));
2467  emit_move_insn (target, mem);
2468}
2469
2470/* Set field ELT of TARGET to VAL.  */
2471
2472void
2473rs6000_expand_vector_set (rtx target, rtx val, int elt)
2474{
2475  enum machine_mode mode = GET_MODE (target);
2476  enum machine_mode inner_mode = GET_MODE_INNER (mode);
2477  rtx reg = gen_reg_rtx (mode);
2478  rtx mask, mem, x;
2479  int width = GET_MODE_SIZE (inner_mode);
2480  int i;
2481
2482  /* Load single variable value.  */
2483  mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2484  emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2485  x = gen_rtx_UNSPEC (VOIDmode,
2486		      gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2487  emit_insn (gen_rtx_PARALLEL (VOIDmode,
2488			       gen_rtvec (2,
2489					  gen_rtx_SET (VOIDmode,
2490						       reg, mem),
2491					  x)));
2492
2493  /* Linear sequence.  */
2494  mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2495  for (i = 0; i < 16; ++i)
2496    XVECEXP (mask, 0, i) = GEN_INT (i);
2497
2498  /* Set permute mask to insert element into target.  */
2499  for (i = 0; i < width; ++i)
2500    XVECEXP (mask, 0, elt*width + i)
2501      = GEN_INT (i + 0x10);
2502  x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2503  x = gen_rtx_UNSPEC (mode,
2504		      gen_rtvec (3, target, reg,
2505				 force_reg (V16QImode, x)),
2506		      UNSPEC_VPERM);
2507  emit_insn (gen_rtx_SET (VOIDmode, target, x));
2508}
2509
2510/* Extract field ELT from VEC into TARGET.  */
2511
2512void
2513rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2514{
2515  enum machine_mode mode = GET_MODE (vec);
2516  enum machine_mode inner_mode = GET_MODE_INNER (mode);
2517  rtx mem, x;
2518
2519  /* Allocate mode-sized buffer.  */
2520  mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2521
2522  /* Add offset to field within buffer matching vector element.  */
2523  mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2524
2525  /* Store single field into mode-sized buffer.  */
2526  x = gen_rtx_UNSPEC (VOIDmode,
2527		      gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2528  emit_insn (gen_rtx_PARALLEL (VOIDmode,
2529			       gen_rtvec (2,
2530					  gen_rtx_SET (VOIDmode,
2531						       mem, vec),
2532					  x)));
2533  emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2534}
2535
2536/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2537   implement ANDing by the mask IN.  */
2538void
2539build_mask64_2_operands (rtx in, rtx *out)
2540{
2541#if HOST_BITS_PER_WIDE_INT >= 64
2542  unsigned HOST_WIDE_INT c, lsb, m1, m2;
2543  int shift;
2544
2545  gcc_assert (GET_CODE (in) == CONST_INT);
2546
2547  c = INTVAL (in);
2548  if (c & 1)
2549    {
2550      /* Assume c initially something like 0x00fff000000fffff.  The idea
2551	 is to rotate the word so that the middle ^^^^^^ group of zeros
2552	 is at the MS end and can be cleared with an rldicl mask.  We then
2553	 rotate back and clear off the MS    ^^ group of zeros with a
2554	 second rldicl.  */
2555      c = ~c;			/*   c == 0xff000ffffff00000 */
2556      lsb = c & -c;		/* lsb == 0x0000000000100000 */
2557      m1 = -lsb;		/*  m1 == 0xfffffffffff00000 */
2558      c = ~c;			/*   c == 0x00fff000000fffff */
2559      c &= -lsb;		/*   c == 0x00fff00000000000 */
2560      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2561      c = ~c;			/*   c == 0xff000fffffffffff */
2562      c &= -lsb;		/*   c == 0xff00000000000000 */
2563      shift = 0;
2564      while ((lsb >>= 1) != 0)
2565	shift++;		/* shift == 44 on exit from loop */
2566      m1 <<= 64 - shift;	/*  m1 == 0xffffff0000000000 */
2567      m1 = ~m1;			/*  m1 == 0x000000ffffffffff */
2568      m2 = ~c;			/*  m2 == 0x00ffffffffffffff */
2569    }
2570  else
2571    {
2572      /* Assume c initially something like 0xff000f0000000000.  The idea
2573	 is to rotate the word so that the     ^^^  middle group of zeros
2574	 is at the LS end and can be cleared with an rldicr mask.  We then
2575	 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2576	 a second rldicr.  */
2577      lsb = c & -c;		/* lsb == 0x0000010000000000 */
2578      m2 = -lsb;		/*  m2 == 0xffffff0000000000 */
2579      c = ~c;			/*   c == 0x00fff0ffffffffff */
2580      c &= -lsb;		/*   c == 0x00fff00000000000 */
2581      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2582      c = ~c;			/*   c == 0xff000fffffffffff */
2583      c &= -lsb;		/*   c == 0xff00000000000000 */
2584      shift = 0;
2585      while ((lsb >>= 1) != 0)
2586	shift++;		/* shift == 44 on exit from loop */
2587      m1 = ~c;			/*  m1 == 0x00ffffffffffffff */
2588      m1 >>= shift;		/*  m1 == 0x0000000000000fff */
2589      m1 = ~m1;			/*  m1 == 0xfffffffffffff000 */
2590    }
2591
2592  /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2593     masks will be all 1's.  We are guaranteed more than one transition.  */
2594  out[0] = GEN_INT (64 - shift);
2595  out[1] = GEN_INT (m1);
2596  out[2] = GEN_INT (shift);
2597  out[3] = GEN_INT (m2);
2598#else
2599  (void)in;
2600  (void)out;
2601  gcc_unreachable ();
2602#endif
2603}
2604
2605/* Return TRUE if OP is an invalid SUBREG operation on the e500.  */
2606
2607bool
2608invalid_e500_subreg (rtx op, enum machine_mode mode)
2609{
2610  if (TARGET_E500_DOUBLE)
2611    {
2612      /* Reject (subreg:SI (reg:DF)).  */
2613      if (GET_CODE (op) == SUBREG
2614	  && mode == SImode
2615	  && REG_P (SUBREG_REG (op))
2616	  && GET_MODE (SUBREG_REG (op)) == DFmode)
2617	return true;
2618
2619      /* Reject (subreg:DF (reg:DI)).  */
2620      if (GET_CODE (op) == SUBREG
2621	  && mode == DFmode
2622	  && REG_P (SUBREG_REG (op))
2623	  && GET_MODE (SUBREG_REG (op)) == DImode)
2624	return true;
2625    }
2626
2627  if (TARGET_SPE
2628      && GET_CODE (op) == SUBREG
2629      && mode == SImode
2630      && REG_P (SUBREG_REG (op))
2631      && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
2632    return true;
2633
2634  return false;
2635}
2636
2637/* Darwin, AIX increases natural record alignment to doubleword if the first
2638   field is an FP double while the FP fields remain word aligned.  */
2639
2640unsigned int
2641rs6000_special_round_type_align (tree type, unsigned int computed,
2642				 unsigned int specified)
2643{
2644  unsigned int align = MAX (computed, specified);
2645  tree field = TYPE_FIELDS (type);
2646
2647  /* Skip all non field decls */
2648  while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2649    field = TREE_CHAIN (field);
2650
2651  if (field != NULL && field != type)
2652    {
2653      type = TREE_TYPE (field);
2654      while (TREE_CODE (type) == ARRAY_TYPE)
2655	type = TREE_TYPE (type);
2656
2657      if (type != error_mark_node && TYPE_MODE (type) == DFmode)
2658	align = MAX (align, 64);
2659    }
2660
2661  return align;
2662}
2663
2664/* Return 1 for an operand in small memory on V.4/eabi.  */
2665
2666int
2667small_data_operand (rtx op ATTRIBUTE_UNUSED,
2668		    enum machine_mode mode ATTRIBUTE_UNUSED)
2669{
2670#if TARGET_ELF
2671  rtx sym_ref;
2672
2673  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2674    return 0;
2675
2676  if (DEFAULT_ABI != ABI_V4)
2677    return 0;
2678
2679  if (GET_CODE (op) == SYMBOL_REF)
2680    sym_ref = op;
2681
2682  else if (GET_CODE (op) != CONST
2683	   || GET_CODE (XEXP (op, 0)) != PLUS
2684	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2685	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2686    return 0;
2687
2688  else
2689    {
2690      rtx sum = XEXP (op, 0);
2691      HOST_WIDE_INT summand;
2692
2693      /* We have to be careful here, because it is the referenced address
2694	 that must be 32k from _SDA_BASE_, not just the symbol.  */
2695      summand = INTVAL (XEXP (sum, 1));
2696      if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2697	return 0;
2698
2699      sym_ref = XEXP (sum, 0);
2700    }
2701
2702  return SYMBOL_REF_SMALL_P (sym_ref);
2703#else
2704  return 0;
2705#endif
2706}
2707
2708/* Return true if either operand is a general purpose register.  */
2709
2710bool
2711gpr_or_gpr_p (rtx op0, rtx op1)
2712{
2713  return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2714	  || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2715}
2716
2717
2718/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address.  */
2719
2720static int
2721constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2722{
2723  switch (GET_CODE (op))
2724    {
2725    case SYMBOL_REF:
2726      if (RS6000_SYMBOL_REF_TLS_P (op))
2727	return 0;
2728      else if (CONSTANT_POOL_ADDRESS_P (op))
2729	{
2730	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2731	    {
2732	      *have_sym = 1;
2733	      return 1;
2734	    }
2735	  else
2736	    return 0;
2737	}
2738      else if (! strcmp (XSTR (op, 0), toc_label_name))
2739	{
2740	  *have_toc = 1;
2741	  return 1;
2742	}
2743      else
2744	return 0;
2745    case PLUS:
2746    case MINUS:
2747      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2748	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2749    case CONST:
2750      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2751    case CONST_INT:
2752      return 1;
2753    default:
2754      return 0;
2755    }
2756}
2757
2758static bool
2759constant_pool_expr_p (rtx op)
2760{
2761  int have_sym = 0;
2762  int have_toc = 0;
2763  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2764}
2765
2766bool
2767toc_relative_expr_p (rtx op)
2768{
2769  int have_sym = 0;
2770  int have_toc = 0;
2771  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2772}
2773
2774bool
2775legitimate_constant_pool_address_p (rtx x)
2776{
2777  return (TARGET_TOC
2778	  && GET_CODE (x) == PLUS
2779	  && GET_CODE (XEXP (x, 0)) == REG
2780	  && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2781	  && constant_pool_expr_p (XEXP (x, 1)));
2782}
2783
2784static bool
2785legitimate_small_data_p (enum machine_mode mode, rtx x)
2786{
2787  return (DEFAULT_ABI == ABI_V4
2788	  && !flag_pic && !TARGET_TOC
2789	  && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2790	  && small_data_operand (x, mode));
2791}
2792
2793/* SPE offset addressing is limited to 5-bits worth of double words.  */
2794#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2795
2796bool
2797rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2798{
2799  unsigned HOST_WIDE_INT offset, extra;
2800
2801  if (GET_CODE (x) != PLUS)
2802    return false;
2803  if (GET_CODE (XEXP (x, 0)) != REG)
2804    return false;
2805  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2806    return false;
2807  if (legitimate_constant_pool_address_p (x))
2808    return true;
2809  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2810    return false;
2811
2812  offset = INTVAL (XEXP (x, 1));
2813  extra = 0;
2814  switch (mode)
2815    {
2816    case V16QImode:
2817    case V8HImode:
2818    case V4SFmode:
2819    case V4SImode:
2820      /* AltiVec vector modes.  Only reg+reg addressing is valid and
2821	 constant offset zero should not occur due to canonicalization.
2822	 Allow any offset when not strict before reload.  */
2823      return !strict;
2824
2825    case V4HImode:
2826    case V2SImode:
2827    case V1DImode:
2828    case V2SFmode:
2829      /* SPE vector modes.  */
2830      return SPE_CONST_OFFSET_OK (offset);
2831
2832    case DFmode:
2833      if (TARGET_E500_DOUBLE)
2834	return SPE_CONST_OFFSET_OK (offset);
2835
2836    case DImode:
2837      /* On e500v2, we may have:
2838
2839	   (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2840
2841         Which gets addressed with evldd instructions.  */
2842      if (TARGET_E500_DOUBLE)
2843	return SPE_CONST_OFFSET_OK (offset);
2844
2845      if (mode == DFmode || !TARGET_POWERPC64)
2846	extra = 4;
2847      else if (offset & 3)
2848	return false;
2849      break;
2850
2851    case TFmode:
2852    case TImode:
2853      if (mode == TFmode || !TARGET_POWERPC64)
2854	extra = 12;
2855      else if (offset & 3)
2856	return false;
2857      else
2858	extra = 8;
2859      break;
2860
2861    default:
2862      break;
2863    }
2864
2865  offset += 0x8000;
2866  return (offset < 0x10000) && (offset + extra < 0x10000);
2867}
2868
2869static bool
2870legitimate_indexed_address_p (rtx x, int strict)
2871{
2872  rtx op0, op1;
2873
2874  if (GET_CODE (x) != PLUS)
2875    return false;
2876
2877  op0 = XEXP (x, 0);
2878  op1 = XEXP (x, 1);
2879
2880  /* Recognize the rtl generated by reload which we know will later be
2881     replaced with proper base and index regs.  */
2882  if (!strict
2883      && reload_in_progress
2884      && (REG_P (op0) || GET_CODE (op0) == PLUS)
2885      && REG_P (op1))
2886    return true;
2887
2888  return (REG_P (op0) && REG_P (op1)
2889	  && ((INT_REG_OK_FOR_BASE_P (op0, strict)
2890	       && INT_REG_OK_FOR_INDEX_P (op1, strict))
2891	      || (INT_REG_OK_FOR_BASE_P (op1, strict)
2892		  && INT_REG_OK_FOR_INDEX_P (op0, strict))));
2893}
2894
2895inline bool
2896legitimate_indirect_address_p (rtx x, int strict)
2897{
2898  return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2899}
2900
2901bool
2902macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2903{
2904  if (!TARGET_MACHO || !flag_pic
2905      || mode != SImode || GET_CODE (x) != MEM)
2906    return false;
2907  x = XEXP (x, 0);
2908
2909  if (GET_CODE (x) != LO_SUM)
2910    return false;
2911  if (GET_CODE (XEXP (x, 0)) != REG)
2912    return false;
2913  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2914    return false;
2915  x = XEXP (x, 1);
2916
2917  return CONSTANT_P (x);
2918}
2919
2920static bool
2921legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2922{
2923  if (GET_CODE (x) != LO_SUM)
2924    return false;
2925  if (GET_CODE (XEXP (x, 0)) != REG)
2926    return false;
2927  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2928    return false;
2929  /* Restrict addressing for DI because of our SUBREG hackery.  */
2930  if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
2931    return false;
2932  x = XEXP (x, 1);
2933
2934  if (TARGET_ELF || TARGET_MACHO)
2935    {
2936      if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2937	return false;
2938      if (TARGET_TOC)
2939	return false;
2940      if (GET_MODE_NUNITS (mode) != 1)
2941	return false;
2942      if (GET_MODE_BITSIZE (mode) > 64
2943	  || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2944	      && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
2945	return false;
2946
2947      return CONSTANT_P (x);
2948    }
2949
2950  return false;
2951}
2952
2953
2954/* Try machine-dependent ways of modifying an illegitimate address
2955   to be legitimate.  If we find one, return the new, valid address.
2956   This is used from only one place: `memory_address' in explow.c.
2957
2958   OLDX is the address as it was before break_out_memory_refs was
2959   called.  In some cases it is useful to look at this to decide what
2960   needs to be done.
2961
2962   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2963
2964   It is always safe for this function to do nothing.  It exists to
2965   recognize opportunities to optimize the output.
2966
2967   On RS/6000, first check for the sum of a register with a constant
2968   integer that is out of range.  If so, generate code to add the
2969   constant with the low-order 16 bits masked to the register and force
2970   this result into another register (this can be done with `cau').
2971   Then generate an address of REG+(CONST&0xffff), allowing for the
2972   possibility of bit 16 being a one.
2973
2974   Then check for the sum of a register and something not constant, try to
2975   load the other things into a register and return the sum.  */
2976
2977rtx
2978rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2979			   enum machine_mode mode)
2980{
2981  if (GET_CODE (x) == SYMBOL_REF)
2982    {
2983      enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2984      if (model != 0)
2985	return rs6000_legitimize_tls_address (x, model);
2986    }
2987
2988  if (GET_CODE (x) == PLUS
2989      && GET_CODE (XEXP (x, 0)) == REG
2990      && GET_CODE (XEXP (x, 1)) == CONST_INT
2991      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2992    {
2993      HOST_WIDE_INT high_int, low_int;
2994      rtx sum;
2995      low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2996      high_int = INTVAL (XEXP (x, 1)) - low_int;
2997      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2998					 GEN_INT (high_int)), 0);
2999      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3000    }
3001  else if (GET_CODE (x) == PLUS
3002	   && GET_CODE (XEXP (x, 0)) == REG
3003	   && GET_CODE (XEXP (x, 1)) != CONST_INT
3004	   && GET_MODE_NUNITS (mode) == 1
3005	   && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3006	       || TARGET_POWERPC64
3007	       || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
3008		   && mode != TFmode))
3009	   && (TARGET_POWERPC64 || mode != DImode)
3010	   && mode != TImode)
3011    {
3012      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3013			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3014    }
3015  else if (ALTIVEC_VECTOR_MODE (mode))
3016    {
3017      rtx reg;
3018
3019      /* Make sure both operands are registers.  */
3020      if (GET_CODE (x) == PLUS)
3021	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
3022			     force_reg (Pmode, XEXP (x, 1)));
3023
3024      reg = force_reg (Pmode, x);
3025      return reg;
3026    }
3027  else if (SPE_VECTOR_MODE (mode)
3028	   || (TARGET_E500_DOUBLE && (mode == DFmode
3029				      || mode == DImode)))
3030    {
3031      if (mode == DImode)
3032	return NULL_RTX;
3033      /* We accept [reg + reg] and [reg + OFFSET].  */
3034
3035      if (GET_CODE (x) == PLUS)
3036	{
3037	  rtx op1 = XEXP (x, 0);
3038	  rtx op2 = XEXP (x, 1);
3039
3040	  op1 = force_reg (Pmode, op1);
3041
3042	  if (GET_CODE (op2) != REG
3043	      && (GET_CODE (op2) != CONST_INT
3044		  || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3045	    op2 = force_reg (Pmode, op2);
3046
3047	  return gen_rtx_PLUS (Pmode, op1, op2);
3048	}
3049
3050      return force_reg (Pmode, x);
3051    }
3052  else if (TARGET_ELF
3053	   && TARGET_32BIT
3054	   && TARGET_NO_TOC
3055	   && ! flag_pic
3056	   && GET_CODE (x) != CONST_INT
3057	   && GET_CODE (x) != CONST_DOUBLE
3058	   && CONSTANT_P (x)
3059	   && GET_MODE_NUNITS (mode) == 1
3060	   && (GET_MODE_BITSIZE (mode) <= 32
3061	       || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
3062    {
3063      rtx reg = gen_reg_rtx (Pmode);
3064      emit_insn (gen_elf_high (reg, x));
3065      return gen_rtx_LO_SUM (Pmode, reg, x);
3066    }
3067  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3068	   && ! flag_pic
3069#if TARGET_MACHO
3070	   && ! MACHO_DYNAMIC_NO_PIC_P
3071#endif
3072	   && GET_CODE (x) != CONST_INT
3073	   && GET_CODE (x) != CONST_DOUBLE
3074	   && CONSTANT_P (x)
3075	   && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
3076	   && mode != DImode
3077	   && mode != TImode)
3078    {
3079      rtx reg = gen_reg_rtx (Pmode);
3080      emit_insn (gen_macho_high (reg, x));
3081      return gen_rtx_LO_SUM (Pmode, reg, x);
3082    }
3083  else if (TARGET_TOC
3084	   && constant_pool_expr_p (x)
3085	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3086    {
3087      return create_TOC_reference (x);
3088    }
3089  else
3090    return NULL_RTX;
3091}
3092
3093/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
3094   We need to emit DTP-relative relocations.  */
3095
3096static void
3097rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3098{
3099  switch (size)
3100    {
3101    case 4:
3102      fputs ("\t.long\t", file);
3103      break;
3104    case 8:
3105      fputs (DOUBLE_INT_ASM_OP, file);
3106      break;
3107    default:
3108      gcc_unreachable ();
3109    }
3110  output_addr_const (file, x);
3111  fputs ("@dtprel+0x8000", file);
3112}
3113
3114/* Construct the SYMBOL_REF for the tls_get_addr function.  */
3115
3116static GTY(()) rtx rs6000_tls_symbol;
3117static rtx
3118rs6000_tls_get_addr (void)
3119{
3120  if (!rs6000_tls_symbol)
3121    rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3122
3123  return rs6000_tls_symbol;
3124}
3125
3126/* Construct the SYMBOL_REF for TLS GOT references.  */
3127
3128static GTY(()) rtx rs6000_got_symbol;
3129static rtx
3130rs6000_got_sym (void)
3131{
3132  if (!rs6000_got_symbol)
3133    {
3134      rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3135      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3136      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3137    }
3138
3139  return rs6000_got_symbol;
3140}
3141
3142/* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
3143   this (thread-local) address.  */
3144
3145static rtx
3146rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3147{
3148  rtx dest, insn;
3149
3150  dest = gen_reg_rtx (Pmode);
3151  if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3152    {
3153      rtx tlsreg;
3154
3155      if (TARGET_64BIT)
3156	{
3157	  tlsreg = gen_rtx_REG (Pmode, 13);
3158	  insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3159	}
3160      else
3161	{
3162	  tlsreg = gen_rtx_REG (Pmode, 2);
3163	  insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3164	}
3165      emit_insn (insn);
3166    }
3167  else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3168    {
3169      rtx tlsreg, tmp;
3170
3171      tmp = gen_reg_rtx (Pmode);
3172      if (TARGET_64BIT)
3173	{
3174	  tlsreg = gen_rtx_REG (Pmode, 13);
3175	  insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3176	}
3177      else
3178	{
3179	  tlsreg = gen_rtx_REG (Pmode, 2);
3180	  insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3181	}
3182      emit_insn (insn);
3183      if (TARGET_64BIT)
3184	insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3185      else
3186	insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3187      emit_insn (insn);
3188    }
3189  else
3190    {
3191      rtx r3, got, tga, tmp1, tmp2, eqv;
3192
3193      /* We currently use relocations like @got@tlsgd for tls, which
3194	 means the linker will handle allocation of tls entries, placing
3195	 them in the .got section.  So use a pointer to the .got section,
3196	 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3197	 or to secondary GOT sections used by 32-bit -fPIC.  */
3198      if (TARGET_64BIT)
3199	got = gen_rtx_REG (Pmode, 2);
3200      else
3201	{
3202	  if (flag_pic == 1)
3203	    got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3204	  else
3205	    {
3206	      rtx gsym = rs6000_got_sym ();
3207	      got = gen_reg_rtx (Pmode);
3208	      if (flag_pic == 0)
3209		rs6000_emit_move (got, gsym, Pmode);
3210	      else
3211		{
3212		  rtx tempLR, tmp3, mem;
3213		  rtx first, last;
3214
3215		  tempLR = gen_reg_rtx (Pmode);
3216		  tmp1 = gen_reg_rtx (Pmode);
3217		  tmp2 = gen_reg_rtx (Pmode);
3218		  tmp3 = gen_reg_rtx (Pmode);
3219		  mem = gen_const_mem (Pmode, tmp1);
3220
3221		  first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
3222		  emit_move_insn (tmp1, tempLR);
3223		  emit_move_insn (tmp2, mem);
3224		  emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3225		  last = emit_move_insn (got, tmp3);
3226		  REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3227							REG_NOTES (last));
3228		  REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3229							 REG_NOTES (first));
3230		  REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3231							REG_NOTES (last));
3232		}
3233	    }
3234	}
3235
3236      if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3237	{
3238	  r3 = gen_rtx_REG (Pmode, 3);
3239	  if (TARGET_64BIT)
3240	    insn = gen_tls_gd_64 (r3, got, addr);
3241	  else
3242	    insn = gen_tls_gd_32 (r3, got, addr);
3243	  start_sequence ();
3244	  emit_insn (insn);
3245	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3246	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3247	  insn = emit_call_insn (insn);
3248	  CONST_OR_PURE_CALL_P (insn) = 1;
3249	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3250	  insn = get_insns ();
3251	  end_sequence ();
3252	  emit_libcall_block (insn, dest, r3, addr);
3253	}
3254      else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3255	{
3256	  r3 = gen_rtx_REG (Pmode, 3);
3257	  if (TARGET_64BIT)
3258	    insn = gen_tls_ld_64 (r3, got);
3259	  else
3260	    insn = gen_tls_ld_32 (r3, got);
3261	  start_sequence ();
3262	  emit_insn (insn);
3263	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3264	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3265	  insn = emit_call_insn (insn);
3266	  CONST_OR_PURE_CALL_P (insn) = 1;
3267	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3268	  insn = get_insns ();
3269	  end_sequence ();
3270	  tmp1 = gen_reg_rtx (Pmode);
3271	  eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3272				UNSPEC_TLSLD);
3273	  emit_libcall_block (insn, tmp1, r3, eqv);
3274	  if (rs6000_tls_size == 16)
3275	    {
3276	      if (TARGET_64BIT)
3277		insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3278	      else
3279		insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3280	    }
3281	  else if (rs6000_tls_size == 32)
3282	    {
3283	      tmp2 = gen_reg_rtx (Pmode);
3284	      if (TARGET_64BIT)
3285		insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3286	      else
3287		insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3288	      emit_insn (insn);
3289	      if (TARGET_64BIT)
3290		insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3291	      else
3292		insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3293	    }
3294	  else
3295	    {
3296	      tmp2 = gen_reg_rtx (Pmode);
3297	      if (TARGET_64BIT)
3298		insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3299	      else
3300		insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3301	      emit_insn (insn);
3302	      insn = gen_rtx_SET (Pmode, dest,
3303				  gen_rtx_PLUS (Pmode, tmp2, tmp1));
3304	    }
3305	  emit_insn (insn);
3306	}
3307      else
3308	{
3309	  /* IE, or 64 bit offset LE.  */
3310	  tmp2 = gen_reg_rtx (Pmode);
3311	  if (TARGET_64BIT)
3312	    insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3313	  else
3314	    insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3315	  emit_insn (insn);
3316	  if (TARGET_64BIT)
3317	    insn = gen_tls_tls_64 (dest, tmp2, addr);
3318	  else
3319	    insn = gen_tls_tls_32 (dest, tmp2, addr);
3320	  emit_insn (insn);
3321	}
3322    }
3323
3324  return dest;
3325}
3326
3327/* Return 1 if X contains a thread-local symbol.  */
3328
3329bool
3330rs6000_tls_referenced_p (rtx x)
3331{
3332  if (! TARGET_HAVE_TLS)
3333    return false;
3334
3335  return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3336}
3337
3338/* Return 1 if *X is a thread-local symbol.  This is the same as
3339   rs6000_tls_symbol_ref except for the type of the unused argument.  */
3340
3341static int
3342rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3343{
3344  return RS6000_SYMBOL_REF_TLS_P (*x);
3345}
3346
3347/* The convention appears to be to define this wherever it is used.
3348   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3349   is now used here.  */
3350#ifndef REG_MODE_OK_FOR_BASE_P
3351#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3352#endif
3353
3354/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
3355   replace the input X, or the original X if no replacement is called for.
3356   The output parameter *WIN is 1 if the calling macro should goto WIN,
3357   0 if it should not.
3358
3359   For RS/6000, we wish to handle large displacements off a base
3360   register by splitting the addend across an addiu/addis and the mem insn.
3361   This cuts number of extra insns needed from 3 to 1.
3362
3363   On Darwin, we use this to generate code for floating point constants.
3364   A movsf_low is generated so we wind up with 2 instructions rather than 3.
3365   The Darwin code is inside #if TARGET_MACHO because only then is
3366   machopic_function_base_name() defined.  */
3367rtx
3368rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3369				  int opnum, int type,
3370				  int ind_levels ATTRIBUTE_UNUSED, int *win)
3371{
3372  /* We must recognize output that we have already generated ourselves.  */
3373  if (GET_CODE (x) == PLUS
3374      && GET_CODE (XEXP (x, 0)) == PLUS
3375      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3376      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3377      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3378    {
3379      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3380		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3381		   opnum, (enum reload_type)type);
3382      *win = 1;
3383      return x;
3384    }
3385
3386#if TARGET_MACHO
3387  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3388      && GET_CODE (x) == LO_SUM
3389      && GET_CODE (XEXP (x, 0)) == PLUS
3390      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3391      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3392      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3393      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3394      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3395      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3396      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3397    {
3398      /* Result of previous invocation of this function on Darwin
3399	 floating point constant.  */
3400      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3401		   BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3402		   opnum, (enum reload_type)type);
3403      *win = 1;
3404      return x;
3405    }
3406#endif
3407
3408  /* Force ld/std non-word aligned offset into base register by wrapping
3409     in offset 0.  */
3410  if (GET_CODE (x) == PLUS
3411      && GET_CODE (XEXP (x, 0)) == REG
3412      && REGNO (XEXP (x, 0)) < 32
3413      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3414      && GET_CODE (XEXP (x, 1)) == CONST_INT
3415      && (INTVAL (XEXP (x, 1)) & 3) != 0
3416      && !ALTIVEC_VECTOR_MODE (mode)
3417      && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3418      && TARGET_POWERPC64)
3419    {
3420      x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3421      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3422		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3423		   opnum, (enum reload_type) type);
3424      *win = 1;
3425      return x;
3426    }
3427
3428  if (GET_CODE (x) == PLUS
3429      && GET_CODE (XEXP (x, 0)) == REG
3430      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3431      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3432      && GET_CODE (XEXP (x, 1)) == CONST_INT
3433      && !SPE_VECTOR_MODE (mode)
3434      && !(TARGET_E500_DOUBLE && (mode == DFmode
3435				  || mode == DImode))
3436      && !ALTIVEC_VECTOR_MODE (mode))
3437    {
3438      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3439      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3440      HOST_WIDE_INT high
3441	= (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3442
3443      /* Check for 32-bit overflow.  */
3444      if (high + low != val)
3445	{
3446	  *win = 0;
3447	  return x;
3448	}
3449
3450      /* Reload the high part into a base reg; leave the low part
3451	 in the mem directly.  */
3452
3453      x = gen_rtx_PLUS (GET_MODE (x),
3454			gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3455				      GEN_INT (high)),
3456			GEN_INT (low));
3457
3458      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3459		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3460		   opnum, (enum reload_type)type);
3461      *win = 1;
3462      return x;
3463    }
3464
3465  if (GET_CODE (x) == SYMBOL_REF
3466      && !ALTIVEC_VECTOR_MODE (mode)
3467      && !SPE_VECTOR_MODE (mode)
3468#if TARGET_MACHO
3469      && DEFAULT_ABI == ABI_DARWIN
3470      && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3471#else
3472      && DEFAULT_ABI == ABI_V4
3473      && !flag_pic
3474#endif
3475      /* Don't do this for TFmode, since the result isn't offsettable.
3476	 The same goes for DImode without 64-bit gprs and DFmode
3477	 without fprs.  */
3478      && mode != TFmode
3479      && (mode != DImode || TARGET_POWERPC64)
3480      && (mode != DFmode || TARGET_POWERPC64
3481	  || (TARGET_FPRS && TARGET_HARD_FLOAT)))
3482    {
3483#if TARGET_MACHO
3484      if (flag_pic)
3485	{
3486	  rtx offset = gen_rtx_CONST (Pmode,
3487			 gen_rtx_MINUS (Pmode, x,
3488					machopic_function_base_sym ()));
3489	  x = gen_rtx_LO_SUM (GET_MODE (x),
3490		gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3491		  gen_rtx_HIGH (Pmode, offset)), offset);
3492	}
3493      else
3494#endif
3495	x = gen_rtx_LO_SUM (GET_MODE (x),
3496	      gen_rtx_HIGH (Pmode, x), x);
3497
3498      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3499		   BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3500		   opnum, (enum reload_type)type);
3501      *win = 1;
3502      return x;
3503    }
3504
3505  /* Reload an offset address wrapped by an AND that represents the
3506     masking of the lower bits.  Strip the outer AND and let reload
3507     convert the offset address into an indirect address.  */
3508  if (TARGET_ALTIVEC
3509      && ALTIVEC_VECTOR_MODE (mode)
3510      && GET_CODE (x) == AND
3511      && GET_CODE (XEXP (x, 0)) == PLUS
3512      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3513      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3514      && GET_CODE (XEXP (x, 1)) == CONST_INT
3515      && INTVAL (XEXP (x, 1)) == -16)
3516    {
3517      x = XEXP (x, 0);
3518      *win = 1;
3519      return x;
3520    }
3521
3522  if (TARGET_TOC
3523      && constant_pool_expr_p (x)
3524      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3525    {
3526      x = create_TOC_reference (x);
3527      *win = 1;
3528      return x;
3529    }
3530  *win = 0;
3531  return x;
3532}
3533
3534/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3535   that is a valid memory address for an instruction.
3536   The MODE argument is the machine mode for the MEM expression
3537   that wants to use this address.
3538
3539   On the RS/6000, there are four valid address: a SYMBOL_REF that
3540   refers to a constant pool entry of an address (or the sum of it
3541   plus a constant), a short (16-bit signed) constant plus a register,
3542   the sum of two registers, or a register indirect, possibly with an
3543   auto-increment.  For DFmode and DImode with a constant plus register,
3544   we must ensure that both words are addressable or PowerPC64 with offset
3545   word aligned.
3546
3547   For modes spanning multiple registers (DFmode in 32-bit GPRs,
3548   32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3549   adjacent memory cells are accessed by adding word-sized offsets
3550   during assembly output.  */
3551int
3552rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3553{
3554  /* If this is an unaligned stvx/ldvx type address, discard the outer AND.  */
3555  if (TARGET_ALTIVEC
3556      && ALTIVEC_VECTOR_MODE (mode)
3557      && GET_CODE (x) == AND
3558      && GET_CODE (XEXP (x, 1)) == CONST_INT
3559      && INTVAL (XEXP (x, 1)) == -16)
3560    x = XEXP (x, 0);
3561
3562  if (RS6000_SYMBOL_REF_TLS_P (x))
3563    return 0;
3564  if (legitimate_indirect_address_p (x, reg_ok_strict))
3565    return 1;
3566  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3567      && !ALTIVEC_VECTOR_MODE (mode)
3568      && !SPE_VECTOR_MODE (mode)
3569      && mode != TFmode
3570      /* Restrict addressing for DI because of our SUBREG hackery.  */
3571      && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3572      && TARGET_UPDATE
3573      && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3574    return 1;
3575  if (legitimate_small_data_p (mode, x))
3576    return 1;
3577  if (legitimate_constant_pool_address_p (x))
3578    return 1;
3579  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
3580  if (! reg_ok_strict
3581      && GET_CODE (x) == PLUS
3582      && GET_CODE (XEXP (x, 0)) == REG
3583      && (XEXP (x, 0) == virtual_stack_vars_rtx
3584	  || XEXP (x, 0) == arg_pointer_rtx)
3585      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3586    return 1;
3587  if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3588    return 1;
3589  if (mode != TImode
3590      && mode != TFmode
3591      && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3592	  || TARGET_POWERPC64
3593	  || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3594      && (TARGET_POWERPC64 || mode != DImode)
3595      && legitimate_indexed_address_p (x, reg_ok_strict))
3596    return 1;
3597  if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3598    return 1;
3599  return 0;
3600}
3601
3602/* Go to LABEL if ADDR (a legitimate address expression)
3603   has an effect that depends on the machine mode it is used for.
3604
3605   On the RS/6000 this is true of all integral offsets (since AltiVec
3606   modes don't allow them) or is a pre-increment or decrement.
3607
3608   ??? Except that due to conceptual problems in offsettable_address_p
3609   we can't really report the problems of integral offsets.  So leave
3610   this assuming that the adjustable offset must be valid for the
3611   sub-words of a TFmode operand, which is what we had before.  */
3612
3613bool
3614rs6000_mode_dependent_address (rtx addr)
3615{
3616  switch (GET_CODE (addr))
3617    {
3618    case PLUS:
3619      if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3620	{
3621	  unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3622	  return val + 12 + 0x8000 >= 0x10000;
3623	}
3624      break;
3625
3626    case LO_SUM:
3627      return true;
3628
3629    case PRE_INC:
3630    case PRE_DEC:
3631      return TARGET_UPDATE;
3632
3633    default:
3634      break;
3635    }
3636
3637  return false;
3638}
3639
3640/* More elaborate version of recog's offsettable_memref_p predicate
3641   that works around the ??? note of rs6000_mode_dependent_address.
3642   In particular it accepts
3643
3644     (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
3645
3646   in 32-bit mode, that the recog predicate rejects.  */
3647
3648bool
3649rs6000_offsettable_memref_p (rtx op)
3650{
3651  if (!MEM_P (op))
3652    return false;
3653
3654  /* First mimic offsettable_memref_p.  */
3655  if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
3656    return true;
3657
3658  /* offsettable_address_p invokes rs6000_mode_dependent_address, but
3659     the latter predicate knows nothing about the mode of the memory
3660     reference and, therefore, assumes that it is the largest supported
3661     mode (TFmode).  As a consequence, legitimate offsettable memory
3662     references are rejected.  rs6000_legitimate_offset_address_p contains
3663     the correct logic for the PLUS case of rs6000_mode_dependent_address.  */
3664  return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
3665}
3666
3667/* Return number of consecutive hard regs needed starting at reg REGNO
3668   to hold something of mode MODE.
3669   This is ordinarily the length in words of a value of mode MODE
3670   but can be less for certain modes in special long registers.
3671
3672   For the SPE, GPRs are 64 bits but only 32 bits are visible in
3673   scalar instructions.  The upper 32 bits are only available to the
3674   SIMD instructions.
3675
3676   POWER and PowerPC GPRs hold 32 bits worth;
3677   PowerPC64 GPRs and FPRs point register holds 64 bits worth.  */
3678
3679int
3680rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3681{
3682  if (FP_REGNO_P (regno))
3683    return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3684
3685  if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3686    return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3687
3688  if (ALTIVEC_REGNO_P (regno))
3689    return
3690      (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3691
3692  /* The value returned for SCmode in the E500 double case is 2 for
3693     ABI compatibility; storing an SCmode value in a single register
3694     would require function_arg and rs6000_spe_function_arg to handle
3695     SCmode so as to pass the value correctly in a pair of
3696     registers.  */
3697  if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
3698    return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3699
3700  return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3701}
3702
3703/* Change register usage conditional on target flags.  */
3704void
3705rs6000_conditional_register_usage (void)
3706{
3707  int i;
3708
3709  /* Set MQ register fixed (already call_used) if not POWER
3710     architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3711     be allocated.  */
3712  if (! TARGET_POWER)
3713    fixed_regs[64] = 1;
3714
3715  /* 64-bit AIX and Linux reserve GPR13 for thread-private data.  */
3716  if (TARGET_64BIT)
3717    fixed_regs[13] = call_used_regs[13]
3718      = call_really_used_regs[13] = 1;
3719
3720  /* Conditionally disable FPRs.  */
3721  if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3722    for (i = 32; i < 64; i++)
3723      fixed_regs[i] = call_used_regs[i]
3724	= call_really_used_regs[i] = 1;
3725
3726  /* The TOC register is not killed across calls in a way that is
3727     visible to the compiler.  */
3728  if (DEFAULT_ABI == ABI_AIX)
3729    call_really_used_regs[2] = 0;
3730
3731  if (DEFAULT_ABI == ABI_V4
3732      && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3733      && flag_pic == 2)
3734    fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3735
3736  if (DEFAULT_ABI == ABI_V4
3737      && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3738      && flag_pic == 1)
3739    fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3740      = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3741      = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3742
3743  if (DEFAULT_ABI == ABI_DARWIN
3744      && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3745      fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3746      = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3747      = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3748
3749  if (TARGET_TOC && TARGET_MINIMAL_TOC)
3750    fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3751      = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3752
3753  if (TARGET_ALTIVEC)
3754    global_regs[VSCR_REGNO] = 1;
3755
3756  if (TARGET_SPE)
3757    {
3758      global_regs[SPEFSCR_REGNO] = 1;
3759      fixed_regs[FIXED_SCRATCH]
3760	= call_used_regs[FIXED_SCRATCH]
3761	= call_really_used_regs[FIXED_SCRATCH] = 1;
3762    }
3763
3764  if (! TARGET_ALTIVEC)
3765    {
3766      for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3767	fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3768      call_really_used_regs[VRSAVE_REGNO] = 1;
3769    }
3770
3771  if (TARGET_ALTIVEC_ABI)
3772    for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3773      call_used_regs[i] = call_really_used_regs[i] = 1;
3774}
3775
3776/* Try to output insns to set TARGET equal to the constant C if it can
3777   be done in less than N insns.  Do all computations in MODE.
3778   Returns the place where the output has been placed if it can be
3779   done and the insns have been emitted.  If it would take more than N
3780   insns, zero is returned and no insns and emitted.  */
3781
3782rtx
3783rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3784		       rtx source, int n ATTRIBUTE_UNUSED)
3785{
3786  rtx result, insn, set;
3787  HOST_WIDE_INT c0, c1;
3788
3789  switch (mode)
3790    {
3791      case  QImode:
3792    case HImode:
3793      if (dest == NULL)
3794	dest = gen_reg_rtx (mode);
3795      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3796      return dest;
3797
3798    case SImode:
3799      result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3800
3801      emit_insn (gen_rtx_SET (VOIDmode, result,
3802			      GEN_INT (INTVAL (source)
3803				       & (~ (HOST_WIDE_INT) 0xffff))));
3804      emit_insn (gen_rtx_SET (VOIDmode, dest,
3805			      gen_rtx_IOR (SImode, result,
3806					   GEN_INT (INTVAL (source) & 0xffff))));
3807      result = dest;
3808      break;
3809
3810    case DImode:
3811      switch (GET_CODE (source))
3812	{
3813	case CONST_INT:
3814	  c0 = INTVAL (source);
3815	  c1 = -(c0 < 0);
3816	  break;
3817
3818	case CONST_DOUBLE:
3819#if HOST_BITS_PER_WIDE_INT >= 64
3820	  c0 = CONST_DOUBLE_LOW (source);
3821	  c1 = -(c0 < 0);
3822#else
3823	  c0 = CONST_DOUBLE_LOW (source);
3824	  c1 = CONST_DOUBLE_HIGH (source);
3825#endif
3826	  break;
3827
3828	default:
3829	  gcc_unreachable ();
3830	}
3831
3832      result = rs6000_emit_set_long_const (dest, c0, c1);
3833      break;
3834
3835    default:
3836      gcc_unreachable ();
3837    }
3838
3839  insn = get_last_insn ();
3840  set = single_set (insn);
3841  if (! CONSTANT_P (SET_SRC (set)))
3842    set_unique_reg_note (insn, REG_EQUAL, source);
3843
3844  return result;
3845}
3846
3847/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3848   fall back to a straight forward decomposition.  We do this to avoid
3849   exponential run times encountered when looking for longer sequences
3850   with rs6000_emit_set_const.  */
3851static rtx
3852rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3853{
3854  if (!TARGET_POWERPC64)
3855    {
3856      rtx operand1, operand2;
3857
3858      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3859					DImode);
3860      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3861					DImode);
3862      emit_move_insn (operand1, GEN_INT (c1));
3863      emit_move_insn (operand2, GEN_INT (c2));
3864    }
3865  else
3866    {
3867      HOST_WIDE_INT ud1, ud2, ud3, ud4;
3868
3869      ud1 = c1 & 0xffff;
3870      ud2 = (c1 & 0xffff0000) >> 16;
3871#if HOST_BITS_PER_WIDE_INT >= 64
3872      c2 = c1 >> 32;
3873#endif
3874      ud3 = c2 & 0xffff;
3875      ud4 = (c2 & 0xffff0000) >> 16;
3876
3877      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3878	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3879	{
3880	  if (ud1 & 0x8000)
3881	    emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) -  0x8000)));
3882	  else
3883	    emit_move_insn (dest, GEN_INT (ud1));
3884	}
3885
3886      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3887	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3888	{
3889	  if (ud2 & 0x8000)
3890	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3891					   - 0x80000000));
3892	  else
3893	    emit_move_insn (dest, GEN_INT (ud2 << 16));
3894	  if (ud1 != 0)
3895	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3896	}
3897      else if ((ud4 == 0xffff && (ud3 & 0x8000))
3898	       || (ud4 == 0 && ! (ud3 & 0x8000)))
3899	{
3900	  if (ud3 & 0x8000)
3901	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3902					   - 0x80000000));
3903	  else
3904	    emit_move_insn (dest, GEN_INT (ud3 << 16));
3905
3906	  if (ud2 != 0)
3907	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3908	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3909	  if (ud1 != 0)
3910	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3911	}
3912      else
3913	{
3914	  if (ud4 & 0x8000)
3915	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3916					   - 0x80000000));
3917	  else
3918	    emit_move_insn (dest, GEN_INT (ud4 << 16));
3919
3920	  if (ud3 != 0)
3921	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3922
3923	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3924	  if (ud2 != 0)
3925	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3926					       GEN_INT (ud2 << 16)));
3927	  if (ud1 != 0)
3928	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3929	}
3930    }
3931  return dest;
3932}
3933
3934/* Helper for the following.  Get rid of [r+r] memory refs
3935   in cases where it won't work (TImode, TFmode).  */
3936
3937static void
3938rs6000_eliminate_indexed_memrefs (rtx operands[2])
3939{
3940  if (GET_CODE (operands[0]) == MEM
3941      && GET_CODE (XEXP (operands[0], 0)) != REG
3942      && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
3943      && ! reload_in_progress)
3944    operands[0]
3945      = replace_equiv_address (operands[0],
3946			       copy_addr_to_reg (XEXP (operands[0], 0)));
3947
3948  if (GET_CODE (operands[1]) == MEM
3949      && GET_CODE (XEXP (operands[1], 0)) != REG
3950      && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
3951      && ! reload_in_progress)
3952    operands[1]
3953      = replace_equiv_address (operands[1],
3954			       copy_addr_to_reg (XEXP (operands[1], 0)));
3955}
3956
3957/* Emit a move from SOURCE to DEST in mode MODE.  */
3958void
3959rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3960{
3961  rtx operands[2];
3962  operands[0] = dest;
3963  operands[1] = source;
3964
3965  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
3966  if (GET_CODE (operands[1]) == CONST_DOUBLE
3967      && ! FLOAT_MODE_P (mode)
3968      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3969    {
3970      /* FIXME.  This should never happen.  */
3971      /* Since it seems that it does, do the safe thing and convert
3972	 to a CONST_INT.  */
3973      operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3974    }
3975  gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3976	      || FLOAT_MODE_P (mode)
3977	      || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3978		   || CONST_DOUBLE_LOW (operands[1]) < 0)
3979		  && (CONST_DOUBLE_HIGH (operands[1]) != -1
3980		      || CONST_DOUBLE_LOW (operands[1]) >= 0)));
3981
3982  /* Check if GCC is setting up a block move that will end up using FP
3983     registers as temporaries.  We must make sure this is acceptable.  */
3984  if (GET_CODE (operands[0]) == MEM
3985      && GET_CODE (operands[1]) == MEM
3986      && mode == DImode
3987      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3988	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3989      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3990					    ? 32 : MEM_ALIGN (operands[0])))
3991	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3992					       ? 32
3993					       : MEM_ALIGN (operands[1]))))
3994      && ! MEM_VOLATILE_P (operands [0])
3995      && ! MEM_VOLATILE_P (operands [1]))
3996    {
3997      emit_move_insn (adjust_address (operands[0], SImode, 0),
3998		      adjust_address (operands[1], SImode, 0));
3999      emit_move_insn (adjust_address (operands[0], SImode, 4),
4000		      adjust_address (operands[1], SImode, 4));
4001      return;
4002    }
4003
4004  if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
4005      && !gpc_reg_operand (operands[1], mode))
4006    operands[1] = force_reg (mode, operands[1]);
4007
4008  if (mode == SFmode && ! TARGET_POWERPC
4009      && TARGET_HARD_FLOAT && TARGET_FPRS
4010      && GET_CODE (operands[0]) == MEM)
4011    {
4012      int regnum;
4013
4014      if (reload_in_progress || reload_completed)
4015	regnum = true_regnum (operands[1]);
4016      else if (GET_CODE (operands[1]) == REG)
4017	regnum = REGNO (operands[1]);
4018      else
4019	regnum = -1;
4020
4021      /* If operands[1] is a register, on POWER it may have
4022	 double-precision data in it, so truncate it to single
4023	 precision.  */
4024      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4025	{
4026	  rtx newreg;
4027	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
4028	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4029	  operands[1] = newreg;
4030	}
4031    }
4032
4033  /* Recognize the case where operand[1] is a reference to thread-local
4034     data and load its address to a register.  */
4035  if (rs6000_tls_referenced_p (operands[1]))
4036    {
4037      enum tls_model model;
4038      rtx tmp = operands[1];
4039      rtx addend = NULL;
4040
4041      if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4042	{
4043          addend = XEXP (XEXP (tmp, 0), 1);
4044	  tmp = XEXP (XEXP (tmp, 0), 0);
4045	}
4046
4047      gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4048      model = SYMBOL_REF_TLS_MODEL (tmp);
4049      gcc_assert (model != 0);
4050
4051      tmp = rs6000_legitimize_tls_address (tmp, model);
4052      if (addend)
4053	{
4054	  tmp = gen_rtx_PLUS (mode, tmp, addend);
4055	  tmp = force_operand (tmp, operands[0]);
4056	}
4057      operands[1] = tmp;
4058    }
4059
4060  /* Handle the case where reload calls us with an invalid address.  */
4061  if (reload_in_progress && mode == Pmode
4062      && (! general_operand (operands[1], mode)
4063	  || ! nonimmediate_operand (operands[0], mode)))
4064    goto emit_set;
4065
4066  /* 128-bit constant floating-point values on Darwin should really be
4067     loaded as two parts.  */
4068  if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
4069      && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4070    {
4071      /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4072	 know how to get a DFmode SUBREG of a TFmode.  */
4073      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
4074			simplify_gen_subreg (DImode, operands[1], mode, 0),
4075			DImode);
4076      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4077					     GET_MODE_SIZE (DImode)),
4078			simplify_gen_subreg (DImode, operands[1], mode,
4079					     GET_MODE_SIZE (DImode)),
4080			DImode);
4081      return;
4082    }
4083
4084  /* FIXME:  In the long term, this switch statement should go away
4085     and be replaced by a sequence of tests based on things like
4086     mode == Pmode.  */
4087  switch (mode)
4088    {
4089    case HImode:
4090    case QImode:
4091      if (CONSTANT_P (operands[1])
4092	  && GET_CODE (operands[1]) != CONST_INT)
4093	operands[1] = force_const_mem (mode, operands[1]);
4094      break;
4095
4096    case TFmode:
4097      rs6000_eliminate_indexed_memrefs (operands);
4098      /* fall through */
4099
4100    case DFmode:
4101    case SFmode:
4102      if (CONSTANT_P (operands[1])
4103	  && ! easy_fp_constant (operands[1], mode))
4104	operands[1] = force_const_mem (mode, operands[1]);
4105      break;
4106
4107    case V16QImode:
4108    case V8HImode:
4109    case V4SFmode:
4110    case V4SImode:
4111    case V4HImode:
4112    case V2SFmode:
4113    case V2SImode:
4114    case V1DImode:
4115      if (CONSTANT_P (operands[1])
4116	  && !easy_vector_constant (operands[1], mode))
4117	operands[1] = force_const_mem (mode, operands[1]);
4118      break;
4119
4120    case SImode:
4121    case DImode:
4122      /* Use default pattern for address of ELF small data */
4123      if (TARGET_ELF
4124	  && mode == Pmode
4125	  && DEFAULT_ABI == ABI_V4
4126	  && (GET_CODE (operands[1]) == SYMBOL_REF
4127	      || GET_CODE (operands[1]) == CONST)
4128	  && small_data_operand (operands[1], mode))
4129	{
4130	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4131	  return;
4132	}
4133
4134      if (DEFAULT_ABI == ABI_V4
4135	  && mode == Pmode && mode == SImode
4136	  && flag_pic == 1 && got_operand (operands[1], mode))
4137	{
4138	  emit_insn (gen_movsi_got (operands[0], operands[1]));
4139	  return;
4140	}
4141
4142      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
4143	  && TARGET_NO_TOC
4144	  && ! flag_pic
4145	  && mode == Pmode
4146	  && CONSTANT_P (operands[1])
4147	  && GET_CODE (operands[1]) != HIGH
4148	  && GET_CODE (operands[1]) != CONST_INT)
4149	{
4150	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
4151
4152	  /* If this is a function address on -mcall-aixdesc,
4153	     convert it to the address of the descriptor.  */
4154	  if (DEFAULT_ABI == ABI_AIX
4155	      && GET_CODE (operands[1]) == SYMBOL_REF
4156	      && XSTR (operands[1], 0)[0] == '.')
4157	    {
4158	      const char *name = XSTR (operands[1], 0);
4159	      rtx new_ref;
4160	      while (*name == '.')
4161		name++;
4162	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4163	      CONSTANT_POOL_ADDRESS_P (new_ref)
4164		= CONSTANT_POOL_ADDRESS_P (operands[1]);
4165	      SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
4166	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
4167	      SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
4168	      operands[1] = new_ref;
4169	    }
4170
4171	  if (DEFAULT_ABI == ABI_DARWIN)
4172	    {
4173#if TARGET_MACHO
4174	      if (MACHO_DYNAMIC_NO_PIC_P)
4175		{
4176		  /* Take care of any required data indirection.  */
4177		  operands[1] = rs6000_machopic_legitimize_pic_address (
4178				  operands[1], mode, operands[0]);
4179		  if (operands[0] != operands[1])
4180		    emit_insn (gen_rtx_SET (VOIDmode,
4181					    operands[0], operands[1]));
4182		  return;
4183		}
4184#endif
4185	      emit_insn (gen_macho_high (target, operands[1]));
4186	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
4187	      return;
4188	    }
4189
4190	  emit_insn (gen_elf_high (target, operands[1]));
4191	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
4192	  return;
4193	}
4194
4195      /* If this is a SYMBOL_REF that refers to a constant pool entry,
4196	 and we have put it in the TOC, we just need to make a TOC-relative
4197	 reference to it.  */
4198      if (TARGET_TOC
4199	  && GET_CODE (operands[1]) == SYMBOL_REF
4200	  && constant_pool_expr_p (operands[1])
4201	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4202					      get_pool_mode (operands[1])))
4203	{
4204	  operands[1] = create_TOC_reference (operands[1]);
4205	}
4206      else if (mode == Pmode
4207	       && CONSTANT_P (operands[1])
4208	       && ((GET_CODE (operands[1]) != CONST_INT
4209		    && ! easy_fp_constant (operands[1], mode))
4210		   || (GET_CODE (operands[1]) == CONST_INT
4211		       && num_insns_constant (operands[1], mode) > 2)
4212		   || (GET_CODE (operands[0]) == REG
4213		       && FP_REGNO_P (REGNO (operands[0]))))
4214	       && GET_CODE (operands[1]) != HIGH
4215	       && ! legitimate_constant_pool_address_p (operands[1])
4216	       && ! toc_relative_expr_p (operands[1]))
4217	{
4218	  /* Emit a USE operation so that the constant isn't deleted if
4219	     expensive optimizations are turned on because nobody
4220	     references it.  This should only be done for operands that
4221	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4222	     This should not be done for operands that contain LABEL_REFs.
4223	     For now, we just handle the obvious case.  */
4224	  if (GET_CODE (operands[1]) != LABEL_REF)
4225	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4226
4227#if TARGET_MACHO
4228	  /* Darwin uses a special PIC legitimizer.  */
4229	  if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4230	    {
4231	      operands[1] =
4232		rs6000_machopic_legitimize_pic_address (operands[1], mode,
4233							operands[0]);
4234	      if (operands[0] != operands[1])
4235		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4236	      return;
4237	    }
4238#endif
4239
4240	  /* If we are to limit the number of things we put in the TOC and
4241	     this is a symbol plus a constant we can add in one insn,
4242	     just put the symbol in the TOC and add the constant.  Don't do
4243	     this if reload is in progress.  */
4244	  if (GET_CODE (operands[1]) == CONST
4245	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4246	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
4247	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4248	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4249		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4250	      && ! side_effects_p (operands[0]))
4251	    {
4252	      rtx sym =
4253		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4254	      rtx other = XEXP (XEXP (operands[1], 0), 1);
4255
4256	      sym = force_reg (mode, sym);
4257	      if (mode == SImode)
4258		emit_insn (gen_addsi3 (operands[0], sym, other));
4259	      else
4260		emit_insn (gen_adddi3 (operands[0], sym, other));
4261	      return;
4262	    }
4263
4264	  operands[1] = force_const_mem (mode, operands[1]);
4265
4266	  if (TARGET_TOC
4267	      && constant_pool_expr_p (XEXP (operands[1], 0))
4268	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4269			get_pool_constant (XEXP (operands[1], 0)),
4270			get_pool_mode (XEXP (operands[1], 0))))
4271	    {
4272	      operands[1]
4273		= gen_const_mem (mode,
4274				 create_TOC_reference (XEXP (operands[1], 0)));
4275	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
4276	    }
4277	}
4278      break;
4279
4280    case TImode:
4281      rs6000_eliminate_indexed_memrefs (operands);
4282
4283      if (TARGET_POWER)
4284	{
4285	  emit_insn (gen_rtx_PARALLEL (VOIDmode,
4286		       gen_rtvec (2,
4287				  gen_rtx_SET (VOIDmode,
4288					       operands[0], operands[1]),
4289				  gen_rtx_CLOBBER (VOIDmode,
4290						   gen_rtx_SCRATCH (SImode)))));
4291	  return;
4292	}
4293      break;
4294
4295    default:
4296      gcc_unreachable ();
4297    }
4298
4299  /* Above, we may have called force_const_mem which may have returned
4300     an invalid address.  If we can, fix this up; otherwise, reload will
4301     have to deal with it.  */
4302  if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4303    operands[1] = validize_mem (operands[1]);
4304
4305 emit_set:
4306  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4307}
4308
4309/* Nonzero if we can use a floating-point register to pass this arg.  */
4310#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE)		\
4311  (SCALAR_FLOAT_MODE_P (MODE)			\
4312   && !DECIMAL_FLOAT_MODE_P (MODE)		\
4313   && (CUM)->fregno <= FP_ARG_MAX_REG		\
4314   && TARGET_HARD_FLOAT && TARGET_FPRS)
4315
4316/* Nonzero if we can use an AltiVec register to pass this arg.  */
4317#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED)	\
4318  (ALTIVEC_VECTOR_MODE (MODE)				\
4319   && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG		\
4320   && TARGET_ALTIVEC_ABI				\
4321   && (NAMED))
4322
4323/* Return a nonzero value to say to return the function value in
4324   memory, just as large structures are always returned.  TYPE will be
4325   the data type of the value, and FNTYPE will be the type of the
4326   function doing the returning, or @code{NULL} for libcalls.
4327
4328   The AIX ABI for the RS/6000 specifies that all structures are
4329   returned in memory.  The Darwin ABI does the same.  The SVR4 ABI
4330   specifies that structures <= 8 bytes are returned in r3/r4, but a
4331   draft put them in memory, and GCC used to implement the draft
4332   instead of the final standard.  Therefore, aix_struct_return
4333   controls this instead of DEFAULT_ABI; V.4 targets needing backward
4334   compatibility can change DRAFT_V4_STRUCT_RET to override the
4335   default, and -m switches get the final word.  See
4336   rs6000_override_options for more details.
4337
4338   The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4339   long double support is enabled.  These values are returned in memory.
4340
4341   int_size_in_bytes returns -1 for variable size objects, which go in
4342   memory always.  The cast to unsigned makes -1 > 8.  */
4343
4344static bool
4345rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4346{
4347  /* In the darwin64 abi, try to use registers for larger structs
4348     if possible.  */
4349  if (rs6000_darwin64_abi
4350      && TREE_CODE (type) == RECORD_TYPE
4351      && int_size_in_bytes (type) > 0)
4352    {
4353      CUMULATIVE_ARGS valcum;
4354      rtx valret;
4355
4356      valcum.words = 0;
4357      valcum.fregno = FP_ARG_MIN_REG;
4358      valcum.vregno = ALTIVEC_ARG_MIN_REG;
4359      /* Do a trial code generation as if this were going to be passed
4360	 as an argument; if any part goes in memory, we return NULL.  */
4361      valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4362      if (valret)
4363	return false;
4364      /* Otherwise fall through to more conventional ABI rules.  */
4365    }
4366
4367  if (AGGREGATE_TYPE_P (type)
4368      && (aix_struct_return
4369	  || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4370    return true;
4371
4372  /* Allow -maltivec -mabi=no-altivec without warning.  Altivec vector
4373     modes only exist for GCC vector types if -maltivec.  */
4374  if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4375      && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4376    return false;
4377
4378  /* Return synthetic vectors in memory.  */
4379  if (TREE_CODE (type) == VECTOR_TYPE
4380      && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
4381    {
4382      static bool warned_for_return_big_vectors = false;
4383      if (!warned_for_return_big_vectors)
4384	{
4385	  warning (0, "GCC vector returned by reference: "
4386		   "non-standard ABI extension with no compatibility guarantee");
4387	  warned_for_return_big_vectors = true;
4388	}
4389      return true;
4390    }
4391
4392  if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
4393    return true;
4394
4395  return false;
4396}
4397
4398/* Initialize a variable CUM of type CUMULATIVE_ARGS
4399   for a call to a function whose data type is FNTYPE.
4400   For a library call, FNTYPE is 0.
4401
4402   For incoming args we set the number of arguments in the prototype large
4403   so we never return a PARALLEL.  */
4404
4405void
4406init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4407		      rtx libname ATTRIBUTE_UNUSED, int incoming,
4408		      int libcall, int n_named_args)
4409{
4410  static CUMULATIVE_ARGS zero_cumulative;
4411
4412  *cum = zero_cumulative;
4413  cum->words = 0;
4414  cum->fregno = FP_ARG_MIN_REG;
4415  cum->vregno = ALTIVEC_ARG_MIN_REG;
4416  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4417  cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4418		      ? CALL_LIBCALL : CALL_NORMAL);
4419  cum->sysv_gregno = GP_ARG_MIN_REG;
4420  cum->stdarg = fntype
4421    && (TYPE_ARG_TYPES (fntype) != 0
4422	&& (TREE_VALUE (tree_last  (TYPE_ARG_TYPES (fntype)))
4423	    != void_type_node));
4424
4425  cum->nargs_prototype = 0;
4426  if (incoming || cum->prototype)
4427    cum->nargs_prototype = n_named_args;
4428
4429  /* Check for a longcall attribute.  */
4430  if ((!fntype && rs6000_default_long_calls)
4431      || (fntype
4432	  && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4433	  && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4434    cum->call_cookie |= CALL_LONG;
4435
4436  if (TARGET_DEBUG_ARG)
4437    {
4438      fprintf (stderr, "\ninit_cumulative_args:");
4439      if (fntype)
4440	{
4441	  tree ret_type = TREE_TYPE (fntype);
4442	  fprintf (stderr, " ret code = %s,",
4443		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
4444	}
4445
4446      if (cum->call_cookie & CALL_LONG)
4447	fprintf (stderr, " longcall,");
4448
4449      fprintf (stderr, " proto = %d, nargs = %d\n",
4450	       cum->prototype, cum->nargs_prototype);
4451    }
4452
4453  if (fntype
4454      && !TARGET_ALTIVEC
4455      && TARGET_ALTIVEC_ABI
4456      && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4457    {
4458      error ("cannot return value in vector register because"
4459	     " altivec instructions are disabled, use -maltivec"
4460	     " to enable them");
4461    }
4462}
4463
4464/* Return true if TYPE must be passed on the stack and not in registers.  */
4465
4466static bool
4467rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4468{
4469  if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4470    return must_pass_in_stack_var_size (mode, type);
4471  else
4472    return must_pass_in_stack_var_size_or_pad (mode, type);
4473}
4474
4475/* If defined, a C expression which determines whether, and in which
4476   direction, to pad out an argument with extra space.  The value
4477   should be of type `enum direction': either `upward' to pad above
4478   the argument, `downward' to pad below, or `none' to inhibit
4479   padding.
4480
4481   For the AIX ABI structs are always stored left shifted in their
4482   argument slot.  */
4483
4484enum direction
4485function_arg_padding (enum machine_mode mode, tree type)
4486{
4487#ifndef AGGREGATE_PADDING_FIXED
4488#define AGGREGATE_PADDING_FIXED 0
4489#endif
4490#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4491#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4492#endif
4493
4494  if (!AGGREGATE_PADDING_FIXED)
4495    {
4496      /* GCC used to pass structures of the same size as integer types as
4497	 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4498	 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4499	 passed padded downward, except that -mstrict-align further
4500	 muddied the water in that multi-component structures of 2 and 4
4501	 bytes in size were passed padded upward.
4502
4503	 The following arranges for best compatibility with previous
4504	 versions of gcc, but removes the -mstrict-align dependency.  */
4505      if (BYTES_BIG_ENDIAN)
4506	{
4507	  HOST_WIDE_INT size = 0;
4508
4509	  if (mode == BLKmode)
4510	    {
4511	      if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4512		size = int_size_in_bytes (type);
4513	    }
4514	  else
4515	    size = GET_MODE_SIZE (mode);
4516
4517	  if (size == 1 || size == 2 || size == 4)
4518	    return downward;
4519	}
4520      return upward;
4521    }
4522
4523  if (AGGREGATES_PAD_UPWARD_ALWAYS)
4524    {
4525      if (type != 0 && AGGREGATE_TYPE_P (type))
4526	return upward;
4527    }
4528
4529  /* Fall back to the default.  */
4530  return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4531}
4532
4533/* If defined, a C expression that gives the alignment boundary, in bits,
4534   of an argument with the specified mode and type.  If it is not defined,
4535   PARM_BOUNDARY is used for all arguments.
4536
4537   V.4 wants long longs and doubles to be double word aligned.  Just
4538   testing the mode size is a boneheaded way to do this as it means
4539   that other types such as complex int are also double word aligned.
4540   However, we're stuck with this because changing the ABI might break
4541   existing library interfaces.
4542
4543   Doubleword align SPE vectors.
4544   Quadword align Altivec vectors.
4545   Quadword align large synthetic vector types.   */
4546
4547int
4548function_arg_boundary (enum machine_mode mode, tree type)
4549{
4550  if (DEFAULT_ABI == ABI_V4
4551      && (GET_MODE_SIZE (mode) == 8
4552	  || (TARGET_HARD_FLOAT
4553	      && TARGET_FPRS
4554	      && mode == TFmode)))
4555    return 64;
4556  else if (SPE_VECTOR_MODE (mode)
4557	   || (type && TREE_CODE (type) == VECTOR_TYPE
4558	       && int_size_in_bytes (type) >= 8
4559	       && int_size_in_bytes (type) < 16))
4560    return 64;
4561  else if (ALTIVEC_VECTOR_MODE (mode)
4562	   || (type && TREE_CODE (type) == VECTOR_TYPE
4563	       && int_size_in_bytes (type) >= 16))
4564    return 128;
4565  else if (rs6000_darwin64_abi && mode == BLKmode
4566	   && type && TYPE_ALIGN (type) > 64)
4567    return 128;
4568  else
4569    return PARM_BOUNDARY;
4570}
4571
4572/* For a function parm of MODE and TYPE, return the starting word in
4573   the parameter area.  NWORDS of the parameter area are already used.  */
4574
4575static unsigned int
4576rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4577{
4578  unsigned int align;
4579  unsigned int parm_offset;
4580
4581  align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4582  parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4583  return nwords + (-(parm_offset + nwords) & align);
4584}
4585
4586/* Compute the size (in words) of a function argument.  */
4587
4588static unsigned long
4589rs6000_arg_size (enum machine_mode mode, tree type)
4590{
4591  unsigned long size;
4592
4593  if (mode != BLKmode)
4594    size = GET_MODE_SIZE (mode);
4595  else
4596    size = int_size_in_bytes (type);
4597
4598  if (TARGET_32BIT)
4599    return (size + 3) >> 2;
4600  else
4601    return (size + 7) >> 3;
4602}
4603
4604/* Use this to flush pending int fields.  */
4605
4606static void
4607rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4608					  HOST_WIDE_INT bitpos)
4609{
4610  unsigned int startbit, endbit;
4611  int intregs, intoffset;
4612  enum machine_mode mode;
4613
4614  if (cum->intoffset == -1)
4615    return;
4616
4617  intoffset = cum->intoffset;
4618  cum->intoffset = -1;
4619
4620  if (intoffset % BITS_PER_WORD != 0)
4621    {
4622      mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4623			    MODE_INT, 0);
4624      if (mode == BLKmode)
4625	{
4626	  /* We couldn't find an appropriate mode, which happens,
4627	     e.g., in packed structs when there are 3 bytes to load.
4628	     Back intoffset back to the beginning of the word in this
4629	     case.  */
4630	  intoffset = intoffset & -BITS_PER_WORD;
4631	}
4632    }
4633
4634  startbit = intoffset & -BITS_PER_WORD;
4635  endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4636  intregs = (endbit - startbit) / BITS_PER_WORD;
4637  cum->words += intregs;
4638}
4639
4640/* The darwin64 ABI calls for us to recurse down through structs,
4641   looking for elements passed in registers.  Unfortunately, we have
4642   to track int register count here also because of misalignments
4643   in powerpc alignment mode.  */
4644
4645static void
4646rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4647					    tree type,
4648					    HOST_WIDE_INT startbitpos)
4649{
4650  tree f;
4651
4652  for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4653    if (TREE_CODE (f) == FIELD_DECL)
4654      {
4655	HOST_WIDE_INT bitpos = startbitpos;
4656	tree ftype = TREE_TYPE (f);
4657	enum machine_mode mode;
4658	if (ftype == error_mark_node)
4659	  continue;
4660	mode = TYPE_MODE (ftype);
4661
4662	if (DECL_SIZE (f) != 0
4663	    && host_integerp (bit_position (f), 1))
4664	  bitpos += int_bit_position (f);
4665
4666	/* ??? FIXME: else assume zero offset.  */
4667
4668	if (TREE_CODE (ftype) == RECORD_TYPE)
4669	  rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4670	else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4671	  {
4672	    rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4673	    cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4674	    cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4675	  }
4676	else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4677	  {
4678	    rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4679	    cum->vregno++;
4680	    cum->words += 2;
4681	  }
4682	else if (cum->intoffset == -1)
4683	  cum->intoffset = bitpos;
4684      }
4685}
4686
4687/* Update the data in CUM to advance over an argument
4688   of mode MODE and data type TYPE.
4689   (TYPE is null for libcalls where that information may not be available.)
4690
4691   Note that for args passed by reference, function_arg will be called
4692   with MODE and TYPE set to that of the pointer to the arg, not the arg
4693   itself.  */
4694
4695void
4696function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4697		      tree type, int named, int depth)
4698{
4699  int size;
4700
4701  /* Only tick off an argument if we're not recursing.  */
4702  if (depth == 0)
4703    cum->nargs_prototype--;
4704
4705  if (TARGET_ALTIVEC_ABI
4706      && (ALTIVEC_VECTOR_MODE (mode)
4707	  || (type && TREE_CODE (type) == VECTOR_TYPE
4708	      && int_size_in_bytes (type) == 16)))
4709    {
4710      bool stack = false;
4711
4712      if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4713	{
4714	  cum->vregno++;
4715	  if (!TARGET_ALTIVEC)
4716	    error ("cannot pass argument in vector register because"
4717		   " altivec instructions are disabled, use -maltivec"
4718		   " to enable them");
4719
4720	  /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4721	     even if it is going to be passed in a vector register.
4722	     Darwin does the same for variable-argument functions.  */
4723	  if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4724	      || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4725	    stack = true;
4726	}
4727      else
4728	stack = true;
4729
4730      if (stack)
4731	{
4732	  int align;
4733
4734	  /* Vector parameters must be 16-byte aligned.  This places
4735	     them at 2 mod 4 in terms of words in 32-bit mode, since
4736	     the parameter save area starts at offset 24 from the
4737	     stack.  In 64-bit mode, they just have to start on an
4738	     even word, since the parameter save area is 16-byte
4739	     aligned.  Space for GPRs is reserved even if the argument
4740	     will be passed in memory.  */
4741	  if (TARGET_32BIT)
4742	    align = (2 - cum->words) & 3;
4743	  else
4744	    align = cum->words & 1;
4745	  cum->words += align + rs6000_arg_size (mode, type);
4746
4747	  if (TARGET_DEBUG_ARG)
4748	    {
4749	      fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4750		       cum->words, align);
4751	      fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4752		       cum->nargs_prototype, cum->prototype,
4753		       GET_MODE_NAME (mode));
4754	    }
4755	}
4756    }
4757  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4758	   && !cum->stdarg
4759	   && cum->sysv_gregno <= GP_ARG_MAX_REG)
4760    cum->sysv_gregno++;
4761
4762  else if (rs6000_darwin64_abi
4763	   && mode == BLKmode
4764    	   && TREE_CODE (type) == RECORD_TYPE
4765	   && (size = int_size_in_bytes (type)) > 0)
4766    {
4767      /* Variable sized types have size == -1 and are
4768	 treated as if consisting entirely of ints.
4769	 Pad to 16 byte boundary if needed.  */
4770      if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4771	  && (cum->words % 2) != 0)
4772	cum->words++;
4773      /* For varargs, we can just go up by the size of the struct. */
4774      if (!named)
4775	cum->words += (size + 7) / 8;
4776      else
4777	{
4778	  /* It is tempting to say int register count just goes up by
4779	     sizeof(type)/8, but this is wrong in a case such as
4780	     { int; double; int; } [powerpc alignment].  We have to
4781	     grovel through the fields for these too.  */
4782	  cum->intoffset = 0;
4783	  rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
4784	  rs6000_darwin64_record_arg_advance_flush (cum,
4785						    size * BITS_PER_UNIT);
4786	}
4787    }
4788  else if (DEFAULT_ABI == ABI_V4)
4789    {
4790      if (TARGET_HARD_FLOAT && TARGET_FPRS
4791	  && (mode == SFmode || mode == DFmode
4792	      || (mode == TFmode && !TARGET_IEEEQUAD)))
4793	{
4794	  if (cum->fregno + (mode == TFmode ? 1 : 0) <= FP_ARG_V4_MAX_REG)
4795	    cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4796	  else
4797	    {
4798	      cum->fregno = FP_ARG_V4_MAX_REG + 1;
4799	      if (mode == DFmode || mode == TFmode)
4800		cum->words += cum->words & 1;
4801	      cum->words += rs6000_arg_size (mode, type);
4802	    }
4803	}
4804      else
4805	{
4806	  int n_words = rs6000_arg_size (mode, type);
4807	  int gregno = cum->sysv_gregno;
4808
4809	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4810	     (r7,r8) or (r9,r10).  As does any other 2 word item such
4811	     as complex int due to a historical mistake.  */
4812	  if (n_words == 2)
4813	    gregno += (1 - gregno) & 1;
4814
4815	  /* Multi-reg args are not split between registers and stack.  */
4816	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4817	    {
4818	      /* Long long and SPE vectors are aligned on the stack.
4819		 So are other 2 word items such as complex int due to
4820		 a historical mistake.  */
4821	      if (n_words == 2)
4822		cum->words += cum->words & 1;
4823	      cum->words += n_words;
4824	    }
4825
4826	  /* Note: continuing to accumulate gregno past when we've started
4827	     spilling to the stack indicates the fact that we've started
4828	     spilling to the stack to expand_builtin_saveregs.  */
4829	  cum->sysv_gregno = gregno + n_words;
4830	}
4831
4832      if (TARGET_DEBUG_ARG)
4833	{
4834	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4835		   cum->words, cum->fregno);
4836	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4837		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4838	  fprintf (stderr, "mode = %4s, named = %d\n",
4839		   GET_MODE_NAME (mode), named);
4840	}
4841    }
4842  else
4843    {
4844      int n_words = rs6000_arg_size (mode, type);
4845      int start_words = cum->words;
4846      int align_words = rs6000_parm_start (mode, type, start_words);
4847
4848      cum->words = align_words + n_words;
4849
4850      if (SCALAR_FLOAT_MODE_P (mode)
4851	  && !DECIMAL_FLOAT_MODE_P (mode)
4852	  && TARGET_HARD_FLOAT && TARGET_FPRS)
4853	cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4854
4855      if (TARGET_DEBUG_ARG)
4856	{
4857	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4858		   cum->words, cum->fregno);
4859	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4860		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4861	  fprintf (stderr, "named = %d, align = %d, depth = %d\n",
4862		   named, align_words - start_words, depth);
4863	}
4864    }
4865}
4866
4867static rtx
4868spe_build_register_parallel (enum machine_mode mode, int gregno)
4869{
4870  rtx r1, r3;
4871
4872  switch (mode)
4873    {
4874    case DFmode:
4875      r1 = gen_rtx_REG (DImode, gregno);
4876      r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4877      return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
4878
4879    case DCmode:
4880      r1 = gen_rtx_REG (DImode, gregno);
4881      r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4882      r3 = gen_rtx_REG (DImode, gregno + 2);
4883      r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4884      return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
4885
4886    default:
4887      gcc_unreachable ();
4888    }
4889}
4890
4891/* Determine where to put a SIMD argument on the SPE.  */
4892static rtx
4893rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4894			 tree type)
4895{
4896  int gregno = cum->sysv_gregno;
4897
4898  /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4899     are passed and returned in a pair of GPRs for ABI compatibility.  */
4900  if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
4901    {
4902      int n_words = rs6000_arg_size (mode, type);
4903
4904      /* Doubles go in an odd/even register pair (r5/r6, etc).  */
4905      if (mode == DFmode)
4906	gregno += (1 - gregno) & 1;
4907
4908      /* Multi-reg args are not split between registers and stack.  */
4909      if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4910	return NULL_RTX;
4911
4912      return spe_build_register_parallel (mode, gregno);
4913    }
4914  if (cum->stdarg)
4915    {
4916      int n_words = rs6000_arg_size (mode, type);
4917
4918      /* SPE vectors are put in odd registers.  */
4919      if (n_words == 2 && (gregno & 1) == 0)
4920	gregno += 1;
4921
4922      if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4923	{
4924	  rtx r1, r2;
4925	  enum machine_mode m = SImode;
4926
4927	  r1 = gen_rtx_REG (m, gregno);
4928	  r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4929	  r2 = gen_rtx_REG (m, gregno + 1);
4930	  r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4931	  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4932	}
4933      else
4934	return NULL_RTX;
4935    }
4936  else
4937    {
4938      if (gregno <= GP_ARG_MAX_REG)
4939	return gen_rtx_REG (mode, gregno);
4940      else
4941	return NULL_RTX;
4942    }
4943}
4944
4945/* A subroutine of rs6000_darwin64_record_arg.  Assign the bits of the
4946   structure between cum->intoffset and bitpos to integer registers.  */
4947
4948static void
4949rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
4950				  HOST_WIDE_INT bitpos, rtx rvec[], int *k)
4951{
4952  enum machine_mode mode;
4953  unsigned int regno;
4954  unsigned int startbit, endbit;
4955  int this_regno, intregs, intoffset;
4956  rtx reg;
4957
4958  if (cum->intoffset == -1)
4959    return;
4960
4961  intoffset = cum->intoffset;
4962  cum->intoffset = -1;
4963
4964  /* If this is the trailing part of a word, try to only load that
4965     much into the register.  Otherwise load the whole register.  Note
4966     that in the latter case we may pick up unwanted bits.  It's not a
4967     problem at the moment but may wish to revisit.  */
4968
4969  if (intoffset % BITS_PER_WORD != 0)
4970    {
4971      mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4972			  MODE_INT, 0);
4973      if (mode == BLKmode)
4974	{
4975	  /* We couldn't find an appropriate mode, which happens,
4976	     e.g., in packed structs when there are 3 bytes to load.
4977	     Back intoffset back to the beginning of the word in this
4978	     case.  */
4979	 intoffset = intoffset & -BITS_PER_WORD;
4980	 mode = word_mode;
4981	}
4982    }
4983  else
4984    mode = word_mode;
4985
4986  startbit = intoffset & -BITS_PER_WORD;
4987  endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4988  intregs = (endbit - startbit) / BITS_PER_WORD;
4989  this_regno = cum->words + intoffset / BITS_PER_WORD;
4990
4991  if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4992    cum->use_stack = 1;
4993
4994  intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4995  if (intregs <= 0)
4996    return;
4997
4998  intoffset /= BITS_PER_UNIT;
4999  do
5000    {
5001      regno = GP_ARG_MIN_REG + this_regno;
5002      reg = gen_rtx_REG (mode, regno);
5003      rvec[(*k)++] =
5004	gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5005
5006      this_regno += 1;
5007      intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5008      mode = word_mode;
5009      intregs -= 1;
5010    }
5011  while (intregs > 0);
5012}
5013
5014/* Recursive workhorse for the following.  */
5015
5016static void
5017rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
5018				    HOST_WIDE_INT startbitpos, rtx rvec[],
5019				    int *k)
5020{
5021  tree f;
5022
5023  for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5024    if (TREE_CODE (f) == FIELD_DECL)
5025      {
5026	HOST_WIDE_INT bitpos = startbitpos;
5027	tree ftype = TREE_TYPE (f);
5028	enum machine_mode mode;
5029	if (ftype == error_mark_node)
5030	  continue;
5031	mode = TYPE_MODE (ftype);
5032
5033	if (DECL_SIZE (f) != 0
5034	    && host_integerp (bit_position (f), 1))
5035	  bitpos += int_bit_position (f);
5036
5037	/* ??? FIXME: else assume zero offset.  */
5038
5039	if (TREE_CODE (ftype) == RECORD_TYPE)
5040	  rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5041	else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
5042	  {
5043#if 0
5044	    switch (mode)
5045	      {
5046	      case SCmode: mode = SFmode; break;
5047	      case DCmode: mode = DFmode; break;
5048	      case TCmode: mode = TFmode; break;
5049	      default: break;
5050	      }
5051#endif
5052	    rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5053	    rvec[(*k)++]
5054	      = gen_rtx_EXPR_LIST (VOIDmode,
5055				   gen_rtx_REG (mode, cum->fregno++),
5056				   GEN_INT (bitpos / BITS_PER_UNIT));
5057	    if (mode == TFmode)
5058	      cum->fregno++;
5059	  }
5060	else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5061	  {
5062	    rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5063	    rvec[(*k)++]
5064	      = gen_rtx_EXPR_LIST (VOIDmode,
5065				   gen_rtx_REG (mode, cum->vregno++),
5066				   GEN_INT (bitpos / BITS_PER_UNIT));
5067	  }
5068	else if (cum->intoffset == -1)
5069	  cum->intoffset = bitpos;
5070      }
5071}
5072
5073/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5074   the register(s) to be used for each field and subfield of a struct
5075   being passed by value, along with the offset of where the
5076   register's value may be found in the block.  FP fields go in FP
5077   register, vector fields go in vector registers, and everything
5078   else goes in int registers, packed as in memory.
5079
5080   This code is also used for function return values.  RETVAL indicates
5081   whether this is the case.
5082
5083   Much of this is taken from the SPARC V9 port, which has a similar
5084   calling convention.  */
5085
5086static rtx
5087rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
5088			    int named, bool retval)
5089{
5090  rtx rvec[FIRST_PSEUDO_REGISTER];
5091  int k = 1, kbase = 1;
5092  HOST_WIDE_INT typesize = int_size_in_bytes (type);
5093  /* This is a copy; modifications are not visible to our caller.  */
5094  CUMULATIVE_ARGS copy_cum = *orig_cum;
5095  CUMULATIVE_ARGS *cum = &copy_cum;
5096
5097  /* Pad to 16 byte boundary if needed.  */
5098  if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5099      && (cum->words % 2) != 0)
5100    cum->words++;
5101
5102  cum->intoffset = 0;
5103  cum->use_stack = 0;
5104  cum->named = named;
5105
5106  /* Put entries into rvec[] for individual FP and vector fields, and
5107     for the chunks of memory that go in int regs.  Note we start at
5108     element 1; 0 is reserved for an indication of using memory, and
5109     may or may not be filled in below. */
5110  rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5111  rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5112
5113  /* If any part of the struct went on the stack put all of it there.
5114     This hack is because the generic code for
5115     FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5116     parts of the struct are not at the beginning.  */
5117  if (cum->use_stack)
5118    {
5119      if (retval)
5120	return NULL_RTX;    /* doesn't go in registers at all */
5121      kbase = 0;
5122      rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5123    }
5124  if (k > 1 || cum->use_stack)
5125    return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
5126  else
5127    return NULL_RTX;
5128}
5129
5130/* Determine where to place an argument in 64-bit mode with 32-bit ABI.  */
5131
5132static rtx
5133rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
5134{
5135  int n_units;
5136  int i, k;
5137  rtx rvec[GP_ARG_NUM_REG + 1];
5138
5139  if (align_words >= GP_ARG_NUM_REG)
5140    return NULL_RTX;
5141
5142  n_units = rs6000_arg_size (mode, type);
5143
5144  /* Optimize the simple case where the arg fits in one gpr, except in
5145     the case of BLKmode due to assign_parms assuming that registers are
5146     BITS_PER_WORD wide.  */
5147  if (n_units == 0
5148      || (n_units == 1 && mode != BLKmode))
5149    return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5150
5151  k = 0;
5152  if (align_words + n_units > GP_ARG_NUM_REG)
5153    /* Not all of the arg fits in gprs.  Say that it goes in memory too,
5154       using a magic NULL_RTX component.
5155       This is not strictly correct.  Only some of the arg belongs in
5156       memory, not all of it.  However, the normal scheme using
5157       function_arg_partial_nregs can result in unusual subregs, eg.
5158       (subreg:SI (reg:DF) 4), which are not handled well.  The code to
5159       store the whole arg to memory is often more efficient than code
5160       to store pieces, and we know that space is available in the right
5161       place for the whole arg.  */
5162    rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5163
5164  i = 0;
5165  do
5166    {
5167      rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5168      rtx off = GEN_INT (i++ * 4);
5169      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5170    }
5171  while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5172
5173  return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5174}
5175
5176/* Determine where to put an argument to a function.
5177   Value is zero to push the argument on the stack,
5178   or a hard register in which to store the argument.
5179
5180   MODE is the argument's machine mode.
5181   TYPE is the data type of the argument (as a tree).
5182    This is null for libcalls where that information may
5183    not be available.
5184   CUM is a variable of type CUMULATIVE_ARGS which gives info about
5185    the preceding args and about the function being called.  It is
5186    not modified in this routine.
5187   NAMED is nonzero if this argument is a named parameter
5188    (otherwise it is an extra parameter matching an ellipsis).
5189
5190   On RS/6000 the first eight words of non-FP are normally in registers
5191   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
5192   Under V.4, the first 8 FP args are in registers.
5193
5194   If this is floating-point and no prototype is specified, we use
5195   both an FP and integer register (or possibly FP reg and stack).  Library
5196   functions (when CALL_LIBCALL is set) always have the proper types for args,
5197   so we can pass the FP value just in one register.  emit_library_function
5198   doesn't support PARALLEL anyway.
5199
5200   Note that for args passed by reference, function_arg will be called
5201   with MODE and TYPE set to that of the pointer to the arg, not the arg
5202   itself.  */
5203
5204rtx
5205function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5206	      tree type, int named)
5207{
5208  enum rs6000_abi abi = DEFAULT_ABI;
5209
5210  /* Return a marker to indicate whether CR1 needs to set or clear the
5211     bit that V.4 uses to say fp args were passed in registers.
5212     Assume that we don't need the marker for software floating point,
5213     or compiler generated library calls.  */
5214  if (mode == VOIDmode)
5215    {
5216      if (abi == ABI_V4
5217	  && (cum->call_cookie & CALL_LIBCALL) == 0
5218	  && (cum->stdarg
5219	      || (cum->nargs_prototype < 0
5220		  && (cum->prototype || TARGET_NO_PROTOTYPE))))
5221	{
5222	  /* For the SPE, we need to crxor CR6 always.  */
5223	  if (TARGET_SPE_ABI)
5224	    return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5225	  else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5226	    return GEN_INT (cum->call_cookie
5227			    | ((cum->fregno == FP_ARG_MIN_REG)
5228			       ? CALL_V4_SET_FP_ARGS
5229			       : CALL_V4_CLEAR_FP_ARGS));
5230	}
5231
5232      return GEN_INT (cum->call_cookie);
5233    }
5234
5235  if (rs6000_darwin64_abi && mode == BLKmode
5236      && TREE_CODE (type) == RECORD_TYPE)
5237    {
5238      rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
5239      if (rslt != NULL_RTX)
5240	return rslt;
5241      /* Else fall through to usual handling.  */
5242    }
5243
5244  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
5245    if (TARGET_64BIT && ! cum->prototype)
5246      {
5247	/* Vector parameters get passed in vector register
5248	   and also in GPRs or memory, in absence of prototype.  */
5249	int align_words;
5250	rtx slot;
5251	align_words = (cum->words + 1) & ~1;
5252
5253	if (align_words >= GP_ARG_NUM_REG)
5254	  {
5255	    slot = NULL_RTX;
5256	  }
5257	else
5258	  {
5259	    slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5260	  }
5261	return gen_rtx_PARALLEL (mode,
5262		 gen_rtvec (2,
5263			    gen_rtx_EXPR_LIST (VOIDmode,
5264					       slot, const0_rtx),
5265			    gen_rtx_EXPR_LIST (VOIDmode,
5266					       gen_rtx_REG (mode, cum->vregno),
5267					       const0_rtx)));
5268      }
5269    else
5270      return gen_rtx_REG (mode, cum->vregno);
5271  else if (TARGET_ALTIVEC_ABI
5272	   && (ALTIVEC_VECTOR_MODE (mode)
5273	       || (type && TREE_CODE (type) == VECTOR_TYPE
5274		   && int_size_in_bytes (type) == 16)))
5275    {
5276      if (named || abi == ABI_V4)
5277	return NULL_RTX;
5278      else
5279	{
5280	  /* Vector parameters to varargs functions under AIX or Darwin
5281	     get passed in memory and possibly also in GPRs.  */
5282	  int align, align_words, n_words;
5283	  enum machine_mode part_mode;
5284
5285	  /* Vector parameters must be 16-byte aligned.  This places them at
5286	     2 mod 4 in terms of words in 32-bit mode, since the parameter
5287	     save area starts at offset 24 from the stack.  In 64-bit mode,
5288	     they just have to start on an even word, since the parameter
5289	     save area is 16-byte aligned.  */
5290	  if (TARGET_32BIT)
5291	    align = (2 - cum->words) & 3;
5292	  else
5293	    align = cum->words & 1;
5294	  align_words = cum->words + align;
5295
5296	  /* Out of registers?  Memory, then.  */
5297	  if (align_words >= GP_ARG_NUM_REG)
5298	    return NULL_RTX;
5299
5300	  if (TARGET_32BIT && TARGET_POWERPC64)
5301	    return rs6000_mixed_function_arg (mode, type, align_words);
5302
5303	  /* The vector value goes in GPRs.  Only the part of the
5304	     value in GPRs is reported here.  */
5305	  part_mode = mode;
5306	  n_words = rs6000_arg_size (mode, type);
5307	  if (align_words + n_words > GP_ARG_NUM_REG)
5308	    /* Fortunately, there are only two possibilities, the value
5309	       is either wholly in GPRs or half in GPRs and half not.  */
5310	    part_mode = DImode;
5311
5312	  return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5313	}
5314    }
5315  else if (TARGET_SPE_ABI && TARGET_SPE
5316	   && (SPE_VECTOR_MODE (mode)
5317	       || (TARGET_E500_DOUBLE && (mode == DFmode
5318					  || mode == DCmode))))
5319    return rs6000_spe_function_arg (cum, mode, type);
5320
5321  else if (abi == ABI_V4)
5322    {
5323      if (TARGET_HARD_FLOAT && TARGET_FPRS
5324	  && (mode == SFmode || mode == DFmode
5325	      || (mode == TFmode && !TARGET_IEEEQUAD)))
5326	{
5327	  if (cum->fregno + (mode == TFmode ? 1 : 0) <= FP_ARG_V4_MAX_REG)
5328	    return gen_rtx_REG (mode, cum->fregno);
5329	  else
5330	    return NULL_RTX;
5331	}
5332      else
5333	{
5334	  int n_words = rs6000_arg_size (mode, type);
5335	  int gregno = cum->sysv_gregno;
5336
5337	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5338	     (r7,r8) or (r9,r10).  As does any other 2 word item such
5339	     as complex int due to a historical mistake.  */
5340	  if (n_words == 2)
5341	    gregno += (1 - gregno) & 1;
5342
5343	  /* Multi-reg args are not split between registers and stack.  */
5344	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5345	    return NULL_RTX;
5346
5347	  if (TARGET_32BIT && TARGET_POWERPC64)
5348	    return rs6000_mixed_function_arg (mode, type,
5349					      gregno - GP_ARG_MIN_REG);
5350	  return gen_rtx_REG (mode, gregno);
5351	}
5352    }
5353  else
5354    {
5355      int align_words = rs6000_parm_start (mode, type, cum->words);
5356
5357      if (USE_FP_FOR_ARG_P (cum, mode, type))
5358	{
5359	  rtx rvec[GP_ARG_NUM_REG + 1];
5360	  rtx r;
5361	  int k;
5362	  bool needs_psave;
5363	  enum machine_mode fmode = mode;
5364	  unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5365
5366	  if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5367	    {
5368	      /* Currently, we only ever need one reg here because complex
5369		 doubles are split.  */
5370	      gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
5371
5372	      /* Long double split over regs and memory.  */
5373	      fmode = DFmode;
5374	    }
5375
5376	  /* Do we also need to pass this arg in the parameter save
5377	     area?  */
5378	  needs_psave = (type
5379			 && (cum->nargs_prototype <= 0
5380			     || (DEFAULT_ABI == ABI_AIX
5381				 && TARGET_XL_COMPAT
5382				 && align_words >= GP_ARG_NUM_REG)));
5383
5384	  if (!needs_psave && mode == fmode)
5385	    return gen_rtx_REG (fmode, cum->fregno);
5386
5387	  k = 0;
5388	  if (needs_psave)
5389	    {
5390	      /* Describe the part that goes in gprs or the stack.
5391		 This piece must come first, before the fprs.  */
5392	      if (align_words < GP_ARG_NUM_REG)
5393		{
5394		  unsigned long n_words = rs6000_arg_size (mode, type);
5395
5396		  if (align_words + n_words > GP_ARG_NUM_REG
5397		      || (TARGET_32BIT && TARGET_POWERPC64))
5398		    {
5399		      /* If this is partially on the stack, then we only
5400			 include the portion actually in registers here.  */
5401		      enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5402		      rtx off;
5403		      int i = 0;
5404		      if (align_words + n_words > GP_ARG_NUM_REG)
5405			/* Not all of the arg fits in gprs.  Say that it
5406			   goes in memory too, using a magic NULL_RTX
5407			   component.  Also see comment in
5408			   rs6000_mixed_function_arg for why the normal
5409			   function_arg_partial_nregs scheme doesn't work
5410			   in this case. */
5411			rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5412						       const0_rtx);
5413		      do
5414			{
5415			  r = gen_rtx_REG (rmode,
5416					   GP_ARG_MIN_REG + align_words);
5417			  off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
5418			  rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5419			}
5420		      while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5421		    }
5422		  else
5423		    {
5424		      /* The whole arg fits in gprs.  */
5425		      r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5426		      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5427		    }
5428		}
5429	      else
5430		/* It's entirely in memory.  */
5431		rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5432	    }
5433
5434	  /* Describe where this piece goes in the fprs.  */
5435	  r = gen_rtx_REG (fmode, cum->fregno);
5436	  rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5437
5438	  return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5439	}
5440      else if (align_words < GP_ARG_NUM_REG)
5441	{
5442	  if (TARGET_32BIT && TARGET_POWERPC64)
5443	    return rs6000_mixed_function_arg (mode, type, align_words);
5444
5445	  if (mode == BLKmode)
5446	    mode = Pmode;
5447
5448	  return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5449	}
5450      else
5451	return NULL_RTX;
5452    }
5453}
5454
5455/* For an arg passed partly in registers and partly in memory, this is
5456   the number of bytes passed in registers.  For args passed entirely in
5457   registers or entirely in memory, zero.  When an arg is described by a
5458   PARALLEL, perhaps using more than one register type, this function
5459   returns the number of bytes used by the first element of the PARALLEL.  */
5460
5461static int
5462rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5463			  tree type, bool named)
5464{
5465  int ret = 0;
5466  int align_words;
5467
5468  if (DEFAULT_ABI == ABI_V4)
5469    return 0;
5470
5471  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5472      && cum->nargs_prototype >= 0)
5473    return 0;
5474
5475  /* In this complicated case we just disable the partial_nregs code.  */
5476  if (rs6000_darwin64_abi && mode == BLKmode
5477      && TREE_CODE (type) == RECORD_TYPE
5478      && int_size_in_bytes (type) > 0)
5479    return 0;
5480
5481  align_words = rs6000_parm_start (mode, type, cum->words);
5482
5483  if (USE_FP_FOR_ARG_P (cum, mode, type))
5484    {
5485      /* If we are passing this arg in the fixed parameter save area
5486	 (gprs or memory) as well as fprs, then this function should
5487	 return the number of partial bytes passed in the parameter
5488	 save area rather than partial bytes passed in fprs.  */
5489      if (type
5490	  && (cum->nargs_prototype <= 0
5491	      || (DEFAULT_ABI == ABI_AIX
5492		  && TARGET_XL_COMPAT
5493		  && align_words >= GP_ARG_NUM_REG)))
5494	return 0;
5495      else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
5496	       > FP_ARG_MAX_REG + 1)
5497	ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
5498      else if (cum->nargs_prototype >= 0)
5499	return 0;
5500    }
5501
5502  if (align_words < GP_ARG_NUM_REG
5503      && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5504    ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
5505
5506  if (ret != 0 && TARGET_DEBUG_ARG)
5507    fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
5508
5509  return ret;
5510}
5511
5512/* A C expression that indicates when an argument must be passed by
5513   reference.  If nonzero for an argument, a copy of that argument is
5514   made in memory and a pointer to the argument is passed instead of
5515   the argument itself.  The pointer is passed in whatever way is
5516   appropriate for passing a pointer to that type.
5517
5518   Under V.4, aggregates and long double are passed by reference.
5519
5520   As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5521   reference unless the AltiVec vector extension ABI is in force.
5522
5523   As an extension to all ABIs, variable sized types are passed by
5524   reference.  */
5525
5526static bool
5527rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5528			  enum machine_mode mode, tree type,
5529			  bool named ATTRIBUTE_UNUSED)
5530{
5531  if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
5532    {
5533      if (TARGET_DEBUG_ARG)
5534	fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5535      return 1;
5536    }
5537
5538  if (!type)
5539    return 0;
5540
5541  if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5542    {
5543      if (TARGET_DEBUG_ARG)
5544	fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5545      return 1;
5546    }
5547
5548  if (int_size_in_bytes (type) < 0)
5549    {
5550      if (TARGET_DEBUG_ARG)
5551	fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5552      return 1;
5553    }
5554
5555  /* Allow -maltivec -mabi=no-altivec without warning.  Altivec vector
5556     modes only exist for GCC vector types if -maltivec.  */
5557  if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5558    {
5559      if (TARGET_DEBUG_ARG)
5560	fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
5561      return 1;
5562    }
5563
5564  /* Pass synthetic vectors in memory.  */
5565  if (TREE_CODE (type) == VECTOR_TYPE
5566      && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
5567    {
5568      static bool warned_for_pass_big_vectors = false;
5569      if (TARGET_DEBUG_ARG)
5570	fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5571      if (!warned_for_pass_big_vectors)
5572	{
5573	  warning (0, "GCC vector passed by reference: "
5574		   "non-standard ABI extension with no compatibility guarantee");
5575	  warned_for_pass_big_vectors = true;
5576	}
5577      return 1;
5578    }
5579
5580  return 0;
5581}
5582
5583static void
5584rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5585{
5586  int i;
5587  enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5588
5589  if (nregs == 0)
5590    return;
5591
5592  for (i = 0; i < nregs; i++)
5593    {
5594      rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5595      if (reload_completed)
5596	{
5597	  if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5598	    tem = NULL_RTX;
5599	  else
5600	    tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5601				       i * GET_MODE_SIZE (reg_mode));
5602	}
5603      else
5604	tem = replace_equiv_address (tem, XEXP (tem, 0));
5605
5606      gcc_assert (tem);
5607
5608      emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5609    }
5610}
5611
5612/* Perform any needed actions needed for a function that is receiving a
5613   variable number of arguments.
5614
5615   CUM is as above.
5616
5617   MODE and TYPE are the mode and type of the current parameter.
5618
5619   PRETEND_SIZE is a variable that should be set to the amount of stack
5620   that must be pushed by the prolog to pretend that our caller pushed
5621   it.
5622
5623   Normally, this macro will push all remaining incoming registers on the
5624   stack and set PRETEND_SIZE to the length of the registers pushed.  */
5625
5626static void
5627setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5628			tree type, int *pretend_size ATTRIBUTE_UNUSED,
5629			int no_rtl)
5630{
5631  CUMULATIVE_ARGS next_cum;
5632  int reg_size = TARGET_32BIT ? 4 : 8;
5633  rtx save_area = NULL_RTX, mem;
5634  int first_reg_offset, set;
5635
5636  /* Skip the last named argument.  */
5637  next_cum = *cum;
5638  function_arg_advance (&next_cum, mode, type, 1, 0);
5639
5640  if (DEFAULT_ABI == ABI_V4)
5641    {
5642      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5643
5644      if (! no_rtl)
5645	{
5646	  int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5647	  HOST_WIDE_INT offset = 0;
5648
5649	  /* Try to optimize the size of the varargs save area.
5650	     The ABI requires that ap.reg_save_area is doubleword
5651	     aligned, but we don't need to allocate space for all
5652	     the bytes, only those to which we actually will save
5653	     anything.  */
5654	  if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5655	    gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5656	  if (TARGET_HARD_FLOAT && TARGET_FPRS
5657	      && next_cum.fregno <= FP_ARG_V4_MAX_REG
5658	      && cfun->va_list_fpr_size)
5659	    {
5660	      if (gpr_reg_num)
5661		fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5662			   * UNITS_PER_FP_WORD;
5663	      if (cfun->va_list_fpr_size
5664		  < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5665		fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5666	      else
5667		fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5668			    * UNITS_PER_FP_WORD;
5669	    }
5670	  if (gpr_reg_num)
5671	    {
5672	      offset = -((first_reg_offset * reg_size) & ~7);
5673	      if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5674		{
5675		  gpr_reg_num = cfun->va_list_gpr_size;
5676		  if (reg_size == 4 && (first_reg_offset & 1))
5677		    gpr_reg_num++;
5678		}
5679	      gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5680	    }
5681	  else if (fpr_size)
5682	    offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5683		       * UNITS_PER_FP_WORD
5684		     - (int) (GP_ARG_NUM_REG * reg_size);
5685
5686	  if (gpr_size + fpr_size)
5687	    {
5688	      rtx reg_save_area
5689		= assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5690	      gcc_assert (GET_CODE (reg_save_area) == MEM);
5691	      reg_save_area = XEXP (reg_save_area, 0);
5692	      if (GET_CODE (reg_save_area) == PLUS)
5693		{
5694		  gcc_assert (XEXP (reg_save_area, 0)
5695			      == virtual_stack_vars_rtx);
5696		  gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5697		  offset += INTVAL (XEXP (reg_save_area, 1));
5698		}
5699	      else
5700		gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5701	    }
5702
5703	  cfun->machine->varargs_save_offset = offset;
5704	  save_area = plus_constant (virtual_stack_vars_rtx, offset);
5705	}
5706    }
5707  else
5708    {
5709      first_reg_offset = next_cum.words;
5710      save_area = virtual_incoming_args_rtx;
5711
5712      if (targetm.calls.must_pass_in_stack (mode, type))
5713	first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5714    }
5715
5716  set = get_varargs_alias_set ();
5717  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5718      && cfun->va_list_gpr_size)
5719    {
5720      int nregs = GP_ARG_NUM_REG - first_reg_offset;
5721
5722      if (va_list_gpr_counter_field)
5723	{
5724	  /* V4 va_list_gpr_size counts number of registers needed.  */
5725	  if (nregs > cfun->va_list_gpr_size)
5726	    nregs = cfun->va_list_gpr_size;
5727	}
5728      else
5729	{
5730	  /* char * va_list instead counts number of bytes needed.  */
5731	  if (nregs > cfun->va_list_gpr_size / reg_size)
5732	    nregs = cfun->va_list_gpr_size / reg_size;
5733	}
5734
5735      mem = gen_rtx_MEM (BLKmode,
5736			 plus_constant (save_area,
5737					first_reg_offset * reg_size));
5738      MEM_NOTRAP_P (mem) = 1;
5739      set_mem_alias_set (mem, set);
5740      set_mem_align (mem, BITS_PER_WORD);
5741
5742      rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5743				  nregs);
5744    }
5745
5746  /* Save FP registers if needed.  */
5747  if (DEFAULT_ABI == ABI_V4
5748      && TARGET_HARD_FLOAT && TARGET_FPRS
5749      && ! no_rtl
5750      && next_cum.fregno <= FP_ARG_V4_MAX_REG
5751      && cfun->va_list_fpr_size)
5752    {
5753      int fregno = next_cum.fregno, nregs;
5754      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5755      rtx lab = gen_label_rtx ();
5756      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
5757					       * UNITS_PER_FP_WORD);
5758
5759      emit_jump_insn
5760	(gen_rtx_SET (VOIDmode,
5761		      pc_rtx,
5762		      gen_rtx_IF_THEN_ELSE (VOIDmode,
5763					    gen_rtx_NE (VOIDmode, cr1,
5764							const0_rtx),
5765					    gen_rtx_LABEL_REF (VOIDmode, lab),
5766					    pc_rtx)));
5767
5768      for (nregs = 0;
5769	   fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5770	   fregno++, off += UNITS_PER_FP_WORD, nregs++)
5771	{
5772	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5773	  MEM_NOTRAP_P (mem) = 1;
5774	  set_mem_alias_set (mem, set);
5775	  set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
5776	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5777	}
5778
5779      emit_label (lab);
5780    }
5781}
5782
5783/* Create the va_list data type.  */
5784
5785static tree
5786rs6000_build_builtin_va_list (void)
5787{
5788  tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5789
5790  /* For AIX, prefer 'char *' because that's what the system
5791     header files like.  */
5792  if (DEFAULT_ABI != ABI_V4)
5793    return build_pointer_type (char_type_node);
5794
5795  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5796  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5797
5798  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5799		      unsigned_char_type_node);
5800  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5801		      unsigned_char_type_node);
5802  /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5803     every user file.  */
5804  f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5805		      short_unsigned_type_node);
5806  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5807		      ptr_type_node);
5808  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5809		      ptr_type_node);
5810
5811  va_list_gpr_counter_field = f_gpr;
5812  va_list_fpr_counter_field = f_fpr;
5813
5814  DECL_FIELD_CONTEXT (f_gpr) = record;
5815  DECL_FIELD_CONTEXT (f_fpr) = record;
5816  DECL_FIELD_CONTEXT (f_res) = record;
5817  DECL_FIELD_CONTEXT (f_ovf) = record;
5818  DECL_FIELD_CONTEXT (f_sav) = record;
5819
5820  TREE_CHAIN (record) = type_decl;
5821  TYPE_NAME (record) = type_decl;
5822  TYPE_FIELDS (record) = f_gpr;
5823  TREE_CHAIN (f_gpr) = f_fpr;
5824  TREE_CHAIN (f_fpr) = f_res;
5825  TREE_CHAIN (f_res) = f_ovf;
5826  TREE_CHAIN (f_ovf) = f_sav;
5827
5828  layout_type (record);
5829
5830  /* The correct type is an array type of one element.  */
5831  return build_array_type (record, build_index_type (size_zero_node));
5832}
5833
5834/* Implement va_start.  */
5835
5836void
5837rs6000_va_start (tree valist, rtx nextarg)
5838{
5839  HOST_WIDE_INT words, n_gpr, n_fpr;
5840  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5841  tree gpr, fpr, ovf, sav, t;
5842
5843  /* Only SVR4 needs something special.  */
5844  if (DEFAULT_ABI != ABI_V4)
5845    {
5846      std_expand_builtin_va_start (valist, nextarg);
5847      return;
5848    }
5849
5850  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5851  f_fpr = TREE_CHAIN (f_gpr);
5852  f_res = TREE_CHAIN (f_fpr);
5853  f_ovf = TREE_CHAIN (f_res);
5854  f_sav = TREE_CHAIN (f_ovf);
5855
5856  valist = build_va_arg_indirect_ref (valist);
5857  gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5858  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5859  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5860  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5861
5862  /* Count number of gp and fp argument registers used.  */
5863  words = current_function_args_info.words;
5864  n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5865	       GP_ARG_NUM_REG);
5866  n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5867	       FP_ARG_NUM_REG);
5868
5869  if (TARGET_DEBUG_ARG)
5870    fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5871	     HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5872	     words, n_gpr, n_fpr);
5873
5874  if (cfun->va_list_gpr_size)
5875    {
5876      t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5877		  build_int_cst (NULL_TREE, n_gpr));
5878      TREE_SIDE_EFFECTS (t) = 1;
5879      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5880    }
5881
5882  if (cfun->va_list_fpr_size)
5883    {
5884      t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5885		  build_int_cst (NULL_TREE, n_fpr));
5886      TREE_SIDE_EFFECTS (t) = 1;
5887      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5888    }
5889
5890  /* Find the overflow area.  */
5891  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5892  if (words != 0)
5893    t = build2 (PLUS_EXPR, TREE_TYPE (ovf), t,
5894	        build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5895  t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5896  TREE_SIDE_EFFECTS (t) = 1;
5897  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5898
5899  /* If there were no va_arg invocations, don't set up the register
5900     save area.  */
5901  if (!cfun->va_list_gpr_size
5902      && !cfun->va_list_fpr_size
5903      && n_gpr < GP_ARG_NUM_REG
5904      && n_fpr < FP_ARG_V4_MAX_REG)
5905    return;
5906
5907  /* Find the register save area.  */
5908  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5909  if (cfun->machine->varargs_save_offset)
5910    t = build2 (PLUS_EXPR, TREE_TYPE (sav), t,
5911	        build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
5912  t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5913  TREE_SIDE_EFFECTS (t) = 1;
5914  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5915}
5916
5917/* Implement va_arg.  */
5918
5919tree
5920rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5921{
5922  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5923  tree gpr, fpr, ovf, sav, reg, t, u;
5924  int size, rsize, n_reg, sav_ofs, sav_scale;
5925  tree lab_false, lab_over, addr;
5926  int align;
5927  tree ptrtype = build_pointer_type (type);
5928
5929  if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5930    {
5931      t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5932      return build_va_arg_indirect_ref (t);
5933    }
5934
5935  if (DEFAULT_ABI != ABI_V4)
5936    {
5937      if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5938	{
5939	  tree elem_type = TREE_TYPE (type);
5940	  enum machine_mode elem_mode = TYPE_MODE (elem_type);
5941	  int elem_size = GET_MODE_SIZE (elem_mode);
5942
5943	  if (elem_size < UNITS_PER_WORD)
5944	    {
5945	      tree real_part, imag_part;
5946	      tree post = NULL_TREE;
5947
5948	      real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5949						  &post);
5950	      /* Copy the value into a temporary, lest the formal temporary
5951		 be reused out from under us.  */
5952	      real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5953	      append_to_statement_list (post, pre_p);
5954
5955	      imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5956						  post_p);
5957
5958	      return build2 (COMPLEX_EXPR, type, real_part, imag_part);
5959	    }
5960	}
5961
5962      return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5963    }
5964
5965  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5966  f_fpr = TREE_CHAIN (f_gpr);
5967  f_res = TREE_CHAIN (f_fpr);
5968  f_ovf = TREE_CHAIN (f_res);
5969  f_sav = TREE_CHAIN (f_ovf);
5970
5971  valist = build_va_arg_indirect_ref (valist);
5972  gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5973  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5974  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5975  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5976
5977  size = int_size_in_bytes (type);
5978  rsize = (size + 3) / 4;
5979  align = 1;
5980
5981  if (TARGET_HARD_FLOAT && TARGET_FPRS
5982      && (TYPE_MODE (type) == SFmode
5983	  || TYPE_MODE (type) == DFmode
5984	  || TYPE_MODE (type) == TFmode))
5985    {
5986      /* FP args go in FP registers, if present.  */
5987      reg = fpr;
5988      n_reg = (size + 7) / 8;
5989      sav_ofs = 8*4;
5990      sav_scale = 8;
5991      if (TYPE_MODE (type) != SFmode)
5992	align = 8;
5993    }
5994  else
5995    {
5996      /* Otherwise into GP registers.  */
5997      reg = gpr;
5998      n_reg = rsize;
5999      sav_ofs = 0;
6000      sav_scale = 4;
6001      if (n_reg == 2)
6002	align = 8;
6003    }
6004
6005  /* Pull the value out of the saved registers....  */
6006
6007  lab_over = NULL;
6008  addr = create_tmp_var (ptr_type_node, "addr");
6009  DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6010
6011  /*  AltiVec vectors never go in registers when -mabi=altivec.  */
6012  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6013    align = 16;
6014  else
6015    {
6016      lab_false = create_artificial_label ();
6017      lab_over = create_artificial_label ();
6018
6019      /* Long long and SPE vectors are aligned in the registers.
6020	 As are any other 2 gpr item such as complex int due to a
6021	 historical mistake.  */
6022      u = reg;
6023      if (n_reg == 2 && reg == gpr)
6024	{
6025	  u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
6026		     size_int (n_reg - 1));
6027	  u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6028	}
6029
6030      t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
6031      t = build2 (GE_EXPR, boolean_type_node, u, t);
6032      u = build1 (GOTO_EXPR, void_type_node, lab_false);
6033      t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6034      gimplify_and_add (t, pre_p);
6035
6036      t = sav;
6037      if (sav_ofs)
6038	t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
6039
6040      u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
6041      u = build1 (CONVERT_EXPR, integer_type_node, u);
6042      u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
6043      t = build2 (PLUS_EXPR, ptr_type_node, t, u);
6044
6045      t = build2 (MODIFY_EXPR, void_type_node, addr, t);
6046      gimplify_and_add (t, pre_p);
6047
6048      t = build1 (GOTO_EXPR, void_type_node, lab_over);
6049      gimplify_and_add (t, pre_p);
6050
6051      t = build1 (LABEL_EXPR, void_type_node, lab_false);
6052      append_to_statement_list (t, pre_p);
6053
6054      if ((n_reg == 2 && reg != gpr) || n_reg > 2)
6055	{
6056	  /* Ensure that we don't find any more args in regs.
6057	     Alignment has taken care of the n_reg == 2 gpr case.  */
6058	  t = build2 (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
6059	  gimplify_and_add (t, pre_p);
6060	}
6061    }
6062
6063  /* ... otherwise out of the overflow area.  */
6064
6065  /* Care for on-stack alignment if needed.  */
6066  t = ovf;
6067  if (align != 1)
6068    {
6069      t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6070      t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
6071		  build_int_cst (NULL_TREE, -align));
6072    }
6073  gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6074
6075  u = build2 (MODIFY_EXPR, void_type_node, addr, t);
6076  gimplify_and_add (u, pre_p);
6077
6078  t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
6079  t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6080  gimplify_and_add (t, pre_p);
6081
6082  if (lab_over)
6083    {
6084      t = build1 (LABEL_EXPR, void_type_node, lab_over);
6085      append_to_statement_list (t, pre_p);
6086    }
6087
6088  if (STRICT_ALIGNMENT
6089      && (TYPE_ALIGN (type)
6090	  > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6091    {
6092      /* The value (of type complex double, for example) may not be
6093	 aligned in memory in the saved registers, so copy via a
6094	 temporary.  (This is the same code as used for SPARC.)  */
6095      tree tmp = create_tmp_var (type, "va_arg_tmp");
6096      tree dest_addr = build_fold_addr_expr (tmp);
6097
6098      tree copy = build_function_call_expr
6099	(implicit_built_in_decls[BUILT_IN_MEMCPY],
6100	 tree_cons (NULL_TREE, dest_addr,
6101		    tree_cons (NULL_TREE, addr,
6102			       tree_cons (NULL_TREE, size_int (rsize * 4),
6103					  NULL_TREE))));
6104
6105      gimplify_and_add (copy, pre_p);
6106      addr = dest_addr;
6107    }
6108
6109  addr = fold_convert (ptrtype, addr);
6110  return build_va_arg_indirect_ref (addr);
6111}
6112
6113/* Builtins.  */
6114
6115static void
6116def_builtin (int mask, const char *name, tree type, int code)
6117{
6118  if (mask & target_flags)
6119    {
6120      if (rs6000_builtin_decls[code])
6121	abort ();
6122
6123      rs6000_builtin_decls[code] =
6124        lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
6125				     NULL, NULL_TREE);
6126    }
6127}
6128
6129/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
6130
6131static const struct builtin_description bdesc_3arg[] =
6132{
6133  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6134  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6135  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6136  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6137  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6138  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6139  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6140  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6141  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6142  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
6143  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
6144  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6145  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6146  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6147  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6148  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6149  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6150  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6151  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6152  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6153  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6154  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6155  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
6156
6157  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6158  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6159  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6160  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6161  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6162  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6163  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6164  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6165  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6166  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6167  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6168  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6169  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6170  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6171  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
6172};
6173
6174/* DST operations: void foo (void *, const int, const char).  */
6175
6176static const struct builtin_description bdesc_dst[] =
6177{
6178  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6179  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6180  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
6181  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6182
6183  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6184  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6185  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6186  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
6187};
6188
6189/* Simple binary operations: VECc = foo (VECa, VECb).  */
6190
6191static struct builtin_description bdesc_2arg[] =
6192{
6193  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6194  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6195  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6196  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
6197  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6198  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6199  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6200  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6201  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6202  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6203  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
6204  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
6205  { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
6206  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6207  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6208  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6209  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6210  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6211  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
6212  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6213  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
6214  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6215  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6216  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6217  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6218  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6219  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6220  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6221  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6222  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6223  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6224  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6225  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6226  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
6227  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6228  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
6229  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6230  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
6231  { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6232  { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6233  { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6234  { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6235  { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
6236  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6237  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6238  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6239  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6240  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6241  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
6242  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6243  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6244  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6245  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6246  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6247  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6248  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
6249  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6250  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6251  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6252  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6253  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6254  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6255  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6256  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
6257  { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
6258  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
6259  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6260  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6261  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
6262  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
6263  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6264  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6265  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6266  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6267  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6268  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6269  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6270  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6271  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6272  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6273  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6274  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6275  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
6276  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6277  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6278  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
6279  { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6280  { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6281  { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6282  { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6283  { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6284  { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
6285  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6286  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
6287  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6288  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6289  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6290  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
6291  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6292  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6293  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6294  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6295  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6296  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6297  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6298  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6299  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6300  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6301  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6302  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
6303  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
6304
6305  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6306  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6307  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6308  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6309  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6310  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6311  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6312  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6313  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6314  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6315  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6316  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6317  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6318  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6319  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6320  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6321  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6322  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6323  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6324  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6325  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6326  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6327  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6328  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6329  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6330  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6331  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6332  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6333  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6334  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6335  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6336  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6337  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6338  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6339  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6340  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6341  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6342  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6343  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6344  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6345  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6346  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6347  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6348  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6349  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6350  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6351  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6352  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6353  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6354  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6355  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6356  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6357  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6358  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6359  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6360  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6361  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6362  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6363  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6364  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6365  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6366  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6367  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6368  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6369  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6370  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6371  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6372  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6373  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6374  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6375  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6376  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6377  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6378  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6379  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6380  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6381  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6382  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6383  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6384  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6385  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6386  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6387  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6388  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6389  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6390  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6391  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6392  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6393  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6394  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6395  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6396  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6397  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6398  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6399  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6400  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6401  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6402  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6403  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6404  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6405  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6406  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6407  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6408  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6409  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6410  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6411  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6412  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6413  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6414  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6415  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6416  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6417  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6418  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6419  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6420  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6421  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6422  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6423  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6424  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6425  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6426  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6427  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6428  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6429  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6430  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6431  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6432
6433  /* Place holder, leave as first spe builtin.  */
6434  { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6435  { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6436  { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6437  { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6438  { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6439  { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6440  { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6441  { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6442  { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6443  { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6444  { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6445  { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6446  { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6447  { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6448  { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6449  { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6450  { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6451  { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6452  { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6453  { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6454  { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6455  { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6456  { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6457  { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6458  { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6459  { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6460  { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6461  { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6462  { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6463  { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6464  { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6465  { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6466  { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6467  { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6468  { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6469  { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6470  { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6471  { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6472  { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6473  { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6474  { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6475  { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6476  { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6477  { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6478  { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6479  { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6480  { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6481  { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6482  { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6483  { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6484  { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6485  { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6486  { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6487  { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6488  { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6489  { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6490  { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6491  { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6492  { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6493  { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6494  { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6495  { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6496  { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6497  { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6498  { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6499  { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6500  { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6501  { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6502  { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6503  { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6504  { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6505  { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6506  { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6507  { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
6508  { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6509  { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
6510  { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6511  { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6512  { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6513  { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6514  { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6515  { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6516  { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6517  { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6518  { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6519  { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6520  { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6521  { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6522  { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6523  { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6524  { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6525  { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6526  { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6527  { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6528  { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6529  { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6530  { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6531  { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6532  { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6533  { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6534  { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6535  { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6536  { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6537  { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6538  { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6539  { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6540  { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6541  { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6542  { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6543
6544  /* SPE binary operations expecting a 5-bit unsigned literal.  */
6545  { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6546
6547  { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6548  { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6549  { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6550  { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6551  { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6552  { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6553  { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6554  { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6555  { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6556  { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6557  { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6558  { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6559  { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6560  { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6561  { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6562  { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6563  { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6564  { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6565  { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6566  { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6567  { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6568  { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6569  { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6570  { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6571  { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6572  { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6573
6574  /* Place-holder.  Leave as last binary SPE builtin.  */
6575  { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
6576};
6577
6578/* AltiVec predicates.  */
6579
6580struct builtin_description_predicates
6581{
6582  const unsigned int mask;
6583  const enum insn_code icode;
6584  const char *opcode;
6585  const char *const name;
6586  const enum rs6000_builtins code;
6587};
6588
6589static const struct builtin_description_predicates bdesc_altivec_preds[] =
6590{
6591  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6592  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6593  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6594  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6595  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6596  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6597  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6598  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6599  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6600  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6601  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6602  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
6603  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6604
6605  { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6606  { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6607  { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
6608};
6609
6610/* SPE predicates.  */
6611static struct builtin_description bdesc_spe_predicates[] =
6612{
6613  /* Place-holder.  Leave as first.  */
6614  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6615  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6616  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6617  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6618  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6619  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6620  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6621  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6622  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6623  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6624  /* Place-holder.  Leave as last.  */
6625  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6626};
6627
6628/* SPE evsel predicates.  */
6629static struct builtin_description bdesc_spe_evsel[] =
6630{
6631  /* Place-holder.  Leave as first.  */
6632  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6633  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6634  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6635  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6636  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6637  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6638  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6639  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6640  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6641  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6642  /* Place-holder.  Leave as last.  */
6643  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6644};
6645
6646/* ABS* operations.  */
6647
6648static const struct builtin_description bdesc_abs[] =
6649{
6650  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6651  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6652  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6653  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6654  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6655  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6656  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6657};
6658
6659/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6660   foo (VECa).  */
6661
6662static struct builtin_description bdesc_1arg[] =
6663{
6664  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6665  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6666  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6667  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6668  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6669  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6670  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6671  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6672  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6673  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6674  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6675  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6676  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6677  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6678  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6679  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6680  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6681
6682  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6683  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6684  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6685  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6686  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6687  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6688  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6689  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6690  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6691  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6692  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6693  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6694  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6695  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6696  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6697  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6698  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6699  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6700  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6701
6702  /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6703     end with SPE_BUILTIN_EVSUBFUSIAAW.  */
6704  { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6705  { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6706  { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6707  { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6708  { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6709  { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6710  { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6711  { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6712  { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6713  { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6714  { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6715  { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6716  { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6717  { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6718  { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6719  { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6720  { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6721  { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6722  { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6723  { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6724  { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6725  { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6726  { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6727  { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6728  { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6729  { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6730  { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6731  { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6732
6733  /* Place-holder.  Leave as last unary SPE builtin.  */
6734  { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
6735};
6736
6737static rtx
6738rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6739{
6740  rtx pat;
6741  tree arg0 = TREE_VALUE (arglist);
6742  rtx op0 = expand_normal (arg0);
6743  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6744  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6745
6746  if (icode == CODE_FOR_nothing)
6747    /* Builtin not supported on this processor.  */
6748    return 0;
6749
6750  /* If we got invalid arguments bail out before generating bad rtl.  */
6751  if (arg0 == error_mark_node)
6752    return const0_rtx;
6753
6754  if (icode == CODE_FOR_altivec_vspltisb
6755      || icode == CODE_FOR_altivec_vspltish
6756      || icode == CODE_FOR_altivec_vspltisw
6757      || icode == CODE_FOR_spe_evsplatfi
6758      || icode == CODE_FOR_spe_evsplati)
6759    {
6760      /* Only allow 5-bit *signed* literals.  */
6761      if (GET_CODE (op0) != CONST_INT
6762	  || INTVAL (op0) > 15
6763	  || INTVAL (op0) < -16)
6764	{
6765	  error ("argument 1 must be a 5-bit signed literal");
6766	  return const0_rtx;
6767	}
6768    }
6769
6770  if (target == 0
6771      || GET_MODE (target) != tmode
6772      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6773    target = gen_reg_rtx (tmode);
6774
6775  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6776    op0 = copy_to_mode_reg (mode0, op0);
6777
6778  pat = GEN_FCN (icode) (target, op0);
6779  if (! pat)
6780    return 0;
6781  emit_insn (pat);
6782
6783  return target;
6784}
6785
6786static rtx
6787altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6788{
6789  rtx pat, scratch1, scratch2;
6790  tree arg0 = TREE_VALUE (arglist);
6791  rtx op0 = expand_normal (arg0);
6792  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6793  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6794
6795  /* If we have invalid arguments, bail out before generating bad rtl.  */
6796  if (arg0 == error_mark_node)
6797    return const0_rtx;
6798
6799  if (target == 0
6800      || GET_MODE (target) != tmode
6801      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6802    target = gen_reg_rtx (tmode);
6803
6804  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6805    op0 = copy_to_mode_reg (mode0, op0);
6806
6807  scratch1 = gen_reg_rtx (mode0);
6808  scratch2 = gen_reg_rtx (mode0);
6809
6810  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6811  if (! pat)
6812    return 0;
6813  emit_insn (pat);
6814
6815  return target;
6816}
6817
6818static rtx
6819rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6820{
6821  rtx pat;
6822  tree arg0 = TREE_VALUE (arglist);
6823  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6824  rtx op0 = expand_normal (arg0);
6825  rtx op1 = expand_normal (arg1);
6826  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6827  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6828  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6829
6830  if (icode == CODE_FOR_nothing)
6831    /* Builtin not supported on this processor.  */
6832    return 0;
6833
6834  /* If we got invalid arguments bail out before generating bad rtl.  */
6835  if (arg0 == error_mark_node || arg1 == error_mark_node)
6836    return const0_rtx;
6837
6838  if (icode == CODE_FOR_altivec_vcfux
6839      || icode == CODE_FOR_altivec_vcfsx
6840      || icode == CODE_FOR_altivec_vctsxs
6841      || icode == CODE_FOR_altivec_vctuxs
6842      || icode == CODE_FOR_altivec_vspltb
6843      || icode == CODE_FOR_altivec_vsplth
6844      || icode == CODE_FOR_altivec_vspltw
6845      || icode == CODE_FOR_spe_evaddiw
6846      || icode == CODE_FOR_spe_evldd
6847      || icode == CODE_FOR_spe_evldh
6848      || icode == CODE_FOR_spe_evldw
6849      || icode == CODE_FOR_spe_evlhhesplat
6850      || icode == CODE_FOR_spe_evlhhossplat
6851      || icode == CODE_FOR_spe_evlhhousplat
6852      || icode == CODE_FOR_spe_evlwhe
6853      || icode == CODE_FOR_spe_evlwhos
6854      || icode == CODE_FOR_spe_evlwhou
6855      || icode == CODE_FOR_spe_evlwhsplat
6856      || icode == CODE_FOR_spe_evlwwsplat
6857      || icode == CODE_FOR_spe_evrlwi
6858      || icode == CODE_FOR_spe_evslwi
6859      || icode == CODE_FOR_spe_evsrwis
6860      || icode == CODE_FOR_spe_evsubifw
6861      || icode == CODE_FOR_spe_evsrwiu)
6862    {
6863      /* Only allow 5-bit unsigned literals.  */
6864      STRIP_NOPS (arg1);
6865      if (TREE_CODE (arg1) != INTEGER_CST
6866	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
6867	{
6868	  error ("argument 2 must be a 5-bit unsigned literal");
6869	  return const0_rtx;
6870	}
6871    }
6872
6873  if (target == 0
6874      || GET_MODE (target) != tmode
6875      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6876    target = gen_reg_rtx (tmode);
6877
6878  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6879    op0 = copy_to_mode_reg (mode0, op0);
6880  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6881    op1 = copy_to_mode_reg (mode1, op1);
6882
6883  pat = GEN_FCN (icode) (target, op0, op1);
6884  if (! pat)
6885    return 0;
6886  emit_insn (pat);
6887
6888  return target;
6889}
6890
6891static rtx
6892altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6893				  tree arglist, rtx target)
6894{
6895  rtx pat, scratch;
6896  tree cr6_form = TREE_VALUE (arglist);
6897  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6898  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6899  rtx op0 = expand_normal (arg0);
6900  rtx op1 = expand_normal (arg1);
6901  enum machine_mode tmode = SImode;
6902  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6903  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6904  int cr6_form_int;
6905
6906  if (TREE_CODE (cr6_form) != INTEGER_CST)
6907    {
6908      error ("argument 1 of __builtin_altivec_predicate must be a constant");
6909      return const0_rtx;
6910    }
6911  else
6912    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6913
6914  gcc_assert (mode0 == mode1);
6915
6916  /* If we have invalid arguments, bail out before generating bad rtl.  */
6917  if (arg0 == error_mark_node || arg1 == error_mark_node)
6918    return const0_rtx;
6919
6920  if (target == 0
6921      || GET_MODE (target) != tmode
6922      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6923    target = gen_reg_rtx (tmode);
6924
6925  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6926    op0 = copy_to_mode_reg (mode0, op0);
6927  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6928    op1 = copy_to_mode_reg (mode1, op1);
6929
6930  scratch = gen_reg_rtx (mode0);
6931
6932  pat = GEN_FCN (icode) (scratch, op0, op1,
6933			 gen_rtx_SYMBOL_REF (Pmode, opcode));
6934  if (! pat)
6935    return 0;
6936  emit_insn (pat);
6937
6938  /* The vec_any* and vec_all* predicates use the same opcodes for two
6939     different operations, but the bits in CR6 will be different
6940     depending on what information we want.  So we have to play tricks
6941     with CR6 to get the right bits out.
6942
6943     If you think this is disgusting, look at the specs for the
6944     AltiVec predicates.  */
6945
6946  switch (cr6_form_int)
6947    {
6948    case 0:
6949      emit_insn (gen_cr6_test_for_zero (target));
6950      break;
6951    case 1:
6952      emit_insn (gen_cr6_test_for_zero_reverse (target));
6953      break;
6954    case 2:
6955      emit_insn (gen_cr6_test_for_lt (target));
6956      break;
6957    case 3:
6958      emit_insn (gen_cr6_test_for_lt_reverse (target));
6959      break;
6960    default:
6961      error ("argument 1 of __builtin_altivec_predicate is out of range");
6962      break;
6963    }
6964
6965  return target;
6966}
6967
6968static rtx
6969altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6970{
6971  rtx pat, addr;
6972  tree arg0 = TREE_VALUE (arglist);
6973  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6974  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6975  enum machine_mode mode0 = Pmode;
6976  enum machine_mode mode1 = Pmode;
6977  rtx op0 = expand_normal (arg0);
6978  rtx op1 = expand_normal (arg1);
6979
6980  if (icode == CODE_FOR_nothing)
6981    /* Builtin not supported on this processor.  */
6982    return 0;
6983
6984  /* If we got invalid arguments bail out before generating bad rtl.  */
6985  if (arg0 == error_mark_node || arg1 == error_mark_node)
6986    return const0_rtx;
6987
6988  if (target == 0
6989      || GET_MODE (target) != tmode
6990      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6991    target = gen_reg_rtx (tmode);
6992
6993  op1 = copy_to_mode_reg (mode1, op1);
6994
6995  if (op0 == const0_rtx)
6996    {
6997      addr = gen_rtx_MEM (tmode, op1);
6998    }
6999  else
7000    {
7001      op0 = copy_to_mode_reg (mode0, op0);
7002      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7003    }
7004
7005  pat = GEN_FCN (icode) (target, addr);
7006
7007  if (! pat)
7008    return 0;
7009  emit_insn (pat);
7010
7011  return target;
7012}
7013
7014static rtx
7015spe_expand_stv_builtin (enum insn_code icode, tree arglist)
7016{
7017  tree arg0 = TREE_VALUE (arglist);
7018  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7019  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7020  rtx op0 = expand_normal (arg0);
7021  rtx op1 = expand_normal (arg1);
7022  rtx op2 = expand_normal (arg2);
7023  rtx pat;
7024  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7025  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7026  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7027
7028  /* Invalid arguments.  Bail before doing anything stoopid!  */
7029  if (arg0 == error_mark_node
7030      || arg1 == error_mark_node
7031      || arg2 == error_mark_node)
7032    return const0_rtx;
7033
7034  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7035    op0 = copy_to_mode_reg (mode2, op0);
7036  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7037    op1 = copy_to_mode_reg (mode0, op1);
7038  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7039    op2 = copy_to_mode_reg (mode1, op2);
7040
7041  pat = GEN_FCN (icode) (op1, op2, op0);
7042  if (pat)
7043    emit_insn (pat);
7044  return NULL_RTX;
7045}
7046
7047static rtx
7048altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
7049{
7050  tree arg0 = TREE_VALUE (arglist);
7051  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7052  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7053  rtx op0 = expand_normal (arg0);
7054  rtx op1 = expand_normal (arg1);
7055  rtx op2 = expand_normal (arg2);
7056  rtx pat, addr;
7057  enum machine_mode tmode = insn_data[icode].operand[0].mode;
7058  enum machine_mode mode1 = Pmode;
7059  enum machine_mode mode2 = Pmode;
7060
7061  /* Invalid arguments.  Bail before doing anything stoopid!  */
7062  if (arg0 == error_mark_node
7063      || arg1 == error_mark_node
7064      || arg2 == error_mark_node)
7065    return const0_rtx;
7066
7067  if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7068    op0 = copy_to_mode_reg (tmode, op0);
7069
7070  op2 = copy_to_mode_reg (mode2, op2);
7071
7072  if (op1 == const0_rtx)
7073    {
7074      addr = gen_rtx_MEM (tmode, op2);
7075    }
7076  else
7077    {
7078      op1 = copy_to_mode_reg (mode1, op1);
7079      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7080    }
7081
7082  pat = GEN_FCN (icode) (addr, op0);
7083  if (pat)
7084    emit_insn (pat);
7085  return NULL_RTX;
7086}
7087
7088static rtx
7089rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
7090{
7091  rtx pat;
7092  tree arg0 = TREE_VALUE (arglist);
7093  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7094  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7095  rtx op0 = expand_normal (arg0);
7096  rtx op1 = expand_normal (arg1);
7097  rtx op2 = expand_normal (arg2);
7098  enum machine_mode tmode = insn_data[icode].operand[0].mode;
7099  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7100  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7101  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
7102
7103  if (icode == CODE_FOR_nothing)
7104    /* Builtin not supported on this processor.  */
7105    return 0;
7106
7107  /* If we got invalid arguments bail out before generating bad rtl.  */
7108  if (arg0 == error_mark_node
7109      || arg1 == error_mark_node
7110      || arg2 == error_mark_node)
7111    return const0_rtx;
7112
7113  if (icode == CODE_FOR_altivec_vsldoi_v4sf
7114      || icode == CODE_FOR_altivec_vsldoi_v4si
7115      || icode == CODE_FOR_altivec_vsldoi_v8hi
7116      || icode == CODE_FOR_altivec_vsldoi_v16qi)
7117    {
7118      /* Only allow 4-bit unsigned literals.  */
7119      STRIP_NOPS (arg2);
7120      if (TREE_CODE (arg2) != INTEGER_CST
7121	  || TREE_INT_CST_LOW (arg2) & ~0xf)
7122	{
7123	  error ("argument 3 must be a 4-bit unsigned literal");
7124	  return const0_rtx;
7125	}
7126    }
7127
7128  if (target == 0
7129      || GET_MODE (target) != tmode
7130      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7131    target = gen_reg_rtx (tmode);
7132
7133  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7134    op0 = copy_to_mode_reg (mode0, op0);
7135  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7136    op1 = copy_to_mode_reg (mode1, op1);
7137  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7138    op2 = copy_to_mode_reg (mode2, op2);
7139
7140  pat = GEN_FCN (icode) (target, op0, op1, op2);
7141  if (! pat)
7142    return 0;
7143  emit_insn (pat);
7144
7145  return target;
7146}
7147
7148/* Expand the lvx builtins.  */
7149static rtx
7150altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
7151{
7152  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7153  tree arglist = TREE_OPERAND (exp, 1);
7154  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7155  tree arg0;
7156  enum machine_mode tmode, mode0;
7157  rtx pat, op0;
7158  enum insn_code icode;
7159
7160  switch (fcode)
7161    {
7162    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
7163      icode = CODE_FOR_altivec_lvx_v16qi;
7164      break;
7165    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
7166      icode = CODE_FOR_altivec_lvx_v8hi;
7167      break;
7168    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
7169      icode = CODE_FOR_altivec_lvx_v4si;
7170      break;
7171    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
7172      icode = CODE_FOR_altivec_lvx_v4sf;
7173      break;
7174    default:
7175      *expandedp = false;
7176      return NULL_RTX;
7177    }
7178
7179  *expandedp = true;
7180
7181  arg0 = TREE_VALUE (arglist);
7182  op0 = expand_normal (arg0);
7183  tmode = insn_data[icode].operand[0].mode;
7184  mode0 = insn_data[icode].operand[1].mode;
7185
7186  if (target == 0
7187      || GET_MODE (target) != tmode
7188      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7189    target = gen_reg_rtx (tmode);
7190
7191  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7192    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7193
7194  pat = GEN_FCN (icode) (target, op0);
7195  if (! pat)
7196    return 0;
7197  emit_insn (pat);
7198  return target;
7199}
7200
7201/* Expand the stvx builtins.  */
7202static rtx
7203altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
7204			   bool *expandedp)
7205{
7206  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7207  tree arglist = TREE_OPERAND (exp, 1);
7208  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7209  tree arg0, arg1;
7210  enum machine_mode mode0, mode1;
7211  rtx pat, op0, op1;
7212  enum insn_code icode;
7213
7214  switch (fcode)
7215    {
7216    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
7217      icode = CODE_FOR_altivec_stvx_v16qi;
7218      break;
7219    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
7220      icode = CODE_FOR_altivec_stvx_v8hi;
7221      break;
7222    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
7223      icode = CODE_FOR_altivec_stvx_v4si;
7224      break;
7225    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
7226      icode = CODE_FOR_altivec_stvx_v4sf;
7227      break;
7228    default:
7229      *expandedp = false;
7230      return NULL_RTX;
7231    }
7232
7233  arg0 = TREE_VALUE (arglist);
7234  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7235  op0 = expand_normal (arg0);
7236  op1 = expand_normal (arg1);
7237  mode0 = insn_data[icode].operand[0].mode;
7238  mode1 = insn_data[icode].operand[1].mode;
7239
7240  if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7241    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7242  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7243    op1 = copy_to_mode_reg (mode1, op1);
7244
7245  pat = GEN_FCN (icode) (op0, op1);
7246  if (pat)
7247    emit_insn (pat);
7248
7249  *expandedp = true;
7250  return NULL_RTX;
7251}
7252
7253/* Expand the dst builtins.  */
7254static rtx
7255altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
7256			    bool *expandedp)
7257{
7258  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7259  tree arglist = TREE_OPERAND (exp, 1);
7260  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7261  tree arg0, arg1, arg2;
7262  enum machine_mode mode0, mode1, mode2;
7263  rtx pat, op0, op1, op2;
7264  struct builtin_description *d;
7265  size_t i;
7266
7267  *expandedp = false;
7268
7269  /* Handle DST variants.  */
7270  d = (struct builtin_description *) bdesc_dst;
7271  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7272    if (d->code == fcode)
7273      {
7274	arg0 = TREE_VALUE (arglist);
7275	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7276	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7277	op0 = expand_normal (arg0);
7278	op1 = expand_normal (arg1);
7279	op2 = expand_normal (arg2);
7280	mode0 = insn_data[d->icode].operand[0].mode;
7281	mode1 = insn_data[d->icode].operand[1].mode;
7282	mode2 = insn_data[d->icode].operand[2].mode;
7283
7284	/* Invalid arguments, bail out before generating bad rtl.  */
7285	if (arg0 == error_mark_node
7286	    || arg1 == error_mark_node
7287	    || arg2 == error_mark_node)
7288	  return const0_rtx;
7289
7290	*expandedp = true;
7291	STRIP_NOPS (arg2);
7292	if (TREE_CODE (arg2) != INTEGER_CST
7293	    || TREE_INT_CST_LOW (arg2) & ~0x3)
7294	  {
7295	    error ("argument to %qs must be a 2-bit unsigned literal", d->name);
7296	    return const0_rtx;
7297	  }
7298
7299	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
7300	  op0 = copy_to_mode_reg (Pmode, op0);
7301	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7302	  op1 = copy_to_mode_reg (mode1, op1);
7303
7304	pat = GEN_FCN (d->icode) (op0, op1, op2);
7305	if (pat != 0)
7306	  emit_insn (pat);
7307
7308	return NULL_RTX;
7309      }
7310
7311  return NULL_RTX;
7312}
7313
7314/* Expand vec_init builtin.  */
7315static rtx
7316altivec_expand_vec_init_builtin (tree type, tree arglist, rtx target)
7317{
7318  enum machine_mode tmode = TYPE_MODE (type);
7319  enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7320  int i, n_elt = GET_MODE_NUNITS (tmode);
7321  rtvec v = rtvec_alloc (n_elt);
7322
7323  gcc_assert (VECTOR_MODE_P (tmode));
7324
7325  for (i = 0; i < n_elt; ++i, arglist = TREE_CHAIN (arglist))
7326    {
7327      rtx x = expand_normal (TREE_VALUE (arglist));
7328      RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7329    }
7330
7331  gcc_assert (arglist == NULL);
7332
7333  if (!target || !register_operand (target, tmode))
7334    target = gen_reg_rtx (tmode);
7335
7336  rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7337  return target;
7338}
7339
7340/* Return the integer constant in ARG.  Constrain it to be in the range
7341   of the subparts of VEC_TYPE; issue an error if not.  */
7342
7343static int
7344get_element_number (tree vec_type, tree arg)
7345{
7346  unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7347
7348  if (!host_integerp (arg, 1)
7349      || (elt = tree_low_cst (arg, 1), elt > max))
7350    {
7351      error ("selector must be an integer constant in the range 0..%wi", max);
7352      return 0;
7353    }
7354
7355  return elt;
7356}
7357
7358/* Expand vec_set builtin.  */
7359static rtx
7360altivec_expand_vec_set_builtin (tree arglist)
7361{
7362  enum machine_mode tmode, mode1;
7363  tree arg0, arg1, arg2;
7364  int elt;
7365  rtx op0, op1;
7366
7367  arg0 = TREE_VALUE (arglist);
7368  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7369  arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7370
7371  tmode = TYPE_MODE (TREE_TYPE (arg0));
7372  mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7373  gcc_assert (VECTOR_MODE_P (tmode));
7374
7375  op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7376  op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7377  elt = get_element_number (TREE_TYPE (arg0), arg2);
7378
7379  if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7380    op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7381
7382  op0 = force_reg (tmode, op0);
7383  op1 = force_reg (mode1, op1);
7384
7385  rs6000_expand_vector_set (op0, op1, elt);
7386
7387  return op0;
7388}
7389
7390/* Expand vec_ext builtin.  */
7391static rtx
7392altivec_expand_vec_ext_builtin (tree arglist, rtx target)
7393{
7394  enum machine_mode tmode, mode0;
7395  tree arg0, arg1;
7396  int elt;
7397  rtx op0;
7398
7399  arg0 = TREE_VALUE (arglist);
7400  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7401
7402  op0 = expand_normal (arg0);
7403  elt = get_element_number (TREE_TYPE (arg0), arg1);
7404
7405  tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7406  mode0 = TYPE_MODE (TREE_TYPE (arg0));
7407  gcc_assert (VECTOR_MODE_P (mode0));
7408
7409  op0 = force_reg (mode0, op0);
7410
7411  if (optimize || !target || !register_operand (target, tmode))
7412    target = gen_reg_rtx (tmode);
7413
7414  rs6000_expand_vector_extract (target, op0, elt);
7415
7416  return target;
7417}
7418
7419/* Expand the builtin in EXP and store the result in TARGET.  Store
7420   true in *EXPANDEDP if we found a builtin to expand.  */
7421static rtx
7422altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
7423{
7424  struct builtin_description *d;
7425  struct builtin_description_predicates *dp;
7426  size_t i;
7427  enum insn_code icode;
7428  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7429  tree arglist = TREE_OPERAND (exp, 1);
7430  tree arg0;
7431  rtx op0, pat;
7432  enum machine_mode tmode, mode0;
7433  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7434
7435  if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7436      && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7437    {
7438      *expandedp = true;
7439      error ("unresolved overload for Altivec builtin %qF", fndecl);
7440      return const0_rtx;
7441    }
7442
7443  target = altivec_expand_ld_builtin (exp, target, expandedp);
7444  if (*expandedp)
7445    return target;
7446
7447  target = altivec_expand_st_builtin (exp, target, expandedp);
7448  if (*expandedp)
7449    return target;
7450
7451  target = altivec_expand_dst_builtin (exp, target, expandedp);
7452  if (*expandedp)
7453    return target;
7454
7455  *expandedp = true;
7456
7457  switch (fcode)
7458    {
7459    case ALTIVEC_BUILTIN_STVX:
7460      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
7461    case ALTIVEC_BUILTIN_STVEBX:
7462      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
7463    case ALTIVEC_BUILTIN_STVEHX:
7464      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
7465    case ALTIVEC_BUILTIN_STVEWX:
7466      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
7467    case ALTIVEC_BUILTIN_STVXL:
7468      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
7469
7470    case ALTIVEC_BUILTIN_MFVSCR:
7471      icode = CODE_FOR_altivec_mfvscr;
7472      tmode = insn_data[icode].operand[0].mode;
7473
7474      if (target == 0
7475	  || GET_MODE (target) != tmode
7476	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7477	target = gen_reg_rtx (tmode);
7478
7479      pat = GEN_FCN (icode) (target);
7480      if (! pat)
7481	return 0;
7482      emit_insn (pat);
7483      return target;
7484
7485    case ALTIVEC_BUILTIN_MTVSCR:
7486      icode = CODE_FOR_altivec_mtvscr;
7487      arg0 = TREE_VALUE (arglist);
7488      op0 = expand_normal (arg0);
7489      mode0 = insn_data[icode].operand[0].mode;
7490
7491      /* If we got invalid arguments bail out before generating bad rtl.  */
7492      if (arg0 == error_mark_node)
7493	return const0_rtx;
7494
7495      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7496	op0 = copy_to_mode_reg (mode0, op0);
7497
7498      pat = GEN_FCN (icode) (op0);
7499      if (pat)
7500	emit_insn (pat);
7501      return NULL_RTX;
7502
7503    case ALTIVEC_BUILTIN_DSSALL:
7504      emit_insn (gen_altivec_dssall ());
7505      return NULL_RTX;
7506
7507    case ALTIVEC_BUILTIN_DSS:
7508      icode = CODE_FOR_altivec_dss;
7509      arg0 = TREE_VALUE (arglist);
7510      STRIP_NOPS (arg0);
7511      op0 = expand_normal (arg0);
7512      mode0 = insn_data[icode].operand[0].mode;
7513
7514      /* If we got invalid arguments bail out before generating bad rtl.  */
7515      if (arg0 == error_mark_node)
7516	return const0_rtx;
7517
7518      if (TREE_CODE (arg0) != INTEGER_CST
7519	  || TREE_INT_CST_LOW (arg0) & ~0x3)
7520	{
7521	  error ("argument to dss must be a 2-bit unsigned literal");
7522	  return const0_rtx;
7523	}
7524
7525      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7526	op0 = copy_to_mode_reg (mode0, op0);
7527
7528      emit_insn (gen_altivec_dss (op0));
7529      return NULL_RTX;
7530
7531    case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7532    case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7533    case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7534    case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
7535      return altivec_expand_vec_init_builtin (TREE_TYPE (exp), arglist, target);
7536
7537    case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7538    case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7539    case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7540    case ALTIVEC_BUILTIN_VEC_SET_V4SF:
7541      return altivec_expand_vec_set_builtin (arglist);
7542
7543    case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7544    case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7545    case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7546    case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
7547      return altivec_expand_vec_ext_builtin (arglist, target);
7548
7549    default:
7550      break;
7551      /* Fall through.  */
7552    }
7553
7554  /* Expand abs* operations.  */
7555  d = (struct builtin_description *) bdesc_abs;
7556  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7557    if (d->code == fcode)
7558      return altivec_expand_abs_builtin (d->icode, arglist, target);
7559
7560  /* Expand the AltiVec predicates.  */
7561  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7562  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7563    if (dp->code == fcode)
7564      return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
7565					       arglist, target);
7566
7567  /* LV* are funky.  We initialized them differently.  */
7568  switch (fcode)
7569    {
7570    case ALTIVEC_BUILTIN_LVSL:
7571      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
7572					arglist, target);
7573    case ALTIVEC_BUILTIN_LVSR:
7574      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
7575					arglist, target);
7576    case ALTIVEC_BUILTIN_LVEBX:
7577      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
7578					arglist, target);
7579    case ALTIVEC_BUILTIN_LVEHX:
7580      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
7581					arglist, target);
7582    case ALTIVEC_BUILTIN_LVEWX:
7583      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
7584					arglist, target);
7585    case ALTIVEC_BUILTIN_LVXL:
7586      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
7587					arglist, target);
7588    case ALTIVEC_BUILTIN_LVX:
7589      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
7590					arglist, target);
7591    default:
7592      break;
7593      /* Fall through.  */
7594    }
7595
7596  *expandedp = false;
7597  return NULL_RTX;
7598}
7599
7600/* Binops that need to be initialized manually, but can be expanded
7601   automagically by rs6000_expand_binop_builtin.  */
7602static struct builtin_description bdesc_2arg_spe[] =
7603{
7604  { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7605  { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7606  { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7607  { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7608  { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7609  { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7610  { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7611  { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7612  { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7613  { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7614  { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7615  { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7616  { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7617  { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7618  { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7619  { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7620  { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7621  { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7622  { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7623  { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7624  { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7625  { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7626};
7627
7628/* Expand the builtin in EXP and store the result in TARGET.  Store
7629   true in *EXPANDEDP if we found a builtin to expand.
7630
7631   This expands the SPE builtins that are not simple unary and binary
7632   operations.  */
7633static rtx
7634spe_expand_builtin (tree exp, rtx target, bool *expandedp)
7635{
7636  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7637  tree arglist = TREE_OPERAND (exp, 1);
7638  tree arg1, arg0;
7639  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7640  enum insn_code icode;
7641  enum machine_mode tmode, mode0;
7642  rtx pat, op0;
7643  struct builtin_description *d;
7644  size_t i;
7645
7646  *expandedp = true;
7647
7648  /* Syntax check for a 5-bit unsigned immediate.  */
7649  switch (fcode)
7650    {
7651    case SPE_BUILTIN_EVSTDD:
7652    case SPE_BUILTIN_EVSTDH:
7653    case SPE_BUILTIN_EVSTDW:
7654    case SPE_BUILTIN_EVSTWHE:
7655    case SPE_BUILTIN_EVSTWHO:
7656    case SPE_BUILTIN_EVSTWWE:
7657    case SPE_BUILTIN_EVSTWWO:
7658      arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7659      if (TREE_CODE (arg1) != INTEGER_CST
7660	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
7661	{
7662	  error ("argument 2 must be a 5-bit unsigned literal");
7663	  return const0_rtx;
7664	}
7665      break;
7666    default:
7667      break;
7668    }
7669
7670  /* The evsplat*i instructions are not quite generic.  */
7671  switch (fcode)
7672    {
7673    case SPE_BUILTIN_EVSPLATFI:
7674      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7675					 arglist, target);
7676    case SPE_BUILTIN_EVSPLATI:
7677      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7678					 arglist, target);
7679    default:
7680      break;
7681    }
7682
7683  d = (struct builtin_description *) bdesc_2arg_spe;
7684  for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7685    if (d->code == fcode)
7686      return rs6000_expand_binop_builtin (d->icode, arglist, target);
7687
7688  d = (struct builtin_description *) bdesc_spe_predicates;
7689  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7690    if (d->code == fcode)
7691      return spe_expand_predicate_builtin (d->icode, arglist, target);
7692
7693  d = (struct builtin_description *) bdesc_spe_evsel;
7694  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7695    if (d->code == fcode)
7696      return spe_expand_evsel_builtin (d->icode, arglist, target);
7697
7698  switch (fcode)
7699    {
7700    case SPE_BUILTIN_EVSTDDX:
7701      return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
7702    case SPE_BUILTIN_EVSTDHX:
7703      return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
7704    case SPE_BUILTIN_EVSTDWX:
7705      return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
7706    case SPE_BUILTIN_EVSTWHEX:
7707      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
7708    case SPE_BUILTIN_EVSTWHOX:
7709      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
7710    case SPE_BUILTIN_EVSTWWEX:
7711      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
7712    case SPE_BUILTIN_EVSTWWOX:
7713      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
7714    case SPE_BUILTIN_EVSTDD:
7715      return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
7716    case SPE_BUILTIN_EVSTDH:
7717      return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
7718    case SPE_BUILTIN_EVSTDW:
7719      return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
7720    case SPE_BUILTIN_EVSTWHE:
7721      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
7722    case SPE_BUILTIN_EVSTWHO:
7723      return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
7724    case SPE_BUILTIN_EVSTWWE:
7725      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
7726    case SPE_BUILTIN_EVSTWWO:
7727      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
7728    case SPE_BUILTIN_MFSPEFSCR:
7729      icode = CODE_FOR_spe_mfspefscr;
7730      tmode = insn_data[icode].operand[0].mode;
7731
7732      if (target == 0
7733	  || GET_MODE (target) != tmode
7734	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7735	target = gen_reg_rtx (tmode);
7736
7737      pat = GEN_FCN (icode) (target);
7738      if (! pat)
7739	return 0;
7740      emit_insn (pat);
7741      return target;
7742    case SPE_BUILTIN_MTSPEFSCR:
7743      icode = CODE_FOR_spe_mtspefscr;
7744      arg0 = TREE_VALUE (arglist);
7745      op0 = expand_normal (arg0);
7746      mode0 = insn_data[icode].operand[0].mode;
7747
7748      if (arg0 == error_mark_node)
7749	return const0_rtx;
7750
7751      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7752	op0 = copy_to_mode_reg (mode0, op0);
7753
7754      pat = GEN_FCN (icode) (op0);
7755      if (pat)
7756	emit_insn (pat);
7757      return NULL_RTX;
7758    default:
7759      break;
7760    }
7761
7762  *expandedp = false;
7763  return NULL_RTX;
7764}
7765
7766static rtx
7767spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7768{
7769  rtx pat, scratch, tmp;
7770  tree form = TREE_VALUE (arglist);
7771  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7772  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7773  rtx op0 = expand_normal (arg0);
7774  rtx op1 = expand_normal (arg1);
7775  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7776  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7777  int form_int;
7778  enum rtx_code code;
7779
7780  if (TREE_CODE (form) != INTEGER_CST)
7781    {
7782      error ("argument 1 of __builtin_spe_predicate must be a constant");
7783      return const0_rtx;
7784    }
7785  else
7786    form_int = TREE_INT_CST_LOW (form);
7787
7788  gcc_assert (mode0 == mode1);
7789
7790  if (arg0 == error_mark_node || arg1 == error_mark_node)
7791    return const0_rtx;
7792
7793  if (target == 0
7794      || GET_MODE (target) != SImode
7795      || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7796    target = gen_reg_rtx (SImode);
7797
7798  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7799    op0 = copy_to_mode_reg (mode0, op0);
7800  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7801    op1 = copy_to_mode_reg (mode1, op1);
7802
7803  scratch = gen_reg_rtx (CCmode);
7804
7805  pat = GEN_FCN (icode) (scratch, op0, op1);
7806  if (! pat)
7807    return const0_rtx;
7808  emit_insn (pat);
7809
7810  /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7811     _lower_.  We use one compare, but look in different bits of the
7812     CR for each variant.
7813
7814     There are 2 elements in each SPE simd type (upper/lower).  The CR
7815     bits are set as follows:
7816
7817     BIT0  | BIT 1  | BIT 2   | BIT 3
7818     U     |   L    | (U | L) | (U & L)
7819
7820     So, for an "all" relationship, BIT 3 would be set.
7821     For an "any" relationship, BIT 2 would be set.  Etc.
7822
7823     Following traditional nomenclature, these bits map to:
7824
7825     BIT0  | BIT 1  | BIT 2   | BIT 3
7826     LT    | GT     | EQ      | OV
7827
7828     Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7829  */
7830
7831  switch (form_int)
7832    {
7833      /* All variant.  OV bit.  */
7834    case 0:
7835      /* We need to get to the OV bit, which is the ORDERED bit.  We
7836	 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7837	 that's ugly and will make validate_condition_mode die.
7838	 So let's just use another pattern.  */
7839      emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7840      return target;
7841      /* Any variant.  EQ bit.  */
7842    case 1:
7843      code = EQ;
7844      break;
7845      /* Upper variant.  LT bit.  */
7846    case 2:
7847      code = LT;
7848      break;
7849      /* Lower variant.  GT bit.  */
7850    case 3:
7851      code = GT;
7852      break;
7853    default:
7854      error ("argument 1 of __builtin_spe_predicate is out of range");
7855      return const0_rtx;
7856    }
7857
7858  tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7859  emit_move_insn (target, tmp);
7860
7861  return target;
7862}
7863
7864/* The evsel builtins look like this:
7865
7866     e = __builtin_spe_evsel_OP (a, b, c, d);
7867
7868   and work like this:
7869
7870     e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7871     e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7872*/
7873
7874static rtx
7875spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7876{
7877  rtx pat, scratch;
7878  tree arg0 = TREE_VALUE (arglist);
7879  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7880  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7881  tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7882  rtx op0 = expand_normal (arg0);
7883  rtx op1 = expand_normal (arg1);
7884  rtx op2 = expand_normal (arg2);
7885  rtx op3 = expand_normal (arg3);
7886  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7887  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7888
7889  gcc_assert (mode0 == mode1);
7890
7891  if (arg0 == error_mark_node || arg1 == error_mark_node
7892      || arg2 == error_mark_node || arg3 == error_mark_node)
7893    return const0_rtx;
7894
7895  if (target == 0
7896      || GET_MODE (target) != mode0
7897      || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7898    target = gen_reg_rtx (mode0);
7899
7900  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7901    op0 = copy_to_mode_reg (mode0, op0);
7902  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7903    op1 = copy_to_mode_reg (mode0, op1);
7904  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7905    op2 = copy_to_mode_reg (mode0, op2);
7906  if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7907    op3 = copy_to_mode_reg (mode0, op3);
7908
7909  /* Generate the compare.  */
7910  scratch = gen_reg_rtx (CCmode);
7911  pat = GEN_FCN (icode) (scratch, op0, op1);
7912  if (! pat)
7913    return const0_rtx;
7914  emit_insn (pat);
7915
7916  if (mode0 == V2SImode)
7917    emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7918  else
7919    emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7920
7921  return target;
7922}
7923
7924/* Expand an expression EXP that calls a built-in function,
7925   with result going to TARGET if that's convenient
7926   (and in mode MODE if that's convenient).
7927   SUBTARGET may be used as the target for computing one of EXP's operands.
7928   IGNORE is nonzero if the value is to be ignored.  */
7929
7930static rtx
7931rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7932		       enum machine_mode mode ATTRIBUTE_UNUSED,
7933		       int ignore ATTRIBUTE_UNUSED)
7934{
7935  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7936  tree arglist = TREE_OPERAND (exp, 1);
7937  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7938  struct builtin_description *d;
7939  size_t i;
7940  rtx ret;
7941  bool success;
7942
7943  if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7944      || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7945    {
7946      int icode = (int) CODE_FOR_altivec_lvsr;
7947      enum machine_mode tmode = insn_data[icode].operand[0].mode;
7948      enum machine_mode mode = insn_data[icode].operand[1].mode;
7949      tree arg;
7950      rtx op, addr, pat;
7951
7952      gcc_assert (TARGET_ALTIVEC);
7953
7954      arg = TREE_VALUE (arglist);
7955      gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7956      op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7957      addr = memory_address (mode, op);
7958      if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7959	op = addr;
7960      else
7961	{
7962	  /* For the load case need to negate the address.  */
7963	  op = gen_reg_rtx (GET_MODE (addr));
7964	  emit_insn (gen_rtx_SET (VOIDmode, op,
7965			 gen_rtx_NEG (GET_MODE (addr), addr)));
7966	}
7967      op = gen_rtx_MEM (mode, op);
7968
7969      if (target == 0
7970	  || GET_MODE (target) != tmode
7971	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7972	target = gen_reg_rtx (tmode);
7973
7974      /*pat = gen_altivec_lvsr (target, op);*/
7975      pat = GEN_FCN (icode) (target, op);
7976      if (!pat)
7977	return 0;
7978      emit_insn (pat);
7979
7980      return target;
7981    }
7982
7983  if (TARGET_ALTIVEC)
7984    {
7985      ret = altivec_expand_builtin (exp, target, &success);
7986
7987      if (success)
7988	return ret;
7989    }
7990  if (TARGET_SPE)
7991    {
7992      ret = spe_expand_builtin (exp, target, &success);
7993
7994      if (success)
7995	return ret;
7996    }
7997
7998  gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
7999
8000  /* Handle simple unary operations.  */
8001  d = (struct builtin_description *) bdesc_1arg;
8002  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8003    if (d->code == fcode)
8004      return rs6000_expand_unop_builtin (d->icode, arglist, target);
8005
8006  /* Handle simple binary operations.  */
8007  d = (struct builtin_description *) bdesc_2arg;
8008  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8009    if (d->code == fcode)
8010      return rs6000_expand_binop_builtin (d->icode, arglist, target);
8011
8012  /* Handle simple ternary operations.  */
8013  d = (struct builtin_description *) bdesc_3arg;
8014  for (i = 0; i < ARRAY_SIZE  (bdesc_3arg); i++, d++)
8015    if (d->code == fcode)
8016      return rs6000_expand_ternop_builtin (d->icode, arglist, target);
8017
8018  gcc_unreachable ();
8019}
8020
8021static tree
8022build_opaque_vector_type (tree node, int nunits)
8023{
8024  node = copy_node (node);
8025  TYPE_MAIN_VARIANT (node) = node;
8026  return build_vector_type (node, nunits);
8027}
8028
8029static void
8030rs6000_init_builtins (void)
8031{
8032  V2SI_type_node = build_vector_type (intSI_type_node, 2);
8033  V2SF_type_node = build_vector_type (float_type_node, 2);
8034  V4HI_type_node = build_vector_type (intHI_type_node, 4);
8035  V4SI_type_node = build_vector_type (intSI_type_node, 4);
8036  V4SF_type_node = build_vector_type (float_type_node, 4);
8037  V8HI_type_node = build_vector_type (intHI_type_node, 8);
8038  V16QI_type_node = build_vector_type (intQI_type_node, 16);
8039
8040  unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8041  unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8042  unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8043
8044  opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8045  opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
8046  opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
8047  opaque_V4SI_type_node = copy_node (V4SI_type_node);
8048
8049  /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8050     types, especially in C++ land.  Similarly, 'vector pixel' is distinct from
8051     'vector unsigned short'.  */
8052
8053  bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8054  bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8055  bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8056  pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8057
8058  long_integer_type_internal_node = long_integer_type_node;
8059  long_unsigned_type_internal_node = long_unsigned_type_node;
8060  intQI_type_internal_node = intQI_type_node;
8061  uintQI_type_internal_node = unsigned_intQI_type_node;
8062  intHI_type_internal_node = intHI_type_node;
8063  uintHI_type_internal_node = unsigned_intHI_type_node;
8064  intSI_type_internal_node = intSI_type_node;
8065  uintSI_type_internal_node = unsigned_intSI_type_node;
8066  float_type_internal_node = float_type_node;
8067  void_type_internal_node = void_type_node;
8068
8069  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8070					    get_identifier ("__bool char"),
8071					    bool_char_type_node));
8072  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8073					    get_identifier ("__bool short"),
8074					    bool_short_type_node));
8075  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8076					    get_identifier ("__bool int"),
8077					    bool_int_type_node));
8078  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8079					    get_identifier ("__pixel"),
8080					    pixel_type_node));
8081
8082  bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8083  bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8084  bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8085  pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8086
8087  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8088					    get_identifier ("__vector unsigned char"),
8089					    unsigned_V16QI_type_node));
8090  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8091					    get_identifier ("__vector signed char"),
8092					    V16QI_type_node));
8093  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8094					    get_identifier ("__vector __bool char"),
8095					    bool_V16QI_type_node));
8096
8097  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8098					    get_identifier ("__vector unsigned short"),
8099					    unsigned_V8HI_type_node));
8100  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8101					    get_identifier ("__vector signed short"),
8102					    V8HI_type_node));
8103  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8104					    get_identifier ("__vector __bool short"),
8105					    bool_V8HI_type_node));
8106
8107  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8108					    get_identifier ("__vector unsigned int"),
8109					    unsigned_V4SI_type_node));
8110  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8111					    get_identifier ("__vector signed int"),
8112					    V4SI_type_node));
8113  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8114					    get_identifier ("__vector __bool int"),
8115					    bool_V4SI_type_node));
8116
8117  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8118					    get_identifier ("__vector float"),
8119					    V4SF_type_node));
8120  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8121					    get_identifier ("__vector __pixel"),
8122					    pixel_V8HI_type_node));
8123
8124  if (TARGET_SPE)
8125    spe_init_builtins ();
8126  if (TARGET_ALTIVEC)
8127    altivec_init_builtins ();
8128  if (TARGET_ALTIVEC || TARGET_SPE)
8129    rs6000_common_init_builtins ();
8130
8131#if TARGET_XCOFF
8132  /* AIX libm provides clog as __clog.  */
8133  if (built_in_decls [BUILT_IN_CLOG])
8134    set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
8135#endif
8136}
8137
8138/* Search through a set of builtins and enable the mask bits.
8139   DESC is an array of builtins.
8140   SIZE is the total number of builtins.
8141   START is the builtin enum at which to start.
8142   END is the builtin enum at which to end.  */
8143static void
8144enable_mask_for_builtins (struct builtin_description *desc, int size,
8145			  enum rs6000_builtins start,
8146			  enum rs6000_builtins end)
8147{
8148  int i;
8149
8150  for (i = 0; i < size; ++i)
8151    if (desc[i].code == start)
8152      break;
8153
8154  if (i == size)
8155    return;
8156
8157  for (; i < size; ++i)
8158    {
8159      /* Flip all the bits on.  */
8160      desc[i].mask = target_flags;
8161      if (desc[i].code == end)
8162	break;
8163    }
8164}
8165
8166static void
8167spe_init_builtins (void)
8168{
8169  tree endlink = void_list_node;
8170  tree puint_type_node = build_pointer_type (unsigned_type_node);
8171  tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
8172  struct builtin_description *d;
8173  size_t i;
8174
8175  tree v2si_ftype_4_v2si
8176    = build_function_type
8177    (opaque_V2SI_type_node,
8178     tree_cons (NULL_TREE, opaque_V2SI_type_node,
8179		tree_cons (NULL_TREE, opaque_V2SI_type_node,
8180			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8181				      tree_cons (NULL_TREE, opaque_V2SI_type_node,
8182						 endlink)))));
8183
8184  tree v2sf_ftype_4_v2sf
8185    = build_function_type
8186    (opaque_V2SF_type_node,
8187     tree_cons (NULL_TREE, opaque_V2SF_type_node,
8188		tree_cons (NULL_TREE, opaque_V2SF_type_node,
8189			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
8190				      tree_cons (NULL_TREE, opaque_V2SF_type_node,
8191						 endlink)))));
8192
8193  tree int_ftype_int_v2si_v2si
8194    = build_function_type
8195    (integer_type_node,
8196     tree_cons (NULL_TREE, integer_type_node,
8197		tree_cons (NULL_TREE, opaque_V2SI_type_node,
8198			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8199				      endlink))));
8200
8201  tree int_ftype_int_v2sf_v2sf
8202    = build_function_type
8203    (integer_type_node,
8204     tree_cons (NULL_TREE, integer_type_node,
8205		tree_cons (NULL_TREE, opaque_V2SF_type_node,
8206			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
8207				      endlink))));
8208
8209  tree void_ftype_v2si_puint_int
8210    = build_function_type (void_type_node,
8211			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8212				      tree_cons (NULL_TREE, puint_type_node,
8213						 tree_cons (NULL_TREE,
8214							    integer_type_node,
8215							    endlink))));
8216
8217  tree void_ftype_v2si_puint_char
8218    = build_function_type (void_type_node,
8219			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8220				      tree_cons (NULL_TREE, puint_type_node,
8221						 tree_cons (NULL_TREE,
8222							    char_type_node,
8223							    endlink))));
8224
8225  tree void_ftype_v2si_pv2si_int
8226    = build_function_type (void_type_node,
8227			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8228				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8229						 tree_cons (NULL_TREE,
8230							    integer_type_node,
8231							    endlink))));
8232
8233  tree void_ftype_v2si_pv2si_char
8234    = build_function_type (void_type_node,
8235			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8236				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8237						 tree_cons (NULL_TREE,
8238							    char_type_node,
8239							    endlink))));
8240
8241  tree void_ftype_int
8242    = build_function_type (void_type_node,
8243			   tree_cons (NULL_TREE, integer_type_node, endlink));
8244
8245  tree int_ftype_void
8246    = build_function_type (integer_type_node, endlink);
8247
8248  tree v2si_ftype_pv2si_int
8249    = build_function_type (opaque_V2SI_type_node,
8250			   tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8251				      tree_cons (NULL_TREE, integer_type_node,
8252						 endlink)));
8253
8254  tree v2si_ftype_puint_int
8255    = build_function_type (opaque_V2SI_type_node,
8256			   tree_cons (NULL_TREE, puint_type_node,
8257				      tree_cons (NULL_TREE, integer_type_node,
8258						 endlink)));
8259
8260  tree v2si_ftype_pushort_int
8261    = build_function_type (opaque_V2SI_type_node,
8262			   tree_cons (NULL_TREE, pushort_type_node,
8263				      tree_cons (NULL_TREE, integer_type_node,
8264						 endlink)));
8265
8266  tree v2si_ftype_signed_char
8267    = build_function_type (opaque_V2SI_type_node,
8268			   tree_cons (NULL_TREE, signed_char_type_node,
8269				      endlink));
8270
8271  /* The initialization of the simple binary and unary builtins is
8272     done in rs6000_common_init_builtins, but we have to enable the
8273     mask bits here manually because we have run out of `target_flags'
8274     bits.  We really need to redesign this mask business.  */
8275
8276  enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8277			    ARRAY_SIZE (bdesc_2arg),
8278			    SPE_BUILTIN_EVADDW,
8279			    SPE_BUILTIN_EVXOR);
8280  enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8281			    ARRAY_SIZE (bdesc_1arg),
8282			    SPE_BUILTIN_EVABS,
8283			    SPE_BUILTIN_EVSUBFUSIAAW);
8284  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8285			    ARRAY_SIZE (bdesc_spe_predicates),
8286			    SPE_BUILTIN_EVCMPEQ,
8287			    SPE_BUILTIN_EVFSTSTLT);
8288  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8289			    ARRAY_SIZE (bdesc_spe_evsel),
8290			    SPE_BUILTIN_EVSEL_CMPGTS,
8291			    SPE_BUILTIN_EVSEL_FSTSTEQ);
8292
8293  (*lang_hooks.decls.pushdecl)
8294    (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8295		 opaque_V2SI_type_node));
8296
8297  /* Initialize irregular SPE builtins.  */
8298
8299  def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8300  def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8301  def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8302  def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8303  def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8304  def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8305  def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8306  def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8307  def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8308  def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8309  def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8310  def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8311  def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8312  def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8313  def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8314  def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
8315  def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8316  def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
8317
8318  /* Loads.  */
8319  def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8320  def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8321  def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8322  def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8323  def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8324  def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8325  def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8326  def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8327  def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8328  def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8329  def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8330  def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8331  def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8332  def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8333  def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8334  def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8335  def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8336  def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8337  def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8338  def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8339  def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8340  def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8341
8342  /* Predicates.  */
8343  d = (struct builtin_description *) bdesc_spe_predicates;
8344  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8345    {
8346      tree type;
8347
8348      switch (insn_data[d->icode].operand[1].mode)
8349	{
8350	case V2SImode:
8351	  type = int_ftype_int_v2si_v2si;
8352	  break;
8353	case V2SFmode:
8354	  type = int_ftype_int_v2sf_v2sf;
8355	  break;
8356	default:
8357	  gcc_unreachable ();
8358	}
8359
8360      def_builtin (d->mask, d->name, type, d->code);
8361    }
8362
8363  /* Evsel predicates.  */
8364  d = (struct builtin_description *) bdesc_spe_evsel;
8365  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8366    {
8367      tree type;
8368
8369      switch (insn_data[d->icode].operand[1].mode)
8370	{
8371	case V2SImode:
8372	  type = v2si_ftype_4_v2si;
8373	  break;
8374	case V2SFmode:
8375	  type = v2sf_ftype_4_v2sf;
8376	  break;
8377	default:
8378	  gcc_unreachable ();
8379	}
8380
8381      def_builtin (d->mask, d->name, type, d->code);
8382    }
8383}
8384
8385static void
8386altivec_init_builtins (void)
8387{
8388  struct builtin_description *d;
8389  struct builtin_description_predicates *dp;
8390  size_t i;
8391  tree ftype;
8392
8393  tree pfloat_type_node = build_pointer_type (float_type_node);
8394  tree pint_type_node = build_pointer_type (integer_type_node);
8395  tree pshort_type_node = build_pointer_type (short_integer_type_node);
8396  tree pchar_type_node = build_pointer_type (char_type_node);
8397
8398  tree pvoid_type_node = build_pointer_type (void_type_node);
8399
8400  tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8401  tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8402  tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8403  tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8404
8405  tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8406
8407  tree int_ftype_opaque
8408    = build_function_type_list (integer_type_node,
8409				opaque_V4SI_type_node, NULL_TREE);
8410
8411  tree opaque_ftype_opaque_int
8412    = build_function_type_list (opaque_V4SI_type_node,
8413				opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8414  tree opaque_ftype_opaque_opaque_int
8415    = build_function_type_list (opaque_V4SI_type_node,
8416				opaque_V4SI_type_node, opaque_V4SI_type_node,
8417				integer_type_node, NULL_TREE);
8418  tree int_ftype_int_opaque_opaque
8419    = build_function_type_list (integer_type_node,
8420                                integer_type_node, opaque_V4SI_type_node,
8421                                opaque_V4SI_type_node, NULL_TREE);
8422  tree int_ftype_int_v4si_v4si
8423    = build_function_type_list (integer_type_node,
8424				integer_type_node, V4SI_type_node,
8425				V4SI_type_node, NULL_TREE);
8426  tree v4sf_ftype_pcfloat
8427    = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
8428  tree void_ftype_pfloat_v4sf
8429    = build_function_type_list (void_type_node,
8430				pfloat_type_node, V4SF_type_node, NULL_TREE);
8431  tree v4si_ftype_pcint
8432    = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8433  tree void_ftype_pint_v4si
8434    = build_function_type_list (void_type_node,
8435				pint_type_node, V4SI_type_node, NULL_TREE);
8436  tree v8hi_ftype_pcshort
8437    = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
8438  tree void_ftype_pshort_v8hi
8439    = build_function_type_list (void_type_node,
8440				pshort_type_node, V8HI_type_node, NULL_TREE);
8441  tree v16qi_ftype_pcchar
8442    = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
8443  tree void_ftype_pchar_v16qi
8444    = build_function_type_list (void_type_node,
8445				pchar_type_node, V16QI_type_node, NULL_TREE);
8446  tree void_ftype_v4si
8447    = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
8448  tree v8hi_ftype_void
8449    = build_function_type (V8HI_type_node, void_list_node);
8450  tree void_ftype_void
8451    = build_function_type (void_type_node, void_list_node);
8452  tree void_ftype_int
8453    = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
8454
8455  tree opaque_ftype_long_pcvoid
8456    = build_function_type_list (opaque_V4SI_type_node,
8457				long_integer_type_node, pcvoid_type_node, NULL_TREE);
8458  tree v16qi_ftype_long_pcvoid
8459    = build_function_type_list (V16QI_type_node,
8460				long_integer_type_node, pcvoid_type_node, NULL_TREE);
8461  tree v8hi_ftype_long_pcvoid
8462    = build_function_type_list (V8HI_type_node,
8463				long_integer_type_node, pcvoid_type_node, NULL_TREE);
8464  tree v4si_ftype_long_pcvoid
8465    = build_function_type_list (V4SI_type_node,
8466				long_integer_type_node, pcvoid_type_node, NULL_TREE);
8467
8468  tree void_ftype_opaque_long_pvoid
8469    = build_function_type_list (void_type_node,
8470				opaque_V4SI_type_node, long_integer_type_node,
8471				pvoid_type_node, NULL_TREE);
8472  tree void_ftype_v4si_long_pvoid
8473    = build_function_type_list (void_type_node,
8474				V4SI_type_node, long_integer_type_node,
8475				pvoid_type_node, NULL_TREE);
8476  tree void_ftype_v16qi_long_pvoid
8477    = build_function_type_list (void_type_node,
8478				V16QI_type_node, long_integer_type_node,
8479				pvoid_type_node, NULL_TREE);
8480  tree void_ftype_v8hi_long_pvoid
8481    = build_function_type_list (void_type_node,
8482				V8HI_type_node, long_integer_type_node,
8483				pvoid_type_node, NULL_TREE);
8484  tree int_ftype_int_v8hi_v8hi
8485    = build_function_type_list (integer_type_node,
8486				integer_type_node, V8HI_type_node,
8487				V8HI_type_node, NULL_TREE);
8488  tree int_ftype_int_v16qi_v16qi
8489    = build_function_type_list (integer_type_node,
8490				integer_type_node, V16QI_type_node,
8491				V16QI_type_node, NULL_TREE);
8492  tree int_ftype_int_v4sf_v4sf
8493    = build_function_type_list (integer_type_node,
8494				integer_type_node, V4SF_type_node,
8495				V4SF_type_node, NULL_TREE);
8496  tree v4si_ftype_v4si
8497    = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8498  tree v8hi_ftype_v8hi
8499    = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8500  tree v16qi_ftype_v16qi
8501    = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8502  tree v4sf_ftype_v4sf
8503    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8504  tree void_ftype_pcvoid_int_int
8505    = build_function_type_list (void_type_node,
8506				pcvoid_type_node, integer_type_node,
8507				integer_type_node, NULL_TREE);
8508
8509  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8510	       ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8511  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8512	       ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8513  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8514	       ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8515  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8516	       ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8517  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8518	       ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8519  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8520	       ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8521  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8522	       ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8523  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8524	       ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
8525  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8526  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8527  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
8528  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
8529  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8530  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8531  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8532  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8533  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8534  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8535  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8536  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8537  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8538  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8539  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8540  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
8541  def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8542  def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8543  def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8544  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8545  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8546  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8547  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8548  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8549  def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8550  def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8551  def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8552  def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8553  def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8554  def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8555
8556  def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8557
8558  def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8559  def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8560  def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8561  def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8562  def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8563  def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8564  def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8565  def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8566  def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8567  def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8568
8569  /* Add the DST variants.  */
8570  d = (struct builtin_description *) bdesc_dst;
8571  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8572    def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
8573
8574  /* Initialize the predicates.  */
8575  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8576  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8577    {
8578      enum machine_mode mode1;
8579      tree type;
8580      bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8581			   && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8582
8583      if (is_overloaded)
8584	mode1 = VOIDmode;
8585      else
8586	mode1 = insn_data[dp->icode].operand[1].mode;
8587
8588      switch (mode1)
8589	{
8590	case VOIDmode:
8591	  type = int_ftype_int_opaque_opaque;
8592	  break;
8593	case V4SImode:
8594	  type = int_ftype_int_v4si_v4si;
8595	  break;
8596	case V8HImode:
8597	  type = int_ftype_int_v8hi_v8hi;
8598	  break;
8599	case V16QImode:
8600	  type = int_ftype_int_v16qi_v16qi;
8601	  break;
8602	case V4SFmode:
8603	  type = int_ftype_int_v4sf_v4sf;
8604	  break;
8605	default:
8606	  gcc_unreachable ();
8607	}
8608
8609      def_builtin (dp->mask, dp->name, type, dp->code);
8610    }
8611
8612  /* Initialize the abs* operators.  */
8613  d = (struct builtin_description *) bdesc_abs;
8614  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8615    {
8616      enum machine_mode mode0;
8617      tree type;
8618
8619      mode0 = insn_data[d->icode].operand[0].mode;
8620
8621      switch (mode0)
8622	{
8623	case V4SImode:
8624	  type = v4si_ftype_v4si;
8625	  break;
8626	case V8HImode:
8627	  type = v8hi_ftype_v8hi;
8628	  break;
8629	case V16QImode:
8630	  type = v16qi_ftype_v16qi;
8631	  break;
8632	case V4SFmode:
8633	  type = v4sf_ftype_v4sf;
8634	  break;
8635	default:
8636	  gcc_unreachable ();
8637	}
8638
8639      def_builtin (d->mask, d->name, type, d->code);
8640    }
8641
8642  if (TARGET_ALTIVEC)
8643    {
8644      tree decl;
8645
8646      /* Initialize target builtin that implements
8647         targetm.vectorize.builtin_mask_for_load.  */
8648
8649      decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
8650                               v16qi_ftype_long_pcvoid,
8651                               ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8652                               BUILT_IN_MD, NULL,
8653                               tree_cons (get_identifier ("const"),
8654                                          NULL_TREE, NULL_TREE));
8655      /* Record the decl. Will be used by rs6000_builtin_mask_for_load.  */
8656      altivec_builtin_mask_for_load = decl;
8657    }
8658
8659  /* Access to the vec_init patterns.  */
8660  ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8661				    integer_type_node, integer_type_node,
8662				    integer_type_node, NULL_TREE);
8663  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8664	       ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8665
8666  ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8667				    short_integer_type_node,
8668				    short_integer_type_node,
8669				    short_integer_type_node,
8670				    short_integer_type_node,
8671				    short_integer_type_node,
8672				    short_integer_type_node,
8673				    short_integer_type_node, NULL_TREE);
8674  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8675	       ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8676
8677  ftype = build_function_type_list (V16QI_type_node, char_type_node,
8678				    char_type_node, char_type_node,
8679				    char_type_node, char_type_node,
8680				    char_type_node, char_type_node,
8681				    char_type_node, char_type_node,
8682				    char_type_node, char_type_node,
8683				    char_type_node, char_type_node,
8684				    char_type_node, char_type_node,
8685				    char_type_node, NULL_TREE);
8686  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8687	       ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8688
8689  ftype = build_function_type_list (V4SF_type_node, float_type_node,
8690				    float_type_node, float_type_node,
8691				    float_type_node, NULL_TREE);
8692  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
8693	       ALTIVEC_BUILTIN_VEC_INIT_V4SF);
8694
8695  /* Access to the vec_set patterns.  */
8696  ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
8697				    intSI_type_node,
8698				    integer_type_node, NULL_TREE);
8699  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
8700	       ALTIVEC_BUILTIN_VEC_SET_V4SI);
8701
8702  ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
8703				    intHI_type_node,
8704				    integer_type_node, NULL_TREE);
8705  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
8706	       ALTIVEC_BUILTIN_VEC_SET_V8HI);
8707
8708  ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
8709				    intQI_type_node,
8710				    integer_type_node, NULL_TREE);
8711  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
8712	       ALTIVEC_BUILTIN_VEC_SET_V16QI);
8713
8714  ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
8715				    float_type_node,
8716				    integer_type_node, NULL_TREE);
8717  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
8718	       ALTIVEC_BUILTIN_VEC_SET_V4SF);
8719
8720  /* Access to the vec_extract patterns.  */
8721  ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
8722				    integer_type_node, NULL_TREE);
8723  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
8724	       ALTIVEC_BUILTIN_VEC_EXT_V4SI);
8725
8726  ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
8727				    integer_type_node, NULL_TREE);
8728  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
8729	       ALTIVEC_BUILTIN_VEC_EXT_V8HI);
8730
8731  ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
8732				    integer_type_node, NULL_TREE);
8733  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
8734	       ALTIVEC_BUILTIN_VEC_EXT_V16QI);
8735
8736  ftype = build_function_type_list (float_type_node, V4SF_type_node,
8737				    integer_type_node, NULL_TREE);
8738  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
8739	       ALTIVEC_BUILTIN_VEC_EXT_V4SF);
8740}
8741
8742static void
8743rs6000_common_init_builtins (void)
8744{
8745  struct builtin_description *d;
8746  size_t i;
8747
8748  tree v4sf_ftype_v4sf_v4sf_v16qi
8749    = build_function_type_list (V4SF_type_node,
8750				V4SF_type_node, V4SF_type_node,
8751				V16QI_type_node, NULL_TREE);
8752  tree v4si_ftype_v4si_v4si_v16qi
8753    = build_function_type_list (V4SI_type_node,
8754				V4SI_type_node, V4SI_type_node,
8755				V16QI_type_node, NULL_TREE);
8756  tree v8hi_ftype_v8hi_v8hi_v16qi
8757    = build_function_type_list (V8HI_type_node,
8758				V8HI_type_node, V8HI_type_node,
8759				V16QI_type_node, NULL_TREE);
8760  tree v16qi_ftype_v16qi_v16qi_v16qi
8761    = build_function_type_list (V16QI_type_node,
8762				V16QI_type_node, V16QI_type_node,
8763				V16QI_type_node, NULL_TREE);
8764  tree v4si_ftype_int
8765    = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8766  tree v8hi_ftype_int
8767    = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8768  tree v16qi_ftype_int
8769    = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
8770  tree v8hi_ftype_v16qi
8771    = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8772  tree v4sf_ftype_v4sf
8773    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8774
8775  tree v2si_ftype_v2si_v2si
8776    = build_function_type_list (opaque_V2SI_type_node,
8777				opaque_V2SI_type_node,
8778				opaque_V2SI_type_node, NULL_TREE);
8779
8780  tree v2sf_ftype_v2sf_v2sf
8781    = build_function_type_list (opaque_V2SF_type_node,
8782				opaque_V2SF_type_node,
8783				opaque_V2SF_type_node, NULL_TREE);
8784
8785  tree v2si_ftype_int_int
8786    = build_function_type_list (opaque_V2SI_type_node,
8787				integer_type_node, integer_type_node,
8788				NULL_TREE);
8789
8790  tree opaque_ftype_opaque
8791    = build_function_type_list (opaque_V4SI_type_node,
8792				opaque_V4SI_type_node, NULL_TREE);
8793
8794  tree v2si_ftype_v2si
8795    = build_function_type_list (opaque_V2SI_type_node,
8796				opaque_V2SI_type_node, NULL_TREE);
8797
8798  tree v2sf_ftype_v2sf
8799    = build_function_type_list (opaque_V2SF_type_node,
8800				opaque_V2SF_type_node, NULL_TREE);
8801
8802  tree v2sf_ftype_v2si
8803    = build_function_type_list (opaque_V2SF_type_node,
8804				opaque_V2SI_type_node, NULL_TREE);
8805
8806  tree v2si_ftype_v2sf
8807    = build_function_type_list (opaque_V2SI_type_node,
8808				opaque_V2SF_type_node, NULL_TREE);
8809
8810  tree v2si_ftype_v2si_char
8811    = build_function_type_list (opaque_V2SI_type_node,
8812				opaque_V2SI_type_node,
8813				char_type_node, NULL_TREE);
8814
8815  tree v2si_ftype_int_char
8816    = build_function_type_list (opaque_V2SI_type_node,
8817				integer_type_node, char_type_node, NULL_TREE);
8818
8819  tree v2si_ftype_char
8820    = build_function_type_list (opaque_V2SI_type_node,
8821				char_type_node, NULL_TREE);
8822
8823  tree int_ftype_int_int
8824    = build_function_type_list (integer_type_node,
8825				integer_type_node, integer_type_node,
8826				NULL_TREE);
8827
8828  tree opaque_ftype_opaque_opaque
8829    = build_function_type_list (opaque_V4SI_type_node,
8830                                opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
8831  tree v4si_ftype_v4si_v4si
8832    = build_function_type_list (V4SI_type_node,
8833				V4SI_type_node, V4SI_type_node, NULL_TREE);
8834  tree v4sf_ftype_v4si_int
8835    = build_function_type_list (V4SF_type_node,
8836				V4SI_type_node, integer_type_node, NULL_TREE);
8837  tree v4si_ftype_v4sf_int
8838    = build_function_type_list (V4SI_type_node,
8839				V4SF_type_node, integer_type_node, NULL_TREE);
8840  tree v4si_ftype_v4si_int
8841    = build_function_type_list (V4SI_type_node,
8842				V4SI_type_node, integer_type_node, NULL_TREE);
8843  tree v8hi_ftype_v8hi_int
8844    = build_function_type_list (V8HI_type_node,
8845				V8HI_type_node, integer_type_node, NULL_TREE);
8846  tree v16qi_ftype_v16qi_int
8847    = build_function_type_list (V16QI_type_node,
8848				V16QI_type_node, integer_type_node, NULL_TREE);
8849  tree v16qi_ftype_v16qi_v16qi_int
8850    = build_function_type_list (V16QI_type_node,
8851				V16QI_type_node, V16QI_type_node,
8852				integer_type_node, NULL_TREE);
8853  tree v8hi_ftype_v8hi_v8hi_int
8854    = build_function_type_list (V8HI_type_node,
8855				V8HI_type_node, V8HI_type_node,
8856				integer_type_node, NULL_TREE);
8857  tree v4si_ftype_v4si_v4si_int
8858    = build_function_type_list (V4SI_type_node,
8859				V4SI_type_node, V4SI_type_node,
8860				integer_type_node, NULL_TREE);
8861  tree v4sf_ftype_v4sf_v4sf_int
8862    = build_function_type_list (V4SF_type_node,
8863				V4SF_type_node, V4SF_type_node,
8864				integer_type_node, NULL_TREE);
8865  tree v4sf_ftype_v4sf_v4sf
8866    = build_function_type_list (V4SF_type_node,
8867				V4SF_type_node, V4SF_type_node, NULL_TREE);
8868  tree opaque_ftype_opaque_opaque_opaque
8869    = build_function_type_list (opaque_V4SI_type_node,
8870                                opaque_V4SI_type_node, opaque_V4SI_type_node,
8871                                opaque_V4SI_type_node, NULL_TREE);
8872  tree v4sf_ftype_v4sf_v4sf_v4si
8873    = build_function_type_list (V4SF_type_node,
8874				V4SF_type_node, V4SF_type_node,
8875				V4SI_type_node, NULL_TREE);
8876  tree v4sf_ftype_v4sf_v4sf_v4sf
8877    = build_function_type_list (V4SF_type_node,
8878				V4SF_type_node, V4SF_type_node,
8879				V4SF_type_node, NULL_TREE);
8880  tree v4si_ftype_v4si_v4si_v4si
8881    = build_function_type_list (V4SI_type_node,
8882				V4SI_type_node, V4SI_type_node,
8883				V4SI_type_node, NULL_TREE);
8884  tree v8hi_ftype_v8hi_v8hi
8885    = build_function_type_list (V8HI_type_node,
8886				V8HI_type_node, V8HI_type_node, NULL_TREE);
8887  tree v8hi_ftype_v8hi_v8hi_v8hi
8888    = build_function_type_list (V8HI_type_node,
8889				V8HI_type_node, V8HI_type_node,
8890				V8HI_type_node, NULL_TREE);
8891  tree v4si_ftype_v8hi_v8hi_v4si
8892    = build_function_type_list (V4SI_type_node,
8893				V8HI_type_node, V8HI_type_node,
8894				V4SI_type_node, NULL_TREE);
8895  tree v4si_ftype_v16qi_v16qi_v4si
8896    = build_function_type_list (V4SI_type_node,
8897				V16QI_type_node, V16QI_type_node,
8898				V4SI_type_node, NULL_TREE);
8899  tree v16qi_ftype_v16qi_v16qi
8900    = build_function_type_list (V16QI_type_node,
8901				V16QI_type_node, V16QI_type_node, NULL_TREE);
8902  tree v4si_ftype_v4sf_v4sf
8903    = build_function_type_list (V4SI_type_node,
8904				V4SF_type_node, V4SF_type_node, NULL_TREE);
8905  tree v8hi_ftype_v16qi_v16qi
8906    = build_function_type_list (V8HI_type_node,
8907				V16QI_type_node, V16QI_type_node, NULL_TREE);
8908  tree v4si_ftype_v8hi_v8hi
8909    = build_function_type_list (V4SI_type_node,
8910				V8HI_type_node, V8HI_type_node, NULL_TREE);
8911  tree v8hi_ftype_v4si_v4si
8912    = build_function_type_list (V8HI_type_node,
8913				V4SI_type_node, V4SI_type_node, NULL_TREE);
8914  tree v16qi_ftype_v8hi_v8hi
8915    = build_function_type_list (V16QI_type_node,
8916				V8HI_type_node, V8HI_type_node, NULL_TREE);
8917  tree v4si_ftype_v16qi_v4si
8918    = build_function_type_list (V4SI_type_node,
8919				V16QI_type_node, V4SI_type_node, NULL_TREE);
8920  tree v4si_ftype_v16qi_v16qi
8921    = build_function_type_list (V4SI_type_node,
8922				V16QI_type_node, V16QI_type_node, NULL_TREE);
8923  tree v4si_ftype_v8hi_v4si
8924    = build_function_type_list (V4SI_type_node,
8925				V8HI_type_node, V4SI_type_node, NULL_TREE);
8926  tree v4si_ftype_v8hi
8927    = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8928  tree int_ftype_v4si_v4si
8929    = build_function_type_list (integer_type_node,
8930				V4SI_type_node, V4SI_type_node, NULL_TREE);
8931  tree int_ftype_v4sf_v4sf
8932    = build_function_type_list (integer_type_node,
8933				V4SF_type_node, V4SF_type_node, NULL_TREE);
8934  tree int_ftype_v16qi_v16qi
8935    = build_function_type_list (integer_type_node,
8936				V16QI_type_node, V16QI_type_node, NULL_TREE);
8937  tree int_ftype_v8hi_v8hi
8938    = build_function_type_list (integer_type_node,
8939				V8HI_type_node, V8HI_type_node, NULL_TREE);
8940
8941  /* Add the simple ternary operators.  */
8942  d = (struct builtin_description *) bdesc_3arg;
8943  for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8944    {
8945      enum machine_mode mode0, mode1, mode2, mode3;
8946      tree type;
8947      bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8948			   && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8949
8950      if (is_overloaded)
8951	{
8952          mode0 = VOIDmode;
8953          mode1 = VOIDmode;
8954          mode2 = VOIDmode;
8955          mode3 = VOIDmode;
8956	}
8957      else
8958	{
8959          if (d->name == 0 || d->icode == CODE_FOR_nothing)
8960	    continue;
8961
8962          mode0 = insn_data[d->icode].operand[0].mode;
8963          mode1 = insn_data[d->icode].operand[1].mode;
8964          mode2 = insn_data[d->icode].operand[2].mode;
8965          mode3 = insn_data[d->icode].operand[3].mode;
8966	}
8967
8968      /* When all four are of the same mode.  */
8969      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8970	{
8971	  switch (mode0)
8972	    {
8973	    case VOIDmode:
8974	      type = opaque_ftype_opaque_opaque_opaque;
8975	      break;
8976	    case V4SImode:
8977	      type = v4si_ftype_v4si_v4si_v4si;
8978	      break;
8979	    case V4SFmode:
8980	      type = v4sf_ftype_v4sf_v4sf_v4sf;
8981	      break;
8982	    case V8HImode:
8983	      type = v8hi_ftype_v8hi_v8hi_v8hi;
8984	      break;
8985	    case V16QImode:
8986	      type = v16qi_ftype_v16qi_v16qi_v16qi;
8987	      break;
8988	    default:
8989	      gcc_unreachable ();
8990	    }
8991	}
8992      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8993	{
8994	  switch (mode0)
8995	    {
8996	    case V4SImode:
8997	      type = v4si_ftype_v4si_v4si_v16qi;
8998	      break;
8999	    case V4SFmode:
9000	      type = v4sf_ftype_v4sf_v4sf_v16qi;
9001	      break;
9002	    case V8HImode:
9003	      type = v8hi_ftype_v8hi_v8hi_v16qi;
9004	      break;
9005	    case V16QImode:
9006	      type = v16qi_ftype_v16qi_v16qi_v16qi;
9007	      break;
9008	    default:
9009	      gcc_unreachable ();
9010	    }
9011	}
9012      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
9013	       && mode3 == V4SImode)
9014	type = v4si_ftype_v16qi_v16qi_v4si;
9015      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
9016	       && mode3 == V4SImode)
9017	type = v4si_ftype_v8hi_v8hi_v4si;
9018      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
9019	       && mode3 == V4SImode)
9020	type = v4sf_ftype_v4sf_v4sf_v4si;
9021
9022      /* vchar, vchar, vchar, 4 bit literal.  */
9023      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
9024	       && mode3 == QImode)
9025	type = v16qi_ftype_v16qi_v16qi_int;
9026
9027      /* vshort, vshort, vshort, 4 bit literal.  */
9028      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
9029	       && mode3 == QImode)
9030	type = v8hi_ftype_v8hi_v8hi_int;
9031
9032      /* vint, vint, vint, 4 bit literal.  */
9033      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
9034	       && mode3 == QImode)
9035	type = v4si_ftype_v4si_v4si_int;
9036
9037      /* vfloat, vfloat, vfloat, 4 bit literal.  */
9038      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
9039	       && mode3 == QImode)
9040	type = v4sf_ftype_v4sf_v4sf_int;
9041
9042      else
9043	gcc_unreachable ();
9044
9045      def_builtin (d->mask, d->name, type, d->code);
9046    }
9047
9048  /* Add the simple binary operators.  */
9049  d = (struct builtin_description *) bdesc_2arg;
9050  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9051    {
9052      enum machine_mode mode0, mode1, mode2;
9053      tree type;
9054      bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9055			   && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9056
9057      if (is_overloaded)
9058	{
9059	  mode0 = VOIDmode;
9060	  mode1 = VOIDmode;
9061	  mode2 = VOIDmode;
9062	}
9063      else
9064	{
9065          if (d->name == 0 || d->icode == CODE_FOR_nothing)
9066	    continue;
9067
9068          mode0 = insn_data[d->icode].operand[0].mode;
9069          mode1 = insn_data[d->icode].operand[1].mode;
9070          mode2 = insn_data[d->icode].operand[2].mode;
9071	}
9072
9073      /* When all three operands are of the same mode.  */
9074      if (mode0 == mode1 && mode1 == mode2)
9075	{
9076	  switch (mode0)
9077	    {
9078	    case VOIDmode:
9079	      type = opaque_ftype_opaque_opaque;
9080	      break;
9081	    case V4SFmode:
9082	      type = v4sf_ftype_v4sf_v4sf;
9083	      break;
9084	    case V4SImode:
9085	      type = v4si_ftype_v4si_v4si;
9086	      break;
9087	    case V16QImode:
9088	      type = v16qi_ftype_v16qi_v16qi;
9089	      break;
9090	    case V8HImode:
9091	      type = v8hi_ftype_v8hi_v8hi;
9092	      break;
9093	    case V2SImode:
9094	      type = v2si_ftype_v2si_v2si;
9095	      break;
9096	    case V2SFmode:
9097	      type = v2sf_ftype_v2sf_v2sf;
9098	      break;
9099	    case SImode:
9100	      type = int_ftype_int_int;
9101	      break;
9102	    default:
9103	      gcc_unreachable ();
9104	    }
9105	}
9106
9107      /* A few other combos we really don't want to do manually.  */
9108
9109      /* vint, vfloat, vfloat.  */
9110      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
9111	type = v4si_ftype_v4sf_v4sf;
9112
9113      /* vshort, vchar, vchar.  */
9114      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
9115	type = v8hi_ftype_v16qi_v16qi;
9116
9117      /* vint, vshort, vshort.  */
9118      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
9119	type = v4si_ftype_v8hi_v8hi;
9120
9121      /* vshort, vint, vint.  */
9122      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
9123	type = v8hi_ftype_v4si_v4si;
9124
9125      /* vchar, vshort, vshort.  */
9126      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
9127	type = v16qi_ftype_v8hi_v8hi;
9128
9129      /* vint, vchar, vint.  */
9130      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
9131	type = v4si_ftype_v16qi_v4si;
9132
9133      /* vint, vchar, vchar.  */
9134      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
9135	type = v4si_ftype_v16qi_v16qi;
9136
9137      /* vint, vshort, vint.  */
9138      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
9139	type = v4si_ftype_v8hi_v4si;
9140
9141      /* vint, vint, 5 bit literal.  */
9142      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
9143	type = v4si_ftype_v4si_int;
9144
9145      /* vshort, vshort, 5 bit literal.  */
9146      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
9147	type = v8hi_ftype_v8hi_int;
9148
9149      /* vchar, vchar, 5 bit literal.  */
9150      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
9151	type = v16qi_ftype_v16qi_int;
9152
9153      /* vfloat, vint, 5 bit literal.  */
9154      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
9155	type = v4sf_ftype_v4si_int;
9156
9157      /* vint, vfloat, 5 bit literal.  */
9158      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
9159	type = v4si_ftype_v4sf_int;
9160
9161      else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
9162	type = v2si_ftype_int_int;
9163
9164      else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
9165	type = v2si_ftype_v2si_char;
9166
9167      else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
9168	type = v2si_ftype_int_char;
9169
9170      else
9171	{
9172	  /* int, x, x.  */
9173	  gcc_assert (mode0 == SImode);
9174	  switch (mode1)
9175	    {
9176	    case V4SImode:
9177	      type = int_ftype_v4si_v4si;
9178	      break;
9179	    case V4SFmode:
9180	      type = int_ftype_v4sf_v4sf;
9181	      break;
9182	    case V16QImode:
9183	      type = int_ftype_v16qi_v16qi;
9184	      break;
9185	    case V8HImode:
9186	      type = int_ftype_v8hi_v8hi;
9187	      break;
9188	    default:
9189	      gcc_unreachable ();
9190	    }
9191	}
9192
9193      def_builtin (d->mask, d->name, type, d->code);
9194    }
9195
9196  /* Add the simple unary operators.  */
9197  d = (struct builtin_description *) bdesc_1arg;
9198  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9199    {
9200      enum machine_mode mode0, mode1;
9201      tree type;
9202      bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9203			   && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9204
9205      if (is_overloaded)
9206        {
9207          mode0 = VOIDmode;
9208          mode1 = VOIDmode;
9209        }
9210      else
9211        {
9212          if (d->name == 0 || d->icode == CODE_FOR_nothing)
9213	    continue;
9214
9215          mode0 = insn_data[d->icode].operand[0].mode;
9216          mode1 = insn_data[d->icode].operand[1].mode;
9217        }
9218
9219      if (mode0 == V4SImode && mode1 == QImode)
9220	type = v4si_ftype_int;
9221      else if (mode0 == V8HImode && mode1 == QImode)
9222	type = v8hi_ftype_int;
9223      else if (mode0 == V16QImode && mode1 == QImode)
9224	type = v16qi_ftype_int;
9225      else if (mode0 == VOIDmode && mode1 == VOIDmode)
9226	type = opaque_ftype_opaque;
9227      else if (mode0 == V4SFmode && mode1 == V4SFmode)
9228	type = v4sf_ftype_v4sf;
9229      else if (mode0 == V8HImode && mode1 == V16QImode)
9230	type = v8hi_ftype_v16qi;
9231      else if (mode0 == V4SImode && mode1 == V8HImode)
9232	type = v4si_ftype_v8hi;
9233      else if (mode0 == V2SImode && mode1 == V2SImode)
9234	type = v2si_ftype_v2si;
9235      else if (mode0 == V2SFmode && mode1 == V2SFmode)
9236	type = v2sf_ftype_v2sf;
9237      else if (mode0 == V2SFmode && mode1 == V2SImode)
9238	type = v2sf_ftype_v2si;
9239      else if (mode0 == V2SImode && mode1 == V2SFmode)
9240	type = v2si_ftype_v2sf;
9241      else if (mode0 == V2SImode && mode1 == QImode)
9242	type = v2si_ftype_char;
9243      else
9244	gcc_unreachable ();
9245
9246      def_builtin (d->mask, d->name, type, d->code);
9247    }
9248}
9249
9250static void
9251rs6000_init_libfuncs (void)
9252{
9253  if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
9254      && !TARGET_POWER2 && !TARGET_POWERPC)
9255    {
9256      /* AIX library routines for float->int conversion.  */
9257      set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9258      set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
9259      set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9260      set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9261    }
9262
9263  if (!TARGET_IEEEQUAD)
9264      /* AIX/Darwin/64-bit Linux quad floating point routines.  */
9265    if (!TARGET_XL_COMPAT)
9266      {
9267	set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9268	set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9269	set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9270	set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
9271
9272	if (TARGET_SOFT_FLOAT)
9273	  {
9274	    set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
9275	    set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
9276	    set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
9277	    set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
9278	    set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
9279	    set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
9280	    set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
9281	    set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
9282
9283	    set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
9284	    set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
9285	    set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
9286	    set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
9287	    set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
9288	    set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
9289	    set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
9290	    set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
9291	  }
9292      }
9293    else
9294      {
9295	set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9296	set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9297	set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9298	set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9299      }
9300  else
9301    {
9302      /* 32-bit SVR4 quad floating point routines.  */
9303
9304      set_optab_libfunc (add_optab, TFmode, "_q_add");
9305      set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9306      set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9307      set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9308      set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9309      if (TARGET_PPC_GPOPT || TARGET_POWER2)
9310	set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9311
9312      set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9313      set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9314      set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9315      set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9316      set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9317      set_optab_libfunc (le_optab, TFmode, "_q_fle");
9318
9319      set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9320      set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9321      set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9322      set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9323      set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9324      set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9325      set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
9326      set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
9327    }
9328}
9329
9330
9331/* Expand a block clear operation, and return 1 if successful.  Return 0
9332   if we should let the compiler generate normal code.
9333
9334   operands[0] is the destination
9335   operands[1] is the length
9336   operands[3] is the alignment */
9337
9338int
9339expand_block_clear (rtx operands[])
9340{
9341  rtx orig_dest = operands[0];
9342  rtx bytes_rtx	= operands[1];
9343  rtx align_rtx = operands[3];
9344  bool constp	= (GET_CODE (bytes_rtx) == CONST_INT);
9345  HOST_WIDE_INT align;
9346  HOST_WIDE_INT bytes;
9347  int offset;
9348  int clear_bytes;
9349  int clear_step;
9350
9351  /* If this is not a fixed size move, just call memcpy */
9352  if (! constp)
9353    return 0;
9354
9355  /* This must be a fixed size alignment  */
9356  gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9357  align = INTVAL (align_rtx) * BITS_PER_UNIT;
9358
9359  /* Anything to clear? */
9360  bytes = INTVAL (bytes_rtx);
9361  if (bytes <= 0)
9362    return 1;
9363
9364  /* Use the builtin memset after a point, to avoid huge code bloat.
9365     When optimize_size, avoid any significant code bloat; calling
9366     memset is about 4 instructions, so allow for one instruction to
9367     load zero and three to do clearing.  */
9368  if (TARGET_ALTIVEC && align >= 128)
9369    clear_step = 16;
9370  else if (TARGET_POWERPC64 && align >= 32)
9371    clear_step = 8;
9372  else
9373    clear_step = 4;
9374
9375  if (optimize_size && bytes > 3 * clear_step)
9376    return 0;
9377  if (! optimize_size && bytes > 8 * clear_step)
9378    return 0;
9379
9380  for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9381    {
9382      enum machine_mode mode = BLKmode;
9383      rtx dest;
9384
9385      if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9386	{
9387	  clear_bytes = 16;
9388	  mode = V4SImode;
9389	}
9390      else if (bytes >= 8 && TARGET_POWERPC64
9391	  /* 64-bit loads and stores require word-aligned
9392	     displacements.  */
9393	  && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9394	{
9395	  clear_bytes = 8;
9396	  mode = DImode;
9397	}
9398      else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9399	{			/* move 4 bytes */
9400	  clear_bytes = 4;
9401	  mode = SImode;
9402	}
9403      else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9404	{			/* move 2 bytes */
9405	  clear_bytes = 2;
9406	  mode = HImode;
9407	}
9408      else /* move 1 byte at a time */
9409	{
9410	  clear_bytes = 1;
9411	  mode = QImode;
9412	}
9413
9414      dest = adjust_address (orig_dest, mode, offset);
9415
9416      emit_move_insn (dest, CONST0_RTX (mode));
9417    }
9418
9419  return 1;
9420}
9421
9422
9423/* Expand a block move operation, and return 1 if successful.  Return 0
9424   if we should let the compiler generate normal code.
9425
9426   operands[0] is the destination
9427   operands[1] is the source
9428   operands[2] is the length
9429   operands[3] is the alignment */
9430
9431#define MAX_MOVE_REG 4
9432
9433int
9434expand_block_move (rtx operands[])
9435{
9436  rtx orig_dest = operands[0];
9437  rtx orig_src	= operands[1];
9438  rtx bytes_rtx	= operands[2];
9439  rtx align_rtx = operands[3];
9440  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
9441  int align;
9442  int bytes;
9443  int offset;
9444  int move_bytes;
9445  rtx stores[MAX_MOVE_REG];
9446  int num_reg = 0;
9447
9448  /* If this is not a fixed size move, just call memcpy */
9449  if (! constp)
9450    return 0;
9451
9452  /* This must be a fixed size alignment */
9453  gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9454  align = INTVAL (align_rtx) * BITS_PER_UNIT;
9455
9456  /* Anything to move? */
9457  bytes = INTVAL (bytes_rtx);
9458  if (bytes <= 0)
9459    return 1;
9460
9461  /* store_one_arg depends on expand_block_move to handle at least the size of
9462     reg_parm_stack_space.  */
9463  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
9464    return 0;
9465
9466  for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
9467    {
9468      union {
9469	rtx (*movmemsi) (rtx, rtx, rtx, rtx);
9470	rtx (*mov) (rtx, rtx);
9471      } gen_func;
9472      enum machine_mode mode = BLKmode;
9473      rtx src, dest;
9474
9475      /* Altivec first, since it will be faster than a string move
9476	 when it applies, and usually not significantly larger.  */
9477      if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9478	{
9479	  move_bytes = 16;
9480	  mode = V4SImode;
9481	  gen_func.mov = gen_movv4si;
9482	}
9483      else if (TARGET_STRING
9484	  && bytes > 24		/* move up to 32 bytes at a time */
9485	  && ! fixed_regs[5]
9486	  && ! fixed_regs[6]
9487	  && ! fixed_regs[7]
9488	  && ! fixed_regs[8]
9489	  && ! fixed_regs[9]
9490	  && ! fixed_regs[10]
9491	  && ! fixed_regs[11]
9492	  && ! fixed_regs[12])
9493	{
9494	  move_bytes = (bytes > 32) ? 32 : bytes;
9495	  gen_func.movmemsi = gen_movmemsi_8reg;
9496	}
9497      else if (TARGET_STRING
9498	       && bytes > 16	/* move up to 24 bytes at a time */
9499	       && ! fixed_regs[5]
9500	       && ! fixed_regs[6]
9501	       && ! fixed_regs[7]
9502	       && ! fixed_regs[8]
9503	       && ! fixed_regs[9]
9504	       && ! fixed_regs[10])
9505	{
9506	  move_bytes = (bytes > 24) ? 24 : bytes;
9507	  gen_func.movmemsi = gen_movmemsi_6reg;
9508	}
9509      else if (TARGET_STRING
9510	       && bytes > 8	/* move up to 16 bytes at a time */
9511	       && ! fixed_regs[5]
9512	       && ! fixed_regs[6]
9513	       && ! fixed_regs[7]
9514	       && ! fixed_regs[8])
9515	{
9516	  move_bytes = (bytes > 16) ? 16 : bytes;
9517	  gen_func.movmemsi = gen_movmemsi_4reg;
9518	}
9519      else if (bytes >= 8 && TARGET_POWERPC64
9520	       /* 64-bit loads and stores require word-aligned
9521		  displacements.  */
9522	       && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9523	{
9524	  move_bytes = 8;
9525	  mode = DImode;
9526	  gen_func.mov = gen_movdi;
9527	}
9528      else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9529	{			/* move up to 8 bytes at a time */
9530	  move_bytes = (bytes > 8) ? 8 : bytes;
9531	  gen_func.movmemsi = gen_movmemsi_2reg;
9532	}
9533      else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9534	{			/* move 4 bytes */
9535	  move_bytes = 4;
9536	  mode = SImode;
9537	  gen_func.mov = gen_movsi;
9538	}
9539      else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9540	{			/* move 2 bytes */
9541	  move_bytes = 2;
9542	  mode = HImode;
9543	  gen_func.mov = gen_movhi;
9544	}
9545      else if (TARGET_STRING && bytes > 1)
9546	{			/* move up to 4 bytes at a time */
9547	  move_bytes = (bytes > 4) ? 4 : bytes;
9548	  gen_func.movmemsi = gen_movmemsi_1reg;
9549	}
9550      else /* move 1 byte at a time */
9551	{
9552	  move_bytes = 1;
9553	  mode = QImode;
9554	  gen_func.mov = gen_movqi;
9555	}
9556
9557      src = adjust_address (orig_src, mode, offset);
9558      dest = adjust_address (orig_dest, mode, offset);
9559
9560      if (mode != BLKmode)
9561	{
9562	  rtx tmp_reg = gen_reg_rtx (mode);
9563
9564	  emit_insn ((*gen_func.mov) (tmp_reg, src));
9565	  stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
9566	}
9567
9568      if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9569	{
9570	  int i;
9571	  for (i = 0; i < num_reg; i++)
9572	    emit_insn (stores[i]);
9573	  num_reg = 0;
9574	}
9575
9576      if (mode == BLKmode)
9577	{
9578	  /* Move the address into scratch registers.  The movmemsi
9579	     patterns require zero offset.  */
9580	  if (!REG_P (XEXP (src, 0)))
9581	    {
9582	      rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9583	      src = replace_equiv_address (src, src_reg);
9584	    }
9585	  set_mem_size (src, GEN_INT (move_bytes));
9586
9587	  if (!REG_P (XEXP (dest, 0)))
9588	    {
9589	      rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9590	      dest = replace_equiv_address (dest, dest_reg);
9591	    }
9592	  set_mem_size (dest, GEN_INT (move_bytes));
9593
9594	  emit_insn ((*gen_func.movmemsi) (dest, src,
9595					   GEN_INT (move_bytes & 31),
9596					   align_rtx));
9597	}
9598    }
9599
9600  return 1;
9601}
9602
9603
9604/* Return a string to perform a load_multiple operation.
9605   operands[0] is the vector.
9606   operands[1] is the source address.
9607   operands[2] is the first destination register.  */
9608
9609const char *
9610rs6000_output_load_multiple (rtx operands[3])
9611{
9612  /* We have to handle the case where the pseudo used to contain the address
9613     is assigned to one of the output registers.  */
9614  int i, j;
9615  int words = XVECLEN (operands[0], 0);
9616  rtx xop[10];
9617
9618  if (XVECLEN (operands[0], 0) == 1)
9619    return "{l|lwz} %2,0(%1)";
9620
9621  for (i = 0; i < words; i++)
9622    if (refers_to_regno_p (REGNO (operands[2]) + i,
9623			   REGNO (operands[2]) + i + 1, operands[1], 0))
9624      {
9625	if (i == words-1)
9626	  {
9627	    xop[0] = GEN_INT (4 * (words-1));
9628	    xop[1] = operands[1];
9629	    xop[2] = operands[2];
9630	    output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9631	    return "";
9632	  }
9633	else if (i == 0)
9634	  {
9635	    xop[0] = GEN_INT (4 * (words-1));
9636	    xop[1] = operands[1];
9637	    xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9638	    output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9639	    return "";
9640	  }
9641	else
9642	  {
9643	    for (j = 0; j < words; j++)
9644	      if (j != i)
9645		{
9646		  xop[0] = GEN_INT (j * 4);
9647		  xop[1] = operands[1];
9648		  xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9649		  output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9650		}
9651	    xop[0] = GEN_INT (i * 4);
9652	    xop[1] = operands[1];
9653	    output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9654	    return "";
9655	  }
9656      }
9657
9658  return "{lsi|lswi} %2,%1,%N0";
9659}
9660
9661
9662/* A validation routine: say whether CODE, a condition code, and MODE
9663   match.  The other alternatives either don't make sense or should
9664   never be generated.  */
9665
9666void
9667validate_condition_mode (enum rtx_code code, enum machine_mode mode)
9668{
9669  gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9670	       || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9671	      && GET_MODE_CLASS (mode) == MODE_CC);
9672
9673  /* These don't make sense.  */
9674  gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9675	      || mode != CCUNSmode);
9676
9677  gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9678	      || mode == CCUNSmode);
9679
9680  gcc_assert (mode == CCFPmode
9681	      || (code != ORDERED && code != UNORDERED
9682		  && code != UNEQ && code != LTGT
9683		  && code != UNGT && code != UNLT
9684		  && code != UNGE && code != UNLE));
9685
9686  /* These should never be generated except for
9687     flag_finite_math_only.  */
9688  gcc_assert (mode != CCFPmode
9689	      || flag_finite_math_only
9690	      || (code != LE && code != GE
9691		  && code != UNEQ && code != LTGT
9692		  && code != UNGT && code != UNLT));
9693
9694  /* These are invalid; the information is not there.  */
9695  gcc_assert (mode != CCEQmode || code == EQ || code == NE);
9696}
9697
9698
9699/* Return 1 if ANDOP is a mask that has no bits on that are not in the
9700   mask required to convert the result of a rotate insn into a shift
9701   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
9702
9703int
9704includes_lshift_p (rtx shiftop, rtx andop)
9705{
9706  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9707
9708  shift_mask <<= INTVAL (shiftop);
9709
9710  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9711}
9712
9713/* Similar, but for right shift.  */
9714
9715int
9716includes_rshift_p (rtx shiftop, rtx andop)
9717{
9718  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9719
9720  shift_mask >>= INTVAL (shiftop);
9721
9722  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9723}
9724
9725/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9726   to perform a left shift.  It must have exactly SHIFTOP least
9727   significant 0's, then one or more 1's, then zero or more 0's.  */
9728
9729int
9730includes_rldic_lshift_p (rtx shiftop, rtx andop)
9731{
9732  if (GET_CODE (andop) == CONST_INT)
9733    {
9734      HOST_WIDE_INT c, lsb, shift_mask;
9735
9736      c = INTVAL (andop);
9737      if (c == 0 || c == ~0)
9738	return 0;
9739
9740      shift_mask = ~0;
9741      shift_mask <<= INTVAL (shiftop);
9742
9743      /* Find the least significant one bit.  */
9744      lsb = c & -c;
9745
9746      /* It must coincide with the LSB of the shift mask.  */
9747      if (-lsb != shift_mask)
9748	return 0;
9749
9750      /* Invert to look for the next transition (if any).  */
9751      c = ~c;
9752
9753      /* Remove the low group of ones (originally low group of zeros).  */
9754      c &= -lsb;
9755
9756      /* Again find the lsb, and check we have all 1's above.  */
9757      lsb = c & -c;
9758      return c == -lsb;
9759    }
9760  else if (GET_CODE (andop) == CONST_DOUBLE
9761	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9762    {
9763      HOST_WIDE_INT low, high, lsb;
9764      HOST_WIDE_INT shift_mask_low, shift_mask_high;
9765
9766      low = CONST_DOUBLE_LOW (andop);
9767      if (HOST_BITS_PER_WIDE_INT < 64)
9768	high = CONST_DOUBLE_HIGH (andop);
9769
9770      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9771	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9772	return 0;
9773
9774      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9775	{
9776	  shift_mask_high = ~0;
9777	  if (INTVAL (shiftop) > 32)
9778	    shift_mask_high <<= INTVAL (shiftop) - 32;
9779
9780	  lsb = high & -high;
9781
9782	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9783	    return 0;
9784
9785	  high = ~high;
9786	  high &= -lsb;
9787
9788	  lsb = high & -high;
9789	  return high == -lsb;
9790	}
9791
9792      shift_mask_low = ~0;
9793      shift_mask_low <<= INTVAL (shiftop);
9794
9795      lsb = low & -low;
9796
9797      if (-lsb != shift_mask_low)
9798	return 0;
9799
9800      if (HOST_BITS_PER_WIDE_INT < 64)
9801	high = ~high;
9802      low = ~low;
9803      low &= -lsb;
9804
9805      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9806	{
9807	  lsb = high & -high;
9808	  return high == -lsb;
9809	}
9810
9811      lsb = low & -low;
9812      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9813    }
9814  else
9815    return 0;
9816}
9817
9818/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9819   to perform a left shift.  It must have SHIFTOP or more least
9820   significant 0's, with the remainder of the word 1's.  */
9821
9822int
9823includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9824{
9825  if (GET_CODE (andop) == CONST_INT)
9826    {
9827      HOST_WIDE_INT c, lsb, shift_mask;
9828
9829      shift_mask = ~0;
9830      shift_mask <<= INTVAL (shiftop);
9831      c = INTVAL (andop);
9832
9833      /* Find the least significant one bit.  */
9834      lsb = c & -c;
9835
9836      /* It must be covered by the shift mask.
9837	 This test also rejects c == 0.  */
9838      if ((lsb & shift_mask) == 0)
9839	return 0;
9840
9841      /* Check we have all 1's above the transition, and reject all 1's.  */
9842      return c == -lsb && lsb != 1;
9843    }
9844  else if (GET_CODE (andop) == CONST_DOUBLE
9845	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9846    {
9847      HOST_WIDE_INT low, lsb, shift_mask_low;
9848
9849      low = CONST_DOUBLE_LOW (andop);
9850
9851      if (HOST_BITS_PER_WIDE_INT < 64)
9852	{
9853	  HOST_WIDE_INT high, shift_mask_high;
9854
9855	  high = CONST_DOUBLE_HIGH (andop);
9856
9857	  if (low == 0)
9858	    {
9859	      shift_mask_high = ~0;
9860	      if (INTVAL (shiftop) > 32)
9861		shift_mask_high <<= INTVAL (shiftop) - 32;
9862
9863	      lsb = high & -high;
9864
9865	      if ((lsb & shift_mask_high) == 0)
9866		return 0;
9867
9868	      return high == -lsb;
9869	    }
9870	  if (high != ~0)
9871	    return 0;
9872	}
9873
9874      shift_mask_low = ~0;
9875      shift_mask_low <<= INTVAL (shiftop);
9876
9877      lsb = low & -low;
9878
9879      if ((lsb & shift_mask_low) == 0)
9880	return 0;
9881
9882      return low == -lsb && lsb != 1;
9883    }
9884  else
9885    return 0;
9886}
9887
9888/* Return 1 if operands will generate a valid arguments to rlwimi
9889instruction for insert with right shift in 64-bit mode.  The mask may
9890not start on the first bit or stop on the last bit because wrap-around
9891effects of instruction do not correspond to semantics of RTL insn.  */
9892
9893int
9894insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9895{
9896  if (INTVAL (startop) > 32
9897      && INTVAL (startop) < 64
9898      && INTVAL (sizeop) > 1
9899      && INTVAL (sizeop) + INTVAL (startop) < 64
9900      && INTVAL (shiftop) > 0
9901      && INTVAL (sizeop) + INTVAL (shiftop) < 32
9902      && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9903    return 1;
9904
9905  return 0;
9906}
9907
9908/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9909   for lfq and stfq insns iff the registers are hard registers.   */
9910
9911int
9912registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9913{
9914  /* We might have been passed a SUBREG.  */
9915  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9916    return 0;
9917
9918  /* We might have been passed non floating point registers.  */
9919  if (!FP_REGNO_P (REGNO (reg1))
9920      || !FP_REGNO_P (REGNO (reg2)))
9921    return 0;
9922
9923  return (REGNO (reg1) == REGNO (reg2) - 1);
9924}
9925
9926/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9927   addr1 and addr2 must be in consecutive memory locations
9928   (addr2 == addr1 + 8).  */
9929
9930int
9931mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9932{
9933  rtx addr1, addr2;
9934  unsigned int reg1, reg2;
9935  int offset1, offset2;
9936
9937  /* The mems cannot be volatile.  */
9938  if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9939    return 0;
9940
9941  addr1 = XEXP (mem1, 0);
9942  addr2 = XEXP (mem2, 0);
9943
9944  /* Extract an offset (if used) from the first addr.  */
9945  if (GET_CODE (addr1) == PLUS)
9946    {
9947      /* If not a REG, return zero.  */
9948      if (GET_CODE (XEXP (addr1, 0)) != REG)
9949	return 0;
9950      else
9951	{
9952	  reg1 = REGNO (XEXP (addr1, 0));
9953	  /* The offset must be constant!  */
9954	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9955	    return 0;
9956	  offset1 = INTVAL (XEXP (addr1, 1));
9957	}
9958    }
9959  else if (GET_CODE (addr1) != REG)
9960    return 0;
9961  else
9962    {
9963      reg1 = REGNO (addr1);
9964      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
9965      offset1 = 0;
9966    }
9967
9968  /* And now for the second addr.  */
9969  if (GET_CODE (addr2) == PLUS)
9970    {
9971      /* If not a REG, return zero.  */
9972      if (GET_CODE (XEXP (addr2, 0)) != REG)
9973	return 0;
9974      else
9975	{
9976	  reg2 = REGNO (XEXP (addr2, 0));
9977	  /* The offset must be constant. */
9978	  if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9979	    return 0;
9980	  offset2 = INTVAL (XEXP (addr2, 1));
9981	}
9982    }
9983  else if (GET_CODE (addr2) != REG)
9984    return 0;
9985  else
9986    {
9987      reg2 = REGNO (addr2);
9988      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
9989      offset2 = 0;
9990    }
9991
9992  /* Both of these must have the same base register.  */
9993  if (reg1 != reg2)
9994    return 0;
9995
9996  /* The offset for the second addr must be 8 more than the first addr.  */
9997  if (offset2 != offset1 + 8)
9998    return 0;
9999
10000  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
10001     instructions.  */
10002  return 1;
10003}
10004
10005/* Return the register class of a scratch register needed to copy IN into
10006   or out of a register in CLASS in MODE.  If it can be done directly,
10007   NO_REGS is returned.  */
10008
10009enum reg_class
10010rs6000_secondary_reload_class (enum reg_class class,
10011			       enum machine_mode mode ATTRIBUTE_UNUSED,
10012			       rtx in)
10013{
10014  int regno;
10015
10016  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
10017#if TARGET_MACHO
10018		     && MACHOPIC_INDIRECT
10019#endif
10020		     ))
10021    {
10022      /* We cannot copy a symbolic operand directly into anything
10023	 other than BASE_REGS for TARGET_ELF.  So indicate that a
10024	 register from BASE_REGS is needed as an intermediate
10025	 register.
10026
10027	 On Darwin, pic addresses require a load from memory, which
10028	 needs a base register.  */
10029      if (class != BASE_REGS
10030	  && (GET_CODE (in) == SYMBOL_REF
10031	      || GET_CODE (in) == HIGH
10032	      || GET_CODE (in) == LABEL_REF
10033	      || GET_CODE (in) == CONST))
10034	return BASE_REGS;
10035    }
10036
10037  if (GET_CODE (in) == REG)
10038    {
10039      regno = REGNO (in);
10040      if (regno >= FIRST_PSEUDO_REGISTER)
10041	{
10042	  regno = true_regnum (in);
10043	  if (regno >= FIRST_PSEUDO_REGISTER)
10044	    regno = -1;
10045	}
10046    }
10047  else if (GET_CODE (in) == SUBREG)
10048    {
10049      regno = true_regnum (in);
10050      if (regno >= FIRST_PSEUDO_REGISTER)
10051	regno = -1;
10052    }
10053  else
10054    regno = -1;
10055
10056  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10057     into anything.  */
10058  if (class == GENERAL_REGS || class == BASE_REGS
10059      || (regno >= 0 && INT_REGNO_P (regno)))
10060    return NO_REGS;
10061
10062  /* Constants, memory, and FP registers can go into FP registers.  */
10063  if ((regno == -1 || FP_REGNO_P (regno))
10064      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
10065    return NO_REGS;
10066
10067  /* Memory, and AltiVec registers can go into AltiVec registers.  */
10068  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
10069      && class == ALTIVEC_REGS)
10070    return NO_REGS;
10071
10072  /* We can copy among the CR registers.  */
10073  if ((class == CR_REGS || class == CR0_REGS)
10074      && regno >= 0 && CR_REGNO_P (regno))
10075    return NO_REGS;
10076
10077  /* Otherwise, we need GENERAL_REGS.  */
10078  return GENERAL_REGS;
10079}
10080
10081/* Given a comparison operation, return the bit number in CCR to test.  We
10082   know this is a valid comparison.
10083
10084   SCC_P is 1 if this is for an scc.  That means that %D will have been
10085   used instead of %C, so the bits will be in different places.
10086
10087   Return -1 if OP isn't a valid comparison for some reason.  */
10088
10089int
10090ccr_bit (rtx op, int scc_p)
10091{
10092  enum rtx_code code = GET_CODE (op);
10093  enum machine_mode cc_mode;
10094  int cc_regnum;
10095  int base_bit;
10096  rtx reg;
10097
10098  if (!COMPARISON_P (op))
10099    return -1;
10100
10101  reg = XEXP (op, 0);
10102
10103  gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
10104
10105  cc_mode = GET_MODE (reg);
10106  cc_regnum = REGNO (reg);
10107  base_bit = 4 * (cc_regnum - CR0_REGNO);
10108
10109  validate_condition_mode (code, cc_mode);
10110
10111  /* When generating a sCOND operation, only positive conditions are
10112     allowed.  */
10113  gcc_assert (!scc_p
10114	      || code == EQ || code == GT || code == LT || code == UNORDERED
10115	      || code == GTU || code == LTU);
10116
10117  switch (code)
10118    {
10119    case NE:
10120      return scc_p ? base_bit + 3 : base_bit + 2;
10121    case EQ:
10122      return base_bit + 2;
10123    case GT:  case GTU:  case UNLE:
10124      return base_bit + 1;
10125    case LT:  case LTU:  case UNGE:
10126      return base_bit;
10127    case ORDERED:  case UNORDERED:
10128      return base_bit + 3;
10129
10130    case GE:  case GEU:
10131      /* If scc, we will have done a cror to put the bit in the
10132	 unordered position.  So test that bit.  For integer, this is ! LT
10133	 unless this is an scc insn.  */
10134      return scc_p ? base_bit + 3 : base_bit;
10135
10136    case LE:  case LEU:
10137      return scc_p ? base_bit + 3 : base_bit + 1;
10138
10139    default:
10140      gcc_unreachable ();
10141    }
10142}
10143
10144/* Return the GOT register.  */
10145
10146rtx
10147rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
10148{
10149  /* The second flow pass currently (June 1999) can't update
10150     regs_ever_live without disturbing other parts of the compiler, so
10151     update it here to make the prolog/epilogue code happy.  */
10152  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
10153    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
10154
10155  current_function_uses_pic_offset_table = 1;
10156
10157  return pic_offset_table_rtx;
10158}
10159
10160/* Function to init struct machine_function.
10161   This will be called, via a pointer variable,
10162   from push_function_context.  */
10163
10164static struct machine_function *
10165rs6000_init_machine_status (void)
10166{
10167  return ggc_alloc_cleared (sizeof (machine_function));
10168}
10169
10170/* These macros test for integers and extract the low-order bits.  */
10171#define INT_P(X)  \
10172((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
10173 && GET_MODE (X) == VOIDmode)
10174
10175#define INT_LOWPART(X) \
10176  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
10177
10178int
10179extract_MB (rtx op)
10180{
10181  int i;
10182  unsigned long val = INT_LOWPART (op);
10183
10184  /* If the high bit is zero, the value is the first 1 bit we find
10185     from the left.  */
10186  if ((val & 0x80000000) == 0)
10187    {
10188      gcc_assert (val & 0xffffffff);
10189
10190      i = 1;
10191      while (((val <<= 1) & 0x80000000) == 0)
10192	++i;
10193      return i;
10194    }
10195
10196  /* If the high bit is set and the low bit is not, or the mask is all
10197     1's, the value is zero.  */
10198  if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
10199    return 0;
10200
10201  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
10202     from the right.  */
10203  i = 31;
10204  while (((val >>= 1) & 1) != 0)
10205    --i;
10206
10207  return i;
10208}
10209
10210int
10211extract_ME (rtx op)
10212{
10213  int i;
10214  unsigned long val = INT_LOWPART (op);
10215
10216  /* If the low bit is zero, the value is the first 1 bit we find from
10217     the right.  */
10218  if ((val & 1) == 0)
10219    {
10220      gcc_assert (val & 0xffffffff);
10221
10222      i = 30;
10223      while (((val >>= 1) & 1) == 0)
10224	--i;
10225
10226      return i;
10227    }
10228
10229  /* If the low bit is set and the high bit is not, or the mask is all
10230     1's, the value is 31.  */
10231  if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
10232    return 31;
10233
10234  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
10235     from the left.  */
10236  i = 0;
10237  while (((val <<= 1) & 0x80000000) != 0)
10238    ++i;
10239
10240  return i;
10241}
10242
10243/* Locate some local-dynamic symbol still in use by this function
10244   so that we can print its name in some tls_ld pattern.  */
10245
10246static const char *
10247rs6000_get_some_local_dynamic_name (void)
10248{
10249  rtx insn;
10250
10251  if (cfun->machine->some_ld_name)
10252    return cfun->machine->some_ld_name;
10253
10254  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10255    if (INSN_P (insn)
10256	&& for_each_rtx (&PATTERN (insn),
10257			 rs6000_get_some_local_dynamic_name_1, 0))
10258      return cfun->machine->some_ld_name;
10259
10260  gcc_unreachable ();
10261}
10262
10263/* Helper function for rs6000_get_some_local_dynamic_name.  */
10264
10265static int
10266rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
10267{
10268  rtx x = *px;
10269
10270  if (GET_CODE (x) == SYMBOL_REF)
10271    {
10272      const char *str = XSTR (x, 0);
10273      if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10274	{
10275	  cfun->machine->some_ld_name = str;
10276	  return 1;
10277	}
10278    }
10279
10280  return 0;
10281}
10282
10283/* Write out a function code label.  */
10284
10285void
10286rs6000_output_function_entry (FILE *file, const char *fname)
10287{
10288  if (fname[0] != '.')
10289    {
10290      switch (DEFAULT_ABI)
10291	{
10292	default:
10293	  gcc_unreachable ();
10294
10295	case ABI_AIX:
10296	  if (DOT_SYMBOLS)
10297	    putc ('.', file);
10298	  else
10299	    ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10300	  break;
10301
10302	case ABI_V4:
10303	case ABI_DARWIN:
10304	  break;
10305	}
10306    }
10307  if (TARGET_AIX)
10308    RS6000_OUTPUT_BASENAME (file, fname);
10309  else
10310    assemble_name (file, fname);
10311}
10312
10313/* Print an operand.  Recognize special options, documented below.  */
10314
10315#if TARGET_ELF
10316#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
10317#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
10318#else
10319#define SMALL_DATA_RELOC "sda21"
10320#define SMALL_DATA_REG 0
10321#endif
10322
10323void
10324print_operand (FILE *file, rtx x, int code)
10325{
10326  int i;
10327  HOST_WIDE_INT val;
10328  unsigned HOST_WIDE_INT uval;
10329
10330  switch (code)
10331    {
10332    case '.':
10333      /* Write out an instruction after the call which may be replaced
10334	 with glue code by the loader.  This depends on the AIX version.  */
10335      asm_fprintf (file, RS6000_CALL_GLUE);
10336      return;
10337
10338      /* %a is output_address.  */
10339
10340    case 'A':
10341      /* If X is a constant integer whose low-order 5 bits are zero,
10342	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
10343	 in the AIX assembler where "sri" with a zero shift count
10344	 writes a trash instruction.  */
10345      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
10346	putc ('l', file);
10347      else
10348	putc ('r', file);
10349      return;
10350
10351    case 'b':
10352      /* If constant, low-order 16 bits of constant, unsigned.
10353	 Otherwise, write normally.  */
10354      if (INT_P (x))
10355	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10356      else
10357	print_operand (file, x, 0);
10358      return;
10359
10360    case 'B':
10361      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10362	 for 64-bit mask direction.  */
10363      putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
10364      return;
10365
10366      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10367	 output_operand.  */
10368
10369    case 'c':
10370      /* X is a CR register.  Print the number of the GT bit of the CR.  */
10371      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10372	output_operand_lossage ("invalid %%E value");
10373      else
10374	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10375      return;
10376
10377    case 'D':
10378      /* Like 'J' but get to the GT bit only.  */
10379      gcc_assert (GET_CODE (x) == REG);
10380
10381      /* Bit 1 is GT bit.  */
10382      i = 4 * (REGNO (x) - CR0_REGNO) + 1;
10383
10384      /* Add one for shift count in rlinm for scc.  */
10385      fprintf (file, "%d", i + 1);
10386      return;
10387
10388    case 'E':
10389      /* X is a CR register.  Print the number of the EQ bit of the CR */
10390      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10391	output_operand_lossage ("invalid %%E value");
10392      else
10393	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
10394      return;
10395
10396    case 'f':
10397      /* X is a CR register.  Print the shift count needed to move it
10398	 to the high-order four bits.  */
10399      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10400	output_operand_lossage ("invalid %%f value");
10401      else
10402	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
10403      return;
10404
10405    case 'F':
10406      /* Similar, but print the count for the rotate in the opposite
10407	 direction.  */
10408      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10409	output_operand_lossage ("invalid %%F value");
10410      else
10411	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
10412      return;
10413
10414    case 'G':
10415      /* X is a constant integer.  If it is negative, print "m",
10416	 otherwise print "z".  This is to make an aze or ame insn.  */
10417      if (GET_CODE (x) != CONST_INT)
10418	output_operand_lossage ("invalid %%G value");
10419      else if (INTVAL (x) >= 0)
10420	putc ('z', file);
10421      else
10422	putc ('m', file);
10423      return;
10424
10425    case 'h':
10426      /* If constant, output low-order five bits.  Otherwise, write
10427	 normally.  */
10428      if (INT_P (x))
10429	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
10430      else
10431	print_operand (file, x, 0);
10432      return;
10433
10434    case 'H':
10435      /* If constant, output low-order six bits.  Otherwise, write
10436	 normally.  */
10437      if (INT_P (x))
10438	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
10439      else
10440	print_operand (file, x, 0);
10441      return;
10442
10443    case 'I':
10444      /* Print `i' if this is a constant, else nothing.  */
10445      if (INT_P (x))
10446	putc ('i', file);
10447      return;
10448
10449    case 'j':
10450      /* Write the bit number in CCR for jump.  */
10451      i = ccr_bit (x, 0);
10452      if (i == -1)
10453	output_operand_lossage ("invalid %%j code");
10454      else
10455	fprintf (file, "%d", i);
10456      return;
10457
10458    case 'J':
10459      /* Similar, but add one for shift count in rlinm for scc and pass
10460	 scc flag to `ccr_bit'.  */
10461      i = ccr_bit (x, 1);
10462      if (i == -1)
10463	output_operand_lossage ("invalid %%J code");
10464      else
10465	/* If we want bit 31, write a shift count of zero, not 32.  */
10466	fprintf (file, "%d", i == 31 ? 0 : i + 1);
10467      return;
10468
10469    case 'k':
10470      /* X must be a constant.  Write the 1's complement of the
10471	 constant.  */
10472      if (! INT_P (x))
10473	output_operand_lossage ("invalid %%k value");
10474      else
10475	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
10476      return;
10477
10478    case 'K':
10479      /* X must be a symbolic constant on ELF.  Write an
10480	 expression suitable for an 'addi' that adds in the low 16
10481	 bits of the MEM.  */
10482      if (GET_CODE (x) != CONST)
10483	{
10484	  print_operand_address (file, x);
10485	  fputs ("@l", file);
10486	}
10487      else
10488	{
10489	  if (GET_CODE (XEXP (x, 0)) != PLUS
10490	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10491		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10492	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
10493	    output_operand_lossage ("invalid %%K value");
10494	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
10495	  fputs ("@l", file);
10496	  /* For GNU as, there must be a non-alphanumeric character
10497	     between 'l' and the number.  The '-' is added by
10498	     print_operand() already.  */
10499	  if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10500	    fputs ("+", file);
10501	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10502	}
10503      return;
10504
10505      /* %l is output_asm_label.  */
10506
10507    case 'L':
10508      /* Write second word of DImode or DFmode reference.  Works on register
10509	 or non-indexed memory only.  */
10510      if (GET_CODE (x) == REG)
10511	fputs (reg_names[REGNO (x) + 1], file);
10512      else if (GET_CODE (x) == MEM)
10513	{
10514	  /* Handle possible auto-increment.  Since it is pre-increment and
10515	     we have already done it, we can just use an offset of word.  */
10516	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
10517	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10518	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10519					   UNITS_PER_WORD));
10520	  else
10521	    output_address (XEXP (adjust_address_nv (x, SImode,
10522						     UNITS_PER_WORD),
10523				  0));
10524
10525	  if (small_data_operand (x, GET_MODE (x)))
10526	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10527		     reg_names[SMALL_DATA_REG]);
10528	}
10529      return;
10530
10531    case 'm':
10532      /* MB value for a mask operand.  */
10533      if (! mask_operand (x, SImode))
10534	output_operand_lossage ("invalid %%m value");
10535
10536      fprintf (file, "%d", extract_MB (x));
10537      return;
10538
10539    case 'M':
10540      /* ME value for a mask operand.  */
10541      if (! mask_operand (x, SImode))
10542	output_operand_lossage ("invalid %%M value");
10543
10544      fprintf (file, "%d", extract_ME (x));
10545      return;
10546
10547      /* %n outputs the negative of its operand.  */
10548
10549    case 'N':
10550      /* Write the number of elements in the vector times 4.  */
10551      if (GET_CODE (x) != PARALLEL)
10552	output_operand_lossage ("invalid %%N value");
10553      else
10554	fprintf (file, "%d", XVECLEN (x, 0) * 4);
10555      return;
10556
10557    case 'O':
10558      /* Similar, but subtract 1 first.  */
10559      if (GET_CODE (x) != PARALLEL)
10560	output_operand_lossage ("invalid %%O value");
10561      else
10562	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
10563      return;
10564
10565    case 'p':
10566      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
10567      if (! INT_P (x)
10568	  || INT_LOWPART (x) < 0
10569	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
10570	output_operand_lossage ("invalid %%p value");
10571      else
10572	fprintf (file, "%d", i);
10573      return;
10574
10575    case 'P':
10576      /* The operand must be an indirect memory reference.  The result
10577	 is the register name.  */
10578      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10579	  || REGNO (XEXP (x, 0)) >= 32)
10580	output_operand_lossage ("invalid %%P value");
10581      else
10582	fputs (reg_names[REGNO (XEXP (x, 0))], file);
10583      return;
10584
10585    case 'q':
10586      /* This outputs the logical code corresponding to a boolean
10587	 expression.  The expression may have one or both operands
10588	 negated (if one, only the first one).  For condition register
10589	 logical operations, it will also treat the negated
10590	 CR codes as NOTs, but not handle NOTs of them.  */
10591      {
10592	const char *const *t = 0;
10593	const char *s;
10594	enum rtx_code code = GET_CODE (x);
10595	static const char * const tbl[3][3] = {
10596	  { "and", "andc", "nor" },
10597	  { "or", "orc", "nand" },
10598	  { "xor", "eqv", "xor" } };
10599
10600	if (code == AND)
10601	  t = tbl[0];
10602	else if (code == IOR)
10603	  t = tbl[1];
10604	else if (code == XOR)
10605	  t = tbl[2];
10606	else
10607	  output_operand_lossage ("invalid %%q value");
10608
10609	if (GET_CODE (XEXP (x, 0)) != NOT)
10610	  s = t[0];
10611	else
10612	  {
10613	    if (GET_CODE (XEXP (x, 1)) == NOT)
10614	      s = t[2];
10615	    else
10616	      s = t[1];
10617	  }
10618
10619	fputs (s, file);
10620      }
10621      return;
10622
10623    case 'Q':
10624      if (TARGET_MFCRF)
10625	fputc (',', file);
10626        /* FALLTHRU */
10627      else
10628	return;
10629
10630    case 'R':
10631      /* X is a CR register.  Print the mask for `mtcrf'.  */
10632      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10633	output_operand_lossage ("invalid %%R value");
10634      else
10635	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
10636      return;
10637
10638    case 's':
10639      /* Low 5 bits of 32 - value */
10640      if (! INT_P (x))
10641	output_operand_lossage ("invalid %%s value");
10642      else
10643	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
10644      return;
10645
10646    case 'S':
10647      /* PowerPC64 mask position.  All 0's is excluded.
10648	 CONST_INT 32-bit mask is considered sign-extended so any
10649	 transition must occur within the CONST_INT, not on the boundary.  */
10650      if (! mask64_operand (x, DImode))
10651	output_operand_lossage ("invalid %%S value");
10652
10653      uval = INT_LOWPART (x);
10654
10655      if (uval & 1)	/* Clear Left */
10656	{
10657#if HOST_BITS_PER_WIDE_INT > 64
10658	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10659#endif
10660	  i = 64;
10661	}
10662      else		/* Clear Right */
10663	{
10664	  uval = ~uval;
10665#if HOST_BITS_PER_WIDE_INT > 64
10666	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10667#endif
10668	  i = 63;
10669	}
10670      while (uval != 0)
10671	--i, uval >>= 1;
10672      gcc_assert (i >= 0);
10673      fprintf (file, "%d", i);
10674      return;
10675
10676    case 't':
10677      /* Like 'J' but get to the OVERFLOW/UNORDERED bit.  */
10678      gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
10679
10680      /* Bit 3 is OV bit.  */
10681      i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10682
10683      /* If we want bit 31, write a shift count of zero, not 32.  */
10684      fprintf (file, "%d", i == 31 ? 0 : i + 1);
10685      return;
10686
10687    case 'T':
10688      /* Print the symbolic name of a branch target register.  */
10689      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10690				  && REGNO (x) != COUNT_REGISTER_REGNUM))
10691	output_operand_lossage ("invalid %%T value");
10692      else if (REGNO (x) == LINK_REGISTER_REGNUM)
10693	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10694      else
10695	fputs ("ctr", file);
10696      return;
10697
10698    case 'u':
10699      /* High-order 16 bits of constant for use in unsigned operand.  */
10700      if (! INT_P (x))
10701	output_operand_lossage ("invalid %%u value");
10702      else
10703	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10704		 (INT_LOWPART (x) >> 16) & 0xffff);
10705      return;
10706
10707    case 'v':
10708      /* High-order 16 bits of constant for use in signed operand.  */
10709      if (! INT_P (x))
10710	output_operand_lossage ("invalid %%v value");
10711      else
10712	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10713		 (INT_LOWPART (x) >> 16) & 0xffff);
10714      return;
10715
10716    case 'U':
10717      /* Print `u' if this has an auto-increment or auto-decrement.  */
10718      if (GET_CODE (x) == MEM
10719	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
10720	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10721	putc ('u', file);
10722      return;
10723
10724    case 'V':
10725      /* Print the trap code for this operand.  */
10726      switch (GET_CODE (x))
10727	{
10728	case EQ:
10729	  fputs ("eq", file);   /* 4 */
10730	  break;
10731	case NE:
10732	  fputs ("ne", file);   /* 24 */
10733	  break;
10734	case LT:
10735	  fputs ("lt", file);   /* 16 */
10736	  break;
10737	case LE:
10738	  fputs ("le", file);   /* 20 */
10739	  break;
10740	case GT:
10741	  fputs ("gt", file);   /* 8 */
10742	  break;
10743	case GE:
10744	  fputs ("ge", file);   /* 12 */
10745	  break;
10746	case LTU:
10747	  fputs ("llt", file);  /* 2 */
10748	  break;
10749	case LEU:
10750	  fputs ("lle", file);  /* 6 */
10751	  break;
10752	case GTU:
10753	  fputs ("lgt", file);  /* 1 */
10754	  break;
10755	case GEU:
10756	  fputs ("lge", file);  /* 5 */
10757	  break;
10758	default:
10759	  gcc_unreachable ();
10760	}
10761      break;
10762
10763    case 'w':
10764      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
10765	 normally.  */
10766      if (INT_P (x))
10767	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10768		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10769      else
10770	print_operand (file, x, 0);
10771      return;
10772
10773    case 'W':
10774      /* MB value for a PowerPC64 rldic operand.  */
10775      val = (GET_CODE (x) == CONST_INT
10776	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10777
10778      if (val < 0)
10779	i = -1;
10780      else
10781	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10782	  if ((val <<= 1) < 0)
10783	    break;
10784
10785#if HOST_BITS_PER_WIDE_INT == 32
10786      if (GET_CODE (x) == CONST_INT && i >= 0)
10787	i += 32;  /* zero-extend high-part was all 0's */
10788      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10789	{
10790	  val = CONST_DOUBLE_LOW (x);
10791
10792	  gcc_assert (val);
10793	  if (val < 0)
10794	    --i;
10795	  else
10796	    for ( ; i < 64; i++)
10797	      if ((val <<= 1) < 0)
10798		break;
10799	}
10800#endif
10801
10802      fprintf (file, "%d", i + 1);
10803      return;
10804
10805    case 'X':
10806      if (GET_CODE (x) == MEM
10807	  && legitimate_indexed_address_p (XEXP (x, 0), 0))
10808	putc ('x', file);
10809      return;
10810
10811    case 'Y':
10812      /* Like 'L', for third word of TImode  */
10813      if (GET_CODE (x) == REG)
10814	fputs (reg_names[REGNO (x) + 2], file);
10815      else if (GET_CODE (x) == MEM)
10816	{
10817	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
10818	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10819	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10820	  else
10821	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10822	  if (small_data_operand (x, GET_MODE (x)))
10823	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10824		     reg_names[SMALL_DATA_REG]);
10825	}
10826      return;
10827
10828    case 'z':
10829      /* X is a SYMBOL_REF.  Write out the name preceded by a
10830	 period and without any trailing data in brackets.  Used for function
10831	 names.  If we are configured for System V (or the embedded ABI) on
10832	 the PowerPC, do not emit the period, since those systems do not use
10833	 TOCs and the like.  */
10834      gcc_assert (GET_CODE (x) == SYMBOL_REF);
10835
10836      /* Mark the decl as referenced so that cgraph will output the
10837	 function.  */
10838      if (SYMBOL_REF_DECL (x))
10839	mark_decl_referenced (SYMBOL_REF_DECL (x));
10840
10841      /* For macho, check to see if we need a stub.  */
10842      if (TARGET_MACHO)
10843	{
10844	  const char *name = XSTR (x, 0);
10845#if TARGET_MACHO
10846	  if (MACHOPIC_INDIRECT
10847	      && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10848	    name = machopic_indirection_name (x, /*stub_p=*/true);
10849#endif
10850	  assemble_name (file, name);
10851	}
10852      else if (!DOT_SYMBOLS)
10853	assemble_name (file, XSTR (x, 0));
10854      else
10855	rs6000_output_function_entry (file, XSTR (x, 0));
10856      return;
10857
10858    case 'Z':
10859      /* Like 'L', for last word of TImode.  */
10860      if (GET_CODE (x) == REG)
10861	fputs (reg_names[REGNO (x) + 3], file);
10862      else if (GET_CODE (x) == MEM)
10863	{
10864	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
10865	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10866	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10867	  else
10868	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10869	  if (small_data_operand (x, GET_MODE (x)))
10870	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10871		     reg_names[SMALL_DATA_REG]);
10872	}
10873      return;
10874
10875      /* Print AltiVec or SPE memory operand.  */
10876    case 'y':
10877      {
10878	rtx tmp;
10879
10880	gcc_assert (GET_CODE (x) == MEM);
10881
10882	tmp = XEXP (x, 0);
10883
10884	/* Ugly hack because %y is overloaded.  */
10885	if (TARGET_E500 && GET_MODE_SIZE (GET_MODE (x)) == 8)
10886	  {
10887	    /* Handle [reg].  */
10888	    if (GET_CODE (tmp) == REG)
10889	      {
10890		fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10891		break;
10892	      }
10893	    /* Handle [reg+UIMM].  */
10894	    else if (GET_CODE (tmp) == PLUS &&
10895		     GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10896	      {
10897		int x;
10898
10899		gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
10900
10901		x = INTVAL (XEXP (tmp, 1));
10902		fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10903		break;
10904	      }
10905
10906	    /* Fall through.  Must be [reg+reg].  */
10907	  }
10908	if (TARGET_ALTIVEC
10909	    && GET_CODE (tmp) == AND
10910	    && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10911	    && INTVAL (XEXP (tmp, 1)) == -16)
10912	  tmp = XEXP (tmp, 0);
10913	if (GET_CODE (tmp) == REG)
10914	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10915	else
10916	  {
10917	    gcc_assert (GET_CODE (tmp) == PLUS
10918			&& REG_P (XEXP (tmp, 0))
10919			&& REG_P (XEXP (tmp, 1)));
10920
10921	    if (REGNO (XEXP (tmp, 0)) == 0)
10922	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10923		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
10924	    else
10925	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10926		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
10927	  }
10928	break;
10929      }
10930
10931    case 0:
10932      if (GET_CODE (x) == REG)
10933	fprintf (file, "%s", reg_names[REGNO (x)]);
10934      else if (GET_CODE (x) == MEM)
10935	{
10936	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
10937	     know the width from the mode.  */
10938	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10939	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10940		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10941	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10942	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10943		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10944	  else
10945	    output_address (XEXP (x, 0));
10946	}
10947      else
10948	output_addr_const (file, x);
10949      return;
10950
10951    case '&':
10952      assemble_name (file, rs6000_get_some_local_dynamic_name ());
10953      return;
10954
10955    default:
10956      output_operand_lossage ("invalid %%xn code");
10957    }
10958}
10959
10960/* Print the address of an operand.  */
10961
10962void
10963print_operand_address (FILE *file, rtx x)
10964{
10965  if (GET_CODE (x) == REG)
10966    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10967  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10968	   || GET_CODE (x) == LABEL_REF)
10969    {
10970      output_addr_const (file, x);
10971      if (small_data_operand (x, GET_MODE (x)))
10972	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10973		 reg_names[SMALL_DATA_REG]);
10974      else
10975	gcc_assert (!TARGET_TOC);
10976    }
10977  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10978    {
10979      gcc_assert (REG_P (XEXP (x, 0)));
10980      if (REGNO (XEXP (x, 0)) == 0)
10981	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10982		 reg_names[ REGNO (XEXP (x, 0)) ]);
10983      else
10984	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10985		 reg_names[ REGNO (XEXP (x, 1)) ]);
10986    }
10987  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10988    fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10989	     INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10990#if TARGET_ELF
10991  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10992	   && CONSTANT_P (XEXP (x, 1)))
10993    {
10994      output_addr_const (file, XEXP (x, 1));
10995      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10996    }
10997#endif
10998#if TARGET_MACHO
10999  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
11000	   && CONSTANT_P (XEXP (x, 1)))
11001    {
11002      fprintf (file, "lo16(");
11003      output_addr_const (file, XEXP (x, 1));
11004      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11005    }
11006#endif
11007  else if (legitimate_constant_pool_address_p (x))
11008    {
11009      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
11010	{
11011	  rtx contains_minus = XEXP (x, 1);
11012	  rtx minus, symref;
11013	  const char *name;
11014
11015	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
11016	     turn it into (sym) for output_addr_const.  */
11017	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
11018	    contains_minus = XEXP (contains_minus, 0);
11019
11020	  minus = XEXP (contains_minus, 0);
11021	  symref = XEXP (minus, 0);
11022	  XEXP (contains_minus, 0) = symref;
11023	  if (TARGET_ELF)
11024	    {
11025	      char *newname;
11026
11027	      name = XSTR (symref, 0);
11028	      newname = alloca (strlen (name) + sizeof ("@toc"));
11029	      strcpy (newname, name);
11030	      strcat (newname, "@toc");
11031	      XSTR (symref, 0) = newname;
11032	    }
11033	  output_addr_const (file, XEXP (x, 1));
11034	  if (TARGET_ELF)
11035	    XSTR (symref, 0) = name;
11036	  XEXP (contains_minus, 0) = minus;
11037	}
11038      else
11039	output_addr_const (file, XEXP (x, 1));
11040
11041      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
11042    }
11043  else
11044    gcc_unreachable ();
11045}
11046
11047/* Target hook for assembling integer objects.  The PowerPC version has
11048   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11049   is defined.  It also needs to handle DI-mode objects on 64-bit
11050   targets.  */
11051
11052static bool
11053rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
11054{
11055#ifdef RELOCATABLE_NEEDS_FIXUP
11056  /* Special handling for SI values.  */
11057  if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
11058    {
11059      static int recurse = 0;
11060
11061      /* For -mrelocatable, we mark all addresses that need to be fixed up
11062	 in the .fixup section.  */
11063      if (TARGET_RELOCATABLE
11064	  && in_section != toc_section
11065	  && in_section != text_section
11066	  && !unlikely_text_section_p (in_section)
11067	  && !recurse
11068	  && GET_CODE (x) != CONST_INT
11069	  && GET_CODE (x) != CONST_DOUBLE
11070	  && CONSTANT_P (x))
11071	{
11072	  char buf[256];
11073
11074	  recurse = 1;
11075	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
11076	  fixuplabelno++;
11077	  ASM_OUTPUT_LABEL (asm_out_file, buf);
11078	  fprintf (asm_out_file, "\t.long\t(");
11079	  output_addr_const (asm_out_file, x);
11080	  fprintf (asm_out_file, ")@fixup\n");
11081	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
11082	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
11083	  fprintf (asm_out_file, "\t.long\t");
11084	  assemble_name (asm_out_file, buf);
11085	  fprintf (asm_out_file, "\n\t.previous\n");
11086	  recurse = 0;
11087	  return true;
11088	}
11089      /* Remove initial .'s to turn a -mcall-aixdesc function
11090	 address into the address of the descriptor, not the function
11091	 itself.  */
11092      else if (GET_CODE (x) == SYMBOL_REF
11093	       && XSTR (x, 0)[0] == '.'
11094	       && DEFAULT_ABI == ABI_AIX)
11095	{
11096	  const char *name = XSTR (x, 0);
11097	  while (*name == '.')
11098	    name++;
11099
11100	  fprintf (asm_out_file, "\t.long\t%s\n", name);
11101	  return true;
11102	}
11103    }
11104#endif /* RELOCATABLE_NEEDS_FIXUP */
11105  return default_assemble_integer (x, size, aligned_p);
11106}
11107
11108#ifdef HAVE_GAS_HIDDEN
11109/* Emit an assembler directive to set symbol visibility for DECL to
11110   VISIBILITY_TYPE.  */
11111
11112static void
11113rs6000_assemble_visibility (tree decl, int vis)
11114{
11115  /* Functions need to have their entry point symbol visibility set as
11116     well as their descriptor symbol visibility.  */
11117  if (DEFAULT_ABI == ABI_AIX
11118      && DOT_SYMBOLS
11119      && TREE_CODE (decl) == FUNCTION_DECL)
11120    {
11121      static const char * const visibility_types[] = {
11122	NULL, "internal", "hidden", "protected"
11123      };
11124
11125      const char *name, *type;
11126
11127      name = ((* targetm.strip_name_encoding)
11128	      (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
11129      type = visibility_types[vis];
11130
11131      fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
11132      fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
11133    }
11134  else
11135    default_assemble_visibility (decl, vis);
11136}
11137#endif
11138
11139enum rtx_code
11140rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
11141{
11142  /* Reversal of FP compares takes care -- an ordered compare
11143     becomes an unordered compare and vice versa.  */
11144  if (mode == CCFPmode
11145      && (!flag_finite_math_only
11146	  || code == UNLT || code == UNLE || code == UNGT || code == UNGE
11147	  || code == UNEQ || code == LTGT))
11148    return reverse_condition_maybe_unordered (code);
11149  else
11150    return reverse_condition (code);
11151}
11152
11153/* Generate a compare for CODE.  Return a brand-new rtx that
11154   represents the result of the compare.  */
11155
11156static rtx
11157rs6000_generate_compare (enum rtx_code code)
11158{
11159  enum machine_mode comp_mode;
11160  rtx compare_result;
11161
11162  if (rs6000_compare_fp_p)
11163    comp_mode = CCFPmode;
11164  else if (code == GTU || code == LTU
11165	   || code == GEU || code == LEU)
11166    comp_mode = CCUNSmode;
11167  else if ((code == EQ || code == NE)
11168	   && GET_CODE (rs6000_compare_op0) == SUBREG
11169	   && GET_CODE (rs6000_compare_op1) == SUBREG
11170	   && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
11171	   && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
11172    /* These are unsigned values, perhaps there will be a later
11173       ordering compare that can be shared with this one.
11174       Unfortunately we cannot detect the signedness of the operands
11175       for non-subregs.  */
11176    comp_mode = CCUNSmode;
11177  else
11178    comp_mode = CCmode;
11179
11180  /* First, the compare.  */
11181  compare_result = gen_reg_rtx (comp_mode);
11182
11183  /* E500 FP compare instructions on the GPRs.  Yuck!  */
11184  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11185      && rs6000_compare_fp_p)
11186    {
11187      rtx cmp, or_result, compare_result2;
11188      enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
11189
11190      if (op_mode == VOIDmode)
11191	op_mode = GET_MODE (rs6000_compare_op1);
11192
11193      /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
11194	 This explains the following mess.  */
11195
11196      switch (code)
11197	{
11198	case EQ: case UNEQ: case NE: case LTGT:
11199	  switch (op_mode)
11200	    {
11201	    case SFmode:
11202	      cmp = flag_unsafe_math_optimizations
11203		? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
11204				   rs6000_compare_op1)
11205		: gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
11206				   rs6000_compare_op1);
11207	      break;
11208
11209	    case DFmode:
11210	      cmp = flag_unsafe_math_optimizations
11211		? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
11212				   rs6000_compare_op1)
11213		: gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
11214				   rs6000_compare_op1);
11215	      break;
11216
11217	    default:
11218	      gcc_unreachable ();
11219	    }
11220	  break;
11221
11222	case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
11223	  switch (op_mode)
11224	    {
11225	    case SFmode:
11226	      cmp = flag_unsafe_math_optimizations
11227		? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
11228				   rs6000_compare_op1)
11229		: gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
11230				   rs6000_compare_op1);
11231	      break;
11232
11233	    case DFmode:
11234	      cmp = flag_unsafe_math_optimizations
11235		? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
11236				   rs6000_compare_op1)
11237		: gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
11238				   rs6000_compare_op1);
11239	      break;
11240
11241	    default:
11242	      gcc_unreachable ();
11243	    }
11244	  break;
11245
11246	case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
11247	  switch (op_mode)
11248	    {
11249	    case SFmode:
11250	      cmp = flag_unsafe_math_optimizations
11251		? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
11252				   rs6000_compare_op1)
11253		: gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11254				   rs6000_compare_op1);
11255	      break;
11256
11257	    case DFmode:
11258	      cmp = flag_unsafe_math_optimizations
11259		? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11260				   rs6000_compare_op1)
11261		: gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11262				   rs6000_compare_op1);
11263	      break;
11264
11265	    default:
11266	      gcc_unreachable ();
11267	    }
11268	  break;
11269        default:
11270          gcc_unreachable ();
11271	}
11272
11273      /* Synthesize LE and GE from LT/GT || EQ.  */
11274      if (code == LE || code == GE || code == LEU || code == GEU)
11275	{
11276	  emit_insn (cmp);
11277
11278	  switch (code)
11279	    {
11280	    case LE: code = LT; break;
11281	    case GE: code = GT; break;
11282	    case LEU: code = LT; break;
11283	    case GEU: code = GT; break;
11284	    default: gcc_unreachable ();
11285	    }
11286
11287	  compare_result2 = gen_reg_rtx (CCFPmode);
11288
11289	  /* Do the EQ.  */
11290	  switch (op_mode)
11291	    {
11292	    case SFmode:
11293	      cmp = flag_unsafe_math_optimizations
11294		? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11295				   rs6000_compare_op1)
11296		: gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11297				   rs6000_compare_op1);
11298	      break;
11299
11300	    case DFmode:
11301	      cmp = flag_unsafe_math_optimizations
11302		? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11303				   rs6000_compare_op1)
11304		: gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11305				   rs6000_compare_op1);
11306	      break;
11307
11308	    default:
11309	      gcc_unreachable ();
11310	    }
11311	  emit_insn (cmp);
11312
11313	  /* OR them together.  */
11314	  or_result = gen_reg_rtx (CCFPmode);
11315	  cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11316					   compare_result2);
11317	  compare_result = or_result;
11318	  code = EQ;
11319	}
11320      else
11321	{
11322	  if (code == NE || code == LTGT)
11323	    code = NE;
11324	  else
11325	    code = EQ;
11326	}
11327
11328      emit_insn (cmp);
11329    }
11330  else
11331    {
11332      /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11333	 CLOBBERs to match cmptf_internal2 pattern.  */
11334      if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11335	  && GET_MODE (rs6000_compare_op0) == TFmode
11336	  && !TARGET_IEEEQUAD
11337	  && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11338	emit_insn (gen_rtx_PARALLEL (VOIDmode,
11339	  gen_rtvec (9,
11340		     gen_rtx_SET (VOIDmode,
11341				  compare_result,
11342				  gen_rtx_COMPARE (comp_mode,
11343						   rs6000_compare_op0,
11344						   rs6000_compare_op1)),
11345		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11346		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11347		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11348		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11349		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11350		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11351		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11352		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
11353      else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11354	       && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11355	{
11356	  rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11357	  comp_mode = CCEQmode;
11358	  compare_result = gen_reg_rtx (CCEQmode);
11359	  if (TARGET_64BIT)
11360	    emit_insn (gen_stack_protect_testdi (compare_result,
11361						 rs6000_compare_op0, op1));
11362	  else
11363	    emit_insn (gen_stack_protect_testsi (compare_result,
11364						 rs6000_compare_op0, op1));
11365	}
11366      else
11367	emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11368				gen_rtx_COMPARE (comp_mode,
11369						 rs6000_compare_op0,
11370						 rs6000_compare_op1)));
11371    }
11372
11373  /* Some kinds of FP comparisons need an OR operation;
11374     under flag_finite_math_only we don't bother.  */
11375  if (rs6000_compare_fp_p
11376      && !flag_finite_math_only
11377      && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
11378      && (code == LE || code == GE
11379	  || code == UNEQ || code == LTGT
11380	  || code == UNGT || code == UNLT))
11381    {
11382      enum rtx_code or1, or2;
11383      rtx or1_rtx, or2_rtx, compare2_rtx;
11384      rtx or_result = gen_reg_rtx (CCEQmode);
11385
11386      switch (code)
11387	{
11388	case LE: or1 = LT;  or2 = EQ;  break;
11389	case GE: or1 = GT;  or2 = EQ;  break;
11390	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
11391	case LTGT: or1 = LT;  or2 = GT;  break;
11392	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
11393	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
11394	default:  gcc_unreachable ();
11395	}
11396      validate_condition_mode (or1, comp_mode);
11397      validate_condition_mode (or2, comp_mode);
11398      or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11399      or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
11400      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11401				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11402				      const_true_rtx);
11403      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11404
11405      compare_result = or_result;
11406      code = EQ;
11407    }
11408
11409  validate_condition_mode (code, GET_MODE (compare_result));
11410
11411  return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
11412}
11413
11414
11415/* Emit the RTL for an sCOND pattern.  */
11416
11417void
11418rs6000_emit_sCOND (enum rtx_code code, rtx result)
11419{
11420  rtx condition_rtx;
11421  enum machine_mode op_mode;
11422  enum rtx_code cond_code;
11423
11424  condition_rtx = rs6000_generate_compare (code);
11425  cond_code = GET_CODE (condition_rtx);
11426
11427  if (TARGET_E500 && rs6000_compare_fp_p
11428      && !TARGET_FPRS && TARGET_HARD_FLOAT)
11429    {
11430      rtx t;
11431
11432      PUT_MODE (condition_rtx, SImode);
11433      t = XEXP (condition_rtx, 0);
11434
11435      gcc_assert (cond_code == NE || cond_code == EQ);
11436
11437      if (cond_code == NE)
11438	emit_insn (gen_e500_flip_gt_bit (t, t));
11439
11440      emit_insn (gen_move_from_CR_gt_bit (result, t));
11441      return;
11442    }
11443
11444  if (cond_code == NE
11445      || cond_code == GE || cond_code == LE
11446      || cond_code == GEU || cond_code == LEU
11447      || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11448    {
11449      rtx not_result = gen_reg_rtx (CCEQmode);
11450      rtx not_op, rev_cond_rtx;
11451      enum machine_mode cc_mode;
11452
11453      cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11454
11455      rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
11456				     SImode, XEXP (condition_rtx, 0), const0_rtx);
11457      not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11458      emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11459      condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11460    }
11461
11462  op_mode = GET_MODE (rs6000_compare_op0);
11463  if (op_mode == VOIDmode)
11464    op_mode = GET_MODE (rs6000_compare_op1);
11465
11466  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11467    {
11468      PUT_MODE (condition_rtx, DImode);
11469      convert_move (result, condition_rtx, 0);
11470    }
11471  else
11472    {
11473      PUT_MODE (condition_rtx, SImode);
11474      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11475    }
11476}
11477
11478/* Emit a branch of kind CODE to location LOC.  */
11479
11480void
11481rs6000_emit_cbranch (enum rtx_code code, rtx loc)
11482{
11483  rtx condition_rtx, loc_ref;
11484
11485  condition_rtx = rs6000_generate_compare (code);
11486  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11487  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11488			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11489						     loc_ref, pc_rtx)));
11490}
11491
11492/* Return the string to output a conditional branch to LABEL, which is
11493   the operand number of the label, or -1 if the branch is really a
11494   conditional return.
11495
11496   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
11497   condition code register and its mode specifies what kind of
11498   comparison we made.
11499
11500   REVERSED is nonzero if we should reverse the sense of the comparison.
11501
11502   INSN is the insn.  */
11503
11504char *
11505output_cbranch (rtx op, const char *label, int reversed, rtx insn)
11506{
11507  static char string[64];
11508  enum rtx_code code = GET_CODE (op);
11509  rtx cc_reg = XEXP (op, 0);
11510  enum machine_mode mode = GET_MODE (cc_reg);
11511  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
11512  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
11513  int really_reversed = reversed ^ need_longbranch;
11514  char *s = string;
11515  const char *ccode;
11516  const char *pred;
11517  rtx note;
11518
11519  validate_condition_mode (code, mode);
11520
11521  /* Work out which way this really branches.  We could use
11522     reverse_condition_maybe_unordered here always but this
11523     makes the resulting assembler clearer.  */
11524  if (really_reversed)
11525    {
11526      /* Reversal of FP compares takes care -- an ordered compare
11527	 becomes an unordered compare and vice versa.  */
11528      if (mode == CCFPmode)
11529	code = reverse_condition_maybe_unordered (code);
11530      else
11531	code = reverse_condition (code);
11532    }
11533
11534  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
11535    {
11536      /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11537	 to the GT bit.  */
11538      switch (code)
11539	{
11540	case EQ:
11541	  /* Opposite of GT.  */
11542	  code = GT;
11543	  break;
11544
11545	case NE:
11546	  code = UNLE;
11547	  break;
11548
11549	default:
11550	  gcc_unreachable ();
11551	}
11552    }
11553
11554  switch (code)
11555    {
11556      /* Not all of these are actually distinct opcodes, but
11557	 we distinguish them for clarity of the resulting assembler.  */
11558    case NE: case LTGT:
11559      ccode = "ne"; break;
11560    case EQ: case UNEQ:
11561      ccode = "eq"; break;
11562    case GE: case GEU:
11563      ccode = "ge"; break;
11564    case GT: case GTU: case UNGT:
11565      ccode = "gt"; break;
11566    case LE: case LEU:
11567      ccode = "le"; break;
11568    case LT: case LTU: case UNLT:
11569      ccode = "lt"; break;
11570    case UNORDERED: ccode = "un"; break;
11571    case ORDERED: ccode = "nu"; break;
11572    case UNGE: ccode = "nl"; break;
11573    case UNLE: ccode = "ng"; break;
11574    default:
11575      gcc_unreachable ();
11576    }
11577
11578  /* Maybe we have a guess as to how likely the branch is.
11579     The old mnemonics don't have a way to specify this information.  */
11580  pred = "";
11581  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11582  if (note != NULL_RTX)
11583    {
11584      /* PROB is the difference from 50%.  */
11585      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
11586
11587      /* Only hint for highly probable/improbable branches on newer
11588	 cpus as static prediction overrides processor dynamic
11589	 prediction.  For older cpus we may as well always hint, but
11590	 assume not taken for branches that are very close to 50% as a
11591	 mispredicted taken branch is more expensive than a
11592	 mispredicted not-taken branch.  */
11593      if (rs6000_always_hint
11594	  || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
11595	      && br_prob_note_reliable_p (note)))
11596	{
11597	  if (abs (prob) > REG_BR_PROB_BASE / 20
11598	      && ((prob > 0) ^ need_longbranch))
11599	    pred = "+";
11600	  else
11601	    pred = "-";
11602	}
11603    }
11604
11605  if (label == NULL)
11606    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
11607  else
11608    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
11609
11610  /* We need to escape any '%' characters in the reg_names string.
11611     Assume they'd only be the first character....  */
11612  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11613    *s++ = '%';
11614  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
11615
11616  if (label != NULL)
11617    {
11618      /* If the branch distance was too far, we may have to use an
11619	 unconditional branch to go the distance.  */
11620      if (need_longbranch)
11621	s += sprintf (s, ",$+8\n\tb %s", label);
11622      else
11623	s += sprintf (s, ",%s", label);
11624    }
11625
11626  return string;
11627}
11628
11629/* Return the string to flip the GT bit on a CR.  */
11630char *
11631output_e500_flip_gt_bit (rtx dst, rtx src)
11632{
11633  static char string[64];
11634  int a, b;
11635
11636  gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11637	      && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
11638
11639  /* GT bit.  */
11640  a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11641  b = 4 * (REGNO (src) - CR0_REGNO) + 1;
11642
11643  sprintf (string, "crnot %d,%d", a, b);
11644  return string;
11645}
11646
11647/* Return insn index for the vector compare instruction for given CODE,
11648   and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11649   not available.  */
11650
11651static int
11652get_vec_cmp_insn (enum rtx_code code,
11653		  enum machine_mode dest_mode,
11654		  enum machine_mode op_mode)
11655{
11656  if (!TARGET_ALTIVEC)
11657    return INSN_NOT_AVAILABLE;
11658
11659  switch (code)
11660    {
11661    case EQ:
11662      if (dest_mode == V16QImode && op_mode == V16QImode)
11663	return UNSPEC_VCMPEQUB;
11664      if (dest_mode == V8HImode && op_mode == V8HImode)
11665	return UNSPEC_VCMPEQUH;
11666      if (dest_mode == V4SImode && op_mode == V4SImode)
11667	return UNSPEC_VCMPEQUW;
11668      if (dest_mode == V4SImode && op_mode == V4SFmode)
11669	return UNSPEC_VCMPEQFP;
11670      break;
11671    case GE:
11672      if (dest_mode == V4SImode && op_mode == V4SFmode)
11673	return UNSPEC_VCMPGEFP;
11674    case GT:
11675      if (dest_mode == V16QImode && op_mode == V16QImode)
11676	return UNSPEC_VCMPGTSB;
11677      if (dest_mode == V8HImode && op_mode == V8HImode)
11678	return UNSPEC_VCMPGTSH;
11679      if (dest_mode == V4SImode && op_mode == V4SImode)
11680	return UNSPEC_VCMPGTSW;
11681      if (dest_mode == V4SImode && op_mode == V4SFmode)
11682	return UNSPEC_VCMPGTFP;
11683      break;
11684    case GTU:
11685      if (dest_mode == V16QImode && op_mode == V16QImode)
11686	return UNSPEC_VCMPGTUB;
11687      if (dest_mode == V8HImode && op_mode == V8HImode)
11688	return UNSPEC_VCMPGTUH;
11689      if (dest_mode == V4SImode && op_mode == V4SImode)
11690	return UNSPEC_VCMPGTUW;
11691      break;
11692    default:
11693      break;
11694    }
11695  return INSN_NOT_AVAILABLE;
11696}
11697
11698/* Emit vector compare for operands OP0 and OP1 using code RCODE.
11699   DMODE is expected destination mode. This is a recursive function.  */
11700
11701static rtx
11702rs6000_emit_vector_compare (enum rtx_code rcode,
11703			    rtx op0, rtx op1,
11704			    enum machine_mode dmode)
11705{
11706  int vec_cmp_insn;
11707  rtx mask;
11708  enum machine_mode dest_mode;
11709  enum machine_mode op_mode = GET_MODE (op1);
11710
11711  gcc_assert (TARGET_ALTIVEC);
11712  gcc_assert (GET_MODE (op0) == GET_MODE (op1));
11713
11714  /* Floating point vector compare instructions uses destination V4SImode.
11715     Move destination to appropriate mode later.  */
11716  if (dmode == V4SFmode)
11717    dest_mode = V4SImode;
11718  else
11719    dest_mode = dmode;
11720
11721  mask = gen_reg_rtx (dest_mode);
11722  vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11723
11724  if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11725    {
11726      bool swap_operands = false;
11727      bool try_again = false;
11728      switch (rcode)
11729	{
11730	case LT:
11731	  rcode = GT;
11732	  swap_operands = true;
11733	  try_again = true;
11734	  break;
11735	case LTU:
11736	  rcode = GTU;
11737	  swap_operands = true;
11738	  try_again = true;
11739	  break;
11740	case NE:
11741	  /* Treat A != B as ~(A==B).  */
11742	  {
11743	    enum insn_code nor_code;
11744	    rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11745						     dest_mode);
11746
11747	    nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
11748	    gcc_assert (nor_code != CODE_FOR_nothing);
11749	    emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11750
11751	    if (dmode != dest_mode)
11752	      {
11753		rtx temp = gen_reg_rtx (dest_mode);
11754		convert_move (temp, mask, 0);
11755		return temp;
11756	      }
11757	    return mask;
11758	  }
11759	  break;
11760	case GE:
11761	case GEU:
11762	case LE:
11763	case LEU:
11764	  /* Try GT/GTU/LT/LTU OR EQ */
11765	  {
11766	    rtx c_rtx, eq_rtx;
11767	    enum insn_code ior_code;
11768	    enum rtx_code new_code;
11769
11770	    switch (rcode)
11771	      {
11772	      case  GE:
11773		new_code = GT;
11774		break;
11775
11776	      case GEU:
11777		new_code = GTU;
11778		break;
11779
11780	      case LE:
11781		new_code = LT;
11782		break;
11783
11784	      case LEU:
11785		new_code = LTU;
11786		break;
11787
11788	      default:
11789		gcc_unreachable ();
11790	      }
11791
11792	    c_rtx = rs6000_emit_vector_compare (new_code,
11793						op0, op1, dest_mode);
11794	    eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11795						 dest_mode);
11796
11797	    ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
11798	    gcc_assert (ior_code != CODE_FOR_nothing);
11799	    emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11800	    if (dmode != dest_mode)
11801	      {
11802		rtx temp = gen_reg_rtx (dest_mode);
11803		convert_move (temp, mask, 0);
11804		return temp;
11805	      }
11806	    return mask;
11807	  }
11808	  break;
11809	default:
11810	  gcc_unreachable ();
11811	}
11812
11813      if (try_again)
11814	{
11815	  vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11816	  /* You only get two chances.  */
11817	  gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
11818	}
11819
11820      if (swap_operands)
11821	{
11822	  rtx tmp;
11823	  tmp = op0;
11824	  op0 = op1;
11825	  op1 = tmp;
11826	}
11827    }
11828
11829  emit_insn (gen_rtx_SET (VOIDmode, mask,
11830			  gen_rtx_UNSPEC (dest_mode,
11831					  gen_rtvec (2, op0, op1),
11832					  vec_cmp_insn)));
11833  if (dmode != dest_mode)
11834    {
11835      rtx temp = gen_reg_rtx (dest_mode);
11836      convert_move (temp, mask, 0);
11837      return temp;
11838    }
11839  return mask;
11840}
11841
11842/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11843   valid insn doesn exist for given mode.  */
11844
11845static int
11846get_vsel_insn (enum machine_mode mode)
11847{
11848  switch (mode)
11849    {
11850    case V4SImode:
11851      return UNSPEC_VSEL4SI;
11852      break;
11853    case V4SFmode:
11854      return UNSPEC_VSEL4SF;
11855      break;
11856    case V8HImode:
11857      return UNSPEC_VSEL8HI;
11858      break;
11859    case V16QImode:
11860      return UNSPEC_VSEL16QI;
11861      break;
11862    default:
11863      return INSN_NOT_AVAILABLE;
11864      break;
11865    }
11866  return INSN_NOT_AVAILABLE;
11867}
11868
11869/* Emit vector select insn where DEST is destination using
11870   operands OP1, OP2 and MASK.  */
11871
11872static void
11873rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11874{
11875  rtx t, temp;
11876  enum machine_mode dest_mode = GET_MODE (dest);
11877  int vsel_insn_index  = get_vsel_insn (GET_MODE (dest));
11878
11879  temp = gen_reg_rtx (dest_mode);
11880
11881  /* For each vector element, select op1 when mask is 1 otherwise
11882     select op2.  */
11883  t = gen_rtx_SET (VOIDmode, temp,
11884		   gen_rtx_UNSPEC (dest_mode,
11885				   gen_rtvec (3, op2, op1, mask),
11886				   vsel_insn_index));
11887  emit_insn (t);
11888  emit_move_insn (dest, temp);
11889  return;
11890}
11891
11892/* Emit vector conditional expression.
11893   DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11894   CC_OP0 and CC_OP1 are the two operands for the relation operation COND.  */
11895
11896int
11897rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11898			      rtx cond, rtx cc_op0, rtx cc_op1)
11899{
11900  enum machine_mode dest_mode = GET_MODE (dest);
11901  enum rtx_code rcode = GET_CODE (cond);
11902  rtx mask;
11903
11904  if (!TARGET_ALTIVEC)
11905    return 0;
11906
11907  /* Get the vector mask for the given relational operations.  */
11908  mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11909
11910  rs6000_emit_vector_select (dest, op1, op2, mask);
11911
11912  return 1;
11913}
11914
11915/* Emit a conditional move: move TRUE_COND to DEST if OP of the
11916   operands of the last comparison is nonzero/true, FALSE_COND if it
11917   is zero/false.  Return 0 if the hardware has no such operation.  */
11918
11919int
11920rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11921{
11922  enum rtx_code code = GET_CODE (op);
11923  rtx op0 = rs6000_compare_op0;
11924  rtx op1 = rs6000_compare_op1;
11925  REAL_VALUE_TYPE c1;
11926  enum machine_mode compare_mode = GET_MODE (op0);
11927  enum machine_mode result_mode = GET_MODE (dest);
11928  rtx temp;
11929  bool is_against_zero;
11930
11931  /* These modes should always match.  */
11932  if (GET_MODE (op1) != compare_mode
11933      /* In the isel case however, we can use a compare immediate, so
11934	 op1 may be a small constant.  */
11935      && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11936    return 0;
11937  if (GET_MODE (true_cond) != result_mode)
11938    return 0;
11939  if (GET_MODE (false_cond) != result_mode)
11940    return 0;
11941
11942  /* First, work out if the hardware can do this at all, or
11943     if it's too slow....  */
11944  if (! rs6000_compare_fp_p)
11945    {
11946      if (TARGET_ISEL)
11947	return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11948      return 0;
11949    }
11950  else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11951	   && SCALAR_FLOAT_MODE_P (compare_mode))
11952    return 0;
11953
11954  is_against_zero = op1 == CONST0_RTX (compare_mode);
11955
11956  /* A floating-point subtract might overflow, underflow, or produce
11957     an inexact result, thus changing the floating-point flags, so it
11958     can't be generated if we care about that.  It's safe if one side
11959     of the construct is zero, since then no subtract will be
11960     generated.  */
11961  if (SCALAR_FLOAT_MODE_P (compare_mode)
11962      && flag_trapping_math && ! is_against_zero)
11963    return 0;
11964
11965  /* Eliminate half of the comparisons by switching operands, this
11966     makes the remaining code simpler.  */
11967  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11968      || code == LTGT || code == LT || code == UNLE)
11969    {
11970      code = reverse_condition_maybe_unordered (code);
11971      temp = true_cond;
11972      true_cond = false_cond;
11973      false_cond = temp;
11974    }
11975
11976  /* UNEQ and LTGT take four instructions for a comparison with zero,
11977     it'll probably be faster to use a branch here too.  */
11978  if (code == UNEQ && HONOR_NANS (compare_mode))
11979    return 0;
11980
11981  if (GET_CODE (op1) == CONST_DOUBLE)
11982    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11983
11984  /* We're going to try to implement comparisons by performing
11985     a subtract, then comparing against zero.  Unfortunately,
11986     Inf - Inf is NaN which is not zero, and so if we don't
11987     know that the operand is finite and the comparison
11988     would treat EQ different to UNORDERED, we can't do it.  */
11989  if (HONOR_INFINITIES (compare_mode)
11990      && code != GT && code != UNGE
11991      && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11992      /* Constructs of the form (a OP b ? a : b) are safe.  */
11993      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11994	  || (! rtx_equal_p (op0, true_cond)
11995	      && ! rtx_equal_p (op1, true_cond))))
11996    return 0;
11997
11998  /* At this point we know we can use fsel.  */
11999
12000  /* Reduce the comparison to a comparison against zero.  */
12001  if (! is_against_zero)
12002    {
12003      temp = gen_reg_rtx (compare_mode);
12004      emit_insn (gen_rtx_SET (VOIDmode, temp,
12005			      gen_rtx_MINUS (compare_mode, op0, op1)));
12006      op0 = temp;
12007      op1 = CONST0_RTX (compare_mode);
12008    }
12009
12010  /* If we don't care about NaNs we can reduce some of the comparisons
12011     down to faster ones.  */
12012  if (! HONOR_NANS (compare_mode))
12013    switch (code)
12014      {
12015      case GT:
12016	code = LE;
12017	temp = true_cond;
12018	true_cond = false_cond;
12019	false_cond = temp;
12020	break;
12021      case UNGE:
12022	code = GE;
12023	break;
12024      case UNEQ:
12025	code = EQ;
12026	break;
12027      default:
12028	break;
12029      }
12030
12031  /* Now, reduce everything down to a GE.  */
12032  switch (code)
12033    {
12034    case GE:
12035      break;
12036
12037    case LE:
12038      temp = gen_reg_rtx (compare_mode);
12039      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
12040      op0 = temp;
12041      break;
12042
12043    case ORDERED:
12044      temp = gen_reg_rtx (compare_mode);
12045      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
12046      op0 = temp;
12047      break;
12048
12049    case EQ:
12050      temp = gen_reg_rtx (compare_mode);
12051      emit_insn (gen_rtx_SET (VOIDmode, temp,
12052			      gen_rtx_NEG (compare_mode,
12053					   gen_rtx_ABS (compare_mode, op0))));
12054      op0 = temp;
12055      break;
12056
12057    case UNGE:
12058      /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
12059      temp = gen_reg_rtx (result_mode);
12060      emit_insn (gen_rtx_SET (VOIDmode, temp,
12061			      gen_rtx_IF_THEN_ELSE (result_mode,
12062						    gen_rtx_GE (VOIDmode,
12063								op0, op1),
12064						    true_cond, false_cond)));
12065      false_cond = true_cond;
12066      true_cond = temp;
12067
12068      temp = gen_reg_rtx (compare_mode);
12069      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
12070      op0 = temp;
12071      break;
12072
12073    case GT:
12074      /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
12075      temp = gen_reg_rtx (result_mode);
12076      emit_insn (gen_rtx_SET (VOIDmode, temp,
12077			      gen_rtx_IF_THEN_ELSE (result_mode,
12078						    gen_rtx_GE (VOIDmode,
12079								op0, op1),
12080						    true_cond, false_cond)));
12081      true_cond = false_cond;
12082      false_cond = temp;
12083
12084      temp = gen_reg_rtx (compare_mode);
12085      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
12086      op0 = temp;
12087      break;
12088
12089    default:
12090      gcc_unreachable ();
12091    }
12092
12093  emit_insn (gen_rtx_SET (VOIDmode, dest,
12094			  gen_rtx_IF_THEN_ELSE (result_mode,
12095						gen_rtx_GE (VOIDmode,
12096							    op0, op1),
12097						true_cond, false_cond)));
12098  return 1;
12099}
12100
12101/* Same as above, but for ints (isel).  */
12102
12103static int
12104rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
12105{
12106  rtx condition_rtx, cr;
12107
12108  /* All isel implementations thus far are 32-bits.  */
12109  if (GET_MODE (rs6000_compare_op0) != SImode)
12110    return 0;
12111
12112  /* We still have to do the compare, because isel doesn't do a
12113     compare, it just looks at the CRx bits set by a previous compare
12114     instruction.  */
12115  condition_rtx = rs6000_generate_compare (GET_CODE (op));
12116  cr = XEXP (condition_rtx, 0);
12117
12118  if (GET_MODE (cr) == CCmode)
12119    emit_insn (gen_isel_signed (dest, condition_rtx,
12120				true_cond, false_cond, cr));
12121  else
12122    emit_insn (gen_isel_unsigned (dest, condition_rtx,
12123				  true_cond, false_cond, cr));
12124
12125  return 1;
12126}
12127
12128const char *
12129output_isel (rtx *operands)
12130{
12131  enum rtx_code code;
12132
12133  code = GET_CODE (operands[1]);
12134  if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
12135    {
12136      PUT_CODE (operands[1], reverse_condition (code));
12137      return "isel %0,%3,%2,%j1";
12138    }
12139  else
12140    return "isel %0,%2,%3,%j1";
12141}
12142
12143void
12144rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
12145{
12146  enum machine_mode mode = GET_MODE (op0);
12147  enum rtx_code c;
12148  rtx target;
12149
12150  if (code == SMAX || code == SMIN)
12151    c = GE;
12152  else
12153    c = GEU;
12154
12155  if (code == SMAX || code == UMAX)
12156    target = emit_conditional_move (dest, c, op0, op1, mode,
12157				    op0, op1, mode, 0);
12158  else
12159    target = emit_conditional_move (dest, c, op0, op1, mode,
12160				    op1, op0, mode, 0);
12161  gcc_assert (target);
12162  if (target != dest)
12163    emit_move_insn (dest, target);
12164}
12165
12166/* Emit instructions to perform a load-reserved/store-conditional operation.
12167   The operation performed is an atomic
12168   (set M (CODE:MODE M OP))
12169   If not NULL, BEFORE is atomically set to M before the operation, and
12170   AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
12171   If SYNC_P then a memory barrier is emitted before the operation.
12172   Either OP or M may be wrapped in a NOT operation.  */
12173
12174void
12175rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
12176		  rtx m, rtx op, rtx before_param, rtx after_param,
12177		  bool sync_p)
12178{
12179  enum machine_mode used_mode;
12180  rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
12181  rtx used_m;
12182  rtvec vec;
12183  HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12184  rtx shift = NULL_RTX;
12185
12186  if (sync_p)
12187    emit_insn (gen_memory_barrier ());
12188
12189  if (GET_CODE (m) == NOT)
12190    used_m = XEXP (m, 0);
12191  else
12192    used_m = m;
12193
12194  /* If this is smaller than SImode, we'll have to use SImode with
12195     adjustments.  */
12196  if (mode == QImode || mode == HImode)
12197    {
12198      rtx newop, oldop;
12199
12200      if (MEM_ALIGN (used_m) >= 32)
12201	{
12202	  int ishift = 0;
12203	  if (BYTES_BIG_ENDIAN)
12204	    ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
12205
12206	  shift = GEN_INT (ishift);
12207	}
12208      else
12209	{
12210	  rtx addrSI, aligned_addr;
12211	  int shift_mask = mode == QImode ? 0x18 : 0x10;
12212
12213	  addrSI = force_reg (SImode, gen_lowpart_common (SImode,
12214							  XEXP (used_m, 0)));
12215	  shift = gen_reg_rtx (SImode);
12216
12217	  emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
12218				 GEN_INT (shift_mask)));
12219	  emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
12220
12221	  aligned_addr = expand_binop (Pmode, and_optab,
12222				       XEXP (used_m, 0),
12223				       GEN_INT (-4), NULL_RTX,
12224				       1, OPTAB_LIB_WIDEN);
12225	  used_m = change_address (used_m, SImode, aligned_addr);
12226	  set_mem_align (used_m, 32);
12227	  /* It's safe to keep the old alias set of USED_M, because
12228	     the operation is atomic and only affects the original
12229	     USED_M.  */
12230	  if (GET_CODE (m) == NOT)
12231	    m = gen_rtx_NOT (SImode, used_m);
12232	  else
12233	    m = used_m;
12234	}
12235
12236      if (GET_CODE (op) == NOT)
12237	{
12238	  oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
12239	  oldop = gen_rtx_NOT (SImode, oldop);
12240	}
12241      else
12242	oldop = lowpart_subreg (SImode, op, mode);
12243
12244      switch (code)
12245	{
12246	case IOR:
12247	case XOR:
12248	  newop = expand_binop (SImode, and_optab,
12249				oldop, GEN_INT (imask), NULL_RTX,
12250				1, OPTAB_LIB_WIDEN);
12251	  emit_insn (gen_ashlsi3 (newop, newop, shift));
12252	  break;
12253
12254	case AND:
12255	  newop = expand_binop (SImode, ior_optab,
12256				oldop, GEN_INT (~imask), NULL_RTX,
12257				1, OPTAB_LIB_WIDEN);
12258	  emit_insn (gen_rotlsi3 (newop, newop, shift));
12259	  break;
12260
12261	case PLUS:
12262	case MINUS:
12263	  {
12264	    rtx mask;
12265
12266	    newop = expand_binop (SImode, and_optab,
12267				  oldop, GEN_INT (imask), NULL_RTX,
12268				  1, OPTAB_LIB_WIDEN);
12269	    emit_insn (gen_ashlsi3 (newop, newop, shift));
12270
12271	    mask = gen_reg_rtx (SImode);
12272	    emit_move_insn (mask, GEN_INT (imask));
12273	    emit_insn (gen_ashlsi3 (mask, mask, shift));
12274
12275	    if (code == PLUS)
12276	      newop = gen_rtx_PLUS (SImode, m, newop);
12277	    else
12278	      newop = gen_rtx_MINUS (SImode, m, newop);
12279	    newop = gen_rtx_AND (SImode, newop, mask);
12280	    newop = gen_rtx_IOR (SImode, newop,
12281				 gen_rtx_AND (SImode,
12282					      gen_rtx_NOT (SImode, mask),
12283					      m));
12284	    break;
12285	  }
12286
12287	default:
12288	  gcc_unreachable ();
12289	}
12290
12291      if (GET_CODE (m) == NOT)
12292	{
12293	  rtx mask, xorm;
12294
12295	  mask = gen_reg_rtx (SImode);
12296	  emit_move_insn (mask, GEN_INT (imask));
12297	  emit_insn (gen_ashlsi3 (mask, mask, shift));
12298
12299	  xorm = gen_rtx_XOR (SImode, used_m, mask);
12300	  /* Depending on the value of 'op', the XOR or the operation might
12301	     be able to be simplified away.  */
12302	  newop = simplify_gen_binary (code, SImode, xorm, newop);
12303	}
12304      op = newop;
12305      used_mode = SImode;
12306      before = gen_reg_rtx (used_mode);
12307      after = gen_reg_rtx (used_mode);
12308    }
12309  else
12310    {
12311      used_mode = mode;
12312      before = before_param;
12313      after = after_param;
12314
12315      if (before == NULL_RTX)
12316	before = gen_reg_rtx (used_mode);
12317      if (after == NULL_RTX)
12318	after = gen_reg_rtx (used_mode);
12319    }
12320
12321  if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12322      && used_mode != mode)
12323    the_op = op;  /* Computed above.  */
12324  else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12325    the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12326  else
12327    the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12328
12329  set_after = gen_rtx_SET (VOIDmode, after, the_op);
12330  set_before = gen_rtx_SET (VOIDmode, before, used_m);
12331  set_atomic = gen_rtx_SET (VOIDmode, used_m,
12332			    gen_rtx_UNSPEC (used_mode,
12333					    gen_rtvec (1, the_op),
12334					    UNSPEC_SYNC_OP));
12335  cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12336
12337  if ((code == PLUS || code == MINUS) && used_mode != mode)
12338    vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12339		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12340  else
12341    vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12342  emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12343
12344  /* Shift and mask the return values properly.  */
12345  if (used_mode != mode && before_param)
12346    {
12347      emit_insn (gen_lshrsi3 (before, before, shift));
12348      convert_move (before_param, before, 1);
12349    }
12350
12351  if (used_mode != mode && after_param)
12352    {
12353      emit_insn (gen_lshrsi3 (after, after, shift));
12354      convert_move (after_param, after, 1);
12355    }
12356
12357  /* The previous sequence will end with a branch that's dependent on
12358     the conditional store, so placing an isync will ensure that no
12359     other instructions (especially, no load or store instructions)
12360     can start before the atomic operation completes.  */
12361  if (sync_p)
12362    emit_insn (gen_isync ());
12363}
12364
12365/* A subroutine of the atomic operation splitters.  Jump to LABEL if
12366   COND is true.  Mark the jump as unlikely to be taken.  */
12367
12368static void
12369emit_unlikely_jump (rtx cond, rtx label)
12370{
12371  rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12372  rtx x;
12373
12374  x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12375  x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12376  REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12377}
12378
12379/* A subroutine of the atomic operation splitters.  Emit a load-locked
12380   instruction in MODE.  */
12381
12382static void
12383emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12384{
12385  rtx (*fn) (rtx, rtx) = NULL;
12386  if (mode == SImode)
12387    fn = gen_load_locked_si;
12388  else if (mode == DImode)
12389    fn = gen_load_locked_di;
12390  emit_insn (fn (reg, mem));
12391}
12392
12393/* A subroutine of the atomic operation splitters.  Emit a store-conditional
12394   instruction in MODE.  */
12395
12396static void
12397emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12398{
12399  rtx (*fn) (rtx, rtx, rtx) = NULL;
12400  if (mode == SImode)
12401    fn = gen_store_conditional_si;
12402  else if (mode == DImode)
12403    fn = gen_store_conditional_di;
12404
12405  /* Emit sync before stwcx. to address PPC405 Erratum.  */
12406  if (PPC405_ERRATUM77)
12407    emit_insn (gen_memory_barrier ());
12408
12409  emit_insn (fn (res, mem, val));
12410}
12411
12412/* Expand an an atomic fetch-and-operate pattern.  CODE is the binary operation
12413   to perform.  MEM is the memory on which to operate.  VAL is the second
12414   operand of the binary operator.  BEFORE and AFTER are optional locations to
12415   return the value of MEM either before of after the operation.  SCRATCH is
12416   a scratch register.  */
12417
12418void
12419rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12420                       rtx before, rtx after, rtx scratch)
12421{
12422  enum machine_mode mode = GET_MODE (mem);
12423  rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12424
12425  emit_insn (gen_memory_barrier ());
12426
12427  label = gen_label_rtx ();
12428  emit_label (label);
12429  label = gen_rtx_LABEL_REF (VOIDmode, label);
12430
12431  if (before == NULL_RTX)
12432    before = scratch;
12433  emit_load_locked (mode, before, mem);
12434
12435  if (code == NOT)
12436    x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12437  else if (code == AND)
12438    x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12439  else
12440    x = gen_rtx_fmt_ee (code, mode, before, val);
12441
12442  if (after != NULL_RTX)
12443    emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12444  emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12445
12446  emit_store_conditional (mode, cond, mem, scratch);
12447
12448  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12449  emit_unlikely_jump (x, label);
12450
12451  emit_insn (gen_isync ());
12452}
12453
12454/* Expand an atomic compare and swap operation.  MEM is the memory on which
12455   to operate.  OLDVAL is the old value to be compared.  NEWVAL is the new
12456   value to be stored.  SCRATCH is a scratch GPR.  */
12457
12458void
12459rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12460			       rtx scratch)
12461{
12462  enum machine_mode mode = GET_MODE (mem);
12463  rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12464
12465  emit_insn (gen_memory_barrier ());
12466
12467  label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12468  label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12469  emit_label (XEXP (label1, 0));
12470
12471  emit_load_locked (mode, retval, mem);
12472
12473  x = gen_rtx_COMPARE (CCmode, retval, oldval);
12474  emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12475
12476  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12477  emit_unlikely_jump (x, label2);
12478
12479  emit_move_insn (scratch, newval);
12480  emit_store_conditional (mode, cond, mem, scratch);
12481
12482  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12483  emit_unlikely_jump (x, label1);
12484
12485  emit_insn (gen_isync ());
12486  emit_label (XEXP (label2, 0));
12487}
12488
12489/* Expand an atomic test and set operation.  MEM is the memory on which
12490   to operate.  VAL is the value set.  SCRATCH is a scratch GPR.  */
12491
12492void
12493rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12494{
12495  enum machine_mode mode = GET_MODE (mem);
12496  rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12497
12498  emit_insn (gen_memory_barrier ());
12499
12500  label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12501  emit_label (XEXP (label, 0));
12502
12503  emit_load_locked (mode, retval, mem);
12504  emit_move_insn (scratch, val);
12505  emit_store_conditional (mode, cond, mem, scratch);
12506
12507  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12508  emit_unlikely_jump (x, label);
12509
12510  emit_insn (gen_isync ());
12511}
12512
12513void
12514rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
12515{
12516  enum machine_mode mode = GET_MODE (mem);
12517  rtx addrSI, align, wdst, shift, mask;
12518  HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
12519  HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12520
12521  /* Shift amount for subword relative to aligned word.  */
12522  addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
12523  shift = gen_reg_rtx (SImode);
12524  emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
12525			 GEN_INT (shift_mask)));
12526  emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
12527
12528  /* Shift and mask old value into position within word.  */
12529  oldval = convert_modes (SImode, mode, oldval, 1);
12530  oldval = expand_binop (SImode, and_optab,
12531			 oldval, GEN_INT (imask), NULL_RTX,
12532			 1, OPTAB_LIB_WIDEN);
12533  emit_insn (gen_ashlsi3 (oldval, oldval, shift));
12534
12535  /* Shift and mask new value into position within word.  */
12536  newval = convert_modes (SImode, mode, newval, 1);
12537  newval = expand_binop (SImode, and_optab,
12538			 newval, GEN_INT (imask), NULL_RTX,
12539			 1, OPTAB_LIB_WIDEN);
12540  emit_insn (gen_ashlsi3 (newval, newval, shift));
12541
12542  /* Mask for insertion.  */
12543  mask = gen_reg_rtx (SImode);
12544  emit_move_insn (mask, GEN_INT (imask));
12545  emit_insn (gen_ashlsi3 (mask, mask, shift));
12546
12547  /* Address of aligned word containing subword.  */
12548  align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
12549			NULL_RTX, 1, OPTAB_LIB_WIDEN);
12550  mem = change_address (mem, SImode, align);
12551  set_mem_align (mem, 32);
12552  MEM_VOLATILE_P (mem) = 1;
12553
12554  wdst = gen_reg_rtx (SImode);
12555  emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
12556						    oldval, newval, mem));
12557
12558  emit_move_insn (dst, gen_lowpart (mode, wdst));
12559}
12560
12561void
12562rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
12563				  rtx oldval, rtx newval, rtx mem,
12564				  rtx scratch)
12565{
12566  rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12567
12568  emit_insn (gen_memory_barrier ());
12569  label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12570  label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12571  emit_label (XEXP (label1, 0));
12572
12573  emit_load_locked (SImode, scratch, mem);
12574
12575  /* Mask subword within loaded value for comparison with oldval.
12576     Use UNSPEC_AND to avoid clobber.*/
12577  emit_insn (gen_rtx_SET (SImode, dest,
12578			  gen_rtx_UNSPEC (SImode,
12579					  gen_rtvec (2, scratch, mask),
12580					  UNSPEC_AND)));
12581
12582  x = gen_rtx_COMPARE (CCmode, dest, oldval);
12583  emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12584
12585  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12586  emit_unlikely_jump (x, label2);
12587
12588  /* Clear subword within loaded value for insertion of new value.  */
12589  emit_insn (gen_rtx_SET (SImode, scratch,
12590			  gen_rtx_AND (SImode,
12591				       gen_rtx_NOT (SImode, mask), scratch)));
12592  emit_insn (gen_iorsi3 (scratch, scratch, newval));
12593  emit_store_conditional (SImode, cond, mem, scratch);
12594
12595  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12596  emit_unlikely_jump (x, label1);
12597
12598  emit_insn (gen_isync ());
12599  emit_label (XEXP (label2, 0));
12600}
12601
12602
12603  /* Emit instructions to move SRC to DST.  Called by splitters for
12604   multi-register moves.  It will emit at most one instruction for
12605   each register that is accessed; that is, it won't emit li/lis pairs
12606   (or equivalent for 64-bit code).  One of SRC or DST must be a hard
12607   register.  */
12608
12609void
12610rs6000_split_multireg_move (rtx dst, rtx src)
12611{
12612  /* The register number of the first register being moved.  */
12613  int reg;
12614  /* The mode that is to be moved.  */
12615  enum machine_mode mode;
12616  /* The mode that the move is being done in, and its size.  */
12617  enum machine_mode reg_mode;
12618  int reg_mode_size;
12619  /* The number of registers that will be moved.  */
12620  int nregs;
12621
12622  reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12623  mode = GET_MODE (dst);
12624  nregs = hard_regno_nregs[reg][mode];
12625  if (FP_REGNO_P (reg))
12626    reg_mode = DFmode;
12627  else if (ALTIVEC_REGNO_P (reg))
12628    reg_mode = V16QImode;
12629  else if (TARGET_E500_DOUBLE && mode == TFmode)
12630    reg_mode = DFmode;
12631  else
12632    reg_mode = word_mode;
12633  reg_mode_size = GET_MODE_SIZE (reg_mode);
12634
12635  gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
12636
12637  if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12638    {
12639      /* Move register range backwards, if we might have destructive
12640	 overlap.  */
12641      int i;
12642      for (i = nregs - 1; i >= 0; i--)
12643	emit_insn (gen_rtx_SET (VOIDmode,
12644				simplify_gen_subreg (reg_mode, dst, mode,
12645						     i * reg_mode_size),
12646				simplify_gen_subreg (reg_mode, src, mode,
12647						     i * reg_mode_size)));
12648    }
12649  else
12650    {
12651      int i;
12652      int j = -1;
12653      bool used_update = false;
12654
12655      if (MEM_P (src) && INT_REGNO_P (reg))
12656	{
12657	  rtx breg;
12658
12659	  if (GET_CODE (XEXP (src, 0)) == PRE_INC
12660	      || GET_CODE (XEXP (src, 0)) == PRE_DEC)
12661	    {
12662	      rtx delta_rtx;
12663	      breg = XEXP (XEXP (src, 0), 0);
12664	      delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
12665			   ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
12666			   : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
12667	      emit_insn (TARGET_32BIT
12668			 ? gen_addsi3 (breg, breg, delta_rtx)
12669			 : gen_adddi3 (breg, breg, delta_rtx));
12670	      src = replace_equiv_address (src, breg);
12671	    }
12672	  else if (! rs6000_offsettable_memref_p (src))
12673	    {
12674	      rtx basereg;
12675	      basereg = gen_rtx_REG (Pmode, reg);
12676	      emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
12677	      src = replace_equiv_address (src, basereg);
12678	    }
12679
12680	  breg = XEXP (src, 0);
12681	  if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
12682	    breg = XEXP (breg, 0);
12683
12684	  /* If the base register we are using to address memory is
12685	     also a destination reg, then change that register last.  */
12686	  if (REG_P (breg)
12687	      && REGNO (breg) >= REGNO (dst)
12688	      && REGNO (breg) < REGNO (dst) + nregs)
12689	    j = REGNO (breg) - REGNO (dst);
12690	}
12691
12692      if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
12693	{
12694	  rtx breg;
12695
12696	  if (GET_CODE (XEXP (dst, 0)) == PRE_INC
12697	      || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
12698	    {
12699	      rtx delta_rtx;
12700	      breg = XEXP (XEXP (dst, 0), 0);
12701	      delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
12702			   ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
12703			   : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
12704
12705	      /* We have to update the breg before doing the store.
12706		 Use store with update, if available.  */
12707
12708	      if (TARGET_UPDATE)
12709		{
12710		  rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
12711		  emit_insn (TARGET_32BIT
12712			     ? (TARGET_POWERPC64
12713				? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
12714				: gen_movsi_update (breg, breg, delta_rtx, nsrc))
12715			     : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
12716		  used_update = true;
12717		}
12718	      else
12719		emit_insn (TARGET_32BIT
12720			   ? gen_addsi3 (breg, breg, delta_rtx)
12721			   : gen_adddi3 (breg, breg, delta_rtx));
12722	      dst = replace_equiv_address (dst, breg);
12723	    }
12724	  else
12725	    gcc_assert (rs6000_offsettable_memref_p (dst));
12726	}
12727
12728      for (i = 0; i < nregs; i++)
12729	{
12730	  /* Calculate index to next subword.  */
12731	  ++j;
12732	  if (j == nregs)
12733	    j = 0;
12734
12735	  /* If compiler already emitted move of first word by
12736	     store with update, no need to do anything.  */
12737	  if (j == 0 && used_update)
12738	    continue;
12739
12740	  emit_insn (gen_rtx_SET (VOIDmode,
12741				  simplify_gen_subreg (reg_mode, dst, mode,
12742						       j * reg_mode_size),
12743				  simplify_gen_subreg (reg_mode, src, mode,
12744						       j * reg_mode_size)));
12745	}
12746    }
12747}
12748
12749
12750/* This page contains routines that are used to determine what the
12751   function prologue and epilogue code will do and write them out.  */
12752
12753/* Return the first fixed-point register that is required to be
12754   saved. 32 if none.  */
12755
12756int
12757first_reg_to_save (void)
12758{
12759  int first_reg;
12760
12761  /* Find lowest numbered live register.  */
12762  for (first_reg = 13; first_reg <= 31; first_reg++)
12763    if (regs_ever_live[first_reg]
12764	&& (! call_used_regs[first_reg]
12765	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
12766		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12767		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
12768		    || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
12769      break;
12770
12771#if TARGET_MACHO
12772  if (flag_pic
12773      && current_function_uses_pic_offset_table
12774      && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
12775    return RS6000_PIC_OFFSET_TABLE_REGNUM;
12776#endif
12777
12778  return first_reg;
12779}
12780
12781/* Similar, for FP regs.  */
12782
12783int
12784first_fp_reg_to_save (void)
12785{
12786  int first_reg;
12787
12788  /* Find lowest numbered live register.  */
12789  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12790    if (regs_ever_live[first_reg])
12791      break;
12792
12793  return first_reg;
12794}
12795
12796/* Similar, for AltiVec regs.  */
12797
12798static int
12799first_altivec_reg_to_save (void)
12800{
12801  int i;
12802
12803  /* Stack frame remains as is unless we are in AltiVec ABI.  */
12804  if (! TARGET_ALTIVEC_ABI)
12805    return LAST_ALTIVEC_REGNO + 1;
12806
12807  /* On Darwin, the unwind routines are compiled without
12808     TARGET_ALTIVEC, and use save_world to save/restore the
12809     altivec registers when necessary.  */
12810  if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
12811      && ! TARGET_ALTIVEC)
12812    return FIRST_ALTIVEC_REGNO + 20;
12813
12814  /* Find lowest numbered live register.  */
12815  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12816    if (regs_ever_live[i])
12817      break;
12818
12819  return i;
12820}
12821
12822/* Return a 32-bit mask of the AltiVec registers we need to set in
12823   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
12824   the 32-bit word is 0.  */
12825
12826static unsigned int
12827compute_vrsave_mask (void)
12828{
12829  unsigned int i, mask = 0;
12830
12831  /* On Darwin, the unwind routines are compiled without
12832     TARGET_ALTIVEC, and use save_world to save/restore the
12833     call-saved altivec registers when necessary.  */
12834  if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
12835      && ! TARGET_ALTIVEC)
12836    mask |= 0xFFF;
12837
12838  /* First, find out if we use _any_ altivec registers.  */
12839  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12840    if (regs_ever_live[i])
12841      mask |= ALTIVEC_REG_BIT (i);
12842
12843  if (mask == 0)
12844    return mask;
12845
12846  /* Next, remove the argument registers from the set.  These must
12847     be in the VRSAVE mask set by the caller, so we don't need to add
12848     them in again.  More importantly, the mask we compute here is
12849     used to generate CLOBBERs in the set_vrsave insn, and we do not
12850     wish the argument registers to die.  */
12851  for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
12852    mask &= ~ALTIVEC_REG_BIT (i);
12853
12854  /* Similarly, remove the return value from the set.  */
12855  {
12856    bool yes = false;
12857    diddle_return_value (is_altivec_return_reg, &yes);
12858    if (yes)
12859      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12860  }
12861
12862  return mask;
12863}
12864
12865/* For a very restricted set of circumstances, we can cut down the
12866   size of prologues/epilogues by calling our own save/restore-the-world
12867   routines.  */
12868
12869static void
12870compute_save_world_info (rs6000_stack_t *info_ptr)
12871{
12872  info_ptr->world_save_p = 1;
12873  info_ptr->world_save_p
12874    = (WORLD_SAVE_P (info_ptr)
12875       && DEFAULT_ABI == ABI_DARWIN
12876       && ! (current_function_calls_setjmp && flag_exceptions)
12877       && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12878       && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12879       && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12880       && info_ptr->cr_save_p);
12881
12882  /* This will not work in conjunction with sibcalls.  Make sure there
12883     are none.  (This check is expensive, but seldom executed.) */
12884  if (WORLD_SAVE_P (info_ptr))
12885    {
12886      rtx insn;
12887      for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
12888	if ( GET_CODE (insn) == CALL_INSN
12889	     && SIBLING_CALL_P (insn))
12890	  {
12891	    info_ptr->world_save_p = 0;
12892	    break;
12893	  }
12894    }
12895
12896  if (WORLD_SAVE_P (info_ptr))
12897    {
12898      /* Even if we're not touching VRsave, make sure there's room on the
12899	 stack for it, if it looks like we're calling SAVE_WORLD, which
12900	 will attempt to save it. */
12901      info_ptr->vrsave_size  = 4;
12902
12903      /* "Save" the VRsave register too if we're saving the world.  */
12904      if (info_ptr->vrsave_mask == 0)
12905	info_ptr->vrsave_mask = compute_vrsave_mask ();
12906
12907      /* Because the Darwin register save/restore routines only handle
12908	 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
12909	 check.  */
12910      gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
12911		  && (info_ptr->first_altivec_reg_save
12912		      >= FIRST_SAVED_ALTIVEC_REGNO));
12913    }
12914  return;
12915}
12916
12917
12918static void
12919is_altivec_return_reg (rtx reg, void *xyes)
12920{
12921  bool *yes = (bool *) xyes;
12922  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12923    *yes = true;
12924}
12925
12926
12927/* Calculate the stack information for the current function.  This is
12928   complicated by having two separate calling sequences, the AIX calling
12929   sequence and the V.4 calling sequence.
12930
12931   AIX (and Darwin/Mac OS X) stack frames look like:
12932							  32-bit  64-bit
12933	SP---->	+---------------------------------------+
12934		| back chain to caller			| 0	  0
12935		+---------------------------------------+
12936		| saved CR				| 4       8 (8-11)
12937		+---------------------------------------+
12938		| saved LR				| 8       16
12939		+---------------------------------------+
12940		| reserved for compilers		| 12      24
12941		+---------------------------------------+
12942		| reserved for binders			| 16      32
12943		+---------------------------------------+
12944		| saved TOC pointer			| 20      40
12945		+---------------------------------------+
12946		| Parameter save area (P)		| 24      48
12947		+---------------------------------------+
12948		| Alloca space (A)			| 24+P    etc.
12949		+---------------------------------------+
12950		| Local variable space (L)		| 24+P+A
12951		+---------------------------------------+
12952		| Float/int conversion temporary (X)	| 24+P+A+L
12953		+---------------------------------------+
12954		| Save area for AltiVec registers (W)	| 24+P+A+L+X
12955		+---------------------------------------+
12956		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
12957		+---------------------------------------+
12958		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
12959		+---------------------------------------+
12960		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
12961		+---------------------------------------+
12962		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
12963		+---------------------------------------+
12964	old SP->| back chain to caller's caller		|
12965		+---------------------------------------+
12966
12967   The required alignment for AIX configurations is two words (i.e., 8
12968   or 16 bytes).
12969
12970
12971   V.4 stack frames look like:
12972
12973	SP---->	+---------------------------------------+
12974		| back chain to caller			| 0
12975		+---------------------------------------+
12976		| caller's saved LR			| 4
12977		+---------------------------------------+
12978		| Parameter save area (P)		| 8
12979		+---------------------------------------+
12980		| Alloca space (A)			| 8+P
12981		+---------------------------------------+
12982		| Varargs save area (V)			| 8+P+A
12983		+---------------------------------------+
12984		| Local variable space (L)		| 8+P+A+V
12985		+---------------------------------------+
12986		| Float/int conversion temporary (X)	| 8+P+A+V+L
12987		+---------------------------------------+
12988		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
12989		+---------------------------------------+
12990		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
12991		+---------------------------------------+
12992		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
12993		+---------------------------------------+
12994		| SPE: area for 64-bit GP registers	|
12995		+---------------------------------------+
12996		| SPE alignment padding			|
12997		+---------------------------------------+
12998		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
12999		+---------------------------------------+
13000		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
13001		+---------------------------------------+
13002		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
13003		+---------------------------------------+
13004	old SP->| back chain to caller's caller		|
13005		+---------------------------------------+
13006
13007   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13008   given.  (But note below and in sysv4.h that we require only 8 and
13009   may round up the size of our stack frame anyways.  The historical
13010   reason is early versions of powerpc-linux which didn't properly
13011   align the stack at program startup.  A happy side-effect is that
13012   -mno-eabi libraries can be used with -meabi programs.)
13013
13014   The EABI configuration defaults to the V.4 layout.  However,
13015   the stack alignment requirements may differ.  If -mno-eabi is not
13016   given, the required stack alignment is 8 bytes; if -mno-eabi is
13017   given, the required alignment is 16 bytes.  (But see V.4 comment
13018   above.)  */
13019
13020#ifndef ABI_STACK_BOUNDARY
13021#define ABI_STACK_BOUNDARY STACK_BOUNDARY
13022#endif
13023
13024static rs6000_stack_t *
13025rs6000_stack_info (void)
13026{
13027  static rs6000_stack_t info;
13028  rs6000_stack_t *info_ptr = &info;
13029  int reg_size = TARGET_32BIT ? 4 : 8;
13030  int ehrd_size;
13031  int save_align;
13032  HOST_WIDE_INT non_fixed_size;
13033
13034  memset (&info, 0, sizeof (info));
13035
13036  if (TARGET_SPE)
13037    {
13038      /* Cache value so we don't rescan instruction chain over and over.  */
13039      if (cfun->machine->insn_chain_scanned_p == 0)
13040	cfun->machine->insn_chain_scanned_p
13041	  = spe_func_has_64bit_regs_p () + 1;
13042      info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
13043    }
13044
13045  /* Select which calling sequence.  */
13046  info_ptr->abi = DEFAULT_ABI;
13047
13048  /* Calculate which registers need to be saved & save area size.  */
13049  info_ptr->first_gp_reg_save = first_reg_to_save ();
13050  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
13051     even if it currently looks like we won't.  */
13052  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
13053       || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
13054       || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
13055      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
13056    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
13057  else
13058    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
13059
13060  /* For the SPE, we have an additional upper 32-bits on each GPR.
13061     Ideally we should save the entire 64-bits only when the upper
13062     half is used in SIMD instructions.  Since we only record
13063     registers live (not the size they are used in), this proves
13064     difficult because we'd have to traverse the instruction chain at
13065     the right time, taking reload into account.  This is a real pain,
13066     so we opt to save the GPRs in 64-bits always if but one register
13067     gets used in 64-bits.  Otherwise, all the registers in the frame
13068     get saved in 32-bits.
13069
13070     So... since when we save all GPRs (except the SP) in 64-bits, the
13071     traditional GP save area will be empty.  */
13072  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
13073    info_ptr->gp_size = 0;
13074
13075  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
13076  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
13077
13078  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
13079  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
13080				 - info_ptr->first_altivec_reg_save);
13081
13082  /* Does this function call anything?  */
13083  info_ptr->calls_p = (! current_function_is_leaf
13084		       || cfun->machine->ra_needs_full_frame);
13085
13086  /* Determine if we need to save the link register.  */
13087  if ((DEFAULT_ABI == ABI_AIX
13088       && current_function_profile
13089       && !TARGET_PROFILE_KERNEL)
13090#ifdef TARGET_RELOCATABLE
13091      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
13092#endif
13093      || (info_ptr->first_fp_reg_save != 64
13094	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
13095      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
13096      || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
13097      || info_ptr->calls_p
13098      || rs6000_ra_ever_killed ())
13099    {
13100      info_ptr->lr_save_p = 1;
13101      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
13102    }
13103
13104  /* Determine if we need to save the condition code registers.  */
13105  if (regs_ever_live[CR2_REGNO]
13106      || regs_ever_live[CR3_REGNO]
13107      || regs_ever_live[CR4_REGNO])
13108    {
13109      info_ptr->cr_save_p = 1;
13110      if (DEFAULT_ABI == ABI_V4)
13111	info_ptr->cr_size = reg_size;
13112    }
13113
13114  /* If the current function calls __builtin_eh_return, then we need
13115     to allocate stack space for registers that will hold data for
13116     the exception handler.  */
13117  if (current_function_calls_eh_return)
13118    {
13119      unsigned int i;
13120      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
13121	continue;
13122
13123      /* SPE saves EH registers in 64-bits.  */
13124      ehrd_size = i * (TARGET_SPE_ABI
13125		       && info_ptr->spe_64bit_regs_used != 0
13126		       ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
13127    }
13128  else
13129    ehrd_size = 0;
13130
13131  /* Determine various sizes.  */
13132  info_ptr->reg_size     = reg_size;
13133  info_ptr->fixed_size   = RS6000_SAVE_AREA;
13134  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
13135  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
13136					 TARGET_ALTIVEC ? 16 : 8);
13137  if (FRAME_GROWS_DOWNWARD)
13138    info_ptr->vars_size
13139      += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
13140		       + info_ptr->parm_size,
13141		       ABI_STACK_BOUNDARY / BITS_PER_UNIT)
13142	 - (info_ptr->fixed_size + info_ptr->vars_size
13143	    + info_ptr->parm_size);
13144
13145  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
13146    info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
13147  else
13148    info_ptr->spe_gp_size = 0;
13149
13150  if (TARGET_ALTIVEC_ABI)
13151    info_ptr->vrsave_mask = compute_vrsave_mask ();
13152  else
13153    info_ptr->vrsave_mask = 0;
13154
13155  if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
13156    info_ptr->vrsave_size  = 4;
13157  else
13158    info_ptr->vrsave_size  = 0;
13159
13160  compute_save_world_info (info_ptr);
13161
13162  /* Calculate the offsets.  */
13163  switch (DEFAULT_ABI)
13164    {
13165    case ABI_NONE:
13166    default:
13167      gcc_unreachable ();
13168
13169    case ABI_AIX:
13170    case ABI_DARWIN:
13171      info_ptr->fp_save_offset   = - info_ptr->fp_size;
13172      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
13173
13174      if (TARGET_ALTIVEC_ABI)
13175	{
13176	  info_ptr->vrsave_save_offset
13177	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
13178
13179	  /* Align stack so vector save area is on a quadword boundary.
13180	     The padding goes above the vectors.  */
13181	  if (info_ptr->altivec_size != 0)
13182	    info_ptr->altivec_padding_size
13183	      = info_ptr->vrsave_save_offset & 0xF;
13184	  else
13185	    info_ptr->altivec_padding_size = 0;
13186
13187	  info_ptr->altivec_save_offset
13188	    = info_ptr->vrsave_save_offset
13189	    - info_ptr->altivec_padding_size
13190	    - info_ptr->altivec_size;
13191	  gcc_assert (info_ptr->altivec_size == 0
13192		      || info_ptr->altivec_save_offset % 16 == 0);
13193
13194	  /* Adjust for AltiVec case.  */
13195	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
13196	}
13197      else
13198	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
13199      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
13200      info_ptr->lr_save_offset   = 2*reg_size;
13201      break;
13202
13203    case ABI_V4:
13204      info_ptr->fp_save_offset   = - info_ptr->fp_size;
13205      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
13206      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
13207
13208      if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
13209	{
13210	  /* Align stack so SPE GPR save area is aligned on a
13211	     double-word boundary.  */
13212	  if (info_ptr->spe_gp_size != 0)
13213	    info_ptr->spe_padding_size
13214	      = 8 - (-info_ptr->cr_save_offset % 8);
13215	  else
13216	    info_ptr->spe_padding_size = 0;
13217
13218	  info_ptr->spe_gp_save_offset
13219	    = info_ptr->cr_save_offset
13220	    - info_ptr->spe_padding_size
13221	    - info_ptr->spe_gp_size;
13222
13223	  /* Adjust for SPE case.  */
13224	  info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
13225	}
13226      else if (TARGET_ALTIVEC_ABI)
13227	{
13228	  info_ptr->vrsave_save_offset
13229	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
13230
13231	  /* Align stack so vector save area is on a quadword boundary.  */
13232	  if (info_ptr->altivec_size != 0)
13233	    info_ptr->altivec_padding_size
13234	      = 16 - (-info_ptr->vrsave_save_offset % 16);
13235	  else
13236	    info_ptr->altivec_padding_size = 0;
13237
13238	  info_ptr->altivec_save_offset
13239	    = info_ptr->vrsave_save_offset
13240	    - info_ptr->altivec_padding_size
13241	    - info_ptr->altivec_size;
13242
13243	  /* Adjust for AltiVec case.  */
13244	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
13245	}
13246      else
13247	info_ptr->ehrd_offset    = info_ptr->cr_save_offset;
13248      info_ptr->ehrd_offset      -= ehrd_size;
13249      info_ptr->lr_save_offset   = reg_size;
13250      break;
13251    }
13252
13253  save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
13254  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
13255					 + info_ptr->gp_size
13256					 + info_ptr->altivec_size
13257					 + info_ptr->altivec_padding_size
13258					 + info_ptr->spe_gp_size
13259					 + info_ptr->spe_padding_size
13260					 + ehrd_size
13261					 + info_ptr->cr_size
13262					 + info_ptr->vrsave_size,
13263					 save_align);
13264
13265  non_fixed_size	 = (info_ptr->vars_size
13266			    + info_ptr->parm_size
13267			    + info_ptr->save_size);
13268
13269  info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
13270				       ABI_STACK_BOUNDARY / BITS_PER_UNIT);
13271
13272  /* Determine if we need to allocate any stack frame:
13273
13274     For AIX we need to push the stack if a frame pointer is needed
13275     (because the stack might be dynamically adjusted), if we are
13276     debugging, if we make calls, or if the sum of fp_save, gp_save,
13277     and local variables are more than the space needed to save all
13278     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
13279     + 18*8 = 288 (GPR13 reserved).
13280
13281     For V.4 we don't have the stack cushion that AIX uses, but assume
13282     that the debugger can handle stackless frames.  */
13283
13284  if (info_ptr->calls_p)
13285    info_ptr->push_p = 1;
13286
13287  else if (DEFAULT_ABI == ABI_V4)
13288    info_ptr->push_p = non_fixed_size != 0;
13289
13290  else if (frame_pointer_needed)
13291    info_ptr->push_p = 1;
13292
13293  else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
13294    info_ptr->push_p = 1;
13295
13296  else
13297    info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
13298
13299  /* Zero offsets if we're not saving those registers.  */
13300  if (info_ptr->fp_size == 0)
13301    info_ptr->fp_save_offset = 0;
13302
13303  if (info_ptr->gp_size == 0)
13304    info_ptr->gp_save_offset = 0;
13305
13306  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
13307    info_ptr->altivec_save_offset = 0;
13308
13309  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
13310    info_ptr->vrsave_save_offset = 0;
13311
13312  if (! TARGET_SPE_ABI
13313      || info_ptr->spe_64bit_regs_used == 0
13314      || info_ptr->spe_gp_size == 0)
13315    info_ptr->spe_gp_save_offset = 0;
13316
13317  if (! info_ptr->lr_save_p)
13318    info_ptr->lr_save_offset = 0;
13319
13320  if (! info_ptr->cr_save_p)
13321    info_ptr->cr_save_offset = 0;
13322
13323  return info_ptr;
13324}
13325
13326/* Return true if the current function uses any GPRs in 64-bit SIMD
13327   mode.  */
13328
13329static bool
13330spe_func_has_64bit_regs_p (void)
13331{
13332  rtx insns, insn;
13333
13334  /* Functions that save and restore all the call-saved registers will
13335     need to save/restore the registers in 64-bits.  */
13336  if (current_function_calls_eh_return
13337      || current_function_calls_setjmp
13338      || current_function_has_nonlocal_goto)
13339    return true;
13340
13341  insns = get_insns ();
13342
13343  for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
13344    {
13345      if (INSN_P (insn))
13346	{
13347	  rtx i;
13348
13349	  /* FIXME: This should be implemented with attributes...
13350
13351	         (set_attr "spe64" "true")....then,
13352	         if (get_spe64(insn)) return true;
13353
13354	     It's the only reliable way to do the stuff below.  */
13355
13356	  i = PATTERN (insn);
13357	  if (GET_CODE (i) == SET)
13358	    {
13359	      enum machine_mode mode = GET_MODE (SET_SRC (i));
13360
13361	      if (SPE_VECTOR_MODE (mode))
13362		return true;
13363	      if (TARGET_E500_DOUBLE && mode == DFmode)
13364		return true;
13365	    }
13366	}
13367    }
13368
13369  return false;
13370}
13371
13372static void
13373debug_stack_info (rs6000_stack_t *info)
13374{
13375  const char *abi_string;
13376
13377  if (! info)
13378    info = rs6000_stack_info ();
13379
13380  fprintf (stderr, "\nStack information for function %s:\n",
13381	   ((current_function_decl && DECL_NAME (current_function_decl))
13382	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13383	    : "<unknown>"));
13384
13385  switch (info->abi)
13386    {
13387    default:		 abi_string = "Unknown";	break;
13388    case ABI_NONE:	 abi_string = "NONE";		break;
13389    case ABI_AIX:	 abi_string = "AIX";		break;
13390    case ABI_DARWIN:	 abi_string = "Darwin";		break;
13391    case ABI_V4:	 abi_string = "V.4";		break;
13392    }
13393
13394  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
13395
13396  if (TARGET_ALTIVEC_ABI)
13397    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13398
13399  if (TARGET_SPE_ABI)
13400    fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13401
13402  if (info->first_gp_reg_save != 32)
13403    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
13404
13405  if (info->first_fp_reg_save != 64)
13406    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
13407
13408  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13409    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13410	     info->first_altivec_reg_save);
13411
13412  if (info->lr_save_p)
13413    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
13414
13415  if (info->cr_save_p)
13416    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
13417
13418  if (info->vrsave_mask)
13419    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
13420
13421  if (info->push_p)
13422    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
13423
13424  if (info->calls_p)
13425    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
13426
13427  if (info->gp_save_offset)
13428    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
13429
13430  if (info->fp_save_offset)
13431    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
13432
13433  if (info->altivec_save_offset)
13434    fprintf (stderr, "\taltivec_save_offset = %5d\n",
13435	     info->altivec_save_offset);
13436
13437  if (info->spe_gp_save_offset)
13438    fprintf (stderr, "\tspe_gp_save_offset  = %5d\n",
13439	     info->spe_gp_save_offset);
13440
13441  if (info->vrsave_save_offset)
13442    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
13443	     info->vrsave_save_offset);
13444
13445  if (info->lr_save_offset)
13446    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
13447
13448  if (info->cr_save_offset)
13449    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
13450
13451  if (info->varargs_save_offset)
13452    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13453
13454  if (info->total_size)
13455    fprintf (stderr, "\ttotal_size          = "HOST_WIDE_INT_PRINT_DEC"\n",
13456	     info->total_size);
13457
13458  if (info->vars_size)
13459    fprintf (stderr, "\tvars_size           = "HOST_WIDE_INT_PRINT_DEC"\n",
13460	     info->vars_size);
13461
13462  if (info->parm_size)
13463    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
13464
13465  if (info->fixed_size)
13466    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
13467
13468  if (info->gp_size)
13469    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
13470
13471  if (info->spe_gp_size)
13472    fprintf (stderr, "\tspe_gp_size         = %5d\n", info->spe_gp_size);
13473
13474  if (info->fp_size)
13475    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
13476
13477  if (info->altivec_size)
13478    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
13479
13480  if (info->vrsave_size)
13481    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
13482
13483  if (info->altivec_padding_size)
13484    fprintf (stderr, "\taltivec_padding_size= %5d\n",
13485	     info->altivec_padding_size);
13486
13487  if (info->spe_padding_size)
13488    fprintf (stderr, "\tspe_padding_size    = %5d\n",
13489	     info->spe_padding_size);
13490
13491  if (info->cr_size)
13492    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
13493
13494  if (info->save_size)
13495    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
13496
13497  if (info->reg_size != 4)
13498    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
13499
13500  fprintf (stderr, "\n");
13501}
13502
13503rtx
13504rs6000_return_addr (int count, rtx frame)
13505{
13506  /* Currently we don't optimize very well between prolog and body
13507     code and for PIC code the code can be actually quite bad, so
13508     don't try to be too clever here.  */
13509  if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
13510    {
13511      cfun->machine->ra_needs_full_frame = 1;
13512
13513      return
13514	gen_rtx_MEM
13515	  (Pmode,
13516	   memory_address
13517	   (Pmode,
13518	    plus_constant (copy_to_reg
13519			   (gen_rtx_MEM (Pmode,
13520					 memory_address (Pmode, frame))),
13521			   RETURN_ADDRESS_OFFSET)));
13522    }
13523
13524  cfun->machine->ra_need_lr = 1;
13525  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
13526}
13527
13528/* Say whether a function is a candidate for sibcall handling or not.
13529   We do not allow indirect calls to be optimized into sibling calls.
13530   Also, we can't do it if there are any vector parameters; there's
13531   nowhere to put the VRsave code so it works; note that functions with
13532   vector parameters are required to have a prototype, so the argument
13533   type info must be available here.  (The tail recursion case can work
13534   with vector parameters, but there's no way to distinguish here.) */
13535static bool
13536rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
13537{
13538  tree type;
13539  if (decl)
13540    {
13541      if (TARGET_ALTIVEC_VRSAVE)
13542	{
13543	  for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
13544	       type; type = TREE_CHAIN (type))
13545	    {
13546	      if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
13547		return false;
13548	    }
13549	}
13550      if (DEFAULT_ABI == ABI_DARWIN
13551	  || ((*targetm.binds_local_p) (decl)
13552	      && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
13553	{
13554	  tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
13555
13556	  if (!lookup_attribute ("longcall", attr_list)
13557	      || lookup_attribute ("shortcall", attr_list))
13558	    return true;
13559	}
13560    }
13561  return false;
13562}
13563
13564/* NULL if INSN insn is valid within a low-overhead loop.
13565   Otherwise return why doloop cannot be applied.
13566   PowerPC uses the COUNT register for branch on table instructions.  */
13567
13568static const char *
13569rs6000_invalid_within_doloop (rtx insn)
13570{
13571  if (CALL_P (insn))
13572    return "Function call in the loop.";
13573
13574  if (JUMP_P (insn)
13575      && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13576	  || GET_CODE (PATTERN (insn)) == ADDR_VEC))
13577    return "Computed branch in the loop.";
13578
13579  return NULL;
13580}
13581
13582static int
13583rs6000_ra_ever_killed (void)
13584{
13585  rtx top;
13586  rtx reg;
13587  rtx insn;
13588
13589  if (current_function_is_thunk)
13590    return 0;
13591
13592  /* regs_ever_live has LR marked as used if any sibcalls are present,
13593     but this should not force saving and restoring in the
13594     pro/epilogue.  Likewise, reg_set_between_p thinks a sibcall
13595     clobbers LR, so that is inappropriate.  */
13596
13597  /* Also, the prologue can generate a store into LR that
13598     doesn't really count, like this:
13599
13600        move LR->R0
13601        bcl to set PIC register
13602        move LR->R31
13603        move R0->LR
13604
13605     When we're called from the epilogue, we need to avoid counting
13606     this as a store.  */
13607
13608  push_topmost_sequence ();
13609  top = get_insns ();
13610  pop_topmost_sequence ();
13611  reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13612
13613  for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13614    {
13615      if (INSN_P (insn))
13616	{
13617	  if (CALL_P (insn))
13618	    {
13619	      if (!SIBLING_CALL_P (insn))
13620		return 1;
13621	    }
13622	  else if (find_regno_note (insn, REG_INC, LINK_REGISTER_REGNUM))
13623	    return 1;
13624	  else if (set_of (reg, insn) != NULL_RTX
13625		   && !prologue_epilogue_contains (insn))
13626	    return 1;
13627    	}
13628    }
13629  return 0;
13630}
13631
13632/* Add a REG_MAYBE_DEAD note to the insn.  */
13633static void
13634rs6000_maybe_dead (rtx insn)
13635{
13636  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13637					const0_rtx,
13638					REG_NOTES (insn));
13639}
13640
13641/* Emit instructions needed to load the TOC register.
13642   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
13643   a constant pool; or for SVR4 -fpic.  */
13644
13645void
13646rs6000_emit_load_toc_table (int fromprolog)
13647{
13648  rtx dest, insn;
13649  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
13650
13651  if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
13652    {
13653      char buf[30];
13654      rtx lab, tmp1, tmp2, got, tempLR;
13655
13656      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13657      lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13658      if (flag_pic == 2)
13659	got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13660      else
13661	got = rs6000_got_sym ();
13662      tmp1 = tmp2 = dest;
13663      if (!fromprolog)
13664	{
13665	  tmp1 = gen_reg_rtx (Pmode);
13666	  tmp2 = gen_reg_rtx (Pmode);
13667	}
13668      tempLR = (fromprolog
13669		? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13670		: gen_reg_rtx (Pmode));
13671      insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
13672      if (fromprolog)
13673	rs6000_maybe_dead (insn);
13674      insn = emit_move_insn (tmp1, tempLR);
13675      if (fromprolog)
13676	rs6000_maybe_dead (insn);
13677      insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
13678      if (fromprolog)
13679	rs6000_maybe_dead (insn);
13680      insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
13681      if (fromprolog)
13682	rs6000_maybe_dead (insn);
13683    }
13684  else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
13685    {
13686      rtx tempLR = (fromprolog
13687		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13688		    : gen_reg_rtx (Pmode));
13689
13690      insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
13691      if (fromprolog)
13692	rs6000_maybe_dead (insn);
13693      insn = emit_move_insn (dest, tempLR);
13694      if (fromprolog)
13695	rs6000_maybe_dead (insn);
13696    }
13697  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
13698    {
13699      char buf[30];
13700      rtx tempLR = (fromprolog
13701		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13702		    : gen_reg_rtx (Pmode));
13703      rtx temp0 = (fromprolog
13704		   ? gen_rtx_REG (Pmode, 0)
13705		   : gen_reg_rtx (Pmode));
13706
13707      if (fromprolog)
13708	{
13709	  rtx symF, symL;
13710
13711	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13712	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13713
13714	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
13715	  symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13716
13717	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
13718							       symF)));
13719	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
13720	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
13721							       symL,
13722							       symF)));
13723	}
13724      else
13725	{
13726	  rtx tocsym;
13727
13728	  tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13729	  emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
13730	  emit_move_insn (dest, tempLR);
13731	  emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
13732	}
13733      insn = emit_insn (gen_addsi3 (dest, temp0, dest));
13734      if (fromprolog)
13735	rs6000_maybe_dead (insn);
13736    }
13737  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
13738    {
13739      /* This is for AIX code running in non-PIC ELF32.  */
13740      char buf[30];
13741      rtx realsym;
13742      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13743      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13744
13745      insn = emit_insn (gen_elf_high (dest, realsym));
13746      if (fromprolog)
13747	rs6000_maybe_dead (insn);
13748      insn = emit_insn (gen_elf_low (dest, dest, realsym));
13749      if (fromprolog)
13750	rs6000_maybe_dead (insn);
13751    }
13752  else
13753    {
13754      gcc_assert (DEFAULT_ABI == ABI_AIX);
13755
13756      if (TARGET_32BIT)
13757	insn = emit_insn (gen_load_toc_aix_si (dest));
13758      else
13759	insn = emit_insn (gen_load_toc_aix_di (dest));
13760      if (fromprolog)
13761	rs6000_maybe_dead (insn);
13762    }
13763}
13764
13765/* Emit instructions to restore the link register after determining where
13766   its value has been stored.  */
13767
13768void
13769rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
13770{
13771  rs6000_stack_t *info = rs6000_stack_info ();
13772  rtx operands[2];
13773
13774  operands[0] = source;
13775  operands[1] = scratch;
13776
13777  if (info->lr_save_p)
13778    {
13779      rtx frame_rtx = stack_pointer_rtx;
13780      HOST_WIDE_INT sp_offset = 0;
13781      rtx tmp;
13782
13783      if (frame_pointer_needed
13784	  || current_function_calls_alloca
13785	  || info->total_size > 32767)
13786	{
13787	  tmp = gen_frame_mem (Pmode, frame_rtx);
13788	  emit_move_insn (operands[1], tmp);
13789	  frame_rtx = operands[1];
13790	}
13791      else if (info->push_p)
13792	sp_offset = info->total_size;
13793
13794      tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
13795      tmp = gen_frame_mem (Pmode, tmp);
13796      emit_move_insn (tmp, operands[0]);
13797    }
13798  else
13799    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
13800}
13801
13802static GTY(()) int set = -1;
13803
13804int
13805get_TOC_alias_set (void)
13806{
13807  if (set == -1)
13808    set = new_alias_set ();
13809  return set;
13810}
13811
13812/* This returns nonzero if the current function uses the TOC.  This is
13813   determined by the presence of (use (unspec ... UNSPEC_TOC)), which
13814   is generated by the ABI_V4 load_toc_* patterns.  */
13815#if TARGET_ELF
13816static int
13817uses_TOC (void)
13818{
13819  rtx insn;
13820
13821  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13822    if (INSN_P (insn))
13823      {
13824	rtx pat = PATTERN (insn);
13825	int i;
13826
13827	if (GET_CODE (pat) == PARALLEL)
13828	  for (i = 0; i < XVECLEN (pat, 0); i++)
13829	    {
13830	      rtx sub = XVECEXP (pat, 0, i);
13831	      if (GET_CODE (sub) == USE)
13832		{
13833		  sub = XEXP (sub, 0);
13834		  if (GET_CODE (sub) == UNSPEC
13835		      && XINT (sub, 1) == UNSPEC_TOC)
13836		    return 1;
13837		}
13838	    }
13839      }
13840  return 0;
13841}
13842#endif
13843
13844rtx
13845create_TOC_reference (rtx symbol)
13846{
13847  if (no_new_pseudos)
13848    regs_ever_live[TOC_REGISTER] = 1;
13849  return gen_rtx_PLUS (Pmode,
13850	   gen_rtx_REG (Pmode, TOC_REGISTER),
13851	     gen_rtx_CONST (Pmode,
13852	       gen_rtx_MINUS (Pmode, symbol,
13853		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
13854}
13855
13856/* If _Unwind_* has been called from within the same module,
13857   toc register is not guaranteed to be saved to 40(1) on function
13858   entry.  Save it there in that case.  */
13859
13860void
13861rs6000_aix_emit_builtin_unwind_init (void)
13862{
13863  rtx mem;
13864  rtx stack_top = gen_reg_rtx (Pmode);
13865  rtx opcode_addr = gen_reg_rtx (Pmode);
13866  rtx opcode = gen_reg_rtx (SImode);
13867  rtx tocompare = gen_reg_rtx (SImode);
13868  rtx no_toc_save_needed = gen_label_rtx ();
13869
13870  mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
13871  emit_move_insn (stack_top, mem);
13872
13873  mem = gen_frame_mem (Pmode,
13874		       gen_rtx_PLUS (Pmode, stack_top,
13875				     GEN_INT (2 * GET_MODE_SIZE (Pmode))));
13876  emit_move_insn (opcode_addr, mem);
13877  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13878  emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
13879					   : 0xE8410028, SImode));
13880
13881  do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
13882			   SImode, NULL_RTX, NULL_RTX,
13883			   no_toc_save_needed);
13884
13885  mem = gen_frame_mem (Pmode,
13886		       gen_rtx_PLUS (Pmode, stack_top,
13887				     GEN_INT (5 * GET_MODE_SIZE (Pmode))));
13888  emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13889  emit_label (no_toc_save_needed);
13890}
13891
13892/* This ties together stack memory (MEM with an alias set of frame_alias_set)
13893   and the change to the stack pointer.  */
13894
13895static void
13896rs6000_emit_stack_tie (void)
13897{
13898  rtx mem = gen_frame_mem (BLKmode,
13899			   gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
13900
13901  emit_insn (gen_stack_tie (mem));
13902}
13903
13904/* Emit the correct code for allocating stack space, as insns.
13905   If COPY_R12, make sure a copy of the old frame is left in r12.
13906   The generated code may use hard register 0 as a temporary.  */
13907
13908static void
13909rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
13910{
13911  rtx insn;
13912  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13913  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
13914  rtx todec = gen_int_mode (-size, Pmode);
13915
13916  if (INTVAL (todec) != -size)
13917    {
13918      warning (0, "stack frame too large");
13919      emit_insn (gen_trap ());
13920      return;
13921    }
13922
13923  if (current_function_limit_stack)
13924    {
13925      if (REG_P (stack_limit_rtx)
13926	  && REGNO (stack_limit_rtx) > 1
13927	  && REGNO (stack_limit_rtx) <= 31)
13928	{
13929	  emit_insn (TARGET_32BIT
13930		     ? gen_addsi3 (tmp_reg,
13931				   stack_limit_rtx,
13932				   GEN_INT (size))
13933		     : gen_adddi3 (tmp_reg,
13934				   stack_limit_rtx,
13935				   GEN_INT (size)));
13936
13937	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13938				    const0_rtx));
13939	}
13940      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
13941	       && TARGET_32BIT
13942	       && DEFAULT_ABI == ABI_V4)
13943	{
13944	  rtx toload = gen_rtx_CONST (VOIDmode,
13945				      gen_rtx_PLUS (Pmode,
13946						    stack_limit_rtx,
13947						    GEN_INT (size)));
13948
13949	  emit_insn (gen_elf_high (tmp_reg, toload));
13950	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13951	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13952				    const0_rtx));
13953	}
13954      else
13955	warning (0, "stack limit expression is not supported");
13956    }
13957
13958  if (copy_r12 || ! TARGET_UPDATE)
13959    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13960
13961  if (TARGET_UPDATE)
13962    {
13963      if (size > 32767)
13964	{
13965	  /* Need a note here so that try_split doesn't get confused.  */
13966	  if (get_last_insn () == NULL_RTX)
13967	    emit_note (NOTE_INSN_DELETED);
13968	  insn = emit_move_insn (tmp_reg, todec);
13969	  try_split (PATTERN (insn), insn, 0);
13970	  todec = tmp_reg;
13971	}
13972
13973      insn = emit_insn (TARGET_32BIT
13974			? gen_movsi_update (stack_reg, stack_reg,
13975					    todec, stack_reg)
13976			: gen_movdi_di_update (stack_reg, stack_reg,
13977					    todec, stack_reg));
13978    }
13979  else
13980    {
13981      insn = emit_insn (TARGET_32BIT
13982			? gen_addsi3 (stack_reg, stack_reg, todec)
13983			: gen_adddi3 (stack_reg, stack_reg, todec));
13984      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13985		      gen_rtx_REG (Pmode, 12));
13986    }
13987
13988  RTX_FRAME_RELATED_P (insn) = 1;
13989  REG_NOTES (insn) =
13990    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13991		       gen_rtx_SET (VOIDmode, stack_reg,
13992				    gen_rtx_PLUS (Pmode, stack_reg,
13993						  GEN_INT (-size))),
13994		       REG_NOTES (insn));
13995}
13996
13997/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13998   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
13999   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
14000   deduce these equivalences by itself so it wasn't necessary to hold
14001   its hand so much.  */
14002
14003static void
14004rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
14005		      rtx reg2, rtx rreg)
14006{
14007  rtx real, temp;
14008
14009  /* copy_rtx will not make unique copies of registers, so we need to
14010     ensure we don't have unwanted sharing here.  */
14011  if (reg == reg2)
14012    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14013
14014  if (reg == rreg)
14015    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14016
14017  real = copy_rtx (PATTERN (insn));
14018
14019  if (reg2 != NULL_RTX)
14020    real = replace_rtx (real, reg2, rreg);
14021
14022  real = replace_rtx (real, reg,
14023		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
14024							STACK_POINTER_REGNUM),
14025				    GEN_INT (val)));
14026
14027  /* We expect that 'real' is either a SET or a PARALLEL containing
14028     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
14029     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
14030
14031  if (GET_CODE (real) == SET)
14032    {
14033      rtx set = real;
14034
14035      temp = simplify_rtx (SET_SRC (set));
14036      if (temp)
14037	SET_SRC (set) = temp;
14038      temp = simplify_rtx (SET_DEST (set));
14039      if (temp)
14040	SET_DEST (set) = temp;
14041      if (GET_CODE (SET_DEST (set)) == MEM)
14042	{
14043	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14044	  if (temp)
14045	    XEXP (SET_DEST (set), 0) = temp;
14046	}
14047    }
14048  else
14049    {
14050      int i;
14051
14052      gcc_assert (GET_CODE (real) == PARALLEL);
14053      for (i = 0; i < XVECLEN (real, 0); i++)
14054	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
14055	  {
14056	    rtx set = XVECEXP (real, 0, i);
14057
14058	    temp = simplify_rtx (SET_SRC (set));
14059	    if (temp)
14060	      SET_SRC (set) = temp;
14061	    temp = simplify_rtx (SET_DEST (set));
14062	    if (temp)
14063	      SET_DEST (set) = temp;
14064	    if (GET_CODE (SET_DEST (set)) == MEM)
14065	      {
14066		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14067		if (temp)
14068		  XEXP (SET_DEST (set), 0) = temp;
14069	      }
14070	    RTX_FRAME_RELATED_P (set) = 1;
14071	  }
14072    }
14073
14074  if (TARGET_SPE)
14075    real = spe_synthesize_frame_save (real);
14076
14077  RTX_FRAME_RELATED_P (insn) = 1;
14078  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14079					real,
14080					REG_NOTES (insn));
14081}
14082
14083/* Given an SPE frame note, return a PARALLEL of SETs with the
14084   original note, plus a synthetic register save.  */
14085
14086static rtx
14087spe_synthesize_frame_save (rtx real)
14088{
14089  rtx synth, offset, reg, real2;
14090
14091  if (GET_CODE (real) != SET
14092      || GET_MODE (SET_SRC (real)) != V2SImode)
14093    return real;
14094
14095  /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14096     frame related note.  The parallel contains a set of the register
14097     being saved, and another set to a synthetic register (n+1200).
14098     This is so we can differentiate between 64-bit and 32-bit saves.
14099     Words cannot describe this nastiness.  */
14100
14101  gcc_assert (GET_CODE (SET_DEST (real)) == MEM
14102	      && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
14103	      && GET_CODE (SET_SRC (real)) == REG);
14104
14105  /* Transform:
14106       (set (mem (plus (reg x) (const y)))
14107            (reg z))
14108     into:
14109       (set (mem (plus (reg x) (const y+4)))
14110            (reg z+1200))
14111  */
14112
14113  real2 = copy_rtx (real);
14114  PUT_MODE (SET_DEST (real2), SImode);
14115  reg = SET_SRC (real2);
14116  real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
14117  synth = copy_rtx (real2);
14118
14119  if (BYTES_BIG_ENDIAN)
14120    {
14121      offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
14122      real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
14123    }
14124
14125  reg = SET_SRC (synth);
14126
14127  synth = replace_rtx (synth, reg,
14128		       gen_rtx_REG (SImode, REGNO (reg) + 1200));
14129
14130  offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
14131  synth = replace_rtx (synth, offset,
14132		       GEN_INT (INTVAL (offset)
14133				+ (BYTES_BIG_ENDIAN ? 0 : 4)));
14134
14135  RTX_FRAME_RELATED_P (synth) = 1;
14136  RTX_FRAME_RELATED_P (real2) = 1;
14137  if (BYTES_BIG_ENDIAN)
14138    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
14139  else
14140    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
14141
14142  return real;
14143}
14144
14145/* Returns an insn that has a vrsave set operation with the
14146   appropriate CLOBBERs.  */
14147
14148static rtx
14149generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
14150{
14151  int nclobs, i;
14152  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
14153  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
14154
14155  clobs[0]
14156    = gen_rtx_SET (VOIDmode,
14157		   vrsave,
14158		   gen_rtx_UNSPEC_VOLATILE (SImode,
14159					    gen_rtvec (2, reg, vrsave),
14160					    UNSPECV_SET_VRSAVE));
14161
14162  nclobs = 1;
14163
14164  /* We need to clobber the registers in the mask so the scheduler
14165     does not move sets to VRSAVE before sets of AltiVec registers.
14166
14167     However, if the function receives nonlocal gotos, reload will set
14168     all call saved registers live.  We will end up with:
14169
14170     	(set (reg 999) (mem))
14171	(parallel [ (set (reg vrsave) (unspec blah))
14172		    (clobber (reg 999))])
14173
14174     The clobber will cause the store into reg 999 to be dead, and
14175     flow will attempt to delete an epilogue insn.  In this case, we
14176     need an unspec use/set of the register.  */
14177
14178  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
14179    if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14180      {
14181	if (!epiloguep || call_used_regs [i])
14182	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
14183					     gen_rtx_REG (V4SImode, i));
14184	else
14185	  {
14186	    rtx reg = gen_rtx_REG (V4SImode, i);
14187
14188	    clobs[nclobs++]
14189	      = gen_rtx_SET (VOIDmode,
14190			     reg,
14191			     gen_rtx_UNSPEC (V4SImode,
14192					     gen_rtvec (1, reg), 27));
14193	  }
14194      }
14195
14196  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
14197
14198  for (i = 0; i < nclobs; ++i)
14199    XVECEXP (insn, 0, i) = clobs[i];
14200
14201  return insn;
14202}
14203
14204/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
14205   Save REGNO into [FRAME_REG + OFFSET] in mode MODE.  */
14206
14207static void
14208emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
14209		 unsigned int regno, int offset, HOST_WIDE_INT total_size)
14210{
14211  rtx reg, offset_rtx, insn, mem, addr, int_rtx;
14212  rtx replacea, replaceb;
14213
14214  int_rtx = GEN_INT (offset);
14215
14216  /* Some cases that need register indexed addressing.  */
14217  if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
14218      || (TARGET_E500_DOUBLE && mode == DFmode)
14219      || (TARGET_SPE_ABI
14220	  && SPE_VECTOR_MODE (mode)
14221	  && !SPE_CONST_OFFSET_OK (offset)))
14222    {
14223      /* Whomever calls us must make sure r11 is available in the
14224	 flow path of instructions in the prologue.  */
14225      offset_rtx = gen_rtx_REG (Pmode, 11);
14226      emit_move_insn (offset_rtx, int_rtx);
14227
14228      replacea = offset_rtx;
14229      replaceb = int_rtx;
14230    }
14231  else
14232    {
14233      offset_rtx = int_rtx;
14234      replacea = NULL_RTX;
14235      replaceb = NULL_RTX;
14236    }
14237
14238  reg = gen_rtx_REG (mode, regno);
14239  addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
14240  mem = gen_frame_mem (mode, addr);
14241
14242  insn = emit_move_insn (mem, reg);
14243
14244  rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
14245}
14246
14247/* Emit an offset memory reference suitable for a frame store, while
14248   converting to a valid addressing mode.  */
14249
14250static rtx
14251gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
14252{
14253  rtx int_rtx, offset_rtx;
14254
14255  int_rtx = GEN_INT (offset);
14256
14257  if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
14258      || (TARGET_E500_DOUBLE && mode == DFmode))
14259    {
14260      offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14261      emit_move_insn (offset_rtx, int_rtx);
14262    }
14263  else
14264    offset_rtx = int_rtx;
14265
14266  return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
14267}
14268
14269/* Look for user-defined global regs.  We should not save and restore these,
14270   and cannot use stmw/lmw if there are any in its range.  */
14271
14272static bool
14273no_global_regs_above (int first_greg)
14274{
14275  int i;
14276  for (i = 0; i < 32 - first_greg; i++)
14277    if (global_regs[first_greg + i])
14278      return false;
14279  return true;
14280}
14281
14282#ifndef TARGET_FIX_AND_CONTINUE
14283#define TARGET_FIX_AND_CONTINUE 0
14284#endif
14285
14286/* Emit function prologue as insns.  */
14287
14288void
14289rs6000_emit_prologue (void)
14290{
14291  rs6000_stack_t *info = rs6000_stack_info ();
14292  enum machine_mode reg_mode = Pmode;
14293  int reg_size = TARGET_32BIT ? 4 : 8;
14294  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14295  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
14296  rtx frame_reg_rtx = sp_reg_rtx;
14297  rtx cr_save_rtx = NULL_RTX;
14298  rtx insn;
14299  int saving_FPRs_inline;
14300  int using_store_multiple;
14301  HOST_WIDE_INT sp_offset = 0;
14302
14303  if (TARGET_FIX_AND_CONTINUE)
14304    {
14305      /* gdb on darwin arranges to forward a function from the old
14306	 address by modifying the first 5 instructions of the function
14307	 to branch to the overriding function.  This is necessary to
14308	 permit function pointers that point to the old function to
14309	 actually forward to the new function.  */
14310      emit_insn (gen_nop ());
14311      emit_insn (gen_nop ());
14312      emit_insn (gen_nop ());
14313      emit_insn (gen_nop ());
14314      emit_insn (gen_nop ());
14315    }
14316
14317  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14318    {
14319      reg_mode = V2SImode;
14320      reg_size = 8;
14321    }
14322
14323  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14324			  && (!TARGET_SPE_ABI
14325			      || info->spe_64bit_regs_used == 0)
14326			  && info->first_gp_reg_save < 31
14327			  && no_global_regs_above (info->first_gp_reg_save));
14328  saving_FPRs_inline = (info->first_fp_reg_save == 64
14329			|| FP_SAVE_INLINE (info->first_fp_reg_save)
14330			|| current_function_calls_eh_return
14331			|| cfun->machine->ra_need_lr);
14332
14333  /* For V.4, update stack before we do any saving and set back pointer.  */
14334  if (! WORLD_SAVE_P (info)
14335      && info->push_p
14336      && (DEFAULT_ABI == ABI_V4
14337	  || current_function_calls_eh_return))
14338    {
14339      if (info->total_size < 32767)
14340	sp_offset = info->total_size;
14341      else
14342	frame_reg_rtx = frame_ptr_rtx;
14343      rs6000_emit_allocate_stack (info->total_size,
14344				  (frame_reg_rtx != sp_reg_rtx
14345				   && (info->cr_save_p
14346				       || info->lr_save_p
14347				       || info->first_fp_reg_save < 64
14348				       || info->first_gp_reg_save < 32
14349				       )));
14350      if (frame_reg_rtx != sp_reg_rtx)
14351	rs6000_emit_stack_tie ();
14352    }
14353
14354  /* Handle world saves specially here.  */
14355  if (WORLD_SAVE_P (info))
14356    {
14357      int i, j, sz;
14358      rtx treg;
14359      rtvec p;
14360      rtx reg0;
14361
14362      /* save_world expects lr in r0. */
14363      reg0 = gen_rtx_REG (Pmode, 0);
14364      if (info->lr_save_p)
14365	{
14366	  insn = emit_move_insn (reg0,
14367				 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14368	  RTX_FRAME_RELATED_P (insn) = 1;
14369	}
14370
14371      /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
14372	 assumptions about the offsets of various bits of the stack
14373	 frame.  */
14374      gcc_assert (info->gp_save_offset == -220
14375		  && info->fp_save_offset == -144
14376		  && info->lr_save_offset == 8
14377		  && info->cr_save_offset == 4
14378		  && info->push_p
14379		  && info->lr_save_p
14380		  && (!current_function_calls_eh_return
14381		       || info->ehrd_offset == -432)
14382		  && info->vrsave_save_offset == -224
14383		  && info->altivec_save_offset == -416);
14384
14385      treg = gen_rtx_REG (SImode, 11);
14386      emit_move_insn (treg, GEN_INT (-info->total_size));
14387
14388      /* SAVE_WORLD takes the caller's LR in R0 and the frame size
14389	 in R11.  It also clobbers R12, so beware!  */
14390
14391      /* Preserve CR2 for save_world prologues */
14392      sz = 5;
14393      sz += 32 - info->first_gp_reg_save;
14394      sz += 64 - info->first_fp_reg_save;
14395      sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14396      p = rtvec_alloc (sz);
14397      j = 0;
14398      RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
14399					    gen_rtx_REG (Pmode,
14400							 LINK_REGISTER_REGNUM));
14401      RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14402					gen_rtx_SYMBOL_REF (Pmode,
14403							    "*save_world"));
14404      /* We do floats first so that the instruction pattern matches
14405	 properly.  */
14406      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14407	{
14408	  rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14409	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14410				   GEN_INT (info->fp_save_offset
14411					    + sp_offset + 8 * i));
14412	  rtx mem = gen_frame_mem (DFmode, addr);
14413
14414	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14415	}
14416      for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14417	{
14418	  rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14419	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14420				   GEN_INT (info->altivec_save_offset
14421					    + sp_offset + 16 * i));
14422	  rtx mem = gen_frame_mem (V4SImode, addr);
14423
14424	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14425	}
14426      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14427	{
14428	  rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14429	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14430				   GEN_INT (info->gp_save_offset
14431					    + sp_offset + reg_size * i));
14432	  rtx mem = gen_frame_mem (reg_mode, addr);
14433
14434	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14435	}
14436
14437      {
14438	/* CR register traditionally saved as CR2.  */
14439	rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14440	rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14441				 GEN_INT (info->cr_save_offset
14442					  + sp_offset));
14443	rtx mem = gen_frame_mem (reg_mode, addr);
14444
14445	RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14446      }
14447      /* Explain about use of R0.  */
14448      if (info->lr_save_p)
14449	{
14450	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14451				   GEN_INT (info->lr_save_offset
14452					    + sp_offset));
14453	  rtx mem = gen_frame_mem (reg_mode, addr);
14454
14455	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
14456	}
14457      /* Explain what happens to the stack pointer.  */
14458      {
14459	rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
14460	RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
14461      }
14462
14463      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14464      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14465			    treg, GEN_INT (-info->total_size));
14466      sp_offset = info->total_size;
14467    }
14468
14469  /* If we use the link register, get it into r0.  */
14470  if (!WORLD_SAVE_P (info) && info->lr_save_p)
14471    {
14472      insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14473			     gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14474      RTX_FRAME_RELATED_P (insn) = 1;
14475    }
14476
14477  /* If we need to save CR, put it into r12.  */
14478  if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
14479    {
14480      rtx set;
14481
14482      cr_save_rtx = gen_rtx_REG (SImode, 12);
14483      insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14484      RTX_FRAME_RELATED_P (insn) = 1;
14485      /* Now, there's no way that dwarf2out_frame_debug_expr is going
14486	 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14487	 But that's OK.  All we have to do is specify that _one_ condition
14488	 code register is saved in this stack slot.  The thrower's epilogue
14489	 will then restore all the call-saved registers.
14490	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
14491      set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14492			 gen_rtx_REG (SImode, CR2_REGNO));
14493      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14494					    set,
14495					    REG_NOTES (insn));
14496    }
14497
14498  /* Do any required saving of fpr's.  If only one or two to save, do
14499     it ourselves.  Otherwise, call function.  */
14500  if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
14501    {
14502      int i;
14503      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14504	if ((regs_ever_live[info->first_fp_reg_save+i]
14505	     && ! call_used_regs[info->first_fp_reg_save+i]))
14506	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14507			   info->first_fp_reg_save + i,
14508			   info->fp_save_offset + sp_offset + 8 * i,
14509			   info->total_size);
14510    }
14511  else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
14512    {
14513      int i;
14514      char rname[30];
14515      const char *alloc_rname;
14516      rtvec p;
14517      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
14518
14519      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14520					  gen_rtx_REG (Pmode,
14521						       LINK_REGISTER_REGNUM));
14522      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14523	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
14524      alloc_rname = ggc_strdup (rname);
14525      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14526				      gen_rtx_SYMBOL_REF (Pmode,
14527							  alloc_rname));
14528      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14529	{
14530	  rtx addr, reg, mem;
14531	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14532	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14533			       GEN_INT (info->fp_save_offset
14534					+ sp_offset + 8*i));
14535	  mem = gen_frame_mem (DFmode, addr);
14536
14537	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14538	}
14539      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14540      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14541			    NULL_RTX, NULL_RTX);
14542    }
14543
14544  /* Save GPRs.  This is done as a PARALLEL if we are using
14545     the store-multiple instructions.  */
14546  if (!WORLD_SAVE_P (info) && using_store_multiple)
14547    {
14548      rtvec p;
14549      int i;
14550      p = rtvec_alloc (32 - info->first_gp_reg_save);
14551      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14552	{
14553	  rtx addr, reg, mem;
14554	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14555	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14556			       GEN_INT (info->gp_save_offset
14557					+ sp_offset
14558					+ reg_size * i));
14559	  mem = gen_frame_mem (reg_mode, addr);
14560
14561	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14562	}
14563      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14564      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14565			    NULL_RTX, NULL_RTX);
14566    }
14567  else if (!WORLD_SAVE_P (info))
14568    {
14569      int i;
14570      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14571	if ((regs_ever_live[info->first_gp_reg_save + i]
14572	     && (!call_used_regs[info->first_gp_reg_save + i]
14573		 || (i + info->first_gp_reg_save
14574		     == RS6000_PIC_OFFSET_TABLE_REGNUM
14575		     && TARGET_TOC && TARGET_MINIMAL_TOC)))
14576	    || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14577		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14578		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14579	  {
14580	    rtx addr, reg, mem;
14581	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14582
14583	    if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14584	      {
14585		int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14586		rtx b;
14587
14588		if (!SPE_CONST_OFFSET_OK (offset))
14589		  {
14590		    b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14591		    emit_move_insn (b, GEN_INT (offset));
14592		  }
14593		else
14594		  b = GEN_INT (offset);
14595
14596		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14597		mem = gen_frame_mem (V2SImode, addr);
14598		insn = emit_move_insn (mem, reg);
14599
14600		if (GET_CODE (b) == CONST_INT)
14601		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14602					NULL_RTX, NULL_RTX);
14603		else
14604		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14605					b, GEN_INT (offset));
14606	      }
14607	    else
14608	      {
14609		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14610				     GEN_INT (info->gp_save_offset
14611					      + sp_offset
14612					      + reg_size * i));
14613		mem = gen_frame_mem (reg_mode, addr);
14614
14615		insn = emit_move_insn (mem, reg);
14616		rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14617				      NULL_RTX, NULL_RTX);
14618	      }
14619	  }
14620    }
14621
14622  /* ??? There's no need to emit actual instructions here, but it's the
14623     easiest way to get the frame unwind information emitted.  */
14624  if (current_function_calls_eh_return)
14625    {
14626      unsigned int i, regno;
14627
14628      /* In AIX ABI we need to pretend we save r2 here.  */
14629      if (TARGET_AIX)
14630	{
14631	  rtx addr, reg, mem;
14632
14633	  reg = gen_rtx_REG (reg_mode, 2);
14634	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14635			       GEN_INT (sp_offset + 5 * reg_size));
14636	  mem = gen_frame_mem (reg_mode, addr);
14637
14638	  insn = emit_move_insn (mem, reg);
14639	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14640				NULL_RTX, NULL_RTX);
14641	  PATTERN (insn) = gen_blockage ();
14642	}
14643
14644      for (i = 0; ; ++i)
14645	{
14646	  regno = EH_RETURN_DATA_REGNO (i);
14647	  if (regno == INVALID_REGNUM)
14648	    break;
14649
14650	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14651			   info->ehrd_offset + sp_offset
14652			   + reg_size * (int) i,
14653			   info->total_size);
14654	}
14655    }
14656
14657  /* Save lr if we used it.  */
14658  if (!WORLD_SAVE_P (info) && info->lr_save_p)
14659    {
14660      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14661			       GEN_INT (info->lr_save_offset + sp_offset));
14662      rtx reg = gen_rtx_REG (Pmode, 0);
14663      rtx mem = gen_rtx_MEM (Pmode, addr);
14664      /* This should not be of frame_alias_set, because of
14665	 __builtin_return_address.  */
14666
14667      insn = emit_move_insn (mem, reg);
14668      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14669			    NULL_RTX, NULL_RTX);
14670    }
14671
14672  /* Save CR if we use any that must be preserved.  */
14673  if (!WORLD_SAVE_P (info) && info->cr_save_p)
14674    {
14675      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14676			       GEN_INT (info->cr_save_offset + sp_offset));
14677      rtx mem = gen_frame_mem (SImode, addr);
14678      /* See the large comment above about why CR2_REGNO is used.  */
14679      rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
14680
14681      /* If r12 was used to hold the original sp, copy cr into r0 now
14682	 that it's free.  */
14683      if (REGNO (frame_reg_rtx) == 12)
14684	{
14685	  rtx set;
14686
14687	  cr_save_rtx = gen_rtx_REG (SImode, 0);
14688	  insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14689	  RTX_FRAME_RELATED_P (insn) = 1;
14690	  set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
14691	  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14692						set,
14693						REG_NOTES (insn));
14694
14695	}
14696      insn = emit_move_insn (mem, cr_save_rtx);
14697
14698      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14699			    NULL_RTX, NULL_RTX);
14700    }
14701
14702  /* Update stack and set back pointer unless this is V.4,
14703     for which it was done previously.  */
14704  if (!WORLD_SAVE_P (info) && info->push_p
14705      && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
14706    {
14707      rs6000_emit_allocate_stack (info->total_size, FALSE);
14708      sp_offset = info->total_size;
14709    }
14710
14711  /* Set frame pointer, if needed.  */
14712  if (frame_pointer_needed)
14713    {
14714      insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
14715			     sp_reg_rtx);
14716      RTX_FRAME_RELATED_P (insn) = 1;
14717    }
14718
14719  /* Save AltiVec registers if needed.  Save here because the red zone does
14720     not include AltiVec registers.  */
14721  if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14722    {
14723      int i;
14724
14725      /* There should be a non inline version of this, for when we
14726         are saving lots of vector registers.  */
14727      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14728        if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14729          {
14730            rtx areg, savereg, mem;
14731            int offset;
14732
14733            offset = info->altivec_save_offset + sp_offset
14734              + 16 * (i - info->first_altivec_reg_save);
14735
14736            savereg = gen_rtx_REG (V4SImode, i);
14737
14738            areg = gen_rtx_REG (Pmode, 0);
14739            emit_move_insn (areg, GEN_INT (offset));
14740
14741            /* AltiVec addressing mode is [reg+reg].  */
14742            mem = gen_frame_mem (V4SImode,
14743                                 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
14744
14745            insn = emit_move_insn (mem, savereg);
14746
14747            rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14748                                  areg, GEN_INT (offset));
14749          }
14750    }
14751
14752  /* VRSAVE is a bit vector representing which AltiVec registers
14753     are used.  The OS uses this to determine which vector
14754     registers to save on a context switch.  We need to save
14755     VRSAVE on the stack frame, add whatever AltiVec registers we
14756     used in this function, and do the corresponding magic in the
14757     epilogue.  */
14758
14759  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14760      && info->vrsave_mask != 0)
14761    {
14762      rtx reg, mem, vrsave;
14763      int offset;
14764
14765      /* Get VRSAVE onto a GPR.  Note that ABI_V4 might be using r12
14766         as frame_reg_rtx and r11 as the static chain pointer for
14767         nested functions.  */
14768      reg = gen_rtx_REG (SImode, 0);
14769      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
14770      if (TARGET_MACHO)
14771        emit_insn (gen_get_vrsave_internal (reg));
14772      else
14773        emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
14774
14775      if (!WORLD_SAVE_P (info))
14776        {
14777          /* Save VRSAVE.  */
14778          offset = info->vrsave_save_offset + sp_offset;
14779          mem = gen_frame_mem (SImode,
14780                               gen_rtx_PLUS (Pmode, frame_reg_rtx,
14781                                             GEN_INT (offset)));
14782          insn = emit_move_insn (mem, reg);
14783        }
14784
14785      /* Include the registers in the mask.  */
14786      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
14787
14788      insn = emit_insn (generate_set_vrsave (reg, info, 0));
14789    }
14790
14791  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
14792  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
14793      || (DEFAULT_ABI == ABI_V4
14794	  && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
14795	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
14796    {
14797      /* If emit_load_toc_table will use the link register, we need to save
14798	 it.  We use R12 for this purpose because emit_load_toc_table
14799	 can use register 0.  This allows us to use a plain 'blr' to return
14800	 from the procedure more often.  */
14801      int save_LR_around_toc_setup = (TARGET_ELF
14802				      && DEFAULT_ABI != ABI_AIX
14803				      && flag_pic
14804				      && ! info->lr_save_p
14805				      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
14806      if (save_LR_around_toc_setup)
14807	{
14808	  rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14809
14810	  insn = emit_move_insn (frame_ptr_rtx, lr);
14811	  rs6000_maybe_dead (insn);
14812	  RTX_FRAME_RELATED_P (insn) = 1;
14813
14814	  rs6000_emit_load_toc_table (TRUE);
14815
14816	  insn = emit_move_insn (lr, frame_ptr_rtx);
14817	  rs6000_maybe_dead (insn);
14818	  RTX_FRAME_RELATED_P (insn) = 1;
14819	}
14820      else
14821	rs6000_emit_load_toc_table (TRUE);
14822    }
14823
14824#if TARGET_MACHO
14825  if (DEFAULT_ABI == ABI_DARWIN
14826      && flag_pic && current_function_uses_pic_offset_table)
14827    {
14828      rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14829      rtx src = machopic_function_base_sym ();
14830
14831      /* Save and restore LR locally around this call (in R0).  */
14832      if (!info->lr_save_p)
14833	rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
14834
14835      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
14836
14837      insn = emit_move_insn (gen_rtx_REG (Pmode,
14838					  RS6000_PIC_OFFSET_TABLE_REGNUM),
14839			     lr);
14840      rs6000_maybe_dead (insn);
14841
14842      if (!info->lr_save_p)
14843	rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
14844    }
14845#endif
14846}
14847
14848/* Write function prologue.  */
14849
14850static void
14851rs6000_output_function_prologue (FILE *file,
14852				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14853{
14854  rs6000_stack_t *info = rs6000_stack_info ();
14855
14856  if (TARGET_DEBUG_STACK)
14857    debug_stack_info (info);
14858
14859  /* Write .extern for any function we will call to save and restore
14860     fp values.  */
14861  if (info->first_fp_reg_save < 64
14862      && !FP_SAVE_INLINE (info->first_fp_reg_save))
14863    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
14864	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
14865	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14866	     RESTORE_FP_SUFFIX);
14867
14868  /* Write .extern for AIX common mode routines, if needed.  */
14869  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14870    {
14871      fputs ("\t.extern __mulh\n", file);
14872      fputs ("\t.extern __mull\n", file);
14873      fputs ("\t.extern __divss\n", file);
14874      fputs ("\t.extern __divus\n", file);
14875      fputs ("\t.extern __quoss\n", file);
14876      fputs ("\t.extern __quous\n", file);
14877      common_mode_defined = 1;
14878    }
14879
14880  if (! HAVE_prologue)
14881    {
14882      start_sequence ();
14883
14884      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14885	 the "toplevel" insn chain.  */
14886      emit_note (NOTE_INSN_DELETED);
14887      rs6000_emit_prologue ();
14888      emit_note (NOTE_INSN_DELETED);
14889
14890      /* Expand INSN_ADDRESSES so final() doesn't crash.  */
14891      {
14892	rtx insn;
14893	unsigned addr = 0;
14894	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14895	  {
14896	    INSN_ADDRESSES_NEW (insn, addr);
14897	    addr += 4;
14898	  }
14899      }
14900
14901      if (TARGET_DEBUG_STACK)
14902	debug_rtx_list (get_insns (), 100);
14903      final (get_insns (), file, FALSE);
14904      end_sequence ();
14905    }
14906
14907  rs6000_pic_labelno++;
14908}
14909
14910/* Emit function epilogue as insns.
14911
14912   At present, dwarf2out_frame_debug_expr doesn't understand
14913   register restores, so we don't bother setting RTX_FRAME_RELATED_P
14914   anywhere in the epilogue.  Most of the insns below would in any case
14915   need special notes to explain where r11 is in relation to the stack.  */
14916
14917void
14918rs6000_emit_epilogue (int sibcall)
14919{
14920  rs6000_stack_t *info;
14921  int restoring_FPRs_inline;
14922  int using_load_multiple;
14923  int using_mfcr_multiple;
14924  int use_backchain_to_restore_sp;
14925  int sp_offset = 0;
14926  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14927  rtx frame_reg_rtx = sp_reg_rtx;
14928  enum machine_mode reg_mode = Pmode;
14929  int reg_size = TARGET_32BIT ? 4 : 8;
14930  int i;
14931
14932  info = rs6000_stack_info ();
14933
14934  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14935    {
14936      reg_mode = V2SImode;
14937      reg_size = 8;
14938    }
14939
14940  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14941			 && (!TARGET_SPE_ABI
14942			     || info->spe_64bit_regs_used == 0)
14943			 && info->first_gp_reg_save < 31
14944			 && no_global_regs_above (info->first_gp_reg_save));
14945  restoring_FPRs_inline = (sibcall
14946			   || current_function_calls_eh_return
14947			   || info->first_fp_reg_save == 64
14948			   || FP_SAVE_INLINE (info->first_fp_reg_save));
14949  use_backchain_to_restore_sp = (frame_pointer_needed
14950				 || current_function_calls_alloca
14951				 || info->total_size > 32767);
14952  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14953			 || rs6000_cpu == PROCESSOR_PPC603
14954			 || rs6000_cpu == PROCESSOR_PPC750
14955			 || optimize_size);
14956
14957  if (WORLD_SAVE_P (info))
14958    {
14959      int i, j;
14960      char rname[30];
14961      const char *alloc_rname;
14962      rtvec p;
14963
14964      /* eh_rest_world_r10 will return to the location saved in the LR
14965	 stack slot (which is not likely to be our caller.)
14966	 Input: R10 -- stack adjustment.  Clobbers R0, R11, R12, R7, R8.
14967	 rest_world is similar, except any R10 parameter is ignored.
14968	 The exception-handling stuff that was here in 2.95 is no
14969	 longer necessary.  */
14970
14971      p = rtvec_alloc (9
14972		       + 1
14973		       + 32 - info->first_gp_reg_save
14974		       + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14975		       + 63 + 1 - info->first_fp_reg_save);
14976
14977      strcpy (rname, ((current_function_calls_eh_return) ?
14978		      "*eh_rest_world_r10" : "*rest_world"));
14979      alloc_rname = ggc_strdup (rname);
14980
14981      j = 0;
14982      RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14983      RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14984					gen_rtx_REG (Pmode,
14985						     LINK_REGISTER_REGNUM));
14986      RTVEC_ELT (p, j++)
14987	= gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
14988      /* The instruction pattern requires a clobber here;
14989	 it is shared with the restVEC helper. */
14990      RTVEC_ELT (p, j++)
14991	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
14992
14993      {
14994	/* CR register traditionally saved as CR2.  */
14995	rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14996	rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14997				 GEN_INT (info->cr_save_offset));
14998	rtx mem = gen_frame_mem (reg_mode, addr);
14999
15000	RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15001      }
15002
15003      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15004	{
15005	  rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15006	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15007				   GEN_INT (info->gp_save_offset
15008					    + reg_size * i));
15009	  rtx mem = gen_frame_mem (reg_mode, addr);
15010
15011	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15012	}
15013      for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
15014	{
15015	  rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15016	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15017				   GEN_INT (info->altivec_save_offset
15018					    + 16 * i));
15019	  rtx mem = gen_frame_mem (V4SImode, addr);
15020
15021	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15022	}
15023      for (i = 0; info->first_fp_reg_save + i <= 63; i++)
15024	{
15025	  rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15026	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15027				   GEN_INT (info->fp_save_offset
15028					    + 8 * i));
15029	  rtx mem = gen_frame_mem (DFmode, addr);
15030
15031	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15032	}
15033      RTVEC_ELT (p, j++)
15034	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
15035      RTVEC_ELT (p, j++)
15036	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
15037      RTVEC_ELT (p, j++)
15038	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
15039      RTVEC_ELT (p, j++)
15040	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
15041      RTVEC_ELT (p, j++)
15042	= gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
15043      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15044
15045      return;
15046    }
15047
15048  /* Set sp_offset based on the stack push from the prologue.  */
15049  if ((DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return)
15050      && info->total_size < 32767)
15051    sp_offset = info->total_size;
15052
15053  /* Restore AltiVec registers if needed.  */
15054  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15055    {
15056      int i;
15057
15058      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15059	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15060	  {
15061	    rtx addr, areg, mem;
15062
15063	    areg = gen_rtx_REG (Pmode, 0);
15064	    emit_move_insn
15065	      (areg, GEN_INT (info->altivec_save_offset
15066			      + sp_offset
15067			      + 16 * (i - info->first_altivec_reg_save)));
15068
15069	    /* AltiVec addressing mode is [reg+reg].  */
15070	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
15071	    mem = gen_frame_mem (V4SImode, addr);
15072
15073	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
15074	  }
15075    }
15076
15077  /* Restore VRSAVE if needed.  */
15078  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15079      && info->vrsave_mask != 0)
15080    {
15081      rtx addr, mem, reg;
15082
15083      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15084			   GEN_INT (info->vrsave_save_offset + sp_offset));
15085      mem = gen_frame_mem (SImode, addr);
15086      reg = gen_rtx_REG (SImode, 12);
15087      emit_move_insn (reg, mem);
15088
15089      emit_insn (generate_set_vrsave (reg, info, 1));
15090    }
15091
15092  sp_offset = 0;
15093
15094  /* If we have a frame pointer, a call to alloca,  or a large stack
15095     frame, restore the old stack pointer using the backchain.  Otherwise,
15096     we know what size to update it with.  */
15097  if (use_backchain_to_restore_sp)
15098    {
15099      /* Under V.4, don't reset the stack pointer until after we're done
15100	 loading the saved registers.  */
15101      if (DEFAULT_ABI == ABI_V4)
15102	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
15103
15104      emit_move_insn (frame_reg_rtx,
15105		      gen_rtx_MEM (Pmode, sp_reg_rtx));
15106    }
15107  else if (info->push_p)
15108    {
15109      if (DEFAULT_ABI == ABI_V4
15110	  || current_function_calls_eh_return)
15111	sp_offset = info->total_size;
15112      else
15113	{
15114	  emit_insn (TARGET_32BIT
15115		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15116				   GEN_INT (info->total_size))
15117		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15118				   GEN_INT (info->total_size)));
15119	}
15120    }
15121
15122  /* Get the old lr if we saved it.  */
15123  if (info->lr_save_p)
15124    {
15125      rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
15126				      info->lr_save_offset + sp_offset);
15127
15128      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
15129    }
15130
15131  /* Get the old cr if we saved it.  */
15132  if (info->cr_save_p)
15133    {
15134      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15135			       GEN_INT (info->cr_save_offset + sp_offset));
15136      rtx mem = gen_frame_mem (SImode, addr);
15137
15138      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
15139    }
15140
15141  /* Set LR here to try to overlap restores below.  */
15142  if (info->lr_save_p)
15143    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
15144		    gen_rtx_REG (Pmode, 0));
15145
15146  /* Load exception handler data registers, if needed.  */
15147  if (current_function_calls_eh_return)
15148    {
15149      unsigned int i, regno;
15150
15151      if (TARGET_AIX)
15152	{
15153	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15154				   GEN_INT (sp_offset + 5 * reg_size));
15155	  rtx mem = gen_frame_mem (reg_mode, addr);
15156
15157	  emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
15158	}
15159
15160      for (i = 0; ; ++i)
15161	{
15162	  rtx mem;
15163
15164	  regno = EH_RETURN_DATA_REGNO (i);
15165	  if (regno == INVALID_REGNUM)
15166	    break;
15167
15168	  mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
15169				      info->ehrd_offset + sp_offset
15170				      + reg_size * (int) i);
15171
15172	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
15173	}
15174    }
15175
15176  /* Restore GPRs.  This is done as a PARALLEL if we are using
15177     the load-multiple instructions.  */
15178  if (using_load_multiple)
15179    {
15180      rtvec p;
15181      p = rtvec_alloc (32 - info->first_gp_reg_save);
15182      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15183	{
15184	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15185				   GEN_INT (info->gp_save_offset
15186					    + sp_offset
15187					    + reg_size * i));
15188	  rtx mem = gen_frame_mem (reg_mode, addr);
15189
15190	  RTVEC_ELT (p, i) =
15191	    gen_rtx_SET (VOIDmode,
15192			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
15193			 mem);
15194	}
15195      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15196    }
15197  else
15198    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15199      if ((regs_ever_live[info->first_gp_reg_save + i]
15200	   && (!call_used_regs[info->first_gp_reg_save + i]
15201	       || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
15202		   && TARGET_TOC && TARGET_MINIMAL_TOC)))
15203	  || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
15204	      && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15205		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
15206	{
15207	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15208				   GEN_INT (info->gp_save_offset
15209					    + sp_offset
15210					    + reg_size * i));
15211	  rtx mem = gen_frame_mem (reg_mode, addr);
15212
15213	  /* Restore 64-bit quantities for SPE.  */
15214	  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15215	    {
15216	      int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
15217	      rtx b;
15218
15219	      if (!SPE_CONST_OFFSET_OK (offset))
15220		{
15221		  b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15222		  emit_move_insn (b, GEN_INT (offset));
15223		}
15224	      else
15225		b = GEN_INT (offset);
15226
15227	      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
15228	      mem = gen_frame_mem (V2SImode, addr);
15229	    }
15230
15231	  emit_move_insn (gen_rtx_REG (reg_mode,
15232				       info->first_gp_reg_save + i), mem);
15233	}
15234
15235  /* Restore fpr's if we need to do it without calling a function.  */
15236  if (restoring_FPRs_inline)
15237    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15238      if ((regs_ever_live[info->first_fp_reg_save+i]
15239	   && ! call_used_regs[info->first_fp_reg_save+i]))
15240	{
15241	  rtx addr, mem;
15242	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15243			       GEN_INT (info->fp_save_offset
15244					+ sp_offset
15245					+ 8 * i));
15246	  mem = gen_frame_mem (DFmode, addr);
15247
15248	  emit_move_insn (gen_rtx_REG (DFmode,
15249				       info->first_fp_reg_save + i),
15250			  mem);
15251	}
15252
15253  /* If we saved cr, restore it here.  Just those that were used.  */
15254  if (info->cr_save_p)
15255    {
15256      rtx r12_rtx = gen_rtx_REG (SImode, 12);
15257      int count = 0;
15258
15259      if (using_mfcr_multiple)
15260	{
15261	  for (i = 0; i < 8; i++)
15262	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15263	      count++;
15264	  gcc_assert (count);
15265	}
15266
15267      if (using_mfcr_multiple && count > 1)
15268	{
15269	  rtvec p;
15270	  int ndx;
15271
15272	  p = rtvec_alloc (count);
15273
15274	  ndx = 0;
15275	  for (i = 0; i < 8; i++)
15276	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15277	      {
15278		rtvec r = rtvec_alloc (2);
15279		RTVEC_ELT (r, 0) = r12_rtx;
15280		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
15281		RTVEC_ELT (p, ndx) =
15282		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
15283			       gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
15284		ndx++;
15285	      }
15286	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15287	  gcc_assert (ndx == count);
15288	}
15289      else
15290	for (i = 0; i < 8; i++)
15291	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15292	    {
15293	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
15294							   CR0_REGNO+i),
15295					      r12_rtx));
15296	    }
15297    }
15298
15299  /* If this is V.4, unwind the stack pointer after all of the loads
15300     have been done.  */
15301  if (frame_reg_rtx != sp_reg_rtx)
15302    {
15303      /* This blockage is needed so that sched doesn't decide to move
15304	 the sp change before the register restores.  */
15305      rs6000_emit_stack_tie ();
15306      emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15307    }
15308  else if (sp_offset != 0)
15309    emit_insn (TARGET_32BIT
15310	       ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15311			     GEN_INT (sp_offset))
15312	       : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15313			     GEN_INT (sp_offset)));
15314
15315  if (current_function_calls_eh_return)
15316    {
15317      rtx sa = EH_RETURN_STACKADJ_RTX;
15318      emit_insn (TARGET_32BIT
15319		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
15320		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
15321    }
15322
15323  if (!sibcall)
15324    {
15325      rtvec p;
15326      if (! restoring_FPRs_inline)
15327	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
15328      else
15329	p = rtvec_alloc (2);
15330
15331      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
15332      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15333				      gen_rtx_REG (Pmode,
15334						   LINK_REGISTER_REGNUM));
15335
15336      /* If we have to restore more than two FP registers, branch to the
15337	 restore function.  It will return to our caller.  */
15338      if (! restoring_FPRs_inline)
15339	{
15340	  int i;
15341	  char rname[30];
15342	  const char *alloc_rname;
15343
15344	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
15345		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
15346	  alloc_rname = ggc_strdup (rname);
15347	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
15348					  gen_rtx_SYMBOL_REF (Pmode,
15349							      alloc_rname));
15350
15351	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15352	    {
15353	      rtx addr, mem;
15354	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15355				   GEN_INT (info->fp_save_offset + 8*i));
15356	      mem = gen_frame_mem (DFmode, addr);
15357
15358	      RTVEC_ELT (p, i+3) =
15359		gen_rtx_SET (VOIDmode,
15360			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15361			     mem);
15362	    }
15363	}
15364
15365      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15366    }
15367}
15368
15369/* Write function epilogue.  */
15370
15371static void
15372rs6000_output_function_epilogue (FILE *file,
15373				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
15374{
15375  if (! HAVE_epilogue)
15376    {
15377      rtx insn = get_last_insn ();
15378      /* If the last insn was a BARRIER, we don't have to write anything except
15379	 the trace table.  */
15380      if (GET_CODE (insn) == NOTE)
15381	insn = prev_nonnote_insn (insn);
15382      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
15383	{
15384	  /* This is slightly ugly, but at least we don't have two
15385	     copies of the epilogue-emitting code.  */
15386	  start_sequence ();
15387
15388	  /* A NOTE_INSN_DELETED is supposed to be at the start
15389	     and end of the "toplevel" insn chain.  */
15390	  emit_note (NOTE_INSN_DELETED);
15391	  rs6000_emit_epilogue (FALSE);
15392	  emit_note (NOTE_INSN_DELETED);
15393
15394	  /* Expand INSN_ADDRESSES so final() doesn't crash.  */
15395	  {
15396	    rtx insn;
15397	    unsigned addr = 0;
15398	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15399	      {
15400		INSN_ADDRESSES_NEW (insn, addr);
15401		addr += 4;
15402	      }
15403	  }
15404
15405	  if (TARGET_DEBUG_STACK)
15406	    debug_rtx_list (get_insns (), 100);
15407	  final (get_insns (), file, FALSE);
15408	  end_sequence ();
15409	}
15410    }
15411
15412#if TARGET_MACHO
15413  macho_branch_islands ();
15414  /* Mach-O doesn't support labels at the end of objects, so if
15415     it looks like we might want one, insert a NOP.  */
15416  {
15417    rtx insn = get_last_insn ();
15418    while (insn
15419	   && NOTE_P (insn)
15420	   && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
15421      insn = PREV_INSN (insn);
15422    if (insn
15423	&& (LABEL_P (insn)
15424	    || (NOTE_P (insn)
15425		&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
15426      fputs ("\tnop\n", file);
15427  }
15428#endif
15429
15430  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
15431     on its format.
15432
15433     We don't output a traceback table if -finhibit-size-directive was
15434     used.  The documentation for -finhibit-size-directive reads
15435     ``don't output a @code{.size} assembler directive, or anything
15436     else that would cause trouble if the function is split in the
15437     middle, and the two halves are placed at locations far apart in
15438     memory.''  The traceback table has this property, since it
15439     includes the offset from the start of the function to the
15440     traceback table itself.
15441
15442     System V.4 Powerpc's (and the embedded ABI derived from it) use a
15443     different traceback table.  */
15444  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
15445      && rs6000_traceback != traceback_none && !current_function_is_thunk)
15446    {
15447      const char *fname = NULL;
15448      const char *language_string = lang_hooks.name;
15449      int fixed_parms = 0, float_parms = 0, parm_info = 0;
15450      int i;
15451      int optional_tbtab;
15452      rs6000_stack_t *info = rs6000_stack_info ();
15453
15454      if (rs6000_traceback == traceback_full)
15455	optional_tbtab = 1;
15456      else if (rs6000_traceback == traceback_part)
15457	optional_tbtab = 0;
15458      else
15459	optional_tbtab = !optimize_size && !TARGET_ELF;
15460
15461      if (optional_tbtab)
15462	{
15463	  fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15464	  while (*fname == '.')	/* V.4 encodes . in the name */
15465	    fname++;
15466
15467	  /* Need label immediately before tbtab, so we can compute
15468	     its offset from the function start.  */
15469	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15470	  ASM_OUTPUT_LABEL (file, fname);
15471	}
15472
15473      /* The .tbtab pseudo-op can only be used for the first eight
15474	 expressions, since it can't handle the possibly variable
15475	 length fields that follow.  However, if you omit the optional
15476	 fields, the assembler outputs zeros for all optional fields
15477	 anyways, giving each variable length field is minimum length
15478	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
15479	 pseudo-op at all.  */
15480
15481      /* An all-zero word flags the start of the tbtab, for debuggers
15482	 that have to find it by searching forward from the entry
15483	 point or from the current pc.  */
15484      fputs ("\t.long 0\n", file);
15485
15486      /* Tbtab format type.  Use format type 0.  */
15487      fputs ("\t.byte 0,", file);
15488
15489      /* Language type.  Unfortunately, there does not seem to be any
15490	 official way to discover the language being compiled, so we
15491	 use language_string.
15492	 C is 0.  Fortran is 1.  Pascal is 2.  Ada is 3.  C++ is 9.
15493	 Java is 13.  Objective-C is 14.  Objective-C++ isn't assigned
15494	 a number, so for now use 9.  */
15495      if (! strcmp (language_string, "GNU C"))
15496	i = 0;
15497      else if (! strcmp (language_string, "GNU F77")
15498	       || ! strcmp (language_string, "GNU F95"))
15499	i = 1;
15500      else if (! strcmp (language_string, "GNU Pascal"))
15501	i = 2;
15502      else if (! strcmp (language_string, "GNU Ada"))
15503	i = 3;
15504      else if (! strcmp (language_string, "GNU C++")
15505	       || ! strcmp (language_string, "GNU Objective-C++"))
15506	i = 9;
15507      else if (! strcmp (language_string, "GNU Java"))
15508	i = 13;
15509      else if (! strcmp (language_string, "GNU Objective-C"))
15510	i = 14;
15511      else
15512	gcc_unreachable ();
15513      fprintf (file, "%d,", i);
15514
15515      /* 8 single bit fields: global linkage (not set for C extern linkage,
15516	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15517	 from start of procedure stored in tbtab, internal function, function
15518	 has controlled storage, function has no toc, function uses fp,
15519	 function logs/aborts fp operations.  */
15520      /* Assume that fp operations are used if any fp reg must be saved.  */
15521      fprintf (file, "%d,",
15522	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
15523
15524      /* 6 bitfields: function is interrupt handler, name present in
15525	 proc table, function calls alloca, on condition directives
15526	 (controls stack walks, 3 bits), saves condition reg, saves
15527	 link reg.  */
15528      /* The `function calls alloca' bit seems to be set whenever reg 31 is
15529	 set up as a frame pointer, even when there is no alloca call.  */
15530      fprintf (file, "%d,",
15531	       ((optional_tbtab << 6)
15532		| ((optional_tbtab & frame_pointer_needed) << 5)
15533		| (info->cr_save_p << 1)
15534		| (info->lr_save_p)));
15535
15536      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
15537	 (6 bits).  */
15538      fprintf (file, "%d,",
15539	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
15540
15541      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
15542      fprintf (file, "%d,", (32 - first_reg_to_save ()));
15543
15544      if (optional_tbtab)
15545	{
15546	  /* Compute the parameter info from the function decl argument
15547	     list.  */
15548	  tree decl;
15549	  int next_parm_info_bit = 31;
15550
15551	  for (decl = DECL_ARGUMENTS (current_function_decl);
15552	       decl; decl = TREE_CHAIN (decl))
15553	    {
15554	      rtx parameter = DECL_INCOMING_RTL (decl);
15555	      enum machine_mode mode = GET_MODE (parameter);
15556
15557	      if (GET_CODE (parameter) == REG)
15558		{
15559		  if (SCALAR_FLOAT_MODE_P (mode))
15560		    {
15561		      int bits;
15562
15563		      float_parms++;
15564
15565		      switch (mode)
15566			{
15567			case SFmode:
15568			  bits = 0x2;
15569			  break;
15570
15571			case DFmode:
15572			case TFmode:
15573			  bits = 0x3;
15574			  break;
15575
15576			default:
15577			  gcc_unreachable ();
15578			}
15579
15580		      /* If only one bit will fit, don't or in this entry.  */
15581		      if (next_parm_info_bit > 0)
15582			parm_info |= (bits << (next_parm_info_bit - 1));
15583		      next_parm_info_bit -= 2;
15584		    }
15585		  else
15586		    {
15587		      fixed_parms += ((GET_MODE_SIZE (mode)
15588				       + (UNITS_PER_WORD - 1))
15589				      / UNITS_PER_WORD);
15590		      next_parm_info_bit -= 1;
15591		    }
15592		}
15593	    }
15594	}
15595
15596      /* Number of fixed point parameters.  */
15597      /* This is actually the number of words of fixed point parameters; thus
15598	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
15599      fprintf (file, "%d,", fixed_parms);
15600
15601      /* 2 bitfields: number of floating point parameters (7 bits), parameters
15602	 all on stack.  */
15603      /* This is actually the number of fp registers that hold parameters;
15604	 and thus the maximum value is 13.  */
15605      /* Set parameters on stack bit if parameters are not in their original
15606	 registers, regardless of whether they are on the stack?  Xlc
15607	 seems to set the bit when not optimizing.  */
15608      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15609
15610      if (! optional_tbtab)
15611	return;
15612
15613      /* Optional fields follow.  Some are variable length.  */
15614
15615      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15616	 11 double float.  */
15617      /* There is an entry for each parameter in a register, in the order that
15618	 they occur in the parameter list.  Any intervening arguments on the
15619	 stack are ignored.  If the list overflows a long (max possible length
15620	 34 bits) then completely leave off all elements that don't fit.  */
15621      /* Only emit this long if there was at least one parameter.  */
15622      if (fixed_parms || float_parms)
15623	fprintf (file, "\t.long %d\n", parm_info);
15624
15625      /* Offset from start of code to tb table.  */
15626      fputs ("\t.long ", file);
15627      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15628      if (TARGET_AIX)
15629	RS6000_OUTPUT_BASENAME (file, fname);
15630      else
15631	assemble_name (file, fname);
15632      putc ('-', file);
15633      rs6000_output_function_entry (file, fname);
15634      putc ('\n', file);
15635
15636      /* Interrupt handler mask.  */
15637      /* Omit this long, since we never set the interrupt handler bit
15638	 above.  */
15639
15640      /* Number of CTL (controlled storage) anchors.  */
15641      /* Omit this long, since the has_ctl bit is never set above.  */
15642
15643      /* Displacement into stack of each CTL anchor.  */
15644      /* Omit this list of longs, because there are no CTL anchors.  */
15645
15646      /* Length of function name.  */
15647      if (*fname == '*')
15648	++fname;
15649      fprintf (file, "\t.short %d\n", (int) strlen (fname));
15650
15651      /* Function name.  */
15652      assemble_string (fname, strlen (fname));
15653
15654      /* Register for alloca automatic storage; this is always reg 31.
15655	 Only emit this if the alloca bit was set above.  */
15656      if (frame_pointer_needed)
15657	fputs ("\t.byte 31\n", file);
15658
15659      fputs ("\t.align 2\n", file);
15660    }
15661}
15662
15663/* A C compound statement that outputs the assembler code for a thunk
15664   function, used to implement C++ virtual function calls with
15665   multiple inheritance.  The thunk acts as a wrapper around a virtual
15666   function, adjusting the implicit object parameter before handing
15667   control off to the real function.
15668
15669   First, emit code to add the integer DELTA to the location that
15670   contains the incoming first argument.  Assume that this argument
15671   contains a pointer, and is the one used to pass the `this' pointer
15672   in C++.  This is the incoming argument *before* the function
15673   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
15674   values of all other incoming arguments.
15675
15676   After the addition, emit code to jump to FUNCTION, which is a
15677   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
15678   not touch the return address.  Hence returning from FUNCTION will
15679   return to whoever called the current `thunk'.
15680
15681   The effect must be as if FUNCTION had been called directly with the
15682   adjusted first argument.  This macro is responsible for emitting
15683   all of the code for a thunk function; output_function_prologue()
15684   and output_function_epilogue() are not invoked.
15685
15686   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
15687   been extracted from it.)  It might possibly be useful on some
15688   targets, but probably not.
15689
15690   If you do not define this macro, the target-independent code in the
15691   C++ frontend will generate a less efficient heavyweight thunk that
15692   calls FUNCTION instead of jumping to it.  The generic approach does
15693   not support varargs.  */
15694
15695static void
15696rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
15697			HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
15698			tree function)
15699{
15700  rtx this, insn, funexp;
15701
15702  reload_completed = 1;
15703  epilogue_completed = 1;
15704  no_new_pseudos = 1;
15705  reset_block_changes ();
15706
15707  /* Mark the end of the (empty) prologue.  */
15708  emit_note (NOTE_INSN_PROLOGUE_END);
15709
15710  /* Find the "this" pointer.  If the function returns a structure,
15711     the structure return pointer is in r3.  */
15712  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
15713    this = gen_rtx_REG (Pmode, 4);
15714  else
15715    this = gen_rtx_REG (Pmode, 3);
15716
15717  /* Apply the constant offset, if required.  */
15718  if (delta)
15719    {
15720      rtx delta_rtx = GEN_INT (delta);
15721      emit_insn (TARGET_32BIT
15722		 ? gen_addsi3 (this, this, delta_rtx)
15723		 : gen_adddi3 (this, this, delta_rtx));
15724    }
15725
15726  /* Apply the offset from the vtable, if required.  */
15727  if (vcall_offset)
15728    {
15729      rtx vcall_offset_rtx = GEN_INT (vcall_offset);
15730      rtx tmp = gen_rtx_REG (Pmode, 12);
15731
15732      emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
15733      if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
15734	{
15735	  emit_insn (TARGET_32BIT
15736		     ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
15737		     : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
15738	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
15739	}
15740      else
15741	{
15742	  rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
15743
15744	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
15745	}
15746      emit_insn (TARGET_32BIT
15747		 ? gen_addsi3 (this, this, tmp)
15748		 : gen_adddi3 (this, this, tmp));
15749    }
15750
15751  /* Generate a tail call to the target function.  */
15752  if (!TREE_USED (function))
15753    {
15754      assemble_external (function);
15755      TREE_USED (function) = 1;
15756    }
15757  funexp = XEXP (DECL_RTL (function), 0);
15758  funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
15759
15760#if TARGET_MACHO
15761  if (MACHOPIC_INDIRECT)
15762    funexp = machopic_indirect_call_target (funexp);
15763#endif
15764
15765  /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
15766     generate sibcall RTL explicitly.  */
15767  insn = emit_call_insn (
15768	   gen_rtx_PARALLEL (VOIDmode,
15769	     gen_rtvec (4,
15770			gen_rtx_CALL (VOIDmode,
15771				      funexp, const0_rtx),
15772			gen_rtx_USE (VOIDmode, const0_rtx),
15773			gen_rtx_USE (VOIDmode,
15774				     gen_rtx_REG (SImode,
15775						  LINK_REGISTER_REGNUM)),
15776			gen_rtx_RETURN (VOIDmode))));
15777  SIBLING_CALL_P (insn) = 1;
15778  emit_barrier ();
15779
15780  /* Run just enough of rest_of_compilation to get the insns emitted.
15781     There's not really enough bulk here to make other passes such as
15782     instruction scheduling worth while.  Note that use_thunk calls
15783     assemble_start_function and assemble_end_function.  */
15784  insn = get_insns ();
15785  insn_locators_initialize ();
15786  shorten_branches (insn);
15787  final_start_function (insn, file, 1);
15788  final (insn, file, 1);
15789  final_end_function ();
15790
15791  reload_completed = 0;
15792  epilogue_completed = 0;
15793  no_new_pseudos = 0;
15794}
15795
15796/* A quick summary of the various types of 'constant-pool tables'
15797   under PowerPC:
15798
15799   Target	Flags		Name		One table per
15800   AIX		(none)		AIX TOC		object file
15801   AIX		-mfull-toc	AIX TOC		object file
15802   AIX		-mminimal-toc	AIX minimal TOC	translation unit
15803   SVR4/EABI	(none)		SVR4 SDATA	object file
15804   SVR4/EABI	-fpic		SVR4 pic	object file
15805   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
15806   SVR4/EABI	-mrelocatable	EABI TOC	function
15807   SVR4/EABI	-maix		AIX TOC		object file
15808   SVR4/EABI	-maix -mminimal-toc
15809				AIX minimal TOC	translation unit
15810
15811   Name			Reg.	Set by	entries	      contains:
15812					made by	 addrs?	fp?	sum?
15813
15814   AIX TOC		2	crt0	as	 Y	option	option
15815   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
15816   SVR4 SDATA		13	crt0	gcc	 N	Y	N
15817   SVR4 pic		30	prolog	ld	 Y	not yet	N
15818   SVR4 PIC		30	prolog	gcc	 Y	option	option
15819   EABI TOC		30	prolog	gcc	 Y	option	option
15820
15821*/
15822
15823/* Hash functions for the hash table.  */
15824
15825static unsigned
15826rs6000_hash_constant (rtx k)
15827{
15828  enum rtx_code code = GET_CODE (k);
15829  enum machine_mode mode = GET_MODE (k);
15830  unsigned result = (code << 3) ^ mode;
15831  const char *format;
15832  int flen, fidx;
15833
15834  format = GET_RTX_FORMAT (code);
15835  flen = strlen (format);
15836  fidx = 0;
15837
15838  switch (code)
15839    {
15840    case LABEL_REF:
15841      return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15842
15843    case CONST_DOUBLE:
15844      if (mode != VOIDmode)
15845	return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15846      flen = 2;
15847      break;
15848
15849    case CODE_LABEL:
15850      fidx = 3;
15851      break;
15852
15853    default:
15854      break;
15855    }
15856
15857  for (; fidx < flen; fidx++)
15858    switch (format[fidx])
15859      {
15860      case 's':
15861	{
15862	  unsigned i, len;
15863	  const char *str = XSTR (k, fidx);
15864	  len = strlen (str);
15865	  result = result * 613 + len;
15866	  for (i = 0; i < len; i++)
15867	    result = result * 613 + (unsigned) str[i];
15868	  break;
15869	}
15870      case 'u':
15871      case 'e':
15872	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15873	break;
15874      case 'i':
15875      case 'n':
15876	result = result * 613 + (unsigned) XINT (k, fidx);
15877	break;
15878      case 'w':
15879	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15880	  result = result * 613 + (unsigned) XWINT (k, fidx);
15881	else
15882	  {
15883	    size_t i;
15884	    for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
15885	      result = result * 613 + (unsigned) (XWINT (k, fidx)
15886						  >> CHAR_BIT * i);
15887	  }
15888	break;
15889      case '0':
15890	break;
15891      default:
15892	gcc_unreachable ();
15893      }
15894
15895  return result;
15896}
15897
15898static unsigned
15899toc_hash_function (const void *hash_entry)
15900{
15901  const struct toc_hash_struct *thc =
15902    (const struct toc_hash_struct *) hash_entry;
15903  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
15904}
15905
15906/* Compare H1 and H2 for equivalence.  */
15907
15908static int
15909toc_hash_eq (const void *h1, const void *h2)
15910{
15911  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15912  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15913
15914  if (((const struct toc_hash_struct *) h1)->key_mode
15915      != ((const struct toc_hash_struct *) h2)->key_mode)
15916    return 0;
15917
15918  return rtx_equal_p (r1, r2);
15919}
15920
15921/* These are the names given by the C++ front-end to vtables, and
15922   vtable-like objects.  Ideally, this logic should not be here;
15923   instead, there should be some programmatic way of inquiring as
15924   to whether or not an object is a vtable.  */
15925
15926#define VTABLE_NAME_P(NAME)				\
15927  (strncmp ("_vt.", name, strlen ("_vt.")) == 0		\
15928  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
15929  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
15930  || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0	\
15931  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
15932
15933void
15934rs6000_output_symbol_ref (FILE *file, rtx x)
15935{
15936  /* Currently C++ toc references to vtables can be emitted before it
15937     is decided whether the vtable is public or private.  If this is
15938     the case, then the linker will eventually complain that there is
15939     a reference to an unknown section.  Thus, for vtables only,
15940     we emit the TOC reference to reference the symbol and not the
15941     section.  */
15942  const char *name = XSTR (x, 0);
15943
15944  if (VTABLE_NAME_P (name))
15945    {
15946      RS6000_OUTPUT_BASENAME (file, name);
15947    }
15948  else
15949    assemble_name (file, name);
15950}
15951
15952/* Output a TOC entry.  We derive the entry name from what is being
15953   written.  */
15954
15955void
15956output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
15957{
15958  char buf[256];
15959  const char *name = buf;
15960  const char *real_name;
15961  rtx base = x;
15962  HOST_WIDE_INT offset = 0;
15963
15964  gcc_assert (!TARGET_NO_TOC);
15965
15966  /* When the linker won't eliminate them, don't output duplicate
15967     TOC entries (this happens on AIX if there is any kind of TOC,
15968     and on SVR4 under -fPIC or -mrelocatable).  Don't do this for
15969     CODE_LABELs.  */
15970  if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
15971    {
15972      struct toc_hash_struct *h;
15973      void * * found;
15974
15975      /* Create toc_hash_table.  This can't be done at OVERRIDE_OPTIONS
15976	 time because GGC is not initialized at that point.  */
15977      if (toc_hash_table == NULL)
15978	toc_hash_table = htab_create_ggc (1021, toc_hash_function,
15979					  toc_hash_eq, NULL);
15980
15981      h = ggc_alloc (sizeof (*h));
15982      h->key = x;
15983      h->key_mode = mode;
15984      h->labelno = labelno;
15985
15986      found = htab_find_slot (toc_hash_table, h, 1);
15987      if (*found == NULL)
15988	*found = h;
15989      else  /* This is indeed a duplicate.
15990	       Set this label equal to that label.  */
15991	{
15992	  fputs ("\t.set ", file);
15993	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15994	  fprintf (file, "%d,", labelno);
15995	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15996	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
15997					      found)->labelno));
15998	  return;
15999	}
16000    }
16001
16002  /* If we're going to put a double constant in the TOC, make sure it's
16003     aligned properly when strict alignment is on.  */
16004  if (GET_CODE (x) == CONST_DOUBLE
16005      && STRICT_ALIGNMENT
16006      && GET_MODE_BITSIZE (mode) >= 64
16007      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
16008    ASM_OUTPUT_ALIGN (file, 3);
16009  }
16010
16011  (*targetm.asm_out.internal_label) (file, "LC", labelno);
16012
16013  /* Handle FP constants specially.  Note that if we have a minimal
16014     TOC, things we put here aren't actually in the TOC, so we can allow
16015     FP constants.  */
16016  if (GET_CODE (x) == CONST_DOUBLE &&
16017      (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
16018    {
16019      REAL_VALUE_TYPE rv;
16020      long k[4];
16021
16022      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
16023      if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16024	REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
16025      else
16026	REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
16027
16028      if (TARGET_64BIT)
16029	{
16030	  if (TARGET_MINIMAL_TOC)
16031	    fputs (DOUBLE_INT_ASM_OP, file);
16032	  else
16033	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16034		     k[0] & 0xffffffff, k[1] & 0xffffffff,
16035		     k[2] & 0xffffffff, k[3] & 0xffffffff);
16036	  fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
16037		   k[0] & 0xffffffff, k[1] & 0xffffffff,
16038		   k[2] & 0xffffffff, k[3] & 0xffffffff);
16039	  return;
16040	}
16041      else
16042	{
16043	  if (TARGET_MINIMAL_TOC)
16044	    fputs ("\t.long ", file);
16045	  else
16046	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16047		     k[0] & 0xffffffff, k[1] & 0xffffffff,
16048		     k[2] & 0xffffffff, k[3] & 0xffffffff);
16049	  fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
16050		   k[0] & 0xffffffff, k[1] & 0xffffffff,
16051		   k[2] & 0xffffffff, k[3] & 0xffffffff);
16052	  return;
16053	}
16054    }
16055  else if (GET_CODE (x) == CONST_DOUBLE &&
16056	   (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
16057    {
16058      REAL_VALUE_TYPE rv;
16059      long k[2];
16060
16061      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
16062
16063      if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16064	REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
16065      else
16066	REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
16067
16068      if (TARGET_64BIT)
16069	{
16070	  if (TARGET_MINIMAL_TOC)
16071	    fputs (DOUBLE_INT_ASM_OP, file);
16072	  else
16073	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16074		     k[0] & 0xffffffff, k[1] & 0xffffffff);
16075	  fprintf (file, "0x%lx%08lx\n",
16076		   k[0] & 0xffffffff, k[1] & 0xffffffff);
16077	  return;
16078	}
16079      else
16080	{
16081	  if (TARGET_MINIMAL_TOC)
16082	    fputs ("\t.long ", file);
16083	  else
16084	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16085		     k[0] & 0xffffffff, k[1] & 0xffffffff);
16086	  fprintf (file, "0x%lx,0x%lx\n",
16087		   k[0] & 0xffffffff, k[1] & 0xffffffff);
16088	  return;
16089	}
16090    }
16091  else if (GET_CODE (x) == CONST_DOUBLE &&
16092	   (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
16093    {
16094      REAL_VALUE_TYPE rv;
16095      long l;
16096
16097      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
16098      if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16099	REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
16100      else
16101	REAL_VALUE_TO_TARGET_SINGLE (rv, l);
16102
16103      if (TARGET_64BIT)
16104	{
16105	  if (TARGET_MINIMAL_TOC)
16106	    fputs (DOUBLE_INT_ASM_OP, file);
16107	  else
16108	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16109	  fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
16110	  return;
16111	}
16112      else
16113	{
16114	  if (TARGET_MINIMAL_TOC)
16115	    fputs ("\t.long ", file);
16116	  else
16117	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16118	  fprintf (file, "0x%lx\n", l & 0xffffffff);
16119	  return;
16120	}
16121    }
16122  else if (GET_MODE (x) == VOIDmode
16123	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
16124    {
16125      unsigned HOST_WIDE_INT low;
16126      HOST_WIDE_INT high;
16127
16128      if (GET_CODE (x) == CONST_DOUBLE)
16129	{
16130	  low = CONST_DOUBLE_LOW (x);
16131	  high = CONST_DOUBLE_HIGH (x);
16132	}
16133      else
16134#if HOST_BITS_PER_WIDE_INT == 32
16135	{
16136	  low = INTVAL (x);
16137	  high = (low & 0x80000000) ? ~0 : 0;
16138	}
16139#else
16140	{
16141	  low = INTVAL (x) & 0xffffffff;
16142	  high = (HOST_WIDE_INT) INTVAL (x) >> 32;
16143	}
16144#endif
16145
16146      /* TOC entries are always Pmode-sized, but since this
16147	 is a bigendian machine then if we're putting smaller
16148	 integer constants in the TOC we have to pad them.
16149	 (This is still a win over putting the constants in
16150	 a separate constant pool, because then we'd have
16151	 to have both a TOC entry _and_ the actual constant.)
16152
16153	 For a 32-bit target, CONST_INT values are loaded and shifted
16154	 entirely within `low' and can be stored in one TOC entry.  */
16155
16156      /* It would be easy to make this work, but it doesn't now.  */
16157      gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
16158
16159      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
16160	{
16161#if HOST_BITS_PER_WIDE_INT == 32
16162	  lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
16163			 POINTER_SIZE, &low, &high, 0);
16164#else
16165	  low |= high << 32;
16166	  low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
16167	  high = (HOST_WIDE_INT) low >> 32;
16168	  low &= 0xffffffff;
16169#endif
16170	}
16171
16172      if (TARGET_64BIT)
16173	{
16174	  if (TARGET_MINIMAL_TOC)
16175	    fputs (DOUBLE_INT_ASM_OP, file);
16176	  else
16177	    fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16178		     (long) high & 0xffffffff, (long) low & 0xffffffff);
16179	  fprintf (file, "0x%lx%08lx\n",
16180		   (long) high & 0xffffffff, (long) low & 0xffffffff);
16181	  return;
16182	}
16183      else
16184	{
16185	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
16186	    {
16187	      if (TARGET_MINIMAL_TOC)
16188		fputs ("\t.long ", file);
16189	      else
16190		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16191			 (long) high & 0xffffffff, (long) low & 0xffffffff);
16192	      fprintf (file, "0x%lx,0x%lx\n",
16193		       (long) high & 0xffffffff, (long) low & 0xffffffff);
16194	    }
16195	  else
16196	    {
16197	      if (TARGET_MINIMAL_TOC)
16198		fputs ("\t.long ", file);
16199	      else
16200		fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
16201	      fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
16202	    }
16203	  return;
16204	}
16205    }
16206
16207  if (GET_CODE (x) == CONST)
16208    {
16209      gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
16210
16211      base = XEXP (XEXP (x, 0), 0);
16212      offset = INTVAL (XEXP (XEXP (x, 0), 1));
16213    }
16214
16215  switch (GET_CODE (base))
16216    {
16217    case SYMBOL_REF:
16218      name = XSTR (base, 0);
16219      break;
16220
16221    case LABEL_REF:
16222      ASM_GENERATE_INTERNAL_LABEL (buf, "L",
16223				   CODE_LABEL_NUMBER (XEXP (base, 0)));
16224      break;
16225
16226    case CODE_LABEL:
16227      ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
16228      break;
16229
16230    default:
16231      gcc_unreachable ();
16232    }
16233
16234  real_name = (*targetm.strip_name_encoding) (name);
16235  if (TARGET_MINIMAL_TOC)
16236    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
16237  else
16238    {
16239      fprintf (file, "\t.tc %s", real_name);
16240
16241      if (offset < 0)
16242	fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
16243      else if (offset)
16244	fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
16245
16246      fputs ("[TC],", file);
16247    }
16248
16249  /* Currently C++ toc references to vtables can be emitted before it
16250     is decided whether the vtable is public or private.  If this is
16251     the case, then the linker will eventually complain that there is
16252     a TOC reference to an unknown section.  Thus, for vtables only,
16253     we emit the TOC reference to reference the symbol and not the
16254     section.  */
16255  if (VTABLE_NAME_P (name))
16256    {
16257      RS6000_OUTPUT_BASENAME (file, name);
16258      if (offset < 0)
16259	fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
16260      else if (offset > 0)
16261	fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
16262    }
16263  else
16264    output_addr_const (file, x);
16265  putc ('\n', file);
16266}
16267
16268/* Output an assembler pseudo-op to write an ASCII string of N characters
16269   starting at P to FILE.
16270
16271   On the RS/6000, we have to do this using the .byte operation and
16272   write out special characters outside the quoted string.
16273   Also, the assembler is broken; very long strings are truncated,
16274   so we must artificially break them up early.  */
16275
16276void
16277output_ascii (FILE *file, const char *p, int n)
16278{
16279  char c;
16280  int i, count_string;
16281  const char *for_string = "\t.byte \"";
16282  const char *for_decimal = "\t.byte ";
16283  const char *to_close = NULL;
16284
16285  count_string = 0;
16286  for (i = 0; i < n; i++)
16287    {
16288      c = *p++;
16289      if (c >= ' ' && c < 0177)
16290	{
16291	  if (for_string)
16292	    fputs (for_string, file);
16293	  putc (c, file);
16294
16295	  /* Write two quotes to get one.  */
16296	  if (c == '"')
16297	    {
16298	      putc (c, file);
16299	      ++count_string;
16300	    }
16301
16302	  for_string = NULL;
16303	  for_decimal = "\"\n\t.byte ";
16304	  to_close = "\"\n";
16305	  ++count_string;
16306
16307	  if (count_string >= 512)
16308	    {
16309	      fputs (to_close, file);
16310
16311	      for_string = "\t.byte \"";
16312	      for_decimal = "\t.byte ";
16313	      to_close = NULL;
16314	      count_string = 0;
16315	    }
16316	}
16317      else
16318	{
16319	  if (for_decimal)
16320	    fputs (for_decimal, file);
16321	  fprintf (file, "%d", c);
16322
16323	  for_string = "\n\t.byte \"";
16324	  for_decimal = ", ";
16325	  to_close = "\n";
16326	  count_string = 0;
16327	}
16328    }
16329
16330  /* Now close the string if we have written one.  Then end the line.  */
16331  if (to_close)
16332    fputs (to_close, file);
16333}
16334
16335/* Generate a unique section name for FILENAME for a section type
16336   represented by SECTION_DESC.  Output goes into BUF.
16337
16338   SECTION_DESC can be any string, as long as it is different for each
16339   possible section type.
16340
16341   We name the section in the same manner as xlc.  The name begins with an
16342   underscore followed by the filename (after stripping any leading directory
16343   names) with the last period replaced by the string SECTION_DESC.  If
16344   FILENAME does not contain a period, SECTION_DESC is appended to the end of
16345   the name.  */
16346
16347void
16348rs6000_gen_section_name (char **buf, const char *filename,
16349			 const char *section_desc)
16350{
16351  const char *q, *after_last_slash, *last_period = 0;
16352  char *p;
16353  int len;
16354
16355  after_last_slash = filename;
16356  for (q = filename; *q; q++)
16357    {
16358      if (*q == '/')
16359	after_last_slash = q + 1;
16360      else if (*q == '.')
16361	last_period = q;
16362    }
16363
16364  len = strlen (after_last_slash) + strlen (section_desc) + 2;
16365  *buf = (char *) xmalloc (len);
16366
16367  p = *buf;
16368  *p++ = '_';
16369
16370  for (q = after_last_slash; *q; q++)
16371    {
16372      if (q == last_period)
16373	{
16374	  strcpy (p, section_desc);
16375	  p += strlen (section_desc);
16376	  break;
16377	}
16378
16379      else if (ISALNUM (*q))
16380	*p++ = *q;
16381    }
16382
16383  if (last_period == 0)
16384    strcpy (p, section_desc);
16385  else
16386    *p = '\0';
16387}
16388
16389/* Emit profile function.  */
16390
16391void
16392output_profile_hook (int labelno ATTRIBUTE_UNUSED)
16393{
16394  /* Non-standard profiling for kernels, which just saves LR then calls
16395     _mcount without worrying about arg saves.  The idea is to change
16396     the function prologue as little as possible as it isn't easy to
16397     account for arg save/restore code added just for _mcount.  */
16398  if (TARGET_PROFILE_KERNEL)
16399    return;
16400
16401  if (DEFAULT_ABI == ABI_AIX)
16402    {
16403#ifndef NO_PROFILE_COUNTERS
16404# define NO_PROFILE_COUNTERS 0
16405#endif
16406      if (NO_PROFILE_COUNTERS)
16407	emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16408      else
16409	{
16410	  char buf[30];
16411	  const char *label_name;
16412	  rtx fun;
16413
16414	  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16415	  label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16416	  fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
16417
16418	  emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16419			     fun, Pmode);
16420	}
16421    }
16422  else if (DEFAULT_ABI == ABI_DARWIN)
16423    {
16424      const char *mcount_name = RS6000_MCOUNT;
16425      int caller_addr_regno = LINK_REGISTER_REGNUM;
16426
16427      /* Be conservative and always set this, at least for now.  */
16428      current_function_uses_pic_offset_table = 1;
16429
16430#if TARGET_MACHO
16431      /* For PIC code, set up a stub and collect the caller's address
16432	 from r0, which is where the prologue puts it.  */
16433      if (MACHOPIC_INDIRECT
16434	  && current_function_uses_pic_offset_table)
16435	caller_addr_regno = 0;
16436#endif
16437      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16438			 0, VOIDmode, 1,
16439			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16440    }
16441}
16442
16443/* Write function profiler code.  */
16444
16445void
16446output_function_profiler (FILE *file, int labelno)
16447{
16448  char buf[100];
16449
16450  switch (DEFAULT_ABI)
16451    {
16452    default:
16453      gcc_unreachable ();
16454
16455    case ABI_V4:
16456      if (!TARGET_32BIT)
16457	{
16458	  warning (0, "no profiling of 64-bit code for this ABI");
16459	  return;
16460	}
16461      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16462      fprintf (file, "\tmflr %s\n", reg_names[0]);
16463      if (NO_PROFILE_COUNTERS)
16464	{
16465	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16466		       reg_names[0], reg_names[1]);
16467	}
16468      else if (TARGET_SECURE_PLT && flag_pic)
16469	{
16470	  asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16471		       reg_names[0], reg_names[1]);
16472	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16473	  asm_fprintf (file, "\t{cau|addis} %s,%s,",
16474		       reg_names[12], reg_names[12]);
16475	  assemble_name (file, buf);
16476	  asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16477	  assemble_name (file, buf);
16478	  asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16479	}
16480      else if (flag_pic == 1)
16481	{
16482	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
16483	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16484		       reg_names[0], reg_names[1]);
16485	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16486	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
16487	  assemble_name (file, buf);
16488	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
16489	}
16490      else if (flag_pic > 1)
16491	{
16492	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16493		       reg_names[0], reg_names[1]);
16494	  /* Now, we need to get the address of the label.  */
16495	  fputs ("\tbcl 20,31,1f\n\t.long ", file);
16496	  assemble_name (file, buf);
16497	  fputs ("-.\n1:", file);
16498	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
16499	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
16500		       reg_names[0], reg_names[11]);
16501	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16502		       reg_names[0], reg_names[0], reg_names[11]);
16503	}
16504      else
16505	{
16506	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
16507	  assemble_name (file, buf);
16508	  fputs ("@ha\n", file);
16509	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16510		       reg_names[0], reg_names[1]);
16511	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
16512	  assemble_name (file, buf);
16513	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
16514	}
16515
16516      /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
16517      fprintf (file, "\tbl %s%s\n",
16518	       RS6000_MCOUNT, flag_pic ? "@plt" : "");
16519      break;
16520
16521    case ABI_AIX:
16522    case ABI_DARWIN:
16523      if (!TARGET_PROFILE_KERNEL)
16524	{
16525	  /* Don't do anything, done in output_profile_hook ().  */
16526	}
16527      else
16528	{
16529	  gcc_assert (!TARGET_32BIT);
16530
16531	  asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16532	  asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16533
16534	  if (cfun->static_chain_decl != NULL)
16535	    {
16536	      asm_fprintf (file, "\tstd %s,24(%s)\n",
16537			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16538	      fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16539	      asm_fprintf (file, "\tld %s,24(%s)\n",
16540			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16541	    }
16542	  else
16543	    fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16544	}
16545      break;
16546    }
16547}
16548
16549
16550/* Power4 load update and store update instructions are cracked into a
16551   load or store and an integer insn which are executed in the same cycle.
16552   Branches have their own dispatch slot which does not count against the
16553   GCC issue rate, but it changes the program flow so there are no other
16554   instructions to issue in this cycle.  */
16555
16556static int
16557rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16558		       int verbose ATTRIBUTE_UNUSED,
16559		       rtx insn, int more)
16560{
16561  if (GET_CODE (PATTERN (insn)) == USE
16562      || GET_CODE (PATTERN (insn)) == CLOBBER)
16563    return more;
16564
16565  if (rs6000_sched_groups)
16566    {
16567      if (is_microcoded_insn (insn))
16568	return 0;
16569      else if (is_cracked_insn (insn))
16570	return more > 2 ? more - 2 : 0;
16571    }
16572
16573  return more - 1;
16574}
16575
16576/* Adjust the cost of a scheduling dependency.  Return the new cost of
16577   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
16578
16579static int
16580rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
16581{
16582  if (! recog_memoized (insn))
16583    return 0;
16584
16585  if (REG_NOTE_KIND (link) != 0)
16586    return 0;
16587
16588  if (REG_NOTE_KIND (link) == 0)
16589    {
16590      /* Data dependency; DEP_INSN writes a register that INSN reads
16591	 some cycles later.  */
16592
16593      /* Separate a load from a narrower, dependent store.  */
16594      if (rs6000_sched_groups
16595	  && GET_CODE (PATTERN (insn)) == SET
16596	  && GET_CODE (PATTERN (dep_insn)) == SET
16597	  && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16598	  && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16599	  && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16600	      > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16601	return cost + 14;
16602
16603      switch (get_attr_type (insn))
16604	{
16605	case TYPE_JMPREG:
16606	  /* Tell the first scheduling pass about the latency between
16607	     a mtctr and bctr (and mtlr and br/blr).  The first
16608	     scheduling pass will not know about this latency since
16609	     the mtctr instruction, which has the latency associated
16610	     to it, will be generated by reload.  */
16611	  return TARGET_POWER ? 5 : 4;
16612	case TYPE_BRANCH:
16613	  /* Leave some extra cycles between a compare and its
16614	     dependent branch, to inhibit expensive mispredicts.  */
16615	  if ((rs6000_cpu_attr == CPU_PPC603
16616	       || rs6000_cpu_attr == CPU_PPC604
16617	       || rs6000_cpu_attr == CPU_PPC604E
16618	       || rs6000_cpu_attr == CPU_PPC620
16619	       || rs6000_cpu_attr == CPU_PPC630
16620	       || rs6000_cpu_attr == CPU_PPC750
16621	       || rs6000_cpu_attr == CPU_PPC7400
16622	       || rs6000_cpu_attr == CPU_PPC7450
16623	       || rs6000_cpu_attr == CPU_POWER4
16624	       || rs6000_cpu_attr == CPU_POWER5)
16625	      && recog_memoized (dep_insn)
16626	      && (INSN_CODE (dep_insn) >= 0)
16627	      && (get_attr_type (dep_insn) == TYPE_CMP
16628		  || get_attr_type (dep_insn) == TYPE_COMPARE
16629		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
16630		  || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
16631		  || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
16632		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
16633		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
16634		  || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
16635	    return cost + 2;
16636	default:
16637	  break;
16638	}
16639      /* Fall out to return default cost.  */
16640    }
16641
16642  return cost;
16643}
16644
16645/* The function returns a true if INSN is microcoded.
16646   Return false otherwise.  */
16647
16648static bool
16649is_microcoded_insn (rtx insn)
16650{
16651  if (!insn || !INSN_P (insn)
16652      || GET_CODE (PATTERN (insn)) == USE
16653      || GET_CODE (PATTERN (insn)) == CLOBBER)
16654    return false;
16655
16656  if (rs6000_sched_groups)
16657    {
16658      enum attr_type type = get_attr_type (insn);
16659      if (type == TYPE_LOAD_EXT_U
16660	  || type == TYPE_LOAD_EXT_UX
16661	  || type == TYPE_LOAD_UX
16662	  || type == TYPE_STORE_UX
16663	  || type == TYPE_MFCR)
16664	return true;
16665    }
16666
16667  return false;
16668}
16669
16670/* The function returns a nonzero value if INSN can be scheduled only
16671   as the first insn in a dispatch group ("dispatch-slot restricted").
16672   In this case, the returned value indicates how many dispatch slots
16673   the insn occupies (at the beginning of the group).
16674   Return 0 otherwise.  */
16675
16676static int
16677is_dispatch_slot_restricted (rtx insn)
16678{
16679  enum attr_type type;
16680
16681  if (!rs6000_sched_groups)
16682    return 0;
16683
16684  if (!insn
16685      || insn == NULL_RTX
16686      || GET_CODE (insn) == NOTE
16687      || GET_CODE (PATTERN (insn)) == USE
16688      || GET_CODE (PATTERN (insn)) == CLOBBER)
16689    return 0;
16690
16691  type = get_attr_type (insn);
16692
16693  switch (type)
16694    {
16695    case TYPE_MFCR:
16696    case TYPE_MFCRF:
16697    case TYPE_MTCR:
16698    case TYPE_DELAYED_CR:
16699    case TYPE_CR_LOGICAL:
16700    case TYPE_MTJMPR:
16701    case TYPE_MFJMPR:
16702      return 1;
16703    case TYPE_IDIV:
16704    case TYPE_LDIV:
16705      return 2;
16706    case TYPE_LOAD_L:
16707    case TYPE_STORE_C:
16708    case TYPE_ISYNC:
16709    case TYPE_SYNC:
16710      return 4;
16711    default:
16712      if (rs6000_cpu == PROCESSOR_POWER5
16713	  && is_cracked_insn (insn))
16714	return 2;
16715      return 0;
16716    }
16717}
16718
16719/* The function returns true if INSN is cracked into 2 instructions
16720   by the processor (and therefore occupies 2 issue slots).  */
16721
16722static bool
16723is_cracked_insn (rtx insn)
16724{
16725  if (!insn || !INSN_P (insn)
16726      || GET_CODE (PATTERN (insn)) == USE
16727      || GET_CODE (PATTERN (insn)) == CLOBBER)
16728    return false;
16729
16730  if (rs6000_sched_groups)
16731    {
16732      enum attr_type type = get_attr_type (insn);
16733      if (type == TYPE_LOAD_U || type == TYPE_STORE_U
16734	  || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
16735	  || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
16736	  || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
16737	  || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
16738	  || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
16739	  || type == TYPE_IDIV || type == TYPE_LDIV
16740	  || type == TYPE_INSERT_WORD)
16741	return true;
16742    }
16743
16744  return false;
16745}
16746
16747/* The function returns true if INSN can be issued only from
16748   the branch slot.  */
16749
16750static bool
16751is_branch_slot_insn (rtx insn)
16752{
16753  if (!insn || !INSN_P (insn)
16754      || GET_CODE (PATTERN (insn)) == USE
16755      || GET_CODE (PATTERN (insn)) == CLOBBER)
16756    return false;
16757
16758  if (rs6000_sched_groups)
16759    {
16760      enum attr_type type = get_attr_type (insn);
16761      if (type == TYPE_BRANCH || type == TYPE_JMPREG)
16762	return true;
16763      return false;
16764    }
16765
16766  return false;
16767}
16768
16769/* A C statement (sans semicolon) to update the integer scheduling
16770   priority INSN_PRIORITY (INSN). Increase the priority to execute the
16771   INSN earlier, reduce the priority to execute INSN later.  Do not
16772   define this macro if you do not need to adjust the scheduling
16773   priorities of insns.  */
16774
16775static int
16776rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
16777{
16778  /* On machines (like the 750) which have asymmetric integer units,
16779     where one integer unit can do multiply and divides and the other
16780     can't, reduce the priority of multiply/divide so it is scheduled
16781     before other integer operations.  */
16782
16783#if 0
16784  if (! INSN_P (insn))
16785    return priority;
16786
16787  if (GET_CODE (PATTERN (insn)) == USE)
16788    return priority;
16789
16790  switch (rs6000_cpu_attr) {
16791  case CPU_PPC750:
16792    switch (get_attr_type (insn))
16793      {
16794      default:
16795	break;
16796
16797      case TYPE_IMUL:
16798      case TYPE_IDIV:
16799	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
16800		 priority, priority);
16801	if (priority >= 0 && priority < 0x01000000)
16802	  priority >>= 3;
16803	break;
16804      }
16805  }
16806#endif
16807
16808  if (is_dispatch_slot_restricted (insn)
16809      && reload_completed
16810      && current_sched_info->sched_max_insns_priority
16811      && rs6000_sched_restricted_insns_priority)
16812    {
16813
16814      /* Prioritize insns that can be dispatched only in the first
16815	 dispatch slot.  */
16816      if (rs6000_sched_restricted_insns_priority == 1)
16817	/* Attach highest priority to insn. This means that in
16818	   haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
16819	   precede 'priority' (critical path) considerations.  */
16820	return current_sched_info->sched_max_insns_priority;
16821      else if (rs6000_sched_restricted_insns_priority == 2)
16822	/* Increase priority of insn by a minimal amount. This means that in
16823	   haifa-sched.c:ready_sort(), only 'priority' (critical path)
16824	   considerations precede dispatch-slot restriction considerations.  */
16825	return (priority + 1);
16826    }
16827
16828  return priority;
16829}
16830
16831/* Return how many instructions the machine can issue per cycle.  */
16832
16833static int
16834rs6000_issue_rate (void)
16835{
16836  /* Use issue rate of 1 for first scheduling pass to decrease degradation.  */
16837  if (!reload_completed)
16838    return 1;
16839
16840  switch (rs6000_cpu_attr) {
16841  case CPU_RIOS1:  /* ? */
16842  case CPU_RS64A:
16843  case CPU_PPC601: /* ? */
16844  case CPU_PPC7450:
16845    return 3;
16846  case CPU_PPC440:
16847  case CPU_PPC603:
16848  case CPU_PPC750:
16849  case CPU_PPC7400:
16850  case CPU_PPC8540:
16851    return 2;
16852  case CPU_RIOS2:
16853  case CPU_PPC604:
16854  case CPU_PPC604E:
16855  case CPU_PPC620:
16856  case CPU_PPC630:
16857    return 4;
16858  case CPU_POWER4:
16859  case CPU_POWER5:
16860    return 5;
16861  default:
16862    return 1;
16863  }
16864}
16865
16866/* Return how many instructions to look ahead for better insn
16867   scheduling.  */
16868
16869static int
16870rs6000_use_sched_lookahead (void)
16871{
16872  if (rs6000_cpu_attr == CPU_PPC8540)
16873    return 4;
16874  return 0;
16875}
16876
16877/* Determine is PAT refers to memory.  */
16878
16879static bool
16880is_mem_ref (rtx pat)
16881{
16882  const char * fmt;
16883  int i, j;
16884  bool ret = false;
16885
16886  if (GET_CODE (pat) == MEM)
16887    return true;
16888
16889  /* Recursively process the pattern.  */
16890  fmt = GET_RTX_FORMAT (GET_CODE (pat));
16891
16892  for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16893    {
16894      if (fmt[i] == 'e')
16895	ret |= is_mem_ref (XEXP (pat, i));
16896      else if (fmt[i] == 'E')
16897	for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16898	  ret |= is_mem_ref (XVECEXP (pat, i, j));
16899    }
16900
16901  return ret;
16902}
16903
16904/* Determine if PAT is a PATTERN of a load insn.  */
16905
16906static bool
16907is_load_insn1 (rtx pat)
16908{
16909  if (!pat || pat == NULL_RTX)
16910    return false;
16911
16912  if (GET_CODE (pat) == SET)
16913    return is_mem_ref (SET_SRC (pat));
16914
16915  if (GET_CODE (pat) == PARALLEL)
16916    {
16917      int i;
16918
16919      for (i = 0; i < XVECLEN (pat, 0); i++)
16920	if (is_load_insn1 (XVECEXP (pat, 0, i)))
16921	  return true;
16922    }
16923
16924  return false;
16925}
16926
16927/* Determine if INSN loads from memory.  */
16928
16929static bool
16930is_load_insn (rtx insn)
16931{
16932  if (!insn || !INSN_P (insn))
16933    return false;
16934
16935  if (GET_CODE (insn) == CALL_INSN)
16936    return false;
16937
16938  return is_load_insn1 (PATTERN (insn));
16939}
16940
16941/* Determine if PAT is a PATTERN of a store insn.  */
16942
16943static bool
16944is_store_insn1 (rtx pat)
16945{
16946  if (!pat || pat == NULL_RTX)
16947    return false;
16948
16949  if (GET_CODE (pat) == SET)
16950    return is_mem_ref (SET_DEST (pat));
16951
16952  if (GET_CODE (pat) == PARALLEL)
16953    {
16954      int i;
16955
16956      for (i = 0; i < XVECLEN (pat, 0); i++)
16957	if (is_store_insn1 (XVECEXP (pat, 0, i)))
16958	  return true;
16959    }
16960
16961  return false;
16962}
16963
16964/* Determine if INSN stores to memory.  */
16965
16966static bool
16967is_store_insn (rtx insn)
16968{
16969  if (!insn || !INSN_P (insn))
16970    return false;
16971
16972  return is_store_insn1 (PATTERN (insn));
16973}
16974
16975/* Returns whether the dependence between INSN and NEXT is considered
16976   costly by the given target.  */
16977
16978static bool
16979rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16980			     int distance)
16981{
16982  /* If the flag is not enabled - no dependence is considered costly;
16983     allow all dependent insns in the same group.
16984     This is the most aggressive option.  */
16985  if (rs6000_sched_costly_dep == no_dep_costly)
16986    return false;
16987
16988  /* If the flag is set to 1 - a dependence is always considered costly;
16989     do not allow dependent instructions in the same group.
16990     This is the most conservative option.  */
16991  if (rs6000_sched_costly_dep == all_deps_costly)
16992    return true;
16993
16994  if (rs6000_sched_costly_dep == store_to_load_dep_costly
16995      && is_load_insn (next)
16996      && is_store_insn (insn))
16997    /* Prevent load after store in the same group.  */
16998    return true;
16999
17000  if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
17001      && is_load_insn (next)
17002      && is_store_insn (insn)
17003      && (!link || (int) REG_NOTE_KIND (link) == 0))
17004     /* Prevent load after store in the same group if it is a true
17005	dependence.  */
17006     return true;
17007
17008  /* The flag is set to X; dependences with latency >= X are considered costly,
17009     and will not be scheduled in the same group.  */
17010  if (rs6000_sched_costly_dep <= max_dep_latency
17011      && ((cost - distance) >= (int)rs6000_sched_costly_dep))
17012    return true;
17013
17014  return false;
17015}
17016
17017/* Return the next insn after INSN that is found before TAIL is reached,
17018   skipping any "non-active" insns - insns that will not actually occupy
17019   an issue slot.  Return NULL_RTX if such an insn is not found.  */
17020
17021static rtx
17022get_next_active_insn (rtx insn, rtx tail)
17023{
17024  if (insn == NULL_RTX || insn == tail)
17025    return NULL_RTX;
17026
17027  while (1)
17028    {
17029      insn = NEXT_INSN (insn);
17030      if (insn == NULL_RTX || insn == tail)
17031	return NULL_RTX;
17032
17033      if (CALL_P (insn)
17034	  || JUMP_P (insn)
17035	  || (NONJUMP_INSN_P (insn)
17036	      && GET_CODE (PATTERN (insn)) != USE
17037	      && GET_CODE (PATTERN (insn)) != CLOBBER
17038	      && INSN_CODE (insn) != CODE_FOR_stack_tie))
17039	break;
17040    }
17041  return insn;
17042}
17043
17044/* Return whether the presence of INSN causes a dispatch group termination
17045   of group WHICH_GROUP.
17046
17047   If WHICH_GROUP == current_group, this function will return true if INSN
17048   causes the termination of the current group (i.e, the dispatch group to
17049   which INSN belongs). This means that INSN will be the last insn in the
17050   group it belongs to.
17051
17052   If WHICH_GROUP == previous_group, this function will return true if INSN
17053   causes the termination of the previous group (i.e, the dispatch group that
17054   precedes the group to which INSN belongs).  This means that INSN will be
17055   the first insn in the group it belongs to).  */
17056
17057static bool
17058insn_terminates_group_p (rtx insn, enum group_termination which_group)
17059{
17060  enum attr_type type;
17061
17062  if (! insn)
17063    return false;
17064
17065  type = get_attr_type (insn);
17066
17067  if (is_microcoded_insn (insn))
17068    return true;
17069
17070  if (which_group == current_group)
17071    {
17072      if (is_branch_slot_insn (insn))
17073	return true;
17074      return false;
17075    }
17076  else if (which_group == previous_group)
17077    {
17078      if (is_dispatch_slot_restricted (insn))
17079	return true;
17080      return false;
17081    }
17082
17083  return false;
17084}
17085
17086/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
17087   dispatch group) from the insns in GROUP_INSNS.  Return false otherwise.  */
17088
17089static bool
17090is_costly_group (rtx *group_insns, rtx next_insn)
17091{
17092  int i;
17093  rtx link;
17094  int cost;
17095  int issue_rate = rs6000_issue_rate ();
17096
17097  for (i = 0; i < issue_rate; i++)
17098    {
17099      rtx insn = group_insns[i];
17100      if (!insn)
17101	continue;
17102      for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
17103	{
17104	  rtx next = XEXP (link, 0);
17105	  if (next == next_insn)
17106	    {
17107	      cost = insn_cost (insn, link, next_insn);
17108	      if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
17109		return true;
17110	    }
17111	}
17112    }
17113
17114  return false;
17115}
17116
17117/* Utility of the function redefine_groups.
17118   Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
17119   in the same dispatch group.  If so, insert nops before NEXT_INSN, in order
17120   to keep it "far" (in a separate group) from GROUP_INSNS, following
17121   one of the following schemes, depending on the value of the flag
17122   -minsert_sched_nops = X:
17123   (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
17124       in order to force NEXT_INSN into a separate group.
17125   (2) X < sched_finish_regroup_exact: insert exactly X nops.
17126   GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
17127   insertion (has a group just ended, how many vacant issue slots remain in the
17128   last group, and how many dispatch groups were encountered so far).  */
17129
17130static int
17131force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
17132		 rtx next_insn, bool *group_end, int can_issue_more,
17133		 int *group_count)
17134{
17135  rtx nop;
17136  bool force;
17137  int issue_rate = rs6000_issue_rate ();
17138  bool end = *group_end;
17139  int i;
17140
17141  if (next_insn == NULL_RTX)
17142    return can_issue_more;
17143
17144  if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
17145    return can_issue_more;
17146
17147  force = is_costly_group (group_insns, next_insn);
17148  if (!force)
17149    return can_issue_more;
17150
17151  if (sched_verbose > 6)
17152    fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
17153	     *group_count ,can_issue_more);
17154
17155  if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
17156    {
17157      if (*group_end)
17158	can_issue_more = 0;
17159
17160      /* Since only a branch can be issued in the last issue_slot, it is
17161	 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
17162	 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
17163	 in this case the last nop will start a new group and the branch
17164	 will be forced to the new group.  */
17165      if (can_issue_more && !is_branch_slot_insn (next_insn))
17166	can_issue_more--;
17167
17168      while (can_issue_more > 0)
17169	{
17170	  nop = gen_nop ();
17171	  emit_insn_before (nop, next_insn);
17172	  can_issue_more--;
17173	}
17174
17175      *group_end = true;
17176      return 0;
17177    }
17178
17179  if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
17180    {
17181      int n_nops = rs6000_sched_insert_nops;
17182
17183      /* Nops can't be issued from the branch slot, so the effective
17184	 issue_rate for nops is 'issue_rate - 1'.  */
17185      if (can_issue_more == 0)
17186	can_issue_more = issue_rate;
17187      can_issue_more--;
17188      if (can_issue_more == 0)
17189	{
17190	  can_issue_more = issue_rate - 1;
17191	  (*group_count)++;
17192	  end = true;
17193	  for (i = 0; i < issue_rate; i++)
17194	    {
17195	      group_insns[i] = 0;
17196	    }
17197	}
17198
17199      while (n_nops > 0)
17200	{
17201	  nop = gen_nop ();
17202	  emit_insn_before (nop, next_insn);
17203	  if (can_issue_more == issue_rate - 1) /* new group begins */
17204	    end = false;
17205	  can_issue_more--;
17206	  if (can_issue_more == 0)
17207	    {
17208	      can_issue_more = issue_rate - 1;
17209	      (*group_count)++;
17210	      end = true;
17211	      for (i = 0; i < issue_rate; i++)
17212		{
17213		  group_insns[i] = 0;
17214		}
17215	    }
17216	  n_nops--;
17217	}
17218
17219      /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1').  */
17220      can_issue_more++;
17221
17222      /* Is next_insn going to start a new group?  */
17223      *group_end
17224	= (end
17225	   || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
17226	   || (can_issue_more <= 2 && is_cracked_insn (next_insn))
17227	   || (can_issue_more < issue_rate &&
17228	       insn_terminates_group_p (next_insn, previous_group)));
17229      if (*group_end && end)
17230	(*group_count)--;
17231
17232      if (sched_verbose > 6)
17233	fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
17234		 *group_count, can_issue_more);
17235      return can_issue_more;
17236    }
17237
17238  return can_issue_more;
17239}
17240
17241/* This function tries to synch the dispatch groups that the compiler "sees"
17242   with the dispatch groups that the processor dispatcher is expected to
17243   form in practice.  It tries to achieve this synchronization by forcing the
17244   estimated processor grouping on the compiler (as opposed to the function
17245   'pad_goups' which tries to force the scheduler's grouping on the processor).
17246
17247   The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
17248   examines the (estimated) dispatch groups that will be formed by the processor
17249   dispatcher.  It marks these group boundaries to reflect the estimated
17250   processor grouping, overriding the grouping that the scheduler had marked.
17251   Depending on the value of the flag '-minsert-sched-nops' this function can
17252   force certain insns into separate groups or force a certain distance between
17253   them by inserting nops, for example, if there exists a "costly dependence"
17254   between the insns.
17255
17256   The function estimates the group boundaries that the processor will form as
17257   follows:  It keeps track of how many vacant issue slots are available after
17258   each insn.  A subsequent insn will start a new group if one of the following
17259   4 cases applies:
17260   - no more vacant issue slots remain in the current dispatch group.
17261   - only the last issue slot, which is the branch slot, is vacant, but the next
17262     insn is not a branch.
17263   - only the last 2 or less issue slots, including the branch slot, are vacant,
17264     which means that a cracked insn (which occupies two issue slots) can't be
17265     issued in this group.
17266   - less than 'issue_rate' slots are vacant, and the next insn always needs to
17267     start a new group.  */
17268
17269static int
17270redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
17271{
17272  rtx insn, next_insn;
17273  int issue_rate;
17274  int can_issue_more;
17275  int slot, i;
17276  bool group_end;
17277  int group_count = 0;
17278  rtx *group_insns;
17279
17280  /* Initialize.  */
17281  issue_rate = rs6000_issue_rate ();
17282  group_insns = alloca (issue_rate * sizeof (rtx));
17283  for (i = 0; i < issue_rate; i++)
17284    {
17285      group_insns[i] = 0;
17286    }
17287  can_issue_more = issue_rate;
17288  slot = 0;
17289  insn = get_next_active_insn (prev_head_insn, tail);
17290  group_end = false;
17291
17292  while (insn != NULL_RTX)
17293    {
17294      slot = (issue_rate - can_issue_more);
17295      group_insns[slot] = insn;
17296      can_issue_more =
17297	rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17298      if (insn_terminates_group_p (insn, current_group))
17299	can_issue_more = 0;
17300
17301      next_insn = get_next_active_insn (insn, tail);
17302      if (next_insn == NULL_RTX)
17303	return group_count + 1;
17304
17305      /* Is next_insn going to start a new group?  */
17306      group_end
17307	= (can_issue_more == 0
17308	   || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
17309	   || (can_issue_more <= 2 && is_cracked_insn (next_insn))
17310	   || (can_issue_more < issue_rate &&
17311	       insn_terminates_group_p (next_insn, previous_group)));
17312
17313      can_issue_more = force_new_group (sched_verbose, dump, group_insns,
17314					next_insn, &group_end, can_issue_more,
17315					&group_count);
17316
17317      if (group_end)
17318	{
17319	  group_count++;
17320	  can_issue_more = 0;
17321	  for (i = 0; i < issue_rate; i++)
17322	    {
17323	      group_insns[i] = 0;
17324	    }
17325	}
17326
17327      if (GET_MODE (next_insn) == TImode && can_issue_more)
17328	PUT_MODE (next_insn, VOIDmode);
17329      else if (!can_issue_more && GET_MODE (next_insn) != TImode)
17330	PUT_MODE (next_insn, TImode);
17331
17332      insn = next_insn;
17333      if (can_issue_more == 0)
17334	can_issue_more = issue_rate;
17335    } /* while */
17336
17337  return group_count;
17338}
17339
17340/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
17341   dispatch group boundaries that the scheduler had marked.  Pad with nops
17342   any dispatch groups which have vacant issue slots, in order to force the
17343   scheduler's grouping on the processor dispatcher.  The function
17344   returns the number of dispatch groups found.  */
17345
17346static int
17347pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
17348{
17349  rtx insn, next_insn;
17350  rtx nop;
17351  int issue_rate;
17352  int can_issue_more;
17353  int group_end;
17354  int group_count = 0;
17355
17356  /* Initialize issue_rate.  */
17357  issue_rate = rs6000_issue_rate ();
17358  can_issue_more = issue_rate;
17359
17360  insn = get_next_active_insn (prev_head_insn, tail);
17361  next_insn = get_next_active_insn (insn, tail);
17362
17363  while (insn != NULL_RTX)
17364    {
17365      can_issue_more =
17366      	rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17367
17368      group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
17369
17370      if (next_insn == NULL_RTX)
17371	break;
17372
17373      if (group_end)
17374	{
17375	  /* If the scheduler had marked group termination at this location
17376	     (between insn and next_indn), and neither insn nor next_insn will
17377	     force group termination, pad the group with nops to force group
17378	     termination.  */
17379	  if (can_issue_more
17380	      && (rs6000_sched_insert_nops == sched_finish_pad_groups)
17381	      && !insn_terminates_group_p (insn, current_group)
17382	      && !insn_terminates_group_p (next_insn, previous_group))
17383	    {
17384	      if (!is_branch_slot_insn (next_insn))
17385		can_issue_more--;
17386
17387	      while (can_issue_more)
17388		{
17389		  nop = gen_nop ();
17390		  emit_insn_before (nop, next_insn);
17391		  can_issue_more--;
17392		}
17393	    }
17394
17395	  can_issue_more = issue_rate;
17396	  group_count++;
17397	}
17398
17399      insn = next_insn;
17400      next_insn = get_next_active_insn (insn, tail);
17401    }
17402
17403  return group_count;
17404}
17405
17406/* The following function is called at the end of scheduling BB.
17407   After reload, it inserts nops at insn group bundling.  */
17408
17409static void
17410rs6000_sched_finish (FILE *dump, int sched_verbose)
17411{
17412  int n_groups;
17413
17414  if (sched_verbose)
17415    fprintf (dump, "=== Finishing schedule.\n");
17416
17417  if (reload_completed && rs6000_sched_groups)
17418    {
17419      if (rs6000_sched_insert_nops == sched_finish_none)
17420	return;
17421
17422      if (rs6000_sched_insert_nops == sched_finish_pad_groups)
17423	n_groups = pad_groups (dump, sched_verbose,
17424			       current_sched_info->prev_head,
17425			       current_sched_info->next_tail);
17426      else
17427	n_groups = redefine_groups (dump, sched_verbose,
17428				    current_sched_info->prev_head,
17429				    current_sched_info->next_tail);
17430
17431      if (sched_verbose >= 6)
17432	{
17433    	  fprintf (dump, "ngroups = %d\n", n_groups);
17434	  print_rtl (dump, current_sched_info->prev_head);
17435	  fprintf (dump, "Done finish_sched\n");
17436	}
17437    }
17438}
17439
17440/* Length in units of the trampoline for entering a nested function.  */
17441
17442int
17443rs6000_trampoline_size (void)
17444{
17445  int ret = 0;
17446
17447  switch (DEFAULT_ABI)
17448    {
17449    default:
17450      gcc_unreachable ();
17451
17452    case ABI_AIX:
17453      ret = (TARGET_32BIT) ? 12 : 24;
17454      break;
17455
17456    case ABI_DARWIN:
17457    case ABI_V4:
17458      ret = (TARGET_32BIT) ? 40 : 48;
17459      break;
17460    }
17461
17462  return ret;
17463}
17464
17465/* Emit RTL insns to initialize the variable parts of a trampoline.
17466   FNADDR is an RTX for the address of the function's pure code.
17467   CXT is an RTX for the static chain value for the function.  */
17468
17469void
17470rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
17471{
17472  int regsize = (TARGET_32BIT) ? 4 : 8;
17473  rtx ctx_reg = force_reg (Pmode, cxt);
17474
17475  switch (DEFAULT_ABI)
17476    {
17477    default:
17478      gcc_unreachable ();
17479
17480/* Macros to shorten the code expansions below.  */
17481#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
17482#define MEM_PLUS(addr,offset) \
17483  gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
17484
17485    /* Under AIX, just build the 3 word function descriptor */
17486    case ABI_AIX:
17487      {
17488	rtx fn_reg = gen_reg_rtx (Pmode);
17489	rtx toc_reg = gen_reg_rtx (Pmode);
17490	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
17491	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
17492	emit_move_insn (MEM_DEREF (addr), fn_reg);
17493	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
17494	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
17495      }
17496      break;
17497
17498    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
17499    case ABI_DARWIN:
17500    case ABI_V4:
17501      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
17502			 FALSE, VOIDmode, 4,
17503			 addr, Pmode,
17504			 GEN_INT (rs6000_trampoline_size ()), SImode,
17505			 fnaddr, Pmode,
17506			 ctx_reg, Pmode);
17507      break;
17508    }
17509
17510  return;
17511}
17512
17513
17514/* Table of valid machine attributes.  */
17515
17516const struct attribute_spec rs6000_attribute_table[] =
17517{
17518  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
17519  { "altivec",   1, 1, false, true,  false, rs6000_handle_altivec_attribute },
17520  { "longcall",  0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
17521  { "shortcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
17522  { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
17523  { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
17524#ifdef SUBTARGET_ATTRIBUTE_TABLE
17525  SUBTARGET_ATTRIBUTE_TABLE,
17526#endif
17527  { NULL,        0, 0, false, false, false, NULL }
17528};
17529
17530/* Handle the "altivec" attribute.  The attribute may have
17531   arguments as follows:
17532
17533	__attribute__((altivec(vector__)))
17534	__attribute__((altivec(pixel__)))	(always followed by 'unsigned short')
17535	__attribute__((altivec(bool__)))	(always followed by 'unsigned')
17536
17537  and may appear more than once (e.g., 'vector bool char') in a
17538  given declaration.  */
17539
17540static tree
17541rs6000_handle_altivec_attribute (tree *node,
17542				 tree name ATTRIBUTE_UNUSED,
17543				 tree args,
17544				 int flags ATTRIBUTE_UNUSED,
17545				 bool *no_add_attrs)
17546{
17547  tree type = *node, result = NULL_TREE;
17548  enum machine_mode mode;
17549  int unsigned_p;
17550  char altivec_type
17551    = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
17552	&& TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
17553       ? *IDENTIFIER_POINTER (TREE_VALUE (args))
17554       : '?');
17555
17556  while (POINTER_TYPE_P (type)
17557	 || TREE_CODE (type) == FUNCTION_TYPE
17558	 || TREE_CODE (type) == METHOD_TYPE
17559	 || TREE_CODE (type) == ARRAY_TYPE)
17560    type = TREE_TYPE (type);
17561
17562  mode = TYPE_MODE (type);
17563
17564  /* Check for invalid AltiVec type qualifiers.  */
17565  if (type == long_unsigned_type_node || type == long_integer_type_node)
17566    {
17567    if (TARGET_64BIT)
17568      error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
17569    else if (rs6000_warn_altivec_long)
17570      warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
17571    }
17572  else if (type == long_long_unsigned_type_node
17573           || type == long_long_integer_type_node)
17574    error ("use of %<long long%> in AltiVec types is invalid");
17575  else if (type == double_type_node)
17576    error ("use of %<double%> in AltiVec types is invalid");
17577  else if (type == long_double_type_node)
17578    error ("use of %<long double%> in AltiVec types is invalid");
17579  else if (type == boolean_type_node)
17580    error ("use of boolean types in AltiVec types is invalid");
17581  else if (TREE_CODE (type) == COMPLEX_TYPE)
17582    error ("use of %<complex%> in AltiVec types is invalid");
17583  else if (DECIMAL_FLOAT_MODE_P (mode))
17584    error ("use of decimal floating point types in AltiVec types is invalid");
17585
17586  switch (altivec_type)
17587    {
17588    case 'v':
17589      unsigned_p = TYPE_UNSIGNED (type);
17590      switch (mode)
17591	{
17592	case SImode:
17593	  result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
17594	  break;
17595	case HImode:
17596	  result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
17597	  break;
17598	case QImode:
17599	  result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
17600	  break;
17601	case SFmode: result = V4SF_type_node; break;
17602	  /* If the user says 'vector int bool', we may be handed the 'bool'
17603	     attribute _before_ the 'vector' attribute, and so select the
17604	     proper type in the 'b' case below.  */
17605	case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
17606	  result = type;
17607	default: break;
17608	}
17609      break;
17610    case 'b':
17611      switch (mode)
17612	{
17613	case SImode: case V4SImode: result = bool_V4SI_type_node; break;
17614	case HImode: case V8HImode: result = bool_V8HI_type_node; break;
17615	case QImode: case V16QImode: result = bool_V16QI_type_node;
17616	default: break;
17617	}
17618      break;
17619    case 'p':
17620      switch (mode)
17621	{
17622	case V8HImode: result = pixel_V8HI_type_node;
17623	default: break;
17624	}
17625    default: break;
17626    }
17627
17628  if (result && result != type && TYPE_READONLY (type))
17629    result = build_qualified_type (result, TYPE_QUAL_CONST);
17630
17631  *no_add_attrs = true;  /* No need to hang on to the attribute.  */
17632
17633  if (result)
17634    *node = reconstruct_complex_type (*node, result);
17635
17636  return NULL_TREE;
17637}
17638
17639/* AltiVec defines four built-in scalar types that serve as vector
17640   elements; we must teach the compiler how to mangle them.  */
17641
17642static const char *
17643rs6000_mangle_fundamental_type (tree type)
17644{
17645  if (type == bool_char_type_node) return "U6__boolc";
17646  if (type == bool_short_type_node) return "U6__bools";
17647  if (type == pixel_type_node) return "u7__pixel";
17648  if (type == bool_int_type_node) return "U6__booli";
17649
17650  /* Mangle IBM extended float long double as `g' (__float128) on
17651     powerpc*-linux where long-double-64 previously was the default.  */
17652  if (TYPE_MAIN_VARIANT (type) == long_double_type_node
17653      && TARGET_ELF
17654      && TARGET_LONG_DOUBLE_128
17655      && !TARGET_IEEEQUAD)
17656    return "g";
17657
17658  /* For all other types, use normal C++ mangling.  */
17659  return NULL;
17660}
17661
17662/* Handle a "longcall" or "shortcall" attribute; arguments as in
17663   struct attribute_spec.handler.  */
17664
17665static tree
17666rs6000_handle_longcall_attribute (tree *node, tree name,
17667				  tree args ATTRIBUTE_UNUSED,
17668				  int flags ATTRIBUTE_UNUSED,
17669				  bool *no_add_attrs)
17670{
17671  if (TREE_CODE (*node) != FUNCTION_TYPE
17672      && TREE_CODE (*node) != FIELD_DECL
17673      && TREE_CODE (*node) != TYPE_DECL)
17674    {
17675      warning (OPT_Wattributes, "%qs attribute only applies to functions",
17676	       IDENTIFIER_POINTER (name));
17677      *no_add_attrs = true;
17678    }
17679
17680  return NULL_TREE;
17681}
17682
17683/* Set longcall attributes on all functions declared when
17684   rs6000_default_long_calls is true.  */
17685static void
17686rs6000_set_default_type_attributes (tree type)
17687{
17688  if (rs6000_default_long_calls
17689      && (TREE_CODE (type) == FUNCTION_TYPE
17690	  || TREE_CODE (type) == METHOD_TYPE))
17691    TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
17692					NULL_TREE,
17693					TYPE_ATTRIBUTES (type));
17694
17695#if TARGET_MACHO
17696  darwin_set_default_type_attributes (type);
17697#endif
17698}
17699
17700/* Return a reference suitable for calling a function with the
17701   longcall attribute.  */
17702
17703rtx
17704rs6000_longcall_ref (rtx call_ref)
17705{
17706  const char *call_name;
17707  tree node;
17708
17709  if (GET_CODE (call_ref) != SYMBOL_REF)
17710    return call_ref;
17711
17712  /* System V adds '.' to the internal name, so skip them.  */
17713  call_name = XSTR (call_ref, 0);
17714  if (*call_name == '.')
17715    {
17716      while (*call_name == '.')
17717	call_name++;
17718
17719      node = get_identifier (call_name);
17720      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
17721    }
17722
17723  return force_reg (Pmode, call_ref);
17724}
17725
17726#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
17727#define TARGET_USE_MS_BITFIELD_LAYOUT 0
17728#endif
17729
17730/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
17731   struct attribute_spec.handler.  */
17732static tree
17733rs6000_handle_struct_attribute (tree *node, tree name,
17734				tree args ATTRIBUTE_UNUSED,
17735				int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
17736{
17737  tree *type = NULL;
17738  if (DECL_P (*node))
17739    {
17740      if (TREE_CODE (*node) == TYPE_DECL)
17741        type = &TREE_TYPE (*node);
17742    }
17743  else
17744    type = node;
17745
17746  if (!(type && (TREE_CODE (*type) == RECORD_TYPE
17747                 || TREE_CODE (*type) == UNION_TYPE)))
17748    {
17749      warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
17750      *no_add_attrs = true;
17751    }
17752
17753  else if ((is_attribute_p ("ms_struct", name)
17754            && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
17755           || ((is_attribute_p ("gcc_struct", name)
17756                && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
17757    {
17758      warning (OPT_Wattributes, "%qs incompatible attribute ignored",
17759               IDENTIFIER_POINTER (name));
17760      *no_add_attrs = true;
17761    }
17762
17763  return NULL_TREE;
17764}
17765
17766static bool
17767rs6000_ms_bitfield_layout_p (tree record_type)
17768{
17769  return (TARGET_USE_MS_BITFIELD_LAYOUT &&
17770          !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
17771    || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
17772}
17773
17774#ifdef USING_ELFOS_H
17775
17776/* A get_unnamed_section callback, used for switching to toc_section.  */
17777
17778static void
17779rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
17780{
17781  if (DEFAULT_ABI == ABI_AIX
17782      && TARGET_MINIMAL_TOC
17783      && !TARGET_RELOCATABLE)
17784    {
17785      if (!toc_initialized)
17786	{
17787	  toc_initialized = 1;
17788	  fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
17789	  (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
17790	  fprintf (asm_out_file, "\t.tc ");
17791	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
17792	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17793	  fprintf (asm_out_file, "\n");
17794
17795	  fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17796	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17797	  fprintf (asm_out_file, " = .+32768\n");
17798	}
17799      else
17800	fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17801    }
17802  else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
17803    fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
17804  else
17805    {
17806      fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17807      if (!toc_initialized)
17808	{
17809	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17810	  fprintf (asm_out_file, " = .+32768\n");
17811	  toc_initialized = 1;
17812	}
17813    }
17814}
17815
17816/* Implement TARGET_ASM_INIT_SECTIONS.  */
17817
17818static void
17819rs6000_elf_asm_init_sections (void)
17820{
17821  toc_section
17822    = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
17823
17824  sdata2_section
17825    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
17826			   SDATA2_SECTION_ASM_OP);
17827}
17828
17829/* Implement TARGET_SELECT_RTX_SECTION.  */
17830
17831static section *
17832rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
17833			       unsigned HOST_WIDE_INT align)
17834{
17835  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
17836    return toc_section;
17837  else
17838    return default_elf_select_rtx_section (mode, x, align);
17839}
17840
17841/* For a SYMBOL_REF, set generic flags and then perform some
17842   target-specific processing.
17843
17844   When the AIX ABI is requested on a non-AIX system, replace the
17845   function name with the real name (with a leading .) rather than the
17846   function descriptor name.  This saves a lot of overriding code to
17847   read the prefixes.  */
17848
17849static void
17850rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
17851{
17852  default_encode_section_info (decl, rtl, first);
17853
17854  if (first
17855      && TREE_CODE (decl) == FUNCTION_DECL
17856      && !TARGET_AIX
17857      && DEFAULT_ABI == ABI_AIX)
17858    {
17859      rtx sym_ref = XEXP (rtl, 0);
17860      size_t len = strlen (XSTR (sym_ref, 0));
17861      char *str = alloca (len + 2);
17862      str[0] = '.';
17863      memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
17864      XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
17865    }
17866}
17867
17868bool
17869rs6000_elf_in_small_data_p (tree decl)
17870{
17871  if (rs6000_sdata == SDATA_NONE)
17872    return false;
17873
17874  /* We want to merge strings, so we never consider them small data.  */
17875  if (TREE_CODE (decl) == STRING_CST)
17876    return false;
17877
17878  /* Functions are never in the small data area.  */
17879  if (TREE_CODE (decl) == FUNCTION_DECL)
17880    return false;
17881
17882  if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
17883    {
17884      const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
17885      if (strcmp (section, ".sdata") == 0
17886	  || strcmp (section, ".sdata2") == 0
17887	  || strcmp (section, ".sbss") == 0
17888	  || strcmp (section, ".sbss2") == 0
17889	  || strcmp (section, ".PPC.EMB.sdata0") == 0
17890	  || strcmp (section, ".PPC.EMB.sbss0") == 0)
17891	return true;
17892    }
17893  else
17894    {
17895      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
17896
17897      if (size > 0
17898	  && (unsigned HOST_WIDE_INT) size <= g_switch_value
17899	  /* If it's not public, and we're not going to reference it there,
17900	     there's no need to put it in the small data section.  */
17901	  && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
17902	return true;
17903    }
17904
17905  return false;
17906}
17907
17908#endif /* USING_ELFOS_H */
17909
17910/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P.  */
17911
17912static bool
17913rs6000_use_blocks_for_constant_p (enum machine_mode mode, rtx x)
17914{
17915  return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
17916}
17917
17918/* Return a REG that occurs in ADDR with coefficient 1.
17919   ADDR can be effectively incremented by incrementing REG.
17920
17921   r0 is special and we must not select it as an address
17922   register by this routine since our caller will try to
17923   increment the returned register via an "la" instruction.  */
17924
17925rtx
17926find_addr_reg (rtx addr)
17927{
17928  while (GET_CODE (addr) == PLUS)
17929    {
17930      if (GET_CODE (XEXP (addr, 0)) == REG
17931	  && REGNO (XEXP (addr, 0)) != 0)
17932	addr = XEXP (addr, 0);
17933      else if (GET_CODE (XEXP (addr, 1)) == REG
17934	       && REGNO (XEXP (addr, 1)) != 0)
17935	addr = XEXP (addr, 1);
17936      else if (CONSTANT_P (XEXP (addr, 0)))
17937	addr = XEXP (addr, 1);
17938      else if (CONSTANT_P (XEXP (addr, 1)))
17939	addr = XEXP (addr, 0);
17940      else
17941	gcc_unreachable ();
17942    }
17943  gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
17944  return addr;
17945}
17946
17947void
17948rs6000_fatal_bad_address (rtx op)
17949{
17950  fatal_insn ("bad address", op);
17951}
17952
17953#if TARGET_MACHO
17954
17955static tree branch_island_list = 0;
17956
17957/* Remember to generate a branch island for far calls to the given
17958   function.  */
17959
17960static void
17961add_compiler_branch_island (tree label_name, tree function_name,
17962			    int line_number)
17963{
17964  tree branch_island = build_tree_list (function_name, label_name);
17965  TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
17966  TREE_CHAIN (branch_island) = branch_island_list;
17967  branch_island_list = branch_island;
17968}
17969
17970#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND)     TREE_VALUE (BRANCH_ISLAND)
17971#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND)  TREE_PURPOSE (BRANCH_ISLAND)
17972#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND)    \
17973		TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
17974
17975/* Generate far-jump branch islands for everything on the
17976   branch_island_list.  Invoked immediately after the last instruction
17977   of the epilogue has been emitted; the branch-islands must be
17978   appended to, and contiguous with, the function body.  Mach-O stubs
17979   are generated in machopic_output_stub().  */
17980
17981static void
17982macho_branch_islands (void)
17983{
17984  char tmp_buf[512];
17985  tree branch_island;
17986
17987  for (branch_island = branch_island_list;
17988       branch_island;
17989       branch_island = TREE_CHAIN (branch_island))
17990    {
17991      const char *label =
17992	IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17993      const char *name  =
17994	IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
17995      char name_buf[512];
17996      /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF().  */
17997      if (name[0] == '*' || name[0] == '&')
17998	strcpy (name_buf, name+1);
17999      else
18000	{
18001	  name_buf[0] = '_';
18002	  strcpy (name_buf+1, name);
18003	}
18004      strcpy (tmp_buf, "\n");
18005      strcat (tmp_buf, label);
18006#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
18007      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
18008	dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
18009#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
18010      if (flag_pic)
18011	{
18012	  strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
18013	  strcat (tmp_buf, label);
18014	  strcat (tmp_buf, "_pic\n");
18015	  strcat (tmp_buf, label);
18016	  strcat (tmp_buf, "_pic:\n\tmflr r11\n");
18017
18018	  strcat (tmp_buf, "\taddis r11,r11,ha16(");
18019	  strcat (tmp_buf, name_buf);
18020	  strcat (tmp_buf, " - ");
18021	  strcat (tmp_buf, label);
18022	  strcat (tmp_buf, "_pic)\n");
18023
18024	  strcat (tmp_buf, "\tmtlr r0\n");
18025
18026	  strcat (tmp_buf, "\taddi r12,r11,lo16(");
18027	  strcat (tmp_buf, name_buf);
18028	  strcat (tmp_buf, " - ");
18029	  strcat (tmp_buf, label);
18030	  strcat (tmp_buf, "_pic)\n");
18031
18032	  strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
18033	}
18034      else
18035	{
18036	  strcat (tmp_buf, ":\nlis r12,hi16(");
18037	  strcat (tmp_buf, name_buf);
18038	  strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
18039	  strcat (tmp_buf, name_buf);
18040	  strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
18041	}
18042      output_asm_insn (tmp_buf, 0);
18043#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
18044      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
18045	dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
18046#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
18047    }
18048
18049  branch_island_list = 0;
18050}
18051
18052/* NO_PREVIOUS_DEF checks in the link list whether the function name is
18053   already there or not.  */
18054
18055static int
18056no_previous_def (tree function_name)
18057{
18058  tree branch_island;
18059  for (branch_island = branch_island_list;
18060       branch_island;
18061       branch_island = TREE_CHAIN (branch_island))
18062    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
18063      return 0;
18064  return 1;
18065}
18066
18067/* GET_PREV_LABEL gets the label name from the previous definition of
18068   the function.  */
18069
18070static tree
18071get_prev_label (tree function_name)
18072{
18073  tree branch_island;
18074  for (branch_island = branch_island_list;
18075       branch_island;
18076       branch_island = TREE_CHAIN (branch_island))
18077    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
18078      return BRANCH_ISLAND_LABEL_NAME (branch_island);
18079  return 0;
18080}
18081
18082#ifndef DARWIN_LINKER_GENERATES_ISLANDS
18083#define DARWIN_LINKER_GENERATES_ISLANDS 0
18084#endif
18085
18086/* KEXTs still need branch islands.  */
18087#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
18088				 || flag_mkernel || flag_apple_kext)
18089
18090/* INSN is either a function call or a millicode call.  It may have an
18091   unconditional jump in its delay slot.
18092
18093   CALL_DEST is the routine we are calling.  */
18094
18095char *
18096output_call (rtx insn, rtx *operands, int dest_operand_number,
18097	     int cookie_operand_number)
18098{
18099  static char buf[256];
18100  if (DARWIN_GENERATE_ISLANDS
18101      && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
18102      && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
18103    {
18104      tree labelname;
18105      tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
18106
18107      if (no_previous_def (funname))
18108	{
18109	  int line_number = 0;
18110	  rtx label_rtx = gen_label_rtx ();
18111	  char *label_buf, temp_buf[256];
18112	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
18113				       CODE_LABEL_NUMBER (label_rtx));
18114	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
18115	  labelname = get_identifier (label_buf);
18116	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
18117	  if (insn)
18118	    line_number = NOTE_LINE_NUMBER (insn);
18119	  add_compiler_branch_island (labelname, funname, line_number);
18120	}
18121      else
18122	labelname = get_prev_label (funname);
18123
18124      /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
18125	 instruction will reach 'foo', otherwise link as 'bl L42'".
18126	 "L42" should be a 'branch island', that will do a far jump to
18127	 'foo'.  Branch islands are generated in
18128	 macho_branch_islands().  */
18129      sprintf (buf, "jbsr %%z%d,%.246s",
18130	       dest_operand_number, IDENTIFIER_POINTER (labelname));
18131    }
18132  else
18133    sprintf (buf, "bl %%z%d", dest_operand_number);
18134  return buf;
18135}
18136
18137/* Generate PIC and indirect symbol stubs.  */
18138
18139void
18140machopic_output_stub (FILE *file, const char *symb, const char *stub)
18141{
18142  unsigned int length;
18143  char *symbol_name, *lazy_ptr_name;
18144  char *local_label_0;
18145  static int label = 0;
18146
18147  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
18148  symb = (*targetm.strip_name_encoding) (symb);
18149
18150
18151  length = strlen (symb);
18152  symbol_name = alloca (length + 32);
18153  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
18154
18155  lazy_ptr_name = alloca (length + 32);
18156  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
18157
18158  if (flag_pic == 2)
18159    switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
18160  else
18161    switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
18162
18163  if (flag_pic == 2)
18164    {
18165      fprintf (file, "\t.align 5\n");
18166
18167      fprintf (file, "%s:\n", stub);
18168      fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18169
18170      label++;
18171      local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
18172      sprintf (local_label_0, "\"L%011d$spb\"", label);
18173
18174      fprintf (file, "\tmflr r0\n");
18175      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
18176      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
18177      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
18178	       lazy_ptr_name, local_label_0);
18179      fprintf (file, "\tmtlr r0\n");
18180      fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
18181	       (TARGET_64BIT ? "ldu" : "lwzu"),
18182	       lazy_ptr_name, local_label_0);
18183      fprintf (file, "\tmtctr r12\n");
18184      fprintf (file, "\tbctr\n");
18185    }
18186  else
18187    {
18188      fprintf (file, "\t.align 4\n");
18189
18190      fprintf (file, "%s:\n", stub);
18191      fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18192
18193      fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
18194      fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
18195	       (TARGET_64BIT ? "ldu" : "lwzu"),
18196	       lazy_ptr_name);
18197      fprintf (file, "\tmtctr r12\n");
18198      fprintf (file, "\tbctr\n");
18199    }
18200
18201  switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
18202  fprintf (file, "%s:\n", lazy_ptr_name);
18203  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18204  fprintf (file, "%sdyld_stub_binding_helper\n",
18205	   (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
18206}
18207
18208/* Legitimize PIC addresses.  If the address is already
18209   position-independent, we return ORIG.  Newly generated
18210   position-independent addresses go into a reg.  This is REG if non
18211   zero, otherwise we allocate register(s) as necessary.  */
18212
18213#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
18214
18215rtx
18216rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
18217					rtx reg)
18218{
18219  rtx base, offset;
18220
18221  if (reg == NULL && ! reload_in_progress && ! reload_completed)
18222    reg = gen_reg_rtx (Pmode);
18223
18224  if (GET_CODE (orig) == CONST)
18225    {
18226      rtx reg_temp;
18227
18228      if (GET_CODE (XEXP (orig, 0)) == PLUS
18229	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
18230	return orig;
18231
18232      gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
18233
18234      /* Use a different reg for the intermediate value, as
18235	 it will be marked UNCHANGING.  */
18236      reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
18237      base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
18238						     Pmode, reg_temp);
18239      offset =
18240	rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
18241						Pmode, reg);
18242
18243      if (GET_CODE (offset) == CONST_INT)
18244	{
18245	  if (SMALL_INT (offset))
18246	    return plus_constant (base, INTVAL (offset));
18247	  else if (! reload_in_progress && ! reload_completed)
18248	    offset = force_reg (Pmode, offset);
18249	  else
18250	    {
18251 	      rtx mem = force_const_mem (Pmode, orig);
18252	      return machopic_legitimize_pic_address (mem, Pmode, reg);
18253	    }
18254	}
18255      return gen_rtx_PLUS (Pmode, base, offset);
18256    }
18257
18258  /* Fall back on generic machopic code.  */
18259  return machopic_legitimize_pic_address (orig, mode, reg);
18260}
18261
18262/* Output a .machine directive for the Darwin assembler, and call
18263   the generic start_file routine.  */
18264
18265static void
18266rs6000_darwin_file_start (void)
18267{
18268  static const struct
18269  {
18270    const char *arg;
18271    const char *name;
18272    int if_set;
18273  } mapping[] = {
18274    { "ppc64", "ppc64", MASK_64BIT },
18275    { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
18276    { "power4", "ppc970", 0 },
18277    { "G5", "ppc970", 0 },
18278    { "7450", "ppc7450", 0 },
18279    { "7400", "ppc7400", MASK_ALTIVEC },
18280    { "G4", "ppc7400", 0 },
18281    { "750", "ppc750", 0 },
18282    { "740", "ppc750", 0 },
18283    { "G3", "ppc750", 0 },
18284    { "604e", "ppc604e", 0 },
18285    { "604", "ppc604", 0 },
18286    { "603e", "ppc603", 0 },
18287    { "603", "ppc603", 0 },
18288    { "601", "ppc601", 0 },
18289    { NULL, "ppc", 0 } };
18290  const char *cpu_id = "";
18291  size_t i;
18292
18293  rs6000_file_start ();
18294  darwin_file_start ();
18295
18296  /* Determine the argument to -mcpu=.  Default to G3 if not specified.  */
18297  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
18298    if (rs6000_select[i].set_arch_p && rs6000_select[i].string
18299	&& rs6000_select[i].string[0] != '\0')
18300      cpu_id = rs6000_select[i].string;
18301
18302  /* Look through the mapping array.  Pick the first name that either
18303     matches the argument, has a bit set in IF_SET that is also set
18304     in the target flags, or has a NULL name.  */
18305
18306  i = 0;
18307  while (mapping[i].arg != NULL
18308	 && strcmp (mapping[i].arg, cpu_id) != 0
18309	 && (mapping[i].if_set & target_flags) == 0)
18310    i++;
18311
18312  fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
18313}
18314
18315#endif /* TARGET_MACHO */
18316
18317#if TARGET_ELF
18318static int
18319rs6000_elf_reloc_rw_mask (void)
18320{
18321  if (flag_pic)
18322    return 3;
18323  else if (DEFAULT_ABI == ABI_AIX)
18324    return 2;
18325  else
18326    return 0;
18327}
18328
18329/* Record an element in the table of global constructors.  SYMBOL is
18330   a SYMBOL_REF of the function to be called; PRIORITY is a number
18331   between 0 and MAX_INIT_PRIORITY.
18332
18333   This differs from default_named_section_asm_out_constructor in
18334   that we have special handling for -mrelocatable.  */
18335
18336static void
18337rs6000_elf_asm_out_constructor (rtx symbol, int priority)
18338{
18339  const char *section = ".ctors";
18340  char buf[16];
18341
18342  if (priority != DEFAULT_INIT_PRIORITY)
18343    {
18344      sprintf (buf, ".ctors.%.5u",
18345	       /* Invert the numbering so the linker puts us in the proper
18346		  order; constructors are run from right to left, and the
18347		  linker sorts in increasing order.  */
18348	       MAX_INIT_PRIORITY - priority);
18349      section = buf;
18350    }
18351
18352  switch_to_section (get_section (section, SECTION_WRITE, NULL));
18353  assemble_align (POINTER_SIZE);
18354
18355  if (TARGET_RELOCATABLE)
18356    {
18357      fputs ("\t.long (", asm_out_file);
18358      output_addr_const (asm_out_file, symbol);
18359      fputs (")@fixup\n", asm_out_file);
18360    }
18361  else
18362    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
18363}
18364
18365static void
18366rs6000_elf_asm_out_destructor (rtx symbol, int priority)
18367{
18368  const char *section = ".dtors";
18369  char buf[16];
18370
18371  if (priority != DEFAULT_INIT_PRIORITY)
18372    {
18373      sprintf (buf, ".dtors.%.5u",
18374	       /* Invert the numbering so the linker puts us in the proper
18375		  order; constructors are run from right to left, and the
18376		  linker sorts in increasing order.  */
18377	       MAX_INIT_PRIORITY - priority);
18378      section = buf;
18379    }
18380
18381  switch_to_section (get_section (section, SECTION_WRITE, NULL));
18382  assemble_align (POINTER_SIZE);
18383
18384  if (TARGET_RELOCATABLE)
18385    {
18386      fputs ("\t.long (", asm_out_file);
18387      output_addr_const (asm_out_file, symbol);
18388      fputs (")@fixup\n", asm_out_file);
18389    }
18390  else
18391    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
18392}
18393
18394void
18395rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
18396{
18397  if (TARGET_64BIT)
18398    {
18399      fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
18400      ASM_OUTPUT_LABEL (file, name);
18401      fputs (DOUBLE_INT_ASM_OP, file);
18402      rs6000_output_function_entry (file, name);
18403      fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
18404      if (DOT_SYMBOLS)
18405	{
18406	  fputs ("\t.size\t", file);
18407	  assemble_name (file, name);
18408	  fputs (",24\n\t.type\t.", file);
18409	  assemble_name (file, name);
18410	  fputs (",@function\n", file);
18411	  if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
18412	    {
18413	      fputs ("\t.globl\t.", file);
18414	      assemble_name (file, name);
18415	      putc ('\n', file);
18416	    }
18417	}
18418      else
18419	ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
18420      ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
18421      rs6000_output_function_entry (file, name);
18422      fputs (":\n", file);
18423      return;
18424    }
18425
18426  if (TARGET_RELOCATABLE
18427      && !TARGET_SECURE_PLT
18428      && (get_pool_size () != 0 || current_function_profile)
18429      && uses_TOC ())
18430    {
18431      char buf[256];
18432
18433      (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
18434
18435      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
18436      fprintf (file, "\t.long ");
18437      assemble_name (file, buf);
18438      putc ('-', file);
18439      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
18440      assemble_name (file, buf);
18441      putc ('\n', file);
18442    }
18443
18444  ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
18445  ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
18446
18447  if (DEFAULT_ABI == ABI_AIX)
18448    {
18449      const char *desc_name, *orig_name;
18450
18451      orig_name = (*targetm.strip_name_encoding) (name);
18452      desc_name = orig_name;
18453      while (*desc_name == '.')
18454	desc_name++;
18455
18456      if (TREE_PUBLIC (decl))
18457	fprintf (file, "\t.globl %s\n", desc_name);
18458
18459      fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18460      fprintf (file, "%s:\n", desc_name);
18461      fprintf (file, "\t.long %s\n", orig_name);
18462      fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
18463      if (DEFAULT_ABI == ABI_AIX)
18464	fputs ("\t.long 0\n", file);
18465      fprintf (file, "\t.previous\n");
18466    }
18467  ASM_OUTPUT_LABEL (file, name);
18468}
18469
18470static void
18471rs6000_elf_end_indicate_exec_stack (void)
18472{
18473  if (NEED_INDICATE_EXEC_STACK)
18474    file_end_indicate_exec_stack ();
18475}
18476#endif
18477
18478#if TARGET_XCOFF
18479static void
18480rs6000_xcoff_asm_output_anchor (rtx symbol)
18481{
18482  char buffer[100];
18483
18484  sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
18485	   SYMBOL_REF_BLOCK_OFFSET (symbol));
18486  ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
18487}
18488
18489static void
18490rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
18491{
18492  fputs (GLOBAL_ASM_OP, stream);
18493  RS6000_OUTPUT_BASENAME (stream, name);
18494  putc ('\n', stream);
18495}
18496
18497/* A get_unnamed_decl callback, used for read-only sections.  PTR
18498   points to the section string variable.  */
18499
18500static void
18501rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
18502{
18503  fprintf (asm_out_file, "\t.csect %s[RO],3\n",
18504	   *(const char *const *) directive);
18505}
18506
18507/* Likewise for read-write sections.  */
18508
18509static void
18510rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
18511{
18512  fprintf (asm_out_file, "\t.csect %s[RW],3\n",
18513	   *(const char *const *) directive);
18514}
18515
18516/* A get_unnamed_section callback, used for switching to toc_section.  */
18517
18518static void
18519rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
18520{
18521  if (TARGET_MINIMAL_TOC)
18522    {
18523      /* toc_section is always selected at least once from
18524	 rs6000_xcoff_file_start, so this is guaranteed to
18525	 always be defined once and only once in each file.  */
18526      if (!toc_initialized)
18527	{
18528	  fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
18529	  fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
18530	  toc_initialized = 1;
18531	}
18532      fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
18533	       (TARGET_32BIT ? "" : ",3"));
18534    }
18535  else
18536    fputs ("\t.toc\n", asm_out_file);
18537}
18538
18539/* Implement TARGET_ASM_INIT_SECTIONS.  */
18540
18541static void
18542rs6000_xcoff_asm_init_sections (void)
18543{
18544  read_only_data_section
18545    = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
18546			   &xcoff_read_only_section_name);
18547
18548  private_data_section
18549    = get_unnamed_section (SECTION_WRITE,
18550			   rs6000_xcoff_output_readwrite_section_asm_op,
18551			   &xcoff_private_data_section_name);
18552
18553  read_only_private_data_section
18554    = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
18555			   &xcoff_private_data_section_name);
18556
18557  toc_section
18558    = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
18559
18560  readonly_data_section = read_only_data_section;
18561  exception_section = data_section;
18562}
18563
18564static int
18565rs6000_xcoff_reloc_rw_mask (void)
18566{
18567  return 3;
18568}
18569
18570static void
18571rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
18572				tree decl ATTRIBUTE_UNUSED)
18573{
18574  int smclass;
18575  static const char * const suffix[3] = { "PR", "RO", "RW" };
18576
18577  if (flags & SECTION_CODE)
18578    smclass = 0;
18579  else if (flags & SECTION_WRITE)
18580    smclass = 2;
18581  else
18582    smclass = 1;
18583
18584  fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
18585	   (flags & SECTION_CODE) ? "." : "",
18586	   name, suffix[smclass], flags & SECTION_ENTSIZE);
18587}
18588
18589static section *
18590rs6000_xcoff_select_section (tree decl, int reloc,
18591			     unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18592{
18593  if (decl_readonly_section (decl, reloc))
18594    {
18595      if (TREE_PUBLIC (decl))
18596	return read_only_data_section;
18597      else
18598	return read_only_private_data_section;
18599    }
18600  else
18601    {
18602      if (TREE_PUBLIC (decl))
18603	return data_section;
18604      else
18605	return private_data_section;
18606    }
18607}
18608
18609static void
18610rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
18611{
18612  const char *name;
18613
18614  /* Use select_section for private and uninitialized data.  */
18615  if (!TREE_PUBLIC (decl)
18616      || DECL_COMMON (decl)
18617      || DECL_INITIAL (decl) == NULL_TREE
18618      || DECL_INITIAL (decl) == error_mark_node
18619      || (flag_zero_initialized_in_bss
18620	  && initializer_zerop (DECL_INITIAL (decl))))
18621    return;
18622
18623  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18624  name = (*targetm.strip_name_encoding) (name);
18625  DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
18626}
18627
18628/* Select section for constant in constant pool.
18629
18630   On RS/6000, all constants are in the private read-only data area.
18631   However, if this is being placed in the TOC it must be output as a
18632   toc entry.  */
18633
18634static section *
18635rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
18636				 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18637{
18638  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
18639    return toc_section;
18640  else
18641    return read_only_private_data_section;
18642}
18643
18644/* Remove any trailing [DS] or the like from the symbol name.  */
18645
18646static const char *
18647rs6000_xcoff_strip_name_encoding (const char *name)
18648{
18649  size_t len;
18650  if (*name == '*')
18651    name++;
18652  len = strlen (name);
18653  if (name[len - 1] == ']')
18654    return ggc_alloc_string (name, len - 4);
18655  else
18656    return name;
18657}
18658
18659/* Section attributes.  AIX is always PIC.  */
18660
18661static unsigned int
18662rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
18663{
18664  unsigned int align;
18665  unsigned int flags = default_section_type_flags (decl, name, reloc);
18666
18667  /* Align to at least UNIT size.  */
18668  if (flags & SECTION_CODE)
18669    align = MIN_UNITS_PER_WORD;
18670  else
18671    /* Increase alignment of large objects if not already stricter.  */
18672    align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
18673		 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
18674		 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
18675
18676  return flags | (exact_log2 (align) & SECTION_ENTSIZE);
18677}
18678
18679/* Output at beginning of assembler file.
18680
18681   Initialize the section names for the RS/6000 at this point.
18682
18683   Specify filename, including full path, to assembler.
18684
18685   We want to go into the TOC section so at least one .toc will be emitted.
18686   Also, in order to output proper .bs/.es pairs, we need at least one static
18687   [RW] section emitted.
18688
18689   Finally, declare mcount when profiling to make the assembler happy.  */
18690
18691static void
18692rs6000_xcoff_file_start (void)
18693{
18694  rs6000_gen_section_name (&xcoff_bss_section_name,
18695			   main_input_filename, ".bss_");
18696  rs6000_gen_section_name (&xcoff_private_data_section_name,
18697			   main_input_filename, ".rw_");
18698  rs6000_gen_section_name (&xcoff_read_only_section_name,
18699			   main_input_filename, ".ro_");
18700
18701  fputs ("\t.file\t", asm_out_file);
18702  output_quoted_string (asm_out_file, main_input_filename);
18703  fputc ('\n', asm_out_file);
18704  if (write_symbols != NO_DEBUG)
18705    switch_to_section (private_data_section);
18706  switch_to_section (text_section);
18707  if (profile_flag)
18708    fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
18709  rs6000_file_start ();
18710}
18711
18712/* Output at end of assembler file.
18713   On the RS/6000, referencing data should automatically pull in text.  */
18714
18715static void
18716rs6000_xcoff_file_end (void)
18717{
18718  switch_to_section (text_section);
18719  fputs ("_section_.text:\n", asm_out_file);
18720  switch_to_section (data_section);
18721  fputs (TARGET_32BIT
18722	 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
18723	 asm_out_file);
18724}
18725#endif /* TARGET_XCOFF */
18726
18727/* Compute a (partial) cost for rtx X.  Return true if the complete
18728   cost has been computed, and false if subexpressions should be
18729   scanned.  In either case, *TOTAL contains the cost result.  */
18730
18731static bool
18732rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
18733{
18734  enum machine_mode mode = GET_MODE (x);
18735
18736  switch (code)
18737    {
18738      /* On the RS/6000, if it is valid in the insn, it is free.  */
18739    case CONST_INT:
18740      if (((outer_code == SET
18741	    || outer_code == PLUS
18742	    || outer_code == MINUS)
18743	   && (satisfies_constraint_I (x)
18744	       || satisfies_constraint_L (x)))
18745	  || (outer_code == AND
18746	      && (satisfies_constraint_K (x)
18747		  || (mode == SImode
18748		      ? satisfies_constraint_L (x)
18749		      : satisfies_constraint_J (x))
18750		  || mask_operand (x, mode)
18751		  || (mode == DImode
18752		      && mask64_operand (x, DImode))))
18753	  || ((outer_code == IOR || outer_code == XOR)
18754	      && (satisfies_constraint_K (x)
18755		  || (mode == SImode
18756		      ? satisfies_constraint_L (x)
18757		      : satisfies_constraint_J (x))))
18758	  || outer_code == ASHIFT
18759	  || outer_code == ASHIFTRT
18760	  || outer_code == LSHIFTRT
18761	  || outer_code == ROTATE
18762	  || outer_code == ROTATERT
18763	  || outer_code == ZERO_EXTRACT
18764	  || (outer_code == MULT
18765	      && satisfies_constraint_I (x))
18766	  || ((outer_code == DIV || outer_code == UDIV
18767	       || outer_code == MOD || outer_code == UMOD)
18768	      && exact_log2 (INTVAL (x)) >= 0)
18769	  || (outer_code == COMPARE
18770	      && (satisfies_constraint_I (x)
18771		  || satisfies_constraint_K (x)))
18772	  || (outer_code == EQ
18773	      && (satisfies_constraint_I (x)
18774		  || satisfies_constraint_K (x)
18775		  || (mode == SImode
18776		      ? satisfies_constraint_L (x)
18777		      : satisfies_constraint_J (x))))
18778	  || (outer_code == GTU
18779	      && satisfies_constraint_I (x))
18780	  || (outer_code == LTU
18781	      && satisfies_constraint_P (x)))
18782	{
18783	  *total = 0;
18784	  return true;
18785	}
18786      else if ((outer_code == PLUS
18787		&& reg_or_add_cint_operand (x, VOIDmode))
18788	       || (outer_code == MINUS
18789		   && reg_or_sub_cint_operand (x, VOIDmode))
18790	       || ((outer_code == SET
18791		    || outer_code == IOR
18792		    || outer_code == XOR)
18793		   && (INTVAL (x)
18794		       & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
18795	{
18796	  *total = COSTS_N_INSNS (1);
18797	  return true;
18798	}
18799      /* FALLTHRU */
18800
18801    case CONST_DOUBLE:
18802      if (mode == DImode && code == CONST_DOUBLE)
18803	{
18804	  if ((outer_code == IOR || outer_code == XOR)
18805	      && CONST_DOUBLE_HIGH (x) == 0
18806	      && (CONST_DOUBLE_LOW (x)
18807		  & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
18808	    {
18809	      *total = 0;
18810	      return true;
18811	    }
18812	  else if ((outer_code == AND && and64_2_operand (x, DImode))
18813		   || ((outer_code == SET
18814			|| outer_code == IOR
18815			|| outer_code == XOR)
18816		       && CONST_DOUBLE_HIGH (x) == 0))
18817	    {
18818	      *total = COSTS_N_INSNS (1);
18819	      return true;
18820	    }
18821	}
18822      /* FALLTHRU */
18823
18824    case CONST:
18825    case HIGH:
18826    case SYMBOL_REF:
18827    case MEM:
18828      /* When optimizing for size, MEM should be slightly more expensive
18829	 than generating address, e.g., (plus (reg) (const)).
18830	 L1 cache latency is about two instructions.  */
18831      *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
18832      return true;
18833
18834    case LABEL_REF:
18835      *total = 0;
18836      return true;
18837
18838    case PLUS:
18839      if (mode == DFmode)
18840	{
18841	  if (GET_CODE (XEXP (x, 0)) == MULT)
18842	    {
18843	      /* FNMA accounted in outer NEG.  */
18844	      if (outer_code == NEG)
18845		*total = rs6000_cost->dmul - rs6000_cost->fp;
18846	      else
18847		*total = rs6000_cost->dmul;
18848	    }
18849	  else
18850	    *total = rs6000_cost->fp;
18851	}
18852      else if (mode == SFmode)
18853	{
18854	  /* FNMA accounted in outer NEG.  */
18855	  if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18856	    *total = 0;
18857	  else
18858	    *total = rs6000_cost->fp;
18859	}
18860      else
18861	*total = COSTS_N_INSNS (1);
18862      return false;
18863
18864    case MINUS:
18865      if (mode == DFmode)
18866	{
18867	  if (GET_CODE (XEXP (x, 0)) == MULT)
18868	    {
18869	      /* FNMA accounted in outer NEG.  */
18870	      if (outer_code == NEG)
18871		*total = 0;
18872	      else
18873		*total = rs6000_cost->dmul;
18874	    }
18875	  else
18876	    *total = rs6000_cost->fp;
18877	}
18878      else if (mode == SFmode)
18879	{
18880	  /* FNMA accounted in outer NEG.  */
18881	  if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18882	    *total = 0;
18883	  else
18884	    *total = rs6000_cost->fp;
18885	}
18886      else
18887	*total = COSTS_N_INSNS (1);
18888      return false;
18889
18890    case MULT:
18891      if (GET_CODE (XEXP (x, 1)) == CONST_INT
18892	  && satisfies_constraint_I (XEXP (x, 1)))
18893	{
18894	  if (INTVAL (XEXP (x, 1)) >= -256
18895	      && INTVAL (XEXP (x, 1)) <= 255)
18896	    *total = rs6000_cost->mulsi_const9;
18897	  else
18898	    *total = rs6000_cost->mulsi_const;
18899	}
18900      /* FMA accounted in outer PLUS/MINUS.  */
18901      else if ((mode == DFmode || mode == SFmode)
18902	       && (outer_code == PLUS || outer_code == MINUS))
18903	*total = 0;
18904      else if (mode == DFmode)
18905	*total = rs6000_cost->dmul;
18906      else if (mode == SFmode)
18907	*total = rs6000_cost->fp;
18908      else if (mode == DImode)
18909	*total = rs6000_cost->muldi;
18910      else
18911	*total = rs6000_cost->mulsi;
18912      return false;
18913
18914    case DIV:
18915    case MOD:
18916      if (FLOAT_MODE_P (mode))
18917	{
18918	  *total = mode == DFmode ? rs6000_cost->ddiv
18919				  : rs6000_cost->sdiv;
18920	  return false;
18921	}
18922      /* FALLTHRU */
18923
18924    case UDIV:
18925    case UMOD:
18926      if (GET_CODE (XEXP (x, 1)) == CONST_INT
18927	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
18928	{
18929	  if (code == DIV || code == MOD)
18930	    /* Shift, addze */
18931	    *total = COSTS_N_INSNS (2);
18932	  else
18933	    /* Shift */
18934	    *total = COSTS_N_INSNS (1);
18935	}
18936      else
18937	{
18938	  if (GET_MODE (XEXP (x, 1)) == DImode)
18939	    *total = rs6000_cost->divdi;
18940	  else
18941	    *total = rs6000_cost->divsi;
18942	}
18943      /* Add in shift and subtract for MOD. */
18944      if (code == MOD || code == UMOD)
18945	*total += COSTS_N_INSNS (2);
18946      return false;
18947
18948    case FFS:
18949      *total = COSTS_N_INSNS (4);
18950      return false;
18951
18952    case NOT:
18953      if (outer_code == AND || outer_code == IOR || outer_code == XOR)
18954	{
18955	  *total = 0;
18956	  return false;
18957	}
18958      /* FALLTHRU */
18959
18960    case AND:
18961    case IOR:
18962    case XOR:
18963    case ZERO_EXTRACT:
18964      *total = COSTS_N_INSNS (1);
18965      return false;
18966
18967    case ASHIFT:
18968    case ASHIFTRT:
18969    case LSHIFTRT:
18970    case ROTATE:
18971    case ROTATERT:
18972      /* Handle mul_highpart.  */
18973      if (outer_code == TRUNCATE
18974	  && GET_CODE (XEXP (x, 0)) == MULT)
18975	{
18976	  if (mode == DImode)
18977	    *total = rs6000_cost->muldi;
18978	  else
18979	    *total = rs6000_cost->mulsi;
18980	  return true;
18981	}
18982      else if (outer_code == AND)
18983	*total = 0;
18984      else
18985	*total = COSTS_N_INSNS (1);
18986      return false;
18987
18988    case SIGN_EXTEND:
18989    case ZERO_EXTEND:
18990      if (GET_CODE (XEXP (x, 0)) == MEM)
18991	*total = 0;
18992      else
18993	*total = COSTS_N_INSNS (1);
18994      return false;
18995
18996    case COMPARE:
18997    case NEG:
18998    case ABS:
18999      if (!FLOAT_MODE_P (mode))
19000	{
19001	  *total = COSTS_N_INSNS (1);
19002	  return false;
19003	}
19004      /* FALLTHRU */
19005
19006    case FLOAT:
19007    case UNSIGNED_FLOAT:
19008    case FIX:
19009    case UNSIGNED_FIX:
19010    case FLOAT_TRUNCATE:
19011      *total = rs6000_cost->fp;
19012      return false;
19013
19014    case FLOAT_EXTEND:
19015      if (mode == DFmode)
19016	*total = 0;
19017      else
19018	*total = rs6000_cost->fp;
19019      return false;
19020
19021    case UNSPEC:
19022      switch (XINT (x, 1))
19023	{
19024	case UNSPEC_FRSP:
19025	  *total = rs6000_cost->fp;
19026	  return true;
19027
19028	default:
19029	  break;
19030	}
19031      break;
19032
19033    case CALL:
19034    case IF_THEN_ELSE:
19035      if (optimize_size)
19036	{
19037	  *total = COSTS_N_INSNS (1);
19038	  return true;
19039	}
19040      else if (FLOAT_MODE_P (mode)
19041	       && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
19042	{
19043	  *total = rs6000_cost->fp;
19044	  return false;
19045	}
19046      break;
19047
19048    case EQ:
19049    case GTU:
19050    case LTU:
19051      /* Carry bit requires mode == Pmode.
19052	 NEG or PLUS already counted so only add one.  */
19053      if (mode == Pmode
19054	  && (outer_code == NEG || outer_code == PLUS))
19055	{
19056	  *total = COSTS_N_INSNS (1);
19057	  return true;
19058	}
19059      if (outer_code == SET)
19060	{
19061	  if (XEXP (x, 1) == const0_rtx)
19062	    {
19063	      *total = COSTS_N_INSNS (2);
19064	      return true;
19065	    }
19066	  else if (mode == Pmode)
19067	    {
19068	      *total = COSTS_N_INSNS (3);
19069	      return false;
19070	    }
19071	}
19072      /* FALLTHRU */
19073
19074    case GT:
19075    case LT:
19076    case UNORDERED:
19077      if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
19078	{
19079	  *total = COSTS_N_INSNS (2);
19080	  return true;
19081	}
19082      /* CC COMPARE.  */
19083      if (outer_code == COMPARE)
19084	{
19085	  *total = 0;
19086	  return true;
19087	}
19088      break;
19089
19090    default:
19091      break;
19092    }
19093
19094  return false;
19095}
19096
19097/* A C expression returning the cost of moving data from a register of class
19098   CLASS1 to one of CLASS2.  */
19099
19100int
19101rs6000_register_move_cost (enum machine_mode mode,
19102			   enum reg_class from, enum reg_class to)
19103{
19104  /*  Moves from/to GENERAL_REGS.  */
19105  if (reg_classes_intersect_p (to, GENERAL_REGS)
19106      || reg_classes_intersect_p (from, GENERAL_REGS))
19107    {
19108      if (! reg_classes_intersect_p (to, GENERAL_REGS))
19109	from = to;
19110
19111      if (from == FLOAT_REGS || from == ALTIVEC_REGS)
19112	return (rs6000_memory_move_cost (mode, from, 0)
19113		+ rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
19114
19115      /* It's more expensive to move CR_REGS than CR0_REGS because of the
19116	 shift.  */
19117      else if (from == CR_REGS)
19118	return 4;
19119
19120      else
19121	/* A move will cost one instruction per GPR moved.  */
19122	return 2 * hard_regno_nregs[0][mode];
19123    }
19124
19125  /* Moving between two similar registers is just one instruction.  */
19126  else if (reg_classes_intersect_p (to, from))
19127    return mode == TFmode ? 4 : 2;
19128
19129  /* Everything else has to go through GENERAL_REGS.  */
19130  else
19131    return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
19132	    + rs6000_register_move_cost (mode, from, GENERAL_REGS));
19133}
19134
19135/* A C expressions returning the cost of moving data of MODE from a register to
19136   or from memory.  */
19137
19138int
19139rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
19140			 int in ATTRIBUTE_UNUSED)
19141{
19142  if (reg_classes_intersect_p (class, GENERAL_REGS))
19143    return 4 * hard_regno_nregs[0][mode];
19144  else if (reg_classes_intersect_p (class, FLOAT_REGS))
19145    return 4 * hard_regno_nregs[32][mode];
19146  else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
19147    return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
19148  else
19149    return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
19150}
19151
19152/* Newton-Raphson approximation of single-precision floating point divide n/d.
19153   Assumes no trapping math and finite arguments.  */
19154
19155void
19156rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
19157{
19158  rtx x0, e0, e1, y1, u0, v0, one;
19159
19160  x0 = gen_reg_rtx (SFmode);
19161  e0 = gen_reg_rtx (SFmode);
19162  e1 = gen_reg_rtx (SFmode);
19163  y1 = gen_reg_rtx (SFmode);
19164  u0 = gen_reg_rtx (SFmode);
19165  v0 = gen_reg_rtx (SFmode);
19166  one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
19167
19168  /* x0 = 1./d estimate */
19169  emit_insn (gen_rtx_SET (VOIDmode, x0,
19170			  gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
19171					  UNSPEC_FRES)));
19172  /* e0 = 1. - d * x0 */
19173  emit_insn (gen_rtx_SET (VOIDmode, e0,
19174			  gen_rtx_MINUS (SFmode, one,
19175					 gen_rtx_MULT (SFmode, d, x0))));
19176  /* e1 = e0 + e0 * e0 */
19177  emit_insn (gen_rtx_SET (VOIDmode, e1,
19178			  gen_rtx_PLUS (SFmode,
19179					gen_rtx_MULT (SFmode, e0, e0), e0)));
19180  /* y1 = x0 + e1 * x0 */
19181  emit_insn (gen_rtx_SET (VOIDmode, y1,
19182			  gen_rtx_PLUS (SFmode,
19183					gen_rtx_MULT (SFmode, e1, x0), x0)));
19184  /* u0 = n * y1 */
19185  emit_insn (gen_rtx_SET (VOIDmode, u0,
19186			  gen_rtx_MULT (SFmode, n, y1)));
19187  /* v0 = n - d * u0 */
19188  emit_insn (gen_rtx_SET (VOIDmode, v0,
19189			  gen_rtx_MINUS (SFmode, n,
19190					 gen_rtx_MULT (SFmode, d, u0))));
19191  /* res = u0 + v0 * y1 */
19192  emit_insn (gen_rtx_SET (VOIDmode, res,
19193			  gen_rtx_PLUS (SFmode,
19194					gen_rtx_MULT (SFmode, v0, y1), u0)));
19195}
19196
19197/* Newton-Raphson approximation of double-precision floating point divide n/d.
19198   Assumes no trapping math and finite arguments.  */
19199
19200void
19201rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
19202{
19203  rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
19204
19205  x0 = gen_reg_rtx (DFmode);
19206  e0 = gen_reg_rtx (DFmode);
19207  e1 = gen_reg_rtx (DFmode);
19208  e2 = gen_reg_rtx (DFmode);
19209  y1 = gen_reg_rtx (DFmode);
19210  y2 = gen_reg_rtx (DFmode);
19211  y3 = gen_reg_rtx (DFmode);
19212  u0 = gen_reg_rtx (DFmode);
19213  v0 = gen_reg_rtx (DFmode);
19214  one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
19215
19216  /* x0 = 1./d estimate */
19217  emit_insn (gen_rtx_SET (VOIDmode, x0,
19218			  gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
19219					  UNSPEC_FRES)));
19220  /* e0 = 1. - d * x0 */
19221  emit_insn (gen_rtx_SET (VOIDmode, e0,
19222			  gen_rtx_MINUS (DFmode, one,
19223					 gen_rtx_MULT (SFmode, d, x0))));
19224  /* y1 = x0 + e0 * x0 */
19225  emit_insn (gen_rtx_SET (VOIDmode, y1,
19226			  gen_rtx_PLUS (DFmode,
19227					gen_rtx_MULT (DFmode, e0, x0), x0)));
19228  /* e1 = e0 * e0 */
19229  emit_insn (gen_rtx_SET (VOIDmode, e1,
19230			  gen_rtx_MULT (DFmode, e0, e0)));
19231  /* y2 = y1 + e1 * y1 */
19232  emit_insn (gen_rtx_SET (VOIDmode, y2,
19233			  gen_rtx_PLUS (DFmode,
19234					gen_rtx_MULT (DFmode, e1, y1), y1)));
19235  /* e2 = e1 * e1 */
19236  emit_insn (gen_rtx_SET (VOIDmode, e2,
19237			  gen_rtx_MULT (DFmode, e1, e1)));
19238  /* y3 = y2 + e2 * y2 */
19239  emit_insn (gen_rtx_SET (VOIDmode, y3,
19240			  gen_rtx_PLUS (DFmode,
19241					gen_rtx_MULT (DFmode, e2, y2), y2)));
19242  /* u0 = n * y3 */
19243  emit_insn (gen_rtx_SET (VOIDmode, u0,
19244			  gen_rtx_MULT (DFmode, n, y3)));
19245  /* v0 = n - d * u0 */
19246  emit_insn (gen_rtx_SET (VOIDmode, v0,
19247			  gen_rtx_MINUS (DFmode, n,
19248					 gen_rtx_MULT (DFmode, d, u0))));
19249  /* res = u0 + v0 * y3 */
19250  emit_insn (gen_rtx_SET (VOIDmode, res,
19251			  gen_rtx_PLUS (DFmode,
19252					gen_rtx_MULT (DFmode, v0, y3), u0)));
19253}
19254
19255/* Return an RTX representing where to find the function value of a
19256   function returning MODE.  */
19257static rtx
19258rs6000_complex_function_value (enum machine_mode mode)
19259{
19260  unsigned int regno;
19261  rtx r1, r2;
19262  enum machine_mode inner = GET_MODE_INNER (mode);
19263  unsigned int inner_bytes = GET_MODE_SIZE (inner);
19264
19265  if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
19266    regno = FP_ARG_RETURN;
19267  else
19268    {
19269      regno = GP_ARG_RETURN;
19270
19271      /* 32-bit is OK since it'll go in r3/r4.  */
19272      if (TARGET_32BIT && inner_bytes >= 4)
19273	return gen_rtx_REG (mode, regno);
19274    }
19275
19276  if (inner_bytes >= 8)
19277    return gen_rtx_REG (mode, regno);
19278
19279  r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
19280			  const0_rtx);
19281  r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
19282			  GEN_INT (inner_bytes));
19283  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
19284}
19285
19286/* Define how to find the value returned by a function.
19287   VALTYPE is the data type of the value (as a tree).
19288   If the precise function being called is known, FUNC is its FUNCTION_DECL;
19289   otherwise, FUNC is 0.
19290
19291   On the SPE, both FPs and vectors are returned in r3.
19292
19293   On RS/6000 an integer value is in r3 and a floating-point value is in
19294   fp1, unless -msoft-float.  */
19295
19296rtx
19297rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
19298{
19299  enum machine_mode mode;
19300  unsigned int regno;
19301
19302  /* Special handling for structs in darwin64.  */
19303  if (rs6000_darwin64_abi
19304      && TYPE_MODE (valtype) == BLKmode
19305      && TREE_CODE (valtype) == RECORD_TYPE
19306      && int_size_in_bytes (valtype) > 0)
19307    {
19308      CUMULATIVE_ARGS valcum;
19309      rtx valret;
19310
19311      valcum.words = 0;
19312      valcum.fregno = FP_ARG_MIN_REG;
19313      valcum.vregno = ALTIVEC_ARG_MIN_REG;
19314      /* Do a trial code generation as if this were going to be passed as
19315	 an argument; if any part goes in memory, we return NULL.  */
19316      valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
19317      if (valret)
19318	return valret;
19319      /* Otherwise fall through to standard ABI rules.  */
19320    }
19321
19322  if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
19323    {
19324      /* Long long return value need be split in -mpowerpc64, 32bit ABI.  */
19325      return gen_rtx_PARALLEL (DImode,
19326	gen_rtvec (2,
19327		   gen_rtx_EXPR_LIST (VOIDmode,
19328				      gen_rtx_REG (SImode, GP_ARG_RETURN),
19329				      const0_rtx),
19330		   gen_rtx_EXPR_LIST (VOIDmode,
19331				      gen_rtx_REG (SImode,
19332						   GP_ARG_RETURN + 1),
19333				      GEN_INT (4))));
19334    }
19335  if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
19336    {
19337      return gen_rtx_PARALLEL (DCmode,
19338	gen_rtvec (4,
19339		   gen_rtx_EXPR_LIST (VOIDmode,
19340				      gen_rtx_REG (SImode, GP_ARG_RETURN),
19341				      const0_rtx),
19342		   gen_rtx_EXPR_LIST (VOIDmode,
19343				      gen_rtx_REG (SImode,
19344						   GP_ARG_RETURN + 1),
19345				      GEN_INT (4)),
19346		   gen_rtx_EXPR_LIST (VOIDmode,
19347				      gen_rtx_REG (SImode,
19348						   GP_ARG_RETURN + 2),
19349				      GEN_INT (8)),
19350		   gen_rtx_EXPR_LIST (VOIDmode,
19351				      gen_rtx_REG (SImode,
19352						   GP_ARG_RETURN + 3),
19353				      GEN_INT (12))));
19354    }
19355
19356  mode = TYPE_MODE (valtype);
19357  if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
19358      || POINTER_TYPE_P (valtype))
19359    mode = TARGET_32BIT ? SImode : DImode;
19360
19361  if (DECIMAL_FLOAT_MODE_P (mode))
19362    regno = GP_ARG_RETURN;
19363  else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
19364    regno = FP_ARG_RETURN;
19365  else if (TREE_CODE (valtype) == COMPLEX_TYPE
19366	   && targetm.calls.split_complex_arg)
19367    return rs6000_complex_function_value (mode);
19368  else if (TREE_CODE (valtype) == VECTOR_TYPE
19369	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
19370	   && ALTIVEC_VECTOR_MODE (mode))
19371    regno = ALTIVEC_ARG_RETURN;
19372  else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
19373	   && (mode == DFmode || mode == DCmode))
19374    return spe_build_register_parallel (mode, GP_ARG_RETURN);
19375  else
19376    regno = GP_ARG_RETURN;
19377
19378  return gen_rtx_REG (mode, regno);
19379}
19380
19381/* Define how to find the value returned by a library function
19382   assuming the value has mode MODE.  */
19383rtx
19384rs6000_libcall_value (enum machine_mode mode)
19385{
19386  unsigned int regno;
19387
19388  if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
19389    {
19390      /* Long long return value need be split in -mpowerpc64, 32bit ABI.  */
19391      return gen_rtx_PARALLEL (DImode,
19392	gen_rtvec (2,
19393		   gen_rtx_EXPR_LIST (VOIDmode,
19394				      gen_rtx_REG (SImode, GP_ARG_RETURN),
19395				      const0_rtx),
19396		   gen_rtx_EXPR_LIST (VOIDmode,
19397				      gen_rtx_REG (SImode,
19398						   GP_ARG_RETURN + 1),
19399				      GEN_INT (4))));
19400    }
19401
19402  if (DECIMAL_FLOAT_MODE_P (mode))
19403    regno = GP_ARG_RETURN;
19404  else if (SCALAR_FLOAT_MODE_P (mode)
19405	   && TARGET_HARD_FLOAT && TARGET_FPRS)
19406    regno = FP_ARG_RETURN;
19407  else if (ALTIVEC_VECTOR_MODE (mode)
19408	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
19409    regno = ALTIVEC_ARG_RETURN;
19410  else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
19411    return rs6000_complex_function_value (mode);
19412  else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
19413	   && (mode == DFmode || mode == DCmode))
19414    return spe_build_register_parallel (mode, GP_ARG_RETURN);
19415  else
19416    regno = GP_ARG_RETURN;
19417
19418  return gen_rtx_REG (mode, regno);
19419}
19420
19421/* Define the offset between two registers, FROM to be eliminated and its
19422   replacement TO, at the start of a routine.  */
19423HOST_WIDE_INT
19424rs6000_initial_elimination_offset (int from, int to)
19425{
19426  rs6000_stack_t *info = rs6000_stack_info ();
19427  HOST_WIDE_INT offset;
19428
19429  if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19430    offset = info->push_p ? 0 : -info->total_size;
19431  else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19432    {
19433      offset = info->push_p ? 0 : -info->total_size;
19434      if (FRAME_GROWS_DOWNWARD)
19435	offset += info->fixed_size + info->vars_size + info->parm_size;
19436    }
19437  else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
19438    offset = FRAME_GROWS_DOWNWARD
19439	     ? info->fixed_size + info->vars_size + info->parm_size
19440	     : 0;
19441  else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
19442    offset = info->total_size;
19443  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19444    offset = info->push_p ? info->total_size : 0;
19445  else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
19446    offset = 0;
19447  else
19448    gcc_unreachable ();
19449
19450  return offset;
19451}
19452
19453/* Return true if TYPE is a SPE or AltiVec opaque type.  */
19454
19455static bool
19456rs6000_is_opaque_type (tree type)
19457{
19458  return (type == opaque_V2SI_type_node
19459	      || type == opaque_V2SF_type_node
19460	      || type == opaque_p_V2SI_type_node
19461	      || type == opaque_V4SI_type_node);
19462}
19463
19464static rtx
19465rs6000_dwarf_register_span (rtx reg)
19466{
19467  unsigned regno;
19468
19469  if (TARGET_SPE
19470      && (SPE_VECTOR_MODE (GET_MODE (reg))
19471	  || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
19472    ;
19473  else
19474    return NULL_RTX;
19475
19476  regno = REGNO (reg);
19477
19478  /* The duality of the SPE register size wreaks all kinds of havoc.
19479     This is a way of distinguishing r0 in 32-bits from r0 in
19480     64-bits.  */
19481  return
19482    gen_rtx_PARALLEL (VOIDmode,
19483		      BYTES_BIG_ENDIAN
19484		      ? gen_rtvec (2,
19485				   gen_rtx_REG (SImode, regno + 1200),
19486				   gen_rtx_REG (SImode, regno))
19487		      : gen_rtvec (2,
19488				   gen_rtx_REG (SImode, regno),
19489				   gen_rtx_REG (SImode, regno + 1200)));
19490}
19491
19492/* Map internal gcc register numbers to DWARF2 register numbers.  */
19493
19494unsigned int
19495rs6000_dbx_register_number (unsigned int regno)
19496{
19497  if (regno <= 63 || write_symbols != DWARF2_DEBUG)
19498    return regno;
19499  if (regno == MQ_REGNO)
19500    return 100;
19501  if (regno == LINK_REGISTER_REGNUM)
19502    return 108;
19503  if (regno == COUNT_REGISTER_REGNUM)
19504    return 109;
19505  if (CR_REGNO_P (regno))
19506    return regno - CR0_REGNO + 86;
19507  if (regno == XER_REGNO)
19508    return 101;
19509  if (ALTIVEC_REGNO_P (regno))
19510    return regno - FIRST_ALTIVEC_REGNO + 1124;
19511  if (regno == VRSAVE_REGNO)
19512    return 356;
19513  if (regno == VSCR_REGNO)
19514    return 67;
19515  if (regno == SPE_ACC_REGNO)
19516    return 99;
19517  if (regno == SPEFSCR_REGNO)
19518    return 612;
19519  /* SPE high reg number.  We get these values of regno from
19520     rs6000_dwarf_register_span.  */
19521  gcc_assert (regno >= 1200 && regno < 1232);
19522  return regno;
19523}
19524
19525/* target hook eh_return_filter_mode */
19526static enum machine_mode
19527rs6000_eh_return_filter_mode (void)
19528{
19529  return TARGET_32BIT ? SImode : word_mode;
19530}
19531
19532/* Target hook for scalar_mode_supported_p.  */
19533static bool
19534rs6000_scalar_mode_supported_p (enum machine_mode mode)
19535{
19536  if (DECIMAL_FLOAT_MODE_P (mode))
19537    return true;
19538  else
19539    return default_scalar_mode_supported_p (mode);
19540}
19541
19542/* Target hook for vector_mode_supported_p.  */
19543static bool
19544rs6000_vector_mode_supported_p (enum machine_mode mode)
19545{
19546
19547  if (TARGET_SPE && SPE_VECTOR_MODE (mode))
19548    return true;
19549
19550  else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
19551    return true;
19552
19553  else
19554    return false;
19555}
19556
19557/* Target hook for invalid_arg_for_unprototyped_fn. */
19558static const char *
19559invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
19560{
19561  return (!rs6000_darwin64_abi
19562	  && typelist == 0
19563          && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
19564          && (funcdecl == NULL_TREE
19565              || (TREE_CODE (funcdecl) == FUNCTION_DECL
19566                  && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
19567	  ? N_("AltiVec argument passed to unprototyped function")
19568	  : NULL;
19569}
19570
19571/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
19572   setup by using __stack_chk_fail_local hidden function instead of
19573   calling __stack_chk_fail directly.  Otherwise it is better to call
19574   __stack_chk_fail directly.  */
19575
19576static tree
19577rs6000_stack_protect_fail (void)
19578{
19579  return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
19580	 ? default_hidden_stack_protect_fail ()
19581	 : default_external_stack_protect_fail ();
19582}
19583
19584#include "gt-rs6000.h"
19585