rs6000.c revision 220150
1/* Subroutines used for code generation on IBM RS/6000.
2   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4   Free Software Foundation, Inc.
5   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6
7   This file is part of GCC.
8
9   GCC is free software; you can redistribute it and/or modify it
10   under the terms of the GNU General Public License as published
11   by the Free Software Foundation; either version 2, or (at your
12   option) any later version.
13
14   GCC is distributed in the hope that it will be useful, but WITHOUT
15   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
17   License for more details.
18
19   You should have received a copy of the GNU General Public License
20   along with GCC; see the file COPYING.  If not, write to the
21   Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
22   MA 02110-1301, USA.  */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "rtl.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
34#include "insn-attr.h"
35#include "flags.h"
36#include "recog.h"
37#include "obstack.h"
38#include "tree.h"
39#include "expr.h"
40#include "optabs.h"
41#include "except.h"
42#include "function.h"
43#include "output.h"
44#include "basic-block.h"
45#include "integrate.h"
46#include "toplev.h"
47#include "ggc.h"
48#include "hashtab.h"
49#include "tm_p.h"
50#include "target.h"
51#include "target-def.h"
52#include "langhooks.h"
53#include "reload.h"
54#include "cfglayout.h"
55#include "sched-int.h"
56#include "tree-gimple.h"
57#include "intl.h"
58#include "params.h"
59#include "tm-constrs.h"
60#if TARGET_XCOFF
61#include "xcoffout.h"  /* get declarations of xcoff_*_section_name */
62#endif
63#if TARGET_MACHO
64#include "gstab.h"  /* for N_SLINE */
65#endif
66
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
71#define min(A,B)	((A) < (B) ? (A) : (B))
72#define max(A,B)	((A) > (B) ? (A) : (B))
73
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76  int first_gp_reg_save;	/* first callee saved GP register used */
77  int first_fp_reg_save;	/* first callee saved FP register used */
78  int first_altivec_reg_save;	/* first callee saved AltiVec register used */
79  int lr_save_p;		/* true if the link reg needs to be saved */
80  int cr_save_p;		/* true if the CR reg needs to be saved */
81  unsigned int vrsave_mask;	/* mask of vec registers to save */
82  int push_p;			/* true if we need to allocate stack space */
83  int calls_p;			/* true if the function makes any calls */
84  int world_save_p;		/* true if we're saving *everything*:
85				   r13-r31, cr, f14-f31, vrsave, v20-v31  */
86  enum rs6000_abi abi;		/* which ABI to use */
87  int gp_save_offset;		/* offset to save GP regs from initial SP */
88  int fp_save_offset;		/* offset to save FP regs from initial SP */
89  int altivec_save_offset;	/* offset to save AltiVec regs from initial SP */
90  int lr_save_offset;		/* offset to save LR from initial SP */
91  int cr_save_offset;		/* offset to save CR from initial SP */
92  int vrsave_save_offset;	/* offset to save VRSAVE from initial SP */
93  int spe_gp_save_offset;	/* offset to save spe 64-bit gprs  */
94  int varargs_save_offset;	/* offset to save the varargs registers */
95  int ehrd_offset;		/* offset to EH return data */
96  int reg_size;			/* register size (4 or 8) */
97  HOST_WIDE_INT vars_size;	/* variable save area size */
98  int parm_size;		/* outgoing parameter size */
99  int save_size;		/* save area size */
100  int fixed_size;		/* fixed size of stack frame */
101  int gp_size;			/* size of saved GP registers */
102  int fp_size;			/* size of saved FP registers */
103  int altivec_size;		/* size of saved AltiVec registers */
104  int cr_size;			/* size to hold CR if not in save_size */
105  int vrsave_size;		/* size to hold VRSAVE if not in save_size */
106  int altivec_padding_size;	/* size of altivec alignment padding if
107				   not in save_size */
108  int spe_gp_size;		/* size of 64-bit GPR save size for SPE */
109  int spe_padding_size;
110  HOST_WIDE_INT total_size;	/* total bytes allocated for stack */
111  int spe_64bit_regs_used;
112} rs6000_stack_t;
113
114/* A C structure for machine-specific, per-function data.
115   This is added to the cfun structure.  */
116typedef struct machine_function GTY(())
117{
118  /* Flags if __builtin_return_address (n) with n >= 1 was used.  */
119  int ra_needs_full_frame;
120  /* Some local-dynamic symbol.  */
121  const char *some_ld_name;
122  /* Whether the instruction chain has been scanned already.  */
123  int insn_chain_scanned_p;
124  /* Flags if __builtin_return_address (0) was used.  */
125  int ra_need_lr;
126  /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127     varargs save area.  */
128  HOST_WIDE_INT varargs_save_offset;
129} machine_function;
130
131/* Target cpu type */
132
133enum processor_type rs6000_cpu;
134struct rs6000_cpu_select rs6000_select[3] =
135{
136  /* switch		name,			tune	arch */
137  { (const char *)0,	"--with-cpu=",		1,	1 },
138  { (const char *)0,	"-mcpu=",		1,	1 },
139  { (const char *)0,	"-mtune=",		1,	0 },
140};
141
142/* Always emit branch hint bits.  */
143static GTY(()) bool rs6000_always_hint;
144
145/* Schedule instructions for group formation.  */
146static GTY(()) bool rs6000_sched_groups;
147
148/* Support for -msched-costly-dep option.  */
149const char *rs6000_sched_costly_dep_str;
150enum rs6000_dependence_cost rs6000_sched_costly_dep;
151
152/* Support for -minsert-sched-nops option.  */
153const char *rs6000_sched_insert_nops_str;
154enum rs6000_nop_insertion rs6000_sched_insert_nops;
155
156/* Support targetm.vectorize.builtin_mask_for_load.  */
157static GTY(()) tree altivec_builtin_mask_for_load;
158
159/* Size of long double.  */
160int rs6000_long_double_type_size;
161
162/* IEEE quad extended precision long double. */
163int rs6000_ieeequad;
164
165/* Whether -mabi=altivec has appeared.  */
166int rs6000_altivec_abi;
167
168/* Nonzero if we want SPE ABI extensions.  */
169int rs6000_spe_abi;
170
171/* Nonzero if floating point operations are done in the GPRs.  */
172int rs6000_float_gprs = 0;
173
174/* Nonzero if we want Darwin's struct-by-value-in-regs ABI.  */
175int rs6000_darwin64_abi;
176
177/* Set to nonzero once AIX common-mode calls have been defined.  */
178static GTY(()) int common_mode_defined;
179
180/* Save information from a "cmpxx" operation until the branch or scc is
181   emitted.  */
182rtx rs6000_compare_op0, rs6000_compare_op1;
183int rs6000_compare_fp_p;
184
185/* Label number of label created for -mrelocatable, to call to so we can
186   get the address of the GOT section */
187int rs6000_pic_labelno;
188
189#ifdef USING_ELFOS_H
190/* Which abi to adhere to */
191const char *rs6000_abi_name;
192
193/* Semantics of the small data area */
194enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
195
196/* Which small data model to use */
197const char *rs6000_sdata_name = (char *)0;
198
199/* Counter for labels which are to be placed in .fixup.  */
200int fixuplabelno = 0;
201#endif
202
203/* Bit size of immediate TLS offsets and string from which it is decoded.  */
204int rs6000_tls_size = 32;
205const char *rs6000_tls_size_string;
206
207/* ABI enumeration available for subtarget to use.  */
208enum rs6000_abi rs6000_current_abi;
209
210/* Whether to use variant of AIX ABI for PowerPC64 Linux.  */
211int dot_symbols;
212
213/* Debug flags */
214const char *rs6000_debug_name;
215int rs6000_debug_stack;		/* debug stack applications */
216int rs6000_debug_arg;		/* debug argument handling */
217
218/* Value is TRUE if register/mode pair is acceptable.  */
219bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
220
221/* Built in types.  */
222
223tree rs6000_builtin_types[RS6000_BTI_MAX];
224tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
225
226const char *rs6000_traceback_name;
227static enum {
228  traceback_default = 0,
229  traceback_none,
230  traceback_part,
231  traceback_full
232} rs6000_traceback;
233
234/* Flag to say the TOC is initialized */
235int toc_initialized;
236char toc_label_name[10];
237
238static GTY(()) section *read_only_data_section;
239static GTY(()) section *private_data_section;
240static GTY(()) section *read_only_private_data_section;
241static GTY(()) section *sdata2_section;
242static GTY(()) section *toc_section;
243
244/* Control alignment for fields within structures.  */
245/* String from -malign-XXXXX.  */
246int rs6000_alignment_flags;
247
248/* True for any options that were explicitly set.  */
249struct {
250  bool aix_struct_ret;		/* True if -maix-struct-ret was used.  */
251  bool alignment;		/* True if -malign- was used.  */
252  bool abi;			/* True if -mabi=spe/nospe was used.  */
253  bool spe;			/* True if -mspe= was used.  */
254  bool float_gprs;		/* True if -mfloat-gprs= was used.  */
255  bool isel;			/* True if -misel was used. */
256  bool long_double;	        /* True if -mlong-double- was used.  */
257  bool ieee;			/* True if -mabi=ieee/ibmlongdouble used.  */
258} rs6000_explicit_options;
259
260struct builtin_description
261{
262  /* mask is not const because we're going to alter it below.  This
263     nonsense will go away when we rewrite the -march infrastructure
264     to give us more target flag bits.  */
265  unsigned int mask;
266  const enum insn_code icode;
267  const char *const name;
268  const enum rs6000_builtins code;
269};
270
271/* Target cpu costs.  */
272
273struct processor_costs {
274  const int mulsi;	  /* cost of SImode multiplication.  */
275  const int mulsi_const;  /* cost of SImode multiplication by constant.  */
276  const int mulsi_const9; /* cost of SImode mult by short constant.  */
277  const int muldi;	  /* cost of DImode multiplication.  */
278  const int divsi;	  /* cost of SImode division.  */
279  const int divdi;	  /* cost of DImode division.  */
280  const int fp;		  /* cost of simple SFmode and DFmode insns.  */
281  const int dmul;	  /* cost of DFmode multiplication (and fmadd).  */
282  const int sdiv;	  /* cost of SFmode division (fdivs).  */
283  const int ddiv;	  /* cost of DFmode division (fdiv).  */
284};
285
286const struct processor_costs *rs6000_cost;
287
288/* Processor costs (relative to an add) */
289
290/* Instruction size costs on 32bit processors.  */
291static const
292struct processor_costs size32_cost = {
293  COSTS_N_INSNS (1),    /* mulsi */
294  COSTS_N_INSNS (1),    /* mulsi_const */
295  COSTS_N_INSNS (1),    /* mulsi_const9 */
296  COSTS_N_INSNS (1),    /* muldi */
297  COSTS_N_INSNS (1),    /* divsi */
298  COSTS_N_INSNS (1),    /* divdi */
299  COSTS_N_INSNS (1),    /* fp */
300  COSTS_N_INSNS (1),    /* dmul */
301  COSTS_N_INSNS (1),    /* sdiv */
302  COSTS_N_INSNS (1),    /* ddiv */
303};
304
305/* Instruction size costs on 64bit processors.  */
306static const
307struct processor_costs size64_cost = {
308  COSTS_N_INSNS (1),    /* mulsi */
309  COSTS_N_INSNS (1),    /* mulsi_const */
310  COSTS_N_INSNS (1),    /* mulsi_const9 */
311  COSTS_N_INSNS (1),    /* muldi */
312  COSTS_N_INSNS (1),    /* divsi */
313  COSTS_N_INSNS (1),    /* divdi */
314  COSTS_N_INSNS (1),    /* fp */
315  COSTS_N_INSNS (1),    /* dmul */
316  COSTS_N_INSNS (1),    /* sdiv */
317  COSTS_N_INSNS (1),    /* ddiv */
318};
319
320/* Instruction costs on RIOS1 processors.  */
321static const
322struct processor_costs rios1_cost = {
323  COSTS_N_INSNS (5),    /* mulsi */
324  COSTS_N_INSNS (4),    /* mulsi_const */
325  COSTS_N_INSNS (3),    /* mulsi_const9 */
326  COSTS_N_INSNS (5),    /* muldi */
327  COSTS_N_INSNS (19),   /* divsi */
328  COSTS_N_INSNS (19),   /* divdi */
329  COSTS_N_INSNS (2),    /* fp */
330  COSTS_N_INSNS (2),    /* dmul */
331  COSTS_N_INSNS (19),   /* sdiv */
332  COSTS_N_INSNS (19),   /* ddiv */
333};
334
335/* Instruction costs on RIOS2 processors.  */
336static const
337struct processor_costs rios2_cost = {
338  COSTS_N_INSNS (2),    /* mulsi */
339  COSTS_N_INSNS (2),    /* mulsi_const */
340  COSTS_N_INSNS (2),    /* mulsi_const9 */
341  COSTS_N_INSNS (2),    /* muldi */
342  COSTS_N_INSNS (13),   /* divsi */
343  COSTS_N_INSNS (13),   /* divdi */
344  COSTS_N_INSNS (2),    /* fp */
345  COSTS_N_INSNS (2),    /* dmul */
346  COSTS_N_INSNS (17),   /* sdiv */
347  COSTS_N_INSNS (17),   /* ddiv */
348};
349
350/* Instruction costs on RS64A processors.  */
351static const
352struct processor_costs rs64a_cost = {
353  COSTS_N_INSNS (20),   /* mulsi */
354  COSTS_N_INSNS (12),   /* mulsi_const */
355  COSTS_N_INSNS (8),    /* mulsi_const9 */
356  COSTS_N_INSNS (34),   /* muldi */
357  COSTS_N_INSNS (65),   /* divsi */
358  COSTS_N_INSNS (67),   /* divdi */
359  COSTS_N_INSNS (4),    /* fp */
360  COSTS_N_INSNS (4),    /* dmul */
361  COSTS_N_INSNS (31),   /* sdiv */
362  COSTS_N_INSNS (31),   /* ddiv */
363};
364
365/* Instruction costs on MPCCORE processors.  */
366static const
367struct processor_costs mpccore_cost = {
368  COSTS_N_INSNS (2),    /* mulsi */
369  COSTS_N_INSNS (2),    /* mulsi_const */
370  COSTS_N_INSNS (2),    /* mulsi_const9 */
371  COSTS_N_INSNS (2),    /* muldi */
372  COSTS_N_INSNS (6),    /* divsi */
373  COSTS_N_INSNS (6),    /* divdi */
374  COSTS_N_INSNS (4),    /* fp */
375  COSTS_N_INSNS (5),    /* dmul */
376  COSTS_N_INSNS (10),   /* sdiv */
377  COSTS_N_INSNS (17),   /* ddiv */
378};
379
380/* Instruction costs on PPC403 processors.  */
381static const
382struct processor_costs ppc403_cost = {
383  COSTS_N_INSNS (4),    /* mulsi */
384  COSTS_N_INSNS (4),    /* mulsi_const */
385  COSTS_N_INSNS (4),    /* mulsi_const9 */
386  COSTS_N_INSNS (4),    /* muldi */
387  COSTS_N_INSNS (33),   /* divsi */
388  COSTS_N_INSNS (33),   /* divdi */
389  COSTS_N_INSNS (11),   /* fp */
390  COSTS_N_INSNS (11),   /* dmul */
391  COSTS_N_INSNS (11),   /* sdiv */
392  COSTS_N_INSNS (11),   /* ddiv */
393};
394
395/* Instruction costs on PPC405 processors.  */
396static const
397struct processor_costs ppc405_cost = {
398  COSTS_N_INSNS (5),    /* mulsi */
399  COSTS_N_INSNS (4),    /* mulsi_const */
400  COSTS_N_INSNS (3),    /* mulsi_const9 */
401  COSTS_N_INSNS (5),    /* muldi */
402  COSTS_N_INSNS (35),   /* divsi */
403  COSTS_N_INSNS (35),   /* divdi */
404  COSTS_N_INSNS (11),   /* fp */
405  COSTS_N_INSNS (11),   /* dmul */
406  COSTS_N_INSNS (11),   /* sdiv */
407  COSTS_N_INSNS (11),   /* ddiv */
408};
409
410/* Instruction costs on PPC440 processors.  */
411static const
412struct processor_costs ppc440_cost = {
413  COSTS_N_INSNS (3),    /* mulsi */
414  COSTS_N_INSNS (2),    /* mulsi_const */
415  COSTS_N_INSNS (2),    /* mulsi_const9 */
416  COSTS_N_INSNS (3),    /* muldi */
417  COSTS_N_INSNS (34),   /* divsi */
418  COSTS_N_INSNS (34),   /* divdi */
419  COSTS_N_INSNS (5),    /* fp */
420  COSTS_N_INSNS (5),    /* dmul */
421  COSTS_N_INSNS (19),   /* sdiv */
422  COSTS_N_INSNS (33),   /* ddiv */
423};
424
425/* Instruction costs on PPC601 processors.  */
426static const
427struct processor_costs ppc601_cost = {
428  COSTS_N_INSNS (5),    /* mulsi */
429  COSTS_N_INSNS (5),    /* mulsi_const */
430  COSTS_N_INSNS (5),    /* mulsi_const9 */
431  COSTS_N_INSNS (5),    /* muldi */
432  COSTS_N_INSNS (36),   /* divsi */
433  COSTS_N_INSNS (36),   /* divdi */
434  COSTS_N_INSNS (4),    /* fp */
435  COSTS_N_INSNS (5),    /* dmul */
436  COSTS_N_INSNS (17),   /* sdiv */
437  COSTS_N_INSNS (31),   /* ddiv */
438};
439
440/* Instruction costs on PPC603 processors.  */
441static const
442struct processor_costs ppc603_cost = {
443  COSTS_N_INSNS (5),    /* mulsi */
444  COSTS_N_INSNS (3),    /* mulsi_const */
445  COSTS_N_INSNS (2),    /* mulsi_const9 */
446  COSTS_N_INSNS (5),    /* muldi */
447  COSTS_N_INSNS (37),   /* divsi */
448  COSTS_N_INSNS (37),   /* divdi */
449  COSTS_N_INSNS (3),    /* fp */
450  COSTS_N_INSNS (4),    /* dmul */
451  COSTS_N_INSNS (18),   /* sdiv */
452  COSTS_N_INSNS (33),   /* ddiv */
453};
454
455/* Instruction costs on PPC604 processors.  */
456static const
457struct processor_costs ppc604_cost = {
458  COSTS_N_INSNS (4),    /* mulsi */
459  COSTS_N_INSNS (4),    /* mulsi_const */
460  COSTS_N_INSNS (4),    /* mulsi_const9 */
461  COSTS_N_INSNS (4),    /* muldi */
462  COSTS_N_INSNS (20),   /* divsi */
463  COSTS_N_INSNS (20),   /* divdi */
464  COSTS_N_INSNS (3),    /* fp */
465  COSTS_N_INSNS (3),    /* dmul */
466  COSTS_N_INSNS (18),   /* sdiv */
467  COSTS_N_INSNS (32),   /* ddiv */
468};
469
470/* Instruction costs on PPC604e processors.  */
471static const
472struct processor_costs ppc604e_cost = {
473  COSTS_N_INSNS (2),    /* mulsi */
474  COSTS_N_INSNS (2),    /* mulsi_const */
475  COSTS_N_INSNS (2),    /* mulsi_const9 */
476  COSTS_N_INSNS (2),    /* muldi */
477  COSTS_N_INSNS (20),   /* divsi */
478  COSTS_N_INSNS (20),   /* divdi */
479  COSTS_N_INSNS (3),    /* fp */
480  COSTS_N_INSNS (3),    /* dmul */
481  COSTS_N_INSNS (18),   /* sdiv */
482  COSTS_N_INSNS (32),   /* ddiv */
483};
484
485/* Instruction costs on PPC620 processors.  */
486static const
487struct processor_costs ppc620_cost = {
488  COSTS_N_INSNS (5),    /* mulsi */
489  COSTS_N_INSNS (4),    /* mulsi_const */
490  COSTS_N_INSNS (3),    /* mulsi_const9 */
491  COSTS_N_INSNS (7),    /* muldi */
492  COSTS_N_INSNS (21),   /* divsi */
493  COSTS_N_INSNS (37),   /* divdi */
494  COSTS_N_INSNS (3),    /* fp */
495  COSTS_N_INSNS (3),    /* dmul */
496  COSTS_N_INSNS (18),   /* sdiv */
497  COSTS_N_INSNS (32),   /* ddiv */
498};
499
500/* Instruction costs on PPC630 processors.  */
501static const
502struct processor_costs ppc630_cost = {
503  COSTS_N_INSNS (5),    /* mulsi */
504  COSTS_N_INSNS (4),    /* mulsi_const */
505  COSTS_N_INSNS (3),    /* mulsi_const9 */
506  COSTS_N_INSNS (7),    /* muldi */
507  COSTS_N_INSNS (21),   /* divsi */
508  COSTS_N_INSNS (37),   /* divdi */
509  COSTS_N_INSNS (3),    /* fp */
510  COSTS_N_INSNS (3),    /* dmul */
511  COSTS_N_INSNS (17),   /* sdiv */
512  COSTS_N_INSNS (21),   /* ddiv */
513};
514
515/* Instruction costs on PPC750 and PPC7400 processors.  */
516static const
517struct processor_costs ppc750_cost = {
518  COSTS_N_INSNS (5),    /* mulsi */
519  COSTS_N_INSNS (3),    /* mulsi_const */
520  COSTS_N_INSNS (2),    /* mulsi_const9 */
521  COSTS_N_INSNS (5),    /* muldi */
522  COSTS_N_INSNS (17),   /* divsi */
523  COSTS_N_INSNS (17),   /* divdi */
524  COSTS_N_INSNS (3),    /* fp */
525  COSTS_N_INSNS (3),    /* dmul */
526  COSTS_N_INSNS (17),   /* sdiv */
527  COSTS_N_INSNS (31),   /* ddiv */
528};
529
530/* Instruction costs on PPC7450 processors.  */
531static const
532struct processor_costs ppc7450_cost = {
533  COSTS_N_INSNS (4),    /* mulsi */
534  COSTS_N_INSNS (3),    /* mulsi_const */
535  COSTS_N_INSNS (3),    /* mulsi_const9 */
536  COSTS_N_INSNS (4),    /* muldi */
537  COSTS_N_INSNS (23),   /* divsi */
538  COSTS_N_INSNS (23),   /* divdi */
539  COSTS_N_INSNS (5),    /* fp */
540  COSTS_N_INSNS (5),    /* dmul */
541  COSTS_N_INSNS (21),   /* sdiv */
542  COSTS_N_INSNS (35),   /* ddiv */
543};
544
545/* Instruction costs on PPC8540 processors.  */
546static const
547struct processor_costs ppc8540_cost = {
548  COSTS_N_INSNS (4),    /* mulsi */
549  COSTS_N_INSNS (4),    /* mulsi_const */
550  COSTS_N_INSNS (4),    /* mulsi_const9 */
551  COSTS_N_INSNS (4),    /* muldi */
552  COSTS_N_INSNS (19),   /* divsi */
553  COSTS_N_INSNS (19),   /* divdi */
554  COSTS_N_INSNS (4),    /* fp */
555  COSTS_N_INSNS (4),    /* dmul */
556  COSTS_N_INSNS (29),   /* sdiv */
557  COSTS_N_INSNS (29),   /* ddiv */
558};
559
560/* Instruction costs on POWER4 and POWER5 processors.  */
561static const
562struct processor_costs power4_cost = {
563  COSTS_N_INSNS (3),    /* mulsi */
564  COSTS_N_INSNS (2),    /* mulsi_const */
565  COSTS_N_INSNS (2),    /* mulsi_const9 */
566  COSTS_N_INSNS (4),    /* muldi */
567  COSTS_N_INSNS (18),   /* divsi */
568  COSTS_N_INSNS (34),   /* divdi */
569  COSTS_N_INSNS (3),    /* fp */
570  COSTS_N_INSNS (3),    /* dmul */
571  COSTS_N_INSNS (17),   /* sdiv */
572  COSTS_N_INSNS (17),   /* ddiv */
573};
574
575
576static bool rs6000_function_ok_for_sibcall (tree, tree);
577static const char *rs6000_invalid_within_doloop (rtx);
578static rtx rs6000_generate_compare (enum rtx_code);
579static void rs6000_maybe_dead (rtx);
580static void rs6000_emit_stack_tie (void);
581static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
582static rtx spe_synthesize_frame_save (rtx);
583static bool spe_func_has_64bit_regs_p (void);
584static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
585			     int, HOST_WIDE_INT);
586static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
587static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
588static unsigned rs6000_hash_constant (rtx);
589static unsigned toc_hash_function (const void *);
590static int toc_hash_eq (const void *, const void *);
591static int constant_pool_expr_1 (rtx, int *, int *);
592static bool constant_pool_expr_p (rtx);
593static bool legitimate_small_data_p (enum machine_mode, rtx);
594static bool legitimate_indexed_address_p (rtx, int);
595static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
596static struct machine_function * rs6000_init_machine_status (void);
597static bool rs6000_assemble_integer (rtx, unsigned int, int);
598static bool no_global_regs_above (int);
599#ifdef HAVE_GAS_HIDDEN
600static void rs6000_assemble_visibility (tree, int);
601#endif
602static int rs6000_ra_ever_killed (void);
603static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
604static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
605static bool rs6000_ms_bitfield_layout_p (tree);
606static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
607static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
608static const char *rs6000_mangle_fundamental_type (tree);
609extern const struct attribute_spec rs6000_attribute_table[];
610static void rs6000_set_default_type_attributes (tree);
611static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
612static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
613static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
614				    tree);
615static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
616static bool rs6000_return_in_memory (tree, tree);
617static void rs6000_file_start (void);
618#if TARGET_ELF
619static int rs6000_elf_reloc_rw_mask (void);
620static void rs6000_elf_asm_out_constructor (rtx, int);
621static void rs6000_elf_asm_out_destructor (rtx, int);
622static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
623static void rs6000_elf_asm_init_sections (void);
624static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
625					       unsigned HOST_WIDE_INT);
626static void rs6000_elf_encode_section_info (tree, rtx, int)
627     ATTRIBUTE_UNUSED;
628#endif
629static bool rs6000_use_blocks_for_constant_p (enum machine_mode, rtx);
630#if TARGET_XCOFF
631static void rs6000_xcoff_asm_output_anchor (rtx);
632static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
633static void rs6000_xcoff_asm_init_sections (void);
634static int rs6000_xcoff_reloc_rw_mask (void);
635static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
636static section *rs6000_xcoff_select_section (tree, int,
637					     unsigned HOST_WIDE_INT);
638static void rs6000_xcoff_unique_section (tree, int);
639static section *rs6000_xcoff_select_rtx_section
640  (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
641static const char * rs6000_xcoff_strip_name_encoding (const char *);
642static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
643static void rs6000_xcoff_file_start (void);
644static void rs6000_xcoff_file_end (void);
645#endif
646static int rs6000_variable_issue (FILE *, int, rtx, int);
647static bool rs6000_rtx_costs (rtx, int, int, int *);
648static int rs6000_adjust_cost (rtx, rtx, rtx, int);
649static bool is_microcoded_insn (rtx);
650static int is_dispatch_slot_restricted (rtx);
651static bool is_cracked_insn (rtx);
652static bool is_branch_slot_insn (rtx);
653static int rs6000_adjust_priority (rtx, int);
654static int rs6000_issue_rate (void);
655static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
656static rtx get_next_active_insn (rtx, rtx);
657static bool insn_terminates_group_p (rtx , enum group_termination);
658static bool is_costly_group (rtx *, rtx);
659static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
660static int redefine_groups (FILE *, int, rtx, rtx);
661static int pad_groups (FILE *, int, rtx, rtx);
662static void rs6000_sched_finish (FILE *, int);
663static int rs6000_use_sched_lookahead (void);
664static tree rs6000_builtin_mask_for_load (void);
665
666static void def_builtin (int, const char *, tree, int);
667static bool rs6000_vector_alignment_reachable (tree, bool);
668static void rs6000_init_builtins (void);
669static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
670static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
671static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
672static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
673static void altivec_init_builtins (void);
674static void rs6000_common_init_builtins (void);
675static void rs6000_init_libfuncs (void);
676
677static void enable_mask_for_builtins (struct builtin_description *, int,
678				      enum rs6000_builtins,
679				      enum rs6000_builtins);
680static tree build_opaque_vector_type (tree, int);
681static void spe_init_builtins (void);
682static rtx spe_expand_builtin (tree, rtx, bool *);
683static rtx spe_expand_stv_builtin (enum insn_code, tree);
684static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
685static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
686static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
687static rs6000_stack_t *rs6000_stack_info (void);
688static void debug_stack_info (rs6000_stack_t *);
689
690static rtx altivec_expand_builtin (tree, rtx, bool *);
691static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
692static rtx altivec_expand_st_builtin (tree, rtx, bool *);
693static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
694static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
695static rtx altivec_expand_predicate_builtin (enum insn_code,
696					     const char *, tree, rtx);
697static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
698static rtx altivec_expand_stv_builtin (enum insn_code, tree);
699static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
700static rtx altivec_expand_vec_set_builtin (tree);
701static rtx altivec_expand_vec_ext_builtin (tree, rtx);
702static int get_element_number (tree, tree);
703static bool rs6000_handle_option (size_t, const char *, int);
704static void rs6000_parse_tls_size_option (void);
705static void rs6000_parse_yes_no_option (const char *, const char *, int *);
706static int first_altivec_reg_to_save (void);
707static unsigned int compute_vrsave_mask (void);
708static void compute_save_world_info (rs6000_stack_t *info_ptr);
709static void is_altivec_return_reg (rtx, void *);
710static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
711int easy_vector_constant (rtx, enum machine_mode);
712static bool rs6000_is_opaque_type (tree);
713static rtx rs6000_dwarf_register_span (rtx);
714static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
715static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
716static rtx rs6000_tls_get_addr (void);
717static rtx rs6000_got_sym (void);
718static int rs6000_tls_symbol_ref_1 (rtx *, void *);
719static const char *rs6000_get_some_local_dynamic_name (void);
720static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
721static rtx rs6000_complex_function_value (enum machine_mode);
722static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
723				    enum machine_mode, tree);
724static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
725						      HOST_WIDE_INT);
726static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
727							tree, HOST_WIDE_INT);
728static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
729					      HOST_WIDE_INT,
730					      rtx[], int *);
731static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
732					       tree, HOST_WIDE_INT,
733					       rtx[], int *);
734static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
735static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
736static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
737static void setup_incoming_varargs (CUMULATIVE_ARGS *,
738				    enum machine_mode, tree,
739				    int *, int);
740static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
741				      tree, bool);
742static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
743				     tree, bool);
744static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
745#if TARGET_MACHO
746static void macho_branch_islands (void);
747static int no_previous_def (tree function_name);
748static tree get_prev_label (tree function_name);
749static void rs6000_darwin_file_start (void);
750#endif
751
752static tree rs6000_build_builtin_va_list (void);
753static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
754static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
755static bool rs6000_scalar_mode_supported_p (enum machine_mode);
756static bool rs6000_vector_mode_supported_p (enum machine_mode);
757static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
758			     enum machine_mode);
759static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
760				       enum machine_mode);
761static int get_vsel_insn (enum machine_mode);
762static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
763static tree rs6000_stack_protect_fail (void);
764
765const int INSN_NOT_AVAILABLE = -1;
766static enum machine_mode rs6000_eh_return_filter_mode (void);
767
768/* Hash table stuff for keeping track of TOC entries.  */
769
770struct toc_hash_struct GTY(())
771{
772  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
773     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
774  rtx key;
775  enum machine_mode key_mode;
776  int labelno;
777};
778
779static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
780
781/* Default register names.  */
782char rs6000_reg_names[][8] =
783{
784      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
785      "8",  "9", "10", "11", "12", "13", "14", "15",
786     "16", "17", "18", "19", "20", "21", "22", "23",
787     "24", "25", "26", "27", "28", "29", "30", "31",
788      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
789      "8",  "9", "10", "11", "12", "13", "14", "15",
790     "16", "17", "18", "19", "20", "21", "22", "23",
791     "24", "25", "26", "27", "28", "29", "30", "31",
792     "mq", "lr", "ctr","ap",
793      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
794      "xer",
795      /* AltiVec registers.  */
796      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
797      "8",  "9",  "10", "11", "12", "13", "14", "15",
798      "16", "17", "18", "19", "20", "21", "22", "23",
799      "24", "25", "26", "27", "28", "29", "30", "31",
800      "vrsave", "vscr",
801      /* SPE registers.  */
802      "spe_acc", "spefscr",
803      /* Soft frame pointer.  */
804      "sfp"
805};
806
807#ifdef TARGET_REGNAMES
808static const char alt_reg_names[][8] =
809{
810   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
811   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
812  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
813  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
814   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
815   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
816  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
817  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
818    "mq",    "lr",  "ctr",   "ap",
819  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
820   "xer",
821  /* AltiVec registers.  */
822   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
823   "%v8",  "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
824  "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
825  "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
826  "vrsave", "vscr",
827  /* SPE registers.  */
828  "spe_acc", "spefscr",
829  /* Soft frame pointer.  */
830  "sfp"
831};
832#endif
833
834#ifndef MASK_STRICT_ALIGN
835#define MASK_STRICT_ALIGN 0
836#endif
837#ifndef TARGET_PROFILE_KERNEL
838#define TARGET_PROFILE_KERNEL 0
839#endif
840
841/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
842#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
843
844/* Initialize the GCC target structure.  */
845#undef TARGET_ATTRIBUTE_TABLE
846#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
847#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
848#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
849
850#undef TARGET_ASM_ALIGNED_DI_OP
851#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
852
853/* Default unaligned ops are only provided for ELF.  Find the ops needed
854   for non-ELF systems.  */
855#ifndef OBJECT_FORMAT_ELF
856#if TARGET_XCOFF
857/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
858   64-bit targets.  */
859#undef TARGET_ASM_UNALIGNED_HI_OP
860#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
861#undef TARGET_ASM_UNALIGNED_SI_OP
862#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
863#undef TARGET_ASM_UNALIGNED_DI_OP
864#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
865#else
866/* For Darwin.  */
867#undef TARGET_ASM_UNALIGNED_HI_OP
868#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
869#undef TARGET_ASM_UNALIGNED_SI_OP
870#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
871#undef TARGET_ASM_UNALIGNED_DI_OP
872#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
873#undef TARGET_ASM_ALIGNED_DI_OP
874#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
875#endif
876#endif
877
878/* This hook deals with fixups for relocatable code and DI-mode objects
879   in 64-bit code.  */
880#undef TARGET_ASM_INTEGER
881#define TARGET_ASM_INTEGER rs6000_assemble_integer
882
883#ifdef HAVE_GAS_HIDDEN
884#undef TARGET_ASM_ASSEMBLE_VISIBILITY
885#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
886#endif
887
888#undef TARGET_HAVE_TLS
889#define TARGET_HAVE_TLS HAVE_AS_TLS
890
891#undef TARGET_CANNOT_FORCE_CONST_MEM
892#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
893
894#undef TARGET_ASM_FUNCTION_PROLOGUE
895#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
896#undef TARGET_ASM_FUNCTION_EPILOGUE
897#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
898
899#undef  TARGET_SCHED_VARIABLE_ISSUE
900#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
901
902#undef TARGET_SCHED_ISSUE_RATE
903#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
904#undef TARGET_SCHED_ADJUST_COST
905#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
906#undef TARGET_SCHED_ADJUST_PRIORITY
907#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
908#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
909#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
910#undef TARGET_SCHED_FINISH
911#define TARGET_SCHED_FINISH rs6000_sched_finish
912
913#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
914#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
915
916#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
917#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
918
919#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
920#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
921
922#undef TARGET_INIT_BUILTINS
923#define TARGET_INIT_BUILTINS rs6000_init_builtins
924
925#undef TARGET_EXPAND_BUILTIN
926#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
927
928#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
929#define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
930
931#undef TARGET_INIT_LIBFUNCS
932#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
933
934#if TARGET_MACHO
935#undef TARGET_BINDS_LOCAL_P
936#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
937#endif
938
939#undef TARGET_MS_BITFIELD_LAYOUT_P
940#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
941
942#undef TARGET_ASM_OUTPUT_MI_THUNK
943#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
944
945#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
946#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
947
948#undef TARGET_FUNCTION_OK_FOR_SIBCALL
949#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
950
951#undef TARGET_INVALID_WITHIN_DOLOOP
952#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
953
954#undef TARGET_RTX_COSTS
955#define TARGET_RTX_COSTS rs6000_rtx_costs
956#undef TARGET_ADDRESS_COST
957#define TARGET_ADDRESS_COST hook_int_rtx_0
958
959#undef TARGET_VECTOR_OPAQUE_P
960#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
961
962#undef TARGET_DWARF_REGISTER_SPAN
963#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
964
965/* On rs6000, function arguments are promoted, as are function return
966   values.  */
967#undef TARGET_PROMOTE_FUNCTION_ARGS
968#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
969#undef TARGET_PROMOTE_FUNCTION_RETURN
970#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
971
972#undef TARGET_RETURN_IN_MEMORY
973#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
974
975#undef TARGET_SETUP_INCOMING_VARARGS
976#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
977
978/* Always strict argument naming on rs6000.  */
979#undef TARGET_STRICT_ARGUMENT_NAMING
980#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
981#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
982#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
983#undef TARGET_SPLIT_COMPLEX_ARG
984#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
985#undef TARGET_MUST_PASS_IN_STACK
986#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
987#undef TARGET_PASS_BY_REFERENCE
988#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
989#undef TARGET_ARG_PARTIAL_BYTES
990#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
991
992#undef TARGET_BUILD_BUILTIN_VA_LIST
993#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
994
995#undef TARGET_GIMPLIFY_VA_ARG_EXPR
996#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
997
998#undef TARGET_EH_RETURN_FILTER_MODE
999#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1000
1001#undef TARGET_SCALAR_MODE_SUPPORTED_P
1002#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1003
1004#undef TARGET_VECTOR_MODE_SUPPORTED_P
1005#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1006
1007#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1008#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1009
1010#undef TARGET_HANDLE_OPTION
1011#define TARGET_HANDLE_OPTION rs6000_handle_option
1012
1013#undef TARGET_DEFAULT_TARGET_FLAGS
1014#define TARGET_DEFAULT_TARGET_FLAGS \
1015  (TARGET_DEFAULT)
1016
1017#undef TARGET_STACK_PROTECT_FAIL
1018#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1019
1020/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1021   The PowerPC architecture requires only weak consistency among
1022   processors--that is, memory accesses between processors need not be
1023   sequentially consistent and memory accesses among processors can occur
1024   in any order. The ability to order memory accesses weakly provides
1025   opportunities for more efficient use of the system bus. Unless a
1026   dependency exists, the 604e allows read operations to precede store
1027   operations.  */
1028#undef TARGET_RELAXED_ORDERING
1029#define TARGET_RELAXED_ORDERING true
1030
1031#ifdef HAVE_AS_TLS
1032#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1033#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1034#endif
1035
1036/* Use a 32-bit anchor range.  This leads to sequences like:
1037
1038	addis	tmp,anchor,high
1039	add	dest,tmp,low
1040
1041   where tmp itself acts as an anchor, and can be shared between
1042   accesses to the same 64k page.  */
1043#undef TARGET_MIN_ANCHOR_OFFSET
1044#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1045#undef TARGET_MAX_ANCHOR_OFFSET
1046#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1047#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1048#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1049
1050struct gcc_target targetm = TARGET_INITIALIZER;
1051
1052
1053/* Value is 1 if hard register REGNO can hold a value of machine-mode
1054   MODE.  */
1055static int
1056rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1057{
1058  /* The GPRs can hold any mode, but values bigger than one register
1059     cannot go past R31.  */
1060  if (INT_REGNO_P (regno))
1061    return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1062
1063  /* The float registers can only hold floating modes and DImode.
1064     This also excludes decimal float modes.  */
1065  if (FP_REGNO_P (regno))
1066    return
1067      (SCALAR_FLOAT_MODE_P (mode)
1068       && !DECIMAL_FLOAT_MODE_P (mode)
1069       && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1070      || (GET_MODE_CLASS (mode) == MODE_INT
1071	  && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1072
1073  /* The CR register can only hold CC modes.  */
1074  if (CR_REGNO_P (regno))
1075    return GET_MODE_CLASS (mode) == MODE_CC;
1076
1077  if (XER_REGNO_P (regno))
1078    return mode == PSImode;
1079
1080  /* AltiVec only in AldyVec registers.  */
1081  if (ALTIVEC_REGNO_P (regno))
1082    return ALTIVEC_VECTOR_MODE (mode);
1083
1084  /* ...but GPRs can hold SIMD data on the SPE in one register.  */
1085  if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1086    return 1;
1087
1088  /* We cannot put TImode anywhere except general register and it must be
1089     able to fit within the register set.  */
1090
1091  return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1092}
1093
1094/* Initialize rs6000_hard_regno_mode_ok_p table.  */
1095static void
1096rs6000_init_hard_regno_mode_ok (void)
1097{
1098  int r, m;
1099
1100  for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1101    for (m = 0; m < NUM_MACHINE_MODES; ++m)
1102      if (rs6000_hard_regno_mode_ok (r, m))
1103	rs6000_hard_regno_mode_ok_p[m][r] = true;
1104}
1105
1106/* If not otherwise specified by a target, make 'long double' equivalent to
1107   'double'.  */
1108
1109#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1110#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1111#endif
1112
1113/* Override command line options.  Mostly we process the processor
1114   type and sometimes adjust other TARGET_ options.  */
1115
1116void
1117rs6000_override_options (const char *default_cpu)
1118{
1119  size_t i, j;
1120  struct rs6000_cpu_select *ptr;
1121  int set_masks;
1122
1123  /* Simplifications for entries below.  */
1124
1125  enum {
1126    POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1127    POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1128  };
1129
1130  /* This table occasionally claims that a processor does not support
1131     a particular feature even though it does, but the feature is slower
1132     than the alternative.  Thus, it shouldn't be relied on as a
1133     complete description of the processor's support.
1134
1135     Please keep this list in order, and don't forget to update the
1136     documentation in invoke.texi when adding a new processor or
1137     flag.  */
1138  static struct ptt
1139    {
1140      const char *const name;		/* Canonical processor name.  */
1141      const enum processor_type processor; /* Processor type enum value.  */
1142      const int target_enable;	/* Target flags to enable.  */
1143    } const processor_target_table[]
1144      = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1145	 {"403", PROCESSOR_PPC403,
1146	  POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1147	 {"405", PROCESSOR_PPC405,
1148	  POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1149	 {"405fp", PROCESSOR_PPC405,
1150	  POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1151	 {"440", PROCESSOR_PPC440,
1152	  POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1153	 {"440fp", PROCESSOR_PPC440,
1154	  POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1155	 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1156	 {"601", PROCESSOR_PPC601,
1157	  MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1158	 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1159	 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1160	 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1161	 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1162	 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1163	 {"620", PROCESSOR_PPC620,
1164	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1165	 {"630", PROCESSOR_PPC630,
1166	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1167	 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1168	 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1169	 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1170	 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1171	 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1172	 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1173	 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1174	 {"8540", PROCESSOR_PPC8540,
1175	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_STRICT_ALIGN},
1176	 /* 8548 has a dummy entry for now.  */
1177	 {"8548", PROCESSOR_PPC8540,
1178	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_STRICT_ALIGN},
1179	 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1180	 {"970", PROCESSOR_POWER4,
1181	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1182	 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1183	 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1184	 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1185	 {"G4",  PROCESSOR_PPC7450, POWERPC_7400_MASK},
1186	 {"G5", PROCESSOR_POWER4,
1187	  POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1188	 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1189	 {"power2", PROCESSOR_POWER,
1190	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1191	 {"power3", PROCESSOR_PPC630,
1192	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1193	 {"power4", PROCESSOR_POWER4,
1194	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1195	 {"power5", PROCESSOR_POWER5,
1196	  POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1197	  | MASK_MFCRF | MASK_POPCNTB},
1198	 {"power5+", PROCESSOR_POWER5,
1199	  POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1200	  | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
1201 	 {"power6", PROCESSOR_POWER5,
1202	  POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
1203	  | MASK_FPRND},
1204	 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1205	 {"powerpc64", PROCESSOR_POWERPC64,
1206	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1207	 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1208	 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1209	 {"rios2", PROCESSOR_RIOS2,
1210	  MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1211	 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1212	 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1213	 {"rs64", PROCESSOR_RS64A,
1214	  POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1215      };
1216
1217  const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1218
1219  /* Some OSs don't support saving the high part of 64-bit registers on
1220     context switch.  Other OSs don't support saving Altivec registers.
1221     On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1222     settings; if the user wants either, the user must explicitly specify
1223     them and we won't interfere with the user's specification.  */
1224
1225  enum {
1226    POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1227    POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
1228		     | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1229		     | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
1230		     | MASK_DLMZB)
1231  };
1232
1233  rs6000_init_hard_regno_mode_ok ();
1234
1235  set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1236#ifdef OS_MISSING_POWERPC64
1237  if (OS_MISSING_POWERPC64)
1238    set_masks &= ~MASK_POWERPC64;
1239#endif
1240#ifdef OS_MISSING_ALTIVEC
1241  if (OS_MISSING_ALTIVEC)
1242    set_masks &= ~MASK_ALTIVEC;
1243#endif
1244
1245  /* Don't override by the processor default if given explicitly.  */
1246  set_masks &= ~target_flags_explicit;
1247
1248  /* Identify the processor type.  */
1249  rs6000_select[0].string = default_cpu;
1250  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1251
1252  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1253    {
1254      ptr = &rs6000_select[i];
1255      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1256	{
1257	  for (j = 0; j < ptt_size; j++)
1258	    if (! strcmp (ptr->string, processor_target_table[j].name))
1259	      {
1260		if (ptr->set_tune_p)
1261		  rs6000_cpu = processor_target_table[j].processor;
1262
1263		if (ptr->set_arch_p)
1264		  {
1265		    target_flags &= ~set_masks;
1266		    target_flags |= (processor_target_table[j].target_enable
1267				     & set_masks);
1268		  }
1269		break;
1270	      }
1271
1272	  if (j == ptt_size)
1273	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1274	}
1275    }
1276
1277  if (TARGET_E500)
1278    rs6000_isel = 1;
1279
1280  /* If we are optimizing big endian systems for space, use the load/store
1281     multiple and string instructions.  */
1282  if (BYTES_BIG_ENDIAN && optimize_size)
1283    target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1284
1285  /* Don't allow -mmultiple or -mstring on little endian systems
1286     unless the cpu is a 750, because the hardware doesn't support the
1287     instructions used in little endian mode, and causes an alignment
1288     trap.  The 750 does not cause an alignment trap (except when the
1289     target is unaligned).  */
1290
1291  if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1292    {
1293      if (TARGET_MULTIPLE)
1294	{
1295	  target_flags &= ~MASK_MULTIPLE;
1296	  if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1297	    warning (0, "-mmultiple is not supported on little endian systems");
1298	}
1299
1300      if (TARGET_STRING)
1301	{
1302	  target_flags &= ~MASK_STRING;
1303	  if ((target_flags_explicit & MASK_STRING) != 0)
1304	    warning (0, "-mstring is not supported on little endian systems");
1305	}
1306    }
1307
1308  /* Set debug flags */
1309  if (rs6000_debug_name)
1310    {
1311      if (! strcmp (rs6000_debug_name, "all"))
1312	rs6000_debug_stack = rs6000_debug_arg = 1;
1313      else if (! strcmp (rs6000_debug_name, "stack"))
1314	rs6000_debug_stack = 1;
1315      else if (! strcmp (rs6000_debug_name, "arg"))
1316	rs6000_debug_arg = 1;
1317      else
1318	error ("unknown -mdebug-%s switch", rs6000_debug_name);
1319    }
1320
1321  if (rs6000_traceback_name)
1322    {
1323      if (! strncmp (rs6000_traceback_name, "full", 4))
1324	rs6000_traceback = traceback_full;
1325      else if (! strncmp (rs6000_traceback_name, "part", 4))
1326	rs6000_traceback = traceback_part;
1327      else if (! strncmp (rs6000_traceback_name, "no", 2))
1328	rs6000_traceback = traceback_none;
1329      else
1330	error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1331	       rs6000_traceback_name);
1332    }
1333
1334  if (!rs6000_explicit_options.long_double)
1335    rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1336
1337#ifndef POWERPC_LINUX
1338  if (!rs6000_explicit_options.ieee)
1339    rs6000_ieeequad = 1;
1340#endif
1341
1342  /* Set Altivec ABI as default for powerpc64 linux.  */
1343  if (TARGET_ELF && TARGET_64BIT)
1344    {
1345      rs6000_altivec_abi = 1;
1346      TARGET_ALTIVEC_VRSAVE = 1;
1347    }
1348
1349  /* Set the Darwin64 ABI as default for 64-bit Darwin.  */
1350  if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1351    {
1352      rs6000_darwin64_abi = 1;
1353#if TARGET_MACHO
1354      darwin_one_byte_bool = 1;
1355#endif
1356      /* Default to natural alignment, for better performance.  */
1357      rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1358    }
1359
1360  /* Place FP constants in the constant pool instead of TOC
1361     if section anchors enabled.  */
1362  if (flag_section_anchors)
1363    TARGET_NO_FP_IN_TOC = 1;
1364
1365  /* Handle -mtls-size option.  */
1366  rs6000_parse_tls_size_option ();
1367
1368#ifdef SUBTARGET_OVERRIDE_OPTIONS
1369  SUBTARGET_OVERRIDE_OPTIONS;
1370#endif
1371#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1372  SUBSUBTARGET_OVERRIDE_OPTIONS;
1373#endif
1374#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1375  SUB3TARGET_OVERRIDE_OPTIONS;
1376#endif
1377
1378  if (TARGET_E500)
1379    {
1380      if (TARGET_ALTIVEC)
1381	error ("AltiVec and E500 instructions cannot coexist");
1382
1383      /* The e500 does not have string instructions, and we set
1384	 MASK_STRING above when optimizing for size.  */
1385      if ((target_flags & MASK_STRING) != 0)
1386	target_flags = target_flags & ~MASK_STRING;
1387    }
1388  else if (rs6000_select[1].string != NULL)
1389    {
1390      /* For the powerpc-eabispe configuration, we set all these by
1391	 default, so let's unset them if we manually set another
1392	 CPU that is not the E500.  */
1393      if (!rs6000_explicit_options.abi)
1394	rs6000_spe_abi = 0;
1395      if (!rs6000_explicit_options.spe)
1396	rs6000_spe = 0;
1397      if (!rs6000_explicit_options.float_gprs)
1398	rs6000_float_gprs = 0;
1399      if (!rs6000_explicit_options.isel)
1400	rs6000_isel = 0;
1401      if (!rs6000_explicit_options.long_double)
1402	rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1403    }
1404
1405  rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1406			&& rs6000_cpu != PROCESSOR_POWER5);
1407  rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1408			 || rs6000_cpu == PROCESSOR_POWER5);
1409
1410  rs6000_sched_restricted_insns_priority
1411    = (rs6000_sched_groups ? 1 : 0);
1412
1413  /* Handle -msched-costly-dep option.  */
1414  rs6000_sched_costly_dep
1415    = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1416
1417  if (rs6000_sched_costly_dep_str)
1418    {
1419      if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1420	rs6000_sched_costly_dep = no_dep_costly;
1421      else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1422	rs6000_sched_costly_dep = all_deps_costly;
1423      else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1424	rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1425      else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1426	rs6000_sched_costly_dep = store_to_load_dep_costly;
1427      else
1428	rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1429    }
1430
1431  /* Handle -minsert-sched-nops option.  */
1432  rs6000_sched_insert_nops
1433    = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1434
1435  if (rs6000_sched_insert_nops_str)
1436    {
1437      if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1438	rs6000_sched_insert_nops = sched_finish_none;
1439      else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1440	rs6000_sched_insert_nops = sched_finish_pad_groups;
1441      else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1442	rs6000_sched_insert_nops = sched_finish_regroup_exact;
1443      else
1444	rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1445    }
1446
1447#ifdef TARGET_REGNAMES
1448  /* If the user desires alternate register names, copy in the
1449     alternate names now.  */
1450  if (TARGET_REGNAMES)
1451    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1452#endif
1453
1454  /* Set aix_struct_return last, after the ABI is determined.
1455     If -maix-struct-return or -msvr4-struct-return was explicitly
1456     used, don't override with the ABI default.  */
1457  if (!rs6000_explicit_options.aix_struct_ret)
1458    aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
1459
1460  if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
1461    REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1462
1463  if (TARGET_TOC)
1464    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1465
1466  /* We can only guarantee the availability of DI pseudo-ops when
1467     assembling for 64-bit targets.  */
1468  if (!TARGET_64BIT)
1469    {
1470      targetm.asm_out.aligned_op.di = NULL;
1471      targetm.asm_out.unaligned_op.di = NULL;
1472    }
1473
1474  /* Set branch target alignment, if not optimizing for size.  */
1475  if (!optimize_size)
1476    {
1477      if (rs6000_sched_groups)
1478	{
1479	  if (align_functions <= 0)
1480	    align_functions = 16;
1481	  if (align_jumps <= 0)
1482	    align_jumps = 16;
1483	  if (align_loops <= 0)
1484	    align_loops = 16;
1485	}
1486      if (align_jumps_max_skip <= 0)
1487	align_jumps_max_skip = 15;
1488      if (align_loops_max_skip <= 0)
1489	align_loops_max_skip = 15;
1490    }
1491
1492  /* Arrange to save and restore machine status around nested functions.  */
1493  init_machine_status = rs6000_init_machine_status;
1494
1495  /* We should always be splitting complex arguments, but we can't break
1496     Linux and Darwin ABIs at the moment.  For now, only AIX is fixed.  */
1497  if (DEFAULT_ABI != ABI_AIX)
1498    targetm.calls.split_complex_arg = NULL;
1499
1500  /* Initialize rs6000_cost with the appropriate target costs.  */
1501  if (optimize_size)
1502    rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1503  else
1504    switch (rs6000_cpu)
1505      {
1506      case PROCESSOR_RIOS1:
1507	rs6000_cost = &rios1_cost;
1508	break;
1509
1510      case PROCESSOR_RIOS2:
1511	rs6000_cost = &rios2_cost;
1512	break;
1513
1514      case PROCESSOR_RS64A:
1515	rs6000_cost = &rs64a_cost;
1516	break;
1517
1518      case PROCESSOR_MPCCORE:
1519	rs6000_cost = &mpccore_cost;
1520	break;
1521
1522      case PROCESSOR_PPC403:
1523	rs6000_cost = &ppc403_cost;
1524	break;
1525
1526      case PROCESSOR_PPC405:
1527	rs6000_cost = &ppc405_cost;
1528	break;
1529
1530      case PROCESSOR_PPC440:
1531	rs6000_cost = &ppc440_cost;
1532	break;
1533
1534      case PROCESSOR_PPC601:
1535	rs6000_cost = &ppc601_cost;
1536	break;
1537
1538      case PROCESSOR_PPC603:
1539	rs6000_cost = &ppc603_cost;
1540	break;
1541
1542      case PROCESSOR_PPC604:
1543	rs6000_cost = &ppc604_cost;
1544	break;
1545
1546      case PROCESSOR_PPC604e:
1547	rs6000_cost = &ppc604e_cost;
1548	break;
1549
1550      case PROCESSOR_PPC620:
1551	rs6000_cost = &ppc620_cost;
1552	break;
1553
1554      case PROCESSOR_PPC630:
1555	rs6000_cost = &ppc630_cost;
1556	break;
1557
1558      case PROCESSOR_PPC750:
1559      case PROCESSOR_PPC7400:
1560	rs6000_cost = &ppc750_cost;
1561	break;
1562
1563      case PROCESSOR_PPC7450:
1564	rs6000_cost = &ppc7450_cost;
1565	break;
1566
1567      case PROCESSOR_PPC8540:
1568	rs6000_cost = &ppc8540_cost;
1569	break;
1570
1571      case PROCESSOR_POWER4:
1572      case PROCESSOR_POWER5:
1573	rs6000_cost = &power4_cost;
1574	break;
1575
1576      default:
1577	gcc_unreachable ();
1578      }
1579}
1580
1581/* Implement targetm.vectorize.builtin_mask_for_load.  */
1582static tree
1583rs6000_builtin_mask_for_load (void)
1584{
1585  if (TARGET_ALTIVEC)
1586    return altivec_builtin_mask_for_load;
1587  else
1588    return 0;
1589}
1590
1591
1592/* Return true iff, data reference of TYPE can reach vector alignment (16)
1593   after applying N number of iterations.  This routine does not determine
1594   how may iterations are required to reach desired alignment.  */
1595
1596static bool
1597rs6000_vector_alignment_reachable (tree type ATTRIBUTE_UNUSED, bool is_packed)
1598{
1599  if (is_packed)
1600    return false;
1601
1602  if (TARGET_32BIT)
1603    {
1604      if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1605        return true;
1606
1607      if (rs6000_alignment_flags ==  MASK_ALIGN_POWER)
1608        return true;
1609
1610      return false;
1611    }
1612  else
1613    {
1614      if (TARGET_MACHO)
1615        return false;
1616
1617      /* Assuming that all other types are naturally aligned. CHECKME!  */
1618      return true;
1619    }
1620}
1621
1622/* Handle generic options of the form -mfoo=yes/no.
1623   NAME is the option name.
1624   VALUE is the option value.
1625   FLAG is the pointer to the flag where to store a 1 or 0, depending on
1626   whether the option value is 'yes' or 'no' respectively.  */
1627static void
1628rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1629{
1630  if (value == 0)
1631    return;
1632  else if (!strcmp (value, "yes"))
1633    *flag = 1;
1634  else if (!strcmp (value, "no"))
1635    *flag = 0;
1636  else
1637    error ("unknown -m%s= option specified: '%s'", name, value);
1638}
1639
1640/* Validate and record the size specified with the -mtls-size option.  */
1641
1642static void
1643rs6000_parse_tls_size_option (void)
1644{
1645  if (rs6000_tls_size_string == 0)
1646    return;
1647  else if (strcmp (rs6000_tls_size_string, "16") == 0)
1648    rs6000_tls_size = 16;
1649  else if (strcmp (rs6000_tls_size_string, "32") == 0)
1650    rs6000_tls_size = 32;
1651  else if (strcmp (rs6000_tls_size_string, "64") == 0)
1652    rs6000_tls_size = 64;
1653  else
1654    error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
1655}
1656
1657void
1658optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1659{
1660  if (DEFAULT_ABI == ABI_DARWIN)
1661    /* The Darwin libraries never set errno, so we might as well
1662       avoid calling them when that's the only reason we would.  */
1663    flag_errno_math = 0;
1664
1665  /* Double growth factor to counter reduced min jump length.  */
1666  set_param_value ("max-grow-copy-bb-insns", 16);
1667
1668  /* Enable section anchors by default.
1669     Skip section anchors for Objective C and Objective C++
1670     until front-ends fixed.  */
1671  if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
1672    flag_section_anchors = 1;
1673}
1674
1675/* Implement TARGET_HANDLE_OPTION.  */
1676
1677static bool
1678rs6000_handle_option (size_t code, const char *arg, int value)
1679{
1680  switch (code)
1681    {
1682    case OPT_mno_power:
1683      target_flags &= ~(MASK_POWER | MASK_POWER2
1684			| MASK_MULTIPLE | MASK_STRING);
1685      target_flags_explicit |= (MASK_POWER | MASK_POWER2
1686				| MASK_MULTIPLE | MASK_STRING);
1687      break;
1688    case OPT_mno_powerpc:
1689      target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1690			| MASK_PPC_GFXOPT | MASK_POWERPC64);
1691      target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1692				| MASK_PPC_GFXOPT | MASK_POWERPC64);
1693      break;
1694    case OPT_mfull_toc:
1695      target_flags &= ~MASK_MINIMAL_TOC;
1696      TARGET_NO_FP_IN_TOC = 0;
1697      TARGET_NO_SUM_IN_TOC = 0;
1698      target_flags_explicit |= MASK_MINIMAL_TOC;
1699#ifdef TARGET_USES_SYSV4_OPT
1700      /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1701	 just the same as -mminimal-toc.  */
1702      target_flags |= MASK_MINIMAL_TOC;
1703      target_flags_explicit |= MASK_MINIMAL_TOC;
1704#endif
1705      break;
1706
1707#ifdef TARGET_USES_SYSV4_OPT
1708    case OPT_mtoc:
1709      /* Make -mtoc behave like -mminimal-toc.  */
1710      target_flags |= MASK_MINIMAL_TOC;
1711      target_flags_explicit |= MASK_MINIMAL_TOC;
1712      break;
1713#endif
1714
1715#ifdef TARGET_USES_AIX64_OPT
1716    case OPT_maix64:
1717#else
1718    case OPT_m64:
1719#endif
1720      target_flags |= MASK_POWERPC64 | MASK_POWERPC;
1721      target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
1722      target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
1723      break;
1724
1725#ifdef TARGET_USES_AIX64_OPT
1726    case OPT_maix32:
1727#else
1728    case OPT_m32:
1729#endif
1730      target_flags &= ~MASK_POWERPC64;
1731      target_flags_explicit |= MASK_POWERPC64;
1732      break;
1733
1734    case OPT_minsert_sched_nops_:
1735      rs6000_sched_insert_nops_str = arg;
1736      break;
1737
1738    case OPT_mminimal_toc:
1739      if (value == 1)
1740	{
1741	  TARGET_NO_FP_IN_TOC = 0;
1742	  TARGET_NO_SUM_IN_TOC = 0;
1743	}
1744      break;
1745
1746    case OPT_mpower:
1747      if (value == 1)
1748	{
1749	  target_flags |= (MASK_MULTIPLE | MASK_STRING);
1750	  target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1751	}
1752      break;
1753
1754    case OPT_mpower2:
1755      if (value == 1)
1756	{
1757	  target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1758	  target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1759	}
1760      break;
1761
1762    case OPT_mpowerpc_gpopt:
1763    case OPT_mpowerpc_gfxopt:
1764      if (value == 1)
1765	{
1766	  target_flags |= MASK_POWERPC;
1767	  target_flags_explicit |= MASK_POWERPC;
1768	}
1769      break;
1770
1771    case OPT_maix_struct_return:
1772    case OPT_msvr4_struct_return:
1773      rs6000_explicit_options.aix_struct_ret = true;
1774      break;
1775
1776    case OPT_mvrsave_:
1777      rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1778      break;
1779
1780    case OPT_misel_:
1781      rs6000_explicit_options.isel = true;
1782      rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1783      break;
1784
1785    case OPT_mspe_:
1786      rs6000_explicit_options.spe = true;
1787      rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1788      /* No SPE means 64-bit long doubles, even if an E500.  */
1789      if (!rs6000_spe)
1790	rs6000_long_double_type_size = 64;
1791      break;
1792
1793    case OPT_mdebug_:
1794      rs6000_debug_name = arg;
1795      break;
1796
1797#ifdef TARGET_USES_SYSV4_OPT
1798    case OPT_mcall_:
1799      rs6000_abi_name = arg;
1800      break;
1801
1802    case OPT_msdata_:
1803      rs6000_sdata_name = arg;
1804      break;
1805
1806    case OPT_mtls_size_:
1807      rs6000_tls_size_string = arg;
1808      break;
1809
1810    case OPT_mrelocatable:
1811      if (value == 1)
1812	{
1813	  target_flags |= MASK_MINIMAL_TOC;
1814	  target_flags_explicit |= MASK_MINIMAL_TOC;
1815	  TARGET_NO_FP_IN_TOC = 1;
1816	}
1817      break;
1818
1819    case OPT_mrelocatable_lib:
1820      if (value == 1)
1821	{
1822	  target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1823	  target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1824	  TARGET_NO_FP_IN_TOC = 1;
1825	}
1826      else
1827	{
1828	  target_flags &= ~MASK_RELOCATABLE;
1829	  target_flags_explicit |= MASK_RELOCATABLE;
1830	}
1831      break;
1832#endif
1833
1834    case OPT_mabi_:
1835      if (!strcmp (arg, "altivec"))
1836	{
1837	  rs6000_explicit_options.abi = true;
1838	  rs6000_altivec_abi = 1;
1839	  rs6000_spe_abi = 0;
1840	}
1841      else if (! strcmp (arg, "no-altivec"))
1842	{
1843	  /* ??? Don't set rs6000_explicit_options.abi here, to allow
1844	     the default for rs6000_spe_abi to be chosen later.  */
1845	  rs6000_altivec_abi = 0;
1846	}
1847      else if (! strcmp (arg, "spe"))
1848	{
1849	  rs6000_explicit_options.abi = true;
1850	  rs6000_spe_abi = 1;
1851	  rs6000_altivec_abi = 0;
1852	  if (!TARGET_SPE_ABI)
1853	    error ("not configured for ABI: '%s'", arg);
1854	}
1855      else if (! strcmp (arg, "no-spe"))
1856	{
1857	  rs6000_explicit_options.abi = true;
1858	  rs6000_spe_abi = 0;
1859	}
1860
1861      /* These are here for testing during development only, do not
1862	 document in the manual please.  */
1863      else if (! strcmp (arg, "d64"))
1864	{
1865	  rs6000_darwin64_abi = 1;
1866	  warning (0, "Using darwin64 ABI");
1867	}
1868      else if (! strcmp (arg, "d32"))
1869	{
1870	  rs6000_darwin64_abi = 0;
1871	  warning (0, "Using old darwin ABI");
1872	}
1873
1874      else if (! strcmp (arg, "ibmlongdouble"))
1875	{
1876	  rs6000_explicit_options.ieee = true;
1877	  rs6000_ieeequad = 0;
1878	  warning (0, "Using IBM extended precision long double");
1879	}
1880      else if (! strcmp (arg, "ieeelongdouble"))
1881	{
1882	  rs6000_explicit_options.ieee = true;
1883	  rs6000_ieeequad = 1;
1884	  warning (0, "Using IEEE extended precision long double");
1885	}
1886
1887      else
1888	{
1889	  error ("unknown ABI specified: '%s'", arg);
1890	  return false;
1891	}
1892      break;
1893
1894    case OPT_mcpu_:
1895      rs6000_select[1].string = arg;
1896      break;
1897
1898    case OPT_mtune_:
1899      rs6000_select[2].string = arg;
1900      break;
1901
1902    case OPT_mtraceback_:
1903      rs6000_traceback_name = arg;
1904      break;
1905
1906    case OPT_mfloat_gprs_:
1907      rs6000_explicit_options.float_gprs = true;
1908      if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1909	rs6000_float_gprs = 1;
1910      else if (! strcmp (arg, "double"))
1911	rs6000_float_gprs = 2;
1912      else if (! strcmp (arg, "no"))
1913	rs6000_float_gprs = 0;
1914      else
1915	{
1916	  error ("invalid option for -mfloat-gprs: '%s'", arg);
1917	  return false;
1918	}
1919      break;
1920
1921    case OPT_mlong_double_:
1922      rs6000_explicit_options.long_double = true;
1923      rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1924      if (value != 64 && value != 128)
1925	{
1926	  error ("Unknown switch -mlong-double-%s", arg);
1927	  rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1928	  return false;
1929	}
1930      else
1931	rs6000_long_double_type_size = value;
1932      break;
1933
1934    case OPT_msched_costly_dep_:
1935      rs6000_sched_costly_dep_str = arg;
1936      break;
1937
1938    case OPT_malign_:
1939      rs6000_explicit_options.alignment = true;
1940      if (! strcmp (arg, "power"))
1941	{
1942	  /* On 64-bit Darwin, power alignment is ABI-incompatible with
1943	     some C library functions, so warn about it. The flag may be
1944	     useful for performance studies from time to time though, so
1945	     don't disable it entirely.  */
1946	  if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1947	    warning (0, "-malign-power is not supported for 64-bit Darwin;"
1948		     " it is incompatible with the installed C and C++ libraries");
1949	  rs6000_alignment_flags = MASK_ALIGN_POWER;
1950	}
1951      else if (! strcmp (arg, "natural"))
1952	rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1953      else
1954	{
1955	  error ("unknown -malign-XXXXX option specified: '%s'", arg);
1956	  return false;
1957	}
1958      break;
1959    }
1960  return true;
1961}
1962
1963/* Do anything needed at the start of the asm file.  */
1964
1965static void
1966rs6000_file_start (void)
1967{
1968  size_t i;
1969  char buffer[80];
1970  const char *start = buffer;
1971  struct rs6000_cpu_select *ptr;
1972  const char *default_cpu = TARGET_CPU_DEFAULT;
1973  FILE *file = asm_out_file;
1974
1975  default_file_start ();
1976
1977#ifdef TARGET_BI_ARCH
1978  if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1979    default_cpu = 0;
1980#endif
1981
1982  if (flag_verbose_asm)
1983    {
1984      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1985      rs6000_select[0].string = default_cpu;
1986
1987      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1988	{
1989	  ptr = &rs6000_select[i];
1990	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1991	    {
1992	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1993	      start = "";
1994	    }
1995	}
1996
1997      if (PPC405_ERRATUM77)
1998	{
1999	  fprintf (file, "%s PPC405CR_ERRATUM77", start);
2000	  start = "";
2001	}
2002
2003#ifdef USING_ELFOS_H
2004      switch (rs6000_sdata)
2005	{
2006	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2007	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2008	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2009	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2010	}
2011
2012      if (rs6000_sdata && g_switch_value)
2013	{
2014	  fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2015		   g_switch_value);
2016	  start = "";
2017	}
2018#endif
2019
2020      if (*start == '\0')
2021	putc ('\n', file);
2022    }
2023
2024  if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2025    {
2026      switch_to_section (toc_section);
2027      switch_to_section (text_section);
2028    }
2029}
2030
2031
2032/* Return nonzero if this function is known to have a null epilogue.  */
2033
2034int
2035direct_return (void)
2036{
2037  if (reload_completed)
2038    {
2039      rs6000_stack_t *info = rs6000_stack_info ();
2040
2041      if (info->first_gp_reg_save == 32
2042	  && info->first_fp_reg_save == 64
2043	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
2044	  && ! info->lr_save_p
2045	  && ! info->cr_save_p
2046	  && info->vrsave_mask == 0
2047	  && ! info->push_p)
2048	return 1;
2049    }
2050
2051  return 0;
2052}
2053
2054/* Return the number of instructions it takes to form a constant in an
2055   integer register.  */
2056
2057int
2058num_insns_constant_wide (HOST_WIDE_INT value)
2059{
2060  /* signed constant loadable with {cal|addi} */
2061  if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
2062    return 1;
2063
2064  /* constant loadable with {cau|addis} */
2065  else if ((value & 0xffff) == 0
2066	   && (value >> 31 == -1 || value >> 31 == 0))
2067    return 1;
2068
2069#if HOST_BITS_PER_WIDE_INT == 64
2070  else if (TARGET_POWERPC64)
2071    {
2072      HOST_WIDE_INT low  = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2073      HOST_WIDE_INT high = value >> 31;
2074
2075      if (high == 0 || high == -1)
2076	return 2;
2077
2078      high >>= 1;
2079
2080      if (low == 0)
2081	return num_insns_constant_wide (high) + 1;
2082      else
2083	return (num_insns_constant_wide (high)
2084		+ num_insns_constant_wide (low) + 1);
2085    }
2086#endif
2087
2088  else
2089    return 2;
2090}
2091
2092int
2093num_insns_constant (rtx op, enum machine_mode mode)
2094{
2095  HOST_WIDE_INT low, high;
2096
2097  switch (GET_CODE (op))
2098    {
2099    case CONST_INT:
2100#if HOST_BITS_PER_WIDE_INT == 64
2101      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2102	  && mask64_operand (op, mode))
2103	return 2;
2104      else
2105#endif
2106	return num_insns_constant_wide (INTVAL (op));
2107
2108      case CONST_DOUBLE:
2109	if (mode == SFmode)
2110	  {
2111	    long l;
2112	    REAL_VALUE_TYPE rv;
2113
2114	    REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2115	    REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2116	    return num_insns_constant_wide ((HOST_WIDE_INT) l);
2117	  }
2118
2119	if (mode == VOIDmode || mode == DImode)
2120	  {
2121	    high = CONST_DOUBLE_HIGH (op);
2122	    low  = CONST_DOUBLE_LOW (op);
2123	  }
2124	else
2125	  {
2126	    long l[2];
2127	    REAL_VALUE_TYPE rv;
2128
2129	    REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2130	    REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2131	    high = l[WORDS_BIG_ENDIAN == 0];
2132	    low  = l[WORDS_BIG_ENDIAN != 0];
2133	  }
2134
2135	if (TARGET_32BIT)
2136	  return (num_insns_constant_wide (low)
2137		  + num_insns_constant_wide (high));
2138	else
2139	  {
2140	    if ((high == 0 && low >= 0)
2141		|| (high == -1 && low < 0))
2142	      return num_insns_constant_wide (low);
2143
2144	    else if (mask64_operand (op, mode))
2145	      return 2;
2146
2147	    else if (low == 0)
2148	      return num_insns_constant_wide (high) + 1;
2149
2150	    else
2151	      return (num_insns_constant_wide (high)
2152		      + num_insns_constant_wide (low) + 1);
2153	  }
2154
2155    default:
2156      gcc_unreachable ();
2157    }
2158}
2159
2160/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2161   If the mode of OP is MODE_VECTOR_INT, this simply returns the
2162   corresponding element of the vector, but for V4SFmode and V2SFmode,
2163   the corresponding "float" is interpreted as an SImode integer.  */
2164
2165static HOST_WIDE_INT
2166const_vector_elt_as_int (rtx op, unsigned int elt)
2167{
2168  rtx tmp = CONST_VECTOR_ELT (op, elt);
2169  if (GET_MODE (op) == V4SFmode
2170      || GET_MODE (op) == V2SFmode)
2171    tmp = gen_lowpart (SImode, tmp);
2172  return INTVAL (tmp);
2173}
2174
2175/* Return true if OP can be synthesized with a particular vspltisb, vspltish
2176   or vspltisw instruction.  OP is a CONST_VECTOR.  Which instruction is used
2177   depends on STEP and COPIES, one of which will be 1.  If COPIES > 1,
2178   all items are set to the same value and contain COPIES replicas of the
2179   vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2180   operand and the others are set to the value of the operand's msb.  */
2181
2182static bool
2183vspltis_constant (rtx op, unsigned step, unsigned copies)
2184{
2185  enum machine_mode mode = GET_MODE (op);
2186  enum machine_mode inner = GET_MODE_INNER (mode);
2187
2188  unsigned i;
2189  unsigned nunits = GET_MODE_NUNITS (mode);
2190  unsigned bitsize = GET_MODE_BITSIZE (inner);
2191  unsigned mask = GET_MODE_MASK (inner);
2192
2193  HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
2194  HOST_WIDE_INT splat_val = val;
2195  HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2196
2197  /* Construct the value to be splatted, if possible.  If not, return 0.  */
2198  for (i = 2; i <= copies; i *= 2)
2199    {
2200      HOST_WIDE_INT small_val;
2201      bitsize /= 2;
2202      small_val = splat_val >> bitsize;
2203      mask >>= bitsize;
2204      if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2205	return false;
2206      splat_val = small_val;
2207    }
2208
2209  /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw].  */
2210  if (EASY_VECTOR_15 (splat_val))
2211    ;
2212
2213  /* Also check if we can splat, and then add the result to itself.  Do so if
2214     the value is positive, of if the splat instruction is using OP's mode;
2215     for splat_val < 0, the splat and the add should use the same mode.  */
2216  else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2217           && (splat_val >= 0 || (step == 1 && copies == 1)))
2218    ;
2219
2220  else
2221    return false;
2222
2223  /* Check if VAL is present in every STEP-th element, and the
2224     other elements are filled with its most significant bit.  */
2225  for (i = 0; i < nunits - 1; ++i)
2226    {
2227      HOST_WIDE_INT desired_val;
2228      if (((i + 1) & (step - 1)) == 0)
2229	desired_val = val;
2230      else
2231	desired_val = msb_val;
2232
2233      if (desired_val != const_vector_elt_as_int (op, i))
2234	return false;
2235    }
2236
2237  return true;
2238}
2239
2240
2241/* Return true if OP is of the given MODE and can be synthesized
2242   with a vspltisb, vspltish or vspltisw.  */
2243
2244bool
2245easy_altivec_constant (rtx op, enum machine_mode mode)
2246{
2247  unsigned step, copies;
2248
2249  if (mode == VOIDmode)
2250    mode = GET_MODE (op);
2251  else if (mode != GET_MODE (op))
2252    return false;
2253
2254  /* Start with a vspltisw.  */
2255  step = GET_MODE_NUNITS (mode) / 4;
2256  copies = 1;
2257
2258  if (vspltis_constant (op, step, copies))
2259    return true;
2260
2261  /* Then try with a vspltish.  */
2262  if (step == 1)
2263    copies <<= 1;
2264  else
2265    step >>= 1;
2266
2267  if (vspltis_constant (op, step, copies))
2268    return true;
2269
2270  /* And finally a vspltisb.  */
2271  if (step == 1)
2272    copies <<= 1;
2273  else
2274    step >>= 1;
2275
2276  if (vspltis_constant (op, step, copies))
2277    return true;
2278
2279  return false;
2280}
2281
2282/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2283   result is OP.  Abort if it is not possible.  */
2284
2285rtx
2286gen_easy_altivec_constant (rtx op)
2287{
2288  enum machine_mode mode = GET_MODE (op);
2289  int nunits = GET_MODE_NUNITS (mode);
2290  rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2291  unsigned step = nunits / 4;
2292  unsigned copies = 1;
2293
2294  /* Start with a vspltisw.  */
2295  if (vspltis_constant (op, step, copies))
2296    return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2297
2298  /* Then try with a vspltish.  */
2299  if (step == 1)
2300    copies <<= 1;
2301  else
2302    step >>= 1;
2303
2304  if (vspltis_constant (op, step, copies))
2305    return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2306
2307  /* And finally a vspltisb.  */
2308  if (step == 1)
2309    copies <<= 1;
2310  else
2311    step >>= 1;
2312
2313  if (vspltis_constant (op, step, copies))
2314    return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2315
2316  gcc_unreachable ();
2317}
2318
2319const char *
2320output_vec_const_move (rtx *operands)
2321{
2322  int cst, cst2;
2323  enum machine_mode mode;
2324  rtx dest, vec;
2325
2326  dest = operands[0];
2327  vec = operands[1];
2328  mode = GET_MODE (dest);
2329
2330  if (TARGET_ALTIVEC)
2331    {
2332      rtx splat_vec;
2333      if (zero_constant (vec, mode))
2334	return "vxor %0,%0,%0";
2335
2336      splat_vec = gen_easy_altivec_constant (vec);
2337      gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2338      operands[1] = XEXP (splat_vec, 0);
2339      if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2340	return "#";
2341
2342      switch (GET_MODE (splat_vec))
2343	{
2344	case V4SImode:
2345	  return "vspltisw %0,%1";
2346
2347	case V8HImode:
2348	  return "vspltish %0,%1";
2349
2350	case V16QImode:
2351	  return "vspltisb %0,%1";
2352
2353	default:
2354	  gcc_unreachable ();
2355	}
2356    }
2357
2358  gcc_assert (TARGET_SPE);
2359
2360  /* Vector constant 0 is handled as a splitter of V2SI, and in the
2361     pattern of V1DI, V4HI, and V2SF.
2362
2363     FIXME: We should probably return # and add post reload
2364     splitters for these, but this way is so easy ;-).  */
2365  cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2366  cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2367  operands[1] = CONST_VECTOR_ELT (vec, 0);
2368  operands[2] = CONST_VECTOR_ELT (vec, 1);
2369  if (cst == cst2)
2370    return "li %0,%1\n\tevmergelo %0,%0,%0";
2371  else
2372    return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2373}
2374
2375/* Initialize vector TARGET to VALS.  */
2376
2377void
2378rs6000_expand_vector_init (rtx target, rtx vals)
2379{
2380  enum machine_mode mode = GET_MODE (target);
2381  enum machine_mode inner_mode = GET_MODE_INNER (mode);
2382  int n_elts = GET_MODE_NUNITS (mode);
2383  int n_var = 0, one_var = -1;
2384  bool all_same = true, all_const_zero = true;
2385  rtx x, mem;
2386  int i;
2387
2388  for (i = 0; i < n_elts; ++i)
2389    {
2390      x = XVECEXP (vals, 0, i);
2391      if (!CONSTANT_P (x))
2392	++n_var, one_var = i;
2393      else if (x != CONST0_RTX (inner_mode))
2394	all_const_zero = false;
2395
2396      if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2397	all_same = false;
2398    }
2399
2400  if (n_var == 0)
2401    {
2402      if (mode != V4SFmode && all_const_zero)
2403	{
2404	  /* Zero register.  */
2405	  emit_insn (gen_rtx_SET (VOIDmode, target,
2406				  gen_rtx_XOR (mode, target, target)));
2407	  return;
2408	}
2409      else if (mode != V4SFmode && easy_vector_constant (vals, mode))
2410	{
2411	  /* Splat immediate.  */
2412	  emit_insn (gen_rtx_SET (VOIDmode, target, vals));
2413	  return;
2414	}
2415      else if (all_same)
2416	;	/* Splat vector element.  */
2417      else
2418	{
2419	  /* Load from constant pool.  */
2420	  emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2421	  return;
2422	}
2423    }
2424
2425  /* Store value to stack temp.  Load vector element.  Splat.  */
2426  if (all_same)
2427    {
2428      mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2429      emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2430		      XVECEXP (vals, 0, 0));
2431      x = gen_rtx_UNSPEC (VOIDmode,
2432			  gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2433      emit_insn (gen_rtx_PARALLEL (VOIDmode,
2434				   gen_rtvec (2,
2435					      gen_rtx_SET (VOIDmode,
2436							   target, mem),
2437					      x)));
2438      x = gen_rtx_VEC_SELECT (inner_mode, target,
2439			      gen_rtx_PARALLEL (VOIDmode,
2440						gen_rtvec (1, const0_rtx)));
2441      emit_insn (gen_rtx_SET (VOIDmode, target,
2442			      gen_rtx_VEC_DUPLICATE (mode, x)));
2443      return;
2444    }
2445
2446  /* One field is non-constant.  Load constant then overwrite
2447     varying field.  */
2448  if (n_var == 1)
2449    {
2450      rtx copy = copy_rtx (vals);
2451
2452      /* Load constant part of vector, substitute neighboring value for
2453	 varying element.  */
2454      XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2455      rs6000_expand_vector_init (target, copy);
2456
2457      /* Insert variable.  */
2458      rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2459      return;
2460    }
2461
2462  /* Construct the vector in memory one field at a time
2463     and load the whole vector.  */
2464  mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2465  for (i = 0; i < n_elts; i++)
2466    emit_move_insn (adjust_address_nv (mem, inner_mode,
2467				    i * GET_MODE_SIZE (inner_mode)),
2468		    XVECEXP (vals, 0, i));
2469  emit_move_insn (target, mem);
2470}
2471
2472/* Set field ELT of TARGET to VAL.  */
2473
2474void
2475rs6000_expand_vector_set (rtx target, rtx val, int elt)
2476{
2477  enum machine_mode mode = GET_MODE (target);
2478  enum machine_mode inner_mode = GET_MODE_INNER (mode);
2479  rtx reg = gen_reg_rtx (mode);
2480  rtx mask, mem, x;
2481  int width = GET_MODE_SIZE (inner_mode);
2482  int i;
2483
2484  /* Load single variable value.  */
2485  mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2486  emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2487  x = gen_rtx_UNSPEC (VOIDmode,
2488		      gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2489  emit_insn (gen_rtx_PARALLEL (VOIDmode,
2490			       gen_rtvec (2,
2491					  gen_rtx_SET (VOIDmode,
2492						       reg, mem),
2493					  x)));
2494
2495  /* Linear sequence.  */
2496  mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2497  for (i = 0; i < 16; ++i)
2498    XVECEXP (mask, 0, i) = GEN_INT (i);
2499
2500  /* Set permute mask to insert element into target.  */
2501  for (i = 0; i < width; ++i)
2502    XVECEXP (mask, 0, elt*width + i)
2503      = GEN_INT (i + 0x10);
2504  x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2505  x = gen_rtx_UNSPEC (mode,
2506		      gen_rtvec (3, target, reg,
2507				 force_reg (V16QImode, x)),
2508		      UNSPEC_VPERM);
2509  emit_insn (gen_rtx_SET (VOIDmode, target, x));
2510}
2511
2512/* Extract field ELT from VEC into TARGET.  */
2513
2514void
2515rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2516{
2517  enum machine_mode mode = GET_MODE (vec);
2518  enum machine_mode inner_mode = GET_MODE_INNER (mode);
2519  rtx mem, x;
2520
2521  /* Allocate mode-sized buffer.  */
2522  mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2523
2524  /* Add offset to field within buffer matching vector element.  */
2525  mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2526
2527  /* Store single field into mode-sized buffer.  */
2528  x = gen_rtx_UNSPEC (VOIDmode,
2529		      gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2530  emit_insn (gen_rtx_PARALLEL (VOIDmode,
2531			       gen_rtvec (2,
2532					  gen_rtx_SET (VOIDmode,
2533						       mem, vec),
2534					  x)));
2535  emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2536}
2537
2538/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2539   implement ANDing by the mask IN.  */
2540void
2541build_mask64_2_operands (rtx in, rtx *out)
2542{
2543#if HOST_BITS_PER_WIDE_INT >= 64
2544  unsigned HOST_WIDE_INT c, lsb, m1, m2;
2545  int shift;
2546
2547  gcc_assert (GET_CODE (in) == CONST_INT);
2548
2549  c = INTVAL (in);
2550  if (c & 1)
2551    {
2552      /* Assume c initially something like 0x00fff000000fffff.  The idea
2553	 is to rotate the word so that the middle ^^^^^^ group of zeros
2554	 is at the MS end and can be cleared with an rldicl mask.  We then
2555	 rotate back and clear off the MS    ^^ group of zeros with a
2556	 second rldicl.  */
2557      c = ~c;			/*   c == 0xff000ffffff00000 */
2558      lsb = c & -c;		/* lsb == 0x0000000000100000 */
2559      m1 = -lsb;		/*  m1 == 0xfffffffffff00000 */
2560      c = ~c;			/*   c == 0x00fff000000fffff */
2561      c &= -lsb;		/*   c == 0x00fff00000000000 */
2562      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2563      c = ~c;			/*   c == 0xff000fffffffffff */
2564      c &= -lsb;		/*   c == 0xff00000000000000 */
2565      shift = 0;
2566      while ((lsb >>= 1) != 0)
2567	shift++;		/* shift == 44 on exit from loop */
2568      m1 <<= 64 - shift;	/*  m1 == 0xffffff0000000000 */
2569      m1 = ~m1;			/*  m1 == 0x000000ffffffffff */
2570      m2 = ~c;			/*  m2 == 0x00ffffffffffffff */
2571    }
2572  else
2573    {
2574      /* Assume c initially something like 0xff000f0000000000.  The idea
2575	 is to rotate the word so that the     ^^^  middle group of zeros
2576	 is at the LS end and can be cleared with an rldicr mask.  We then
2577	 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2578	 a second rldicr.  */
2579      lsb = c & -c;		/* lsb == 0x0000010000000000 */
2580      m2 = -lsb;		/*  m2 == 0xffffff0000000000 */
2581      c = ~c;			/*   c == 0x00fff0ffffffffff */
2582      c &= -lsb;		/*   c == 0x00fff00000000000 */
2583      lsb = c & -c;		/* lsb == 0x0000100000000000 */
2584      c = ~c;			/*   c == 0xff000fffffffffff */
2585      c &= -lsb;		/*   c == 0xff00000000000000 */
2586      shift = 0;
2587      while ((lsb >>= 1) != 0)
2588	shift++;		/* shift == 44 on exit from loop */
2589      m1 = ~c;			/*  m1 == 0x00ffffffffffffff */
2590      m1 >>= shift;		/*  m1 == 0x0000000000000fff */
2591      m1 = ~m1;			/*  m1 == 0xfffffffffffff000 */
2592    }
2593
2594  /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2595     masks will be all 1's.  We are guaranteed more than one transition.  */
2596  out[0] = GEN_INT (64 - shift);
2597  out[1] = GEN_INT (m1);
2598  out[2] = GEN_INT (shift);
2599  out[3] = GEN_INT (m2);
2600#else
2601  (void)in;
2602  (void)out;
2603  gcc_unreachable ();
2604#endif
2605}
2606
2607/* Return TRUE if OP is an invalid SUBREG operation on the e500.  */
2608
2609bool
2610invalid_e500_subreg (rtx op, enum machine_mode mode)
2611{
2612  if (TARGET_E500_DOUBLE)
2613    {
2614      /* Reject (subreg:SI (reg:DF)).  */
2615      if (GET_CODE (op) == SUBREG
2616	  && mode == SImode
2617	  && REG_P (SUBREG_REG (op))
2618	  && GET_MODE (SUBREG_REG (op)) == DFmode)
2619	return true;
2620
2621      /* Reject (subreg:DF (reg:DI)).  */
2622      if (GET_CODE (op) == SUBREG
2623	  && mode == DFmode
2624	  && REG_P (SUBREG_REG (op))
2625	  && GET_MODE (SUBREG_REG (op)) == DImode)
2626	return true;
2627    }
2628
2629  if (TARGET_SPE
2630      && GET_CODE (op) == SUBREG
2631      && mode == SImode
2632      && REG_P (SUBREG_REG (op))
2633      && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
2634    return true;
2635
2636  return false;
2637}
2638
2639/* Darwin, AIX increases natural record alignment to doubleword if the first
2640   field is an FP double while the FP fields remain word aligned.  */
2641
2642unsigned int
2643rs6000_special_round_type_align (tree type, unsigned int computed,
2644				 unsigned int specified)
2645{
2646  unsigned int align = MAX (computed, specified);
2647  tree field = TYPE_FIELDS (type);
2648
2649  /* Skip all non field decls */
2650  while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2651    field = TREE_CHAIN (field);
2652
2653  if (field != NULL && field != type)
2654    {
2655      type = TREE_TYPE (field);
2656      while (TREE_CODE (type) == ARRAY_TYPE)
2657	type = TREE_TYPE (type);
2658
2659      if (type != error_mark_node && TYPE_MODE (type) == DFmode)
2660	align = MAX (align, 64);
2661    }
2662
2663  return align;
2664}
2665
2666/* Return 1 for an operand in small memory on V.4/eabi.  */
2667
2668int
2669small_data_operand (rtx op ATTRIBUTE_UNUSED,
2670		    enum machine_mode mode ATTRIBUTE_UNUSED)
2671{
2672#if TARGET_ELF
2673  rtx sym_ref;
2674
2675  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2676    return 0;
2677
2678  if (DEFAULT_ABI != ABI_V4)
2679    return 0;
2680
2681  if (GET_CODE (op) == SYMBOL_REF)
2682    sym_ref = op;
2683
2684  else if (GET_CODE (op) != CONST
2685	   || GET_CODE (XEXP (op, 0)) != PLUS
2686	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2687	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2688    return 0;
2689
2690  else
2691    {
2692      rtx sum = XEXP (op, 0);
2693      HOST_WIDE_INT summand;
2694
2695      /* We have to be careful here, because it is the referenced address
2696	 that must be 32k from _SDA_BASE_, not just the symbol.  */
2697      summand = INTVAL (XEXP (sum, 1));
2698      if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2699	return 0;
2700
2701      sym_ref = XEXP (sum, 0);
2702    }
2703
2704  return SYMBOL_REF_SMALL_P (sym_ref);
2705#else
2706  return 0;
2707#endif
2708}
2709
2710/* Return true if either operand is a general purpose register.  */
2711
2712bool
2713gpr_or_gpr_p (rtx op0, rtx op1)
2714{
2715  return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2716	  || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2717}
2718
2719
2720/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address.  */
2721
2722static int
2723constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2724{
2725  switch (GET_CODE (op))
2726    {
2727    case SYMBOL_REF:
2728      if (RS6000_SYMBOL_REF_TLS_P (op))
2729	return 0;
2730      else if (CONSTANT_POOL_ADDRESS_P (op))
2731	{
2732	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2733	    {
2734	      *have_sym = 1;
2735	      return 1;
2736	    }
2737	  else
2738	    return 0;
2739	}
2740      else if (! strcmp (XSTR (op, 0), toc_label_name))
2741	{
2742	  *have_toc = 1;
2743	  return 1;
2744	}
2745      else
2746	return 0;
2747    case PLUS:
2748    case MINUS:
2749      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2750	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2751    case CONST:
2752      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2753    case CONST_INT:
2754      return 1;
2755    default:
2756      return 0;
2757    }
2758}
2759
2760static bool
2761constant_pool_expr_p (rtx op)
2762{
2763  int have_sym = 0;
2764  int have_toc = 0;
2765  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2766}
2767
2768bool
2769toc_relative_expr_p (rtx op)
2770{
2771  int have_sym = 0;
2772  int have_toc = 0;
2773  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2774}
2775
2776bool
2777legitimate_constant_pool_address_p (rtx x)
2778{
2779  return (TARGET_TOC
2780	  && GET_CODE (x) == PLUS
2781	  && GET_CODE (XEXP (x, 0)) == REG
2782	  && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2783	  && constant_pool_expr_p (XEXP (x, 1)));
2784}
2785
2786static bool
2787legitimate_small_data_p (enum machine_mode mode, rtx x)
2788{
2789  return (DEFAULT_ABI == ABI_V4
2790	  && !flag_pic && !TARGET_TOC
2791	  && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2792	  && small_data_operand (x, mode));
2793}
2794
2795/* SPE offset addressing is limited to 5-bits worth of double words.  */
2796#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2797
2798bool
2799rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2800{
2801  unsigned HOST_WIDE_INT offset, extra;
2802
2803  if (GET_CODE (x) != PLUS)
2804    return false;
2805  if (GET_CODE (XEXP (x, 0)) != REG)
2806    return false;
2807  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2808    return false;
2809  if (legitimate_constant_pool_address_p (x))
2810    return true;
2811  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2812    return false;
2813
2814  offset = INTVAL (XEXP (x, 1));
2815  extra = 0;
2816  switch (mode)
2817    {
2818    case V16QImode:
2819    case V8HImode:
2820    case V4SFmode:
2821    case V4SImode:
2822      /* AltiVec vector modes.  Only reg+reg addressing is valid and
2823	 constant offset zero should not occur due to canonicalization.
2824	 Allow any offset when not strict before reload.  */
2825      return !strict;
2826
2827    case V4HImode:
2828    case V2SImode:
2829    case V1DImode:
2830    case V2SFmode:
2831      /* SPE vector modes.  */
2832      return SPE_CONST_OFFSET_OK (offset);
2833
2834    case DFmode:
2835      if (TARGET_E500_DOUBLE)
2836	return SPE_CONST_OFFSET_OK (offset);
2837
2838    case DImode:
2839      /* On e500v2, we may have:
2840
2841	   (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2842
2843         Which gets addressed with evldd instructions.  */
2844      if (TARGET_E500_DOUBLE)
2845	return SPE_CONST_OFFSET_OK (offset);
2846
2847      if (mode == DFmode || !TARGET_POWERPC64)
2848	extra = 4;
2849      else if (offset & 3)
2850	return false;
2851      break;
2852
2853    case TFmode:
2854    case TImode:
2855      if (mode == TFmode || !TARGET_POWERPC64)
2856	extra = 12;
2857      else if (offset & 3)
2858	return false;
2859      else
2860	extra = 8;
2861      break;
2862
2863    default:
2864      break;
2865    }
2866
2867  offset += 0x8000;
2868  return (offset < 0x10000) && (offset + extra < 0x10000);
2869}
2870
2871static bool
2872legitimate_indexed_address_p (rtx x, int strict)
2873{
2874  rtx op0, op1;
2875
2876  if (GET_CODE (x) != PLUS)
2877    return false;
2878
2879  op0 = XEXP (x, 0);
2880  op1 = XEXP (x, 1);
2881
2882  /* Recognize the rtl generated by reload which we know will later be
2883     replaced with proper base and index regs.  */
2884  if (!strict
2885      && reload_in_progress
2886      && (REG_P (op0) || GET_CODE (op0) == PLUS)
2887      && REG_P (op1))
2888    return true;
2889
2890  return (REG_P (op0) && REG_P (op1)
2891	  && ((INT_REG_OK_FOR_BASE_P (op0, strict)
2892	       && INT_REG_OK_FOR_INDEX_P (op1, strict))
2893	      || (INT_REG_OK_FOR_BASE_P (op1, strict)
2894		  && INT_REG_OK_FOR_INDEX_P (op0, strict))));
2895}
2896
2897inline bool
2898legitimate_indirect_address_p (rtx x, int strict)
2899{
2900  return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2901}
2902
2903bool
2904macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2905{
2906  if (!TARGET_MACHO || !flag_pic
2907      || mode != SImode || GET_CODE (x) != MEM)
2908    return false;
2909  x = XEXP (x, 0);
2910
2911  if (GET_CODE (x) != LO_SUM)
2912    return false;
2913  if (GET_CODE (XEXP (x, 0)) != REG)
2914    return false;
2915  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2916    return false;
2917  x = XEXP (x, 1);
2918
2919  return CONSTANT_P (x);
2920}
2921
2922static bool
2923legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2924{
2925  if (GET_CODE (x) != LO_SUM)
2926    return false;
2927  if (GET_CODE (XEXP (x, 0)) != REG)
2928    return false;
2929  if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2930    return false;
2931  /* Restrict addressing for DI because of our SUBREG hackery.  */
2932  if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
2933    return false;
2934  x = XEXP (x, 1);
2935
2936  if (TARGET_ELF || TARGET_MACHO)
2937    {
2938      if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2939	return false;
2940      if (TARGET_TOC)
2941	return false;
2942      if (GET_MODE_NUNITS (mode) != 1)
2943	return false;
2944      if (GET_MODE_BITSIZE (mode) > 64
2945	  || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2946	      && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
2947	return false;
2948
2949      return CONSTANT_P (x);
2950    }
2951
2952  return false;
2953}
2954
2955
2956/* Try machine-dependent ways of modifying an illegitimate address
2957   to be legitimate.  If we find one, return the new, valid address.
2958   This is used from only one place: `memory_address' in explow.c.
2959
2960   OLDX is the address as it was before break_out_memory_refs was
2961   called.  In some cases it is useful to look at this to decide what
2962   needs to be done.
2963
2964   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2965
2966   It is always safe for this function to do nothing.  It exists to
2967   recognize opportunities to optimize the output.
2968
2969   On RS/6000, first check for the sum of a register with a constant
2970   integer that is out of range.  If so, generate code to add the
2971   constant with the low-order 16 bits masked to the register and force
2972   this result into another register (this can be done with `cau').
2973   Then generate an address of REG+(CONST&0xffff), allowing for the
2974   possibility of bit 16 being a one.
2975
2976   Then check for the sum of a register and something not constant, try to
2977   load the other things into a register and return the sum.  */
2978
2979rtx
2980rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2981			   enum machine_mode mode)
2982{
2983  if (GET_CODE (x) == SYMBOL_REF)
2984    {
2985      enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2986      if (model != 0)
2987	return rs6000_legitimize_tls_address (x, model);
2988    }
2989
2990  if (GET_CODE (x) == PLUS
2991      && GET_CODE (XEXP (x, 0)) == REG
2992      && GET_CODE (XEXP (x, 1)) == CONST_INT
2993      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2994    {
2995      HOST_WIDE_INT high_int, low_int;
2996      rtx sum;
2997      low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2998      high_int = INTVAL (XEXP (x, 1)) - low_int;
2999      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3000					 GEN_INT (high_int)), 0);
3001      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3002    }
3003  else if (GET_CODE (x) == PLUS
3004	   && GET_CODE (XEXP (x, 0)) == REG
3005	   && GET_CODE (XEXP (x, 1)) != CONST_INT
3006	   && GET_MODE_NUNITS (mode) == 1
3007	   && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3008	       || TARGET_POWERPC64
3009	       || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
3010		   && mode != TFmode))
3011	   && (TARGET_POWERPC64 || mode != DImode)
3012	   && mode != TImode)
3013    {
3014      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3015			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3016    }
3017  else if (ALTIVEC_VECTOR_MODE (mode))
3018    {
3019      rtx reg;
3020
3021      /* Make sure both operands are registers.  */
3022      if (GET_CODE (x) == PLUS)
3023	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
3024			     force_reg (Pmode, XEXP (x, 1)));
3025
3026      reg = force_reg (Pmode, x);
3027      return reg;
3028    }
3029  else if (SPE_VECTOR_MODE (mode)
3030	   || (TARGET_E500_DOUBLE && (mode == DFmode
3031				      || mode == DImode)))
3032    {
3033      if (mode == DImode)
3034	return NULL_RTX;
3035      /* We accept [reg + reg] and [reg + OFFSET].  */
3036
3037      if (GET_CODE (x) == PLUS)
3038	{
3039	  rtx op1 = XEXP (x, 0);
3040	  rtx op2 = XEXP (x, 1);
3041
3042	  op1 = force_reg (Pmode, op1);
3043
3044	  if (GET_CODE (op2) != REG
3045	      && (GET_CODE (op2) != CONST_INT
3046		  || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3047	    op2 = force_reg (Pmode, op2);
3048
3049	  return gen_rtx_PLUS (Pmode, op1, op2);
3050	}
3051
3052      return force_reg (Pmode, x);
3053    }
3054  else if (TARGET_ELF
3055	   && TARGET_32BIT
3056	   && TARGET_NO_TOC
3057	   && ! flag_pic
3058	   && GET_CODE (x) != CONST_INT
3059	   && GET_CODE (x) != CONST_DOUBLE
3060	   && CONSTANT_P (x)
3061	   && GET_MODE_NUNITS (mode) == 1
3062	   && (GET_MODE_BITSIZE (mode) <= 32
3063	       || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
3064    {
3065      rtx reg = gen_reg_rtx (Pmode);
3066      emit_insn (gen_elf_high (reg, x));
3067      return gen_rtx_LO_SUM (Pmode, reg, x);
3068    }
3069  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3070	   && ! flag_pic
3071#if TARGET_MACHO
3072	   && ! MACHO_DYNAMIC_NO_PIC_P
3073#endif
3074	   && GET_CODE (x) != CONST_INT
3075	   && GET_CODE (x) != CONST_DOUBLE
3076	   && CONSTANT_P (x)
3077	   && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
3078	   && mode != DImode
3079	   && mode != TImode)
3080    {
3081      rtx reg = gen_reg_rtx (Pmode);
3082      emit_insn (gen_macho_high (reg, x));
3083      return gen_rtx_LO_SUM (Pmode, reg, x);
3084    }
3085  else if (TARGET_TOC
3086	   && constant_pool_expr_p (x)
3087	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3088    {
3089      return create_TOC_reference (x);
3090    }
3091  else
3092    return NULL_RTX;
3093}
3094
3095/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
3096   We need to emit DTP-relative relocations.  */
3097
3098static void
3099rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3100{
3101  switch (size)
3102    {
3103    case 4:
3104      fputs ("\t.long\t", file);
3105      break;
3106    case 8:
3107      fputs (DOUBLE_INT_ASM_OP, file);
3108      break;
3109    default:
3110      gcc_unreachable ();
3111    }
3112  output_addr_const (file, x);
3113  fputs ("@dtprel+0x8000", file);
3114}
3115
3116/* Construct the SYMBOL_REF for the tls_get_addr function.  */
3117
3118static GTY(()) rtx rs6000_tls_symbol;
3119static rtx
3120rs6000_tls_get_addr (void)
3121{
3122  if (!rs6000_tls_symbol)
3123    rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3124
3125  return rs6000_tls_symbol;
3126}
3127
3128/* Construct the SYMBOL_REF for TLS GOT references.  */
3129
3130static GTY(()) rtx rs6000_got_symbol;
3131static rtx
3132rs6000_got_sym (void)
3133{
3134  if (!rs6000_got_symbol)
3135    {
3136      rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3137      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3138      SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3139    }
3140
3141  return rs6000_got_symbol;
3142}
3143
3144/* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
3145   this (thread-local) address.  */
3146
3147static rtx
3148rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3149{
3150  rtx dest, insn;
3151
3152  dest = gen_reg_rtx (Pmode);
3153  if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3154    {
3155      rtx tlsreg;
3156
3157      if (TARGET_64BIT)
3158	{
3159	  tlsreg = gen_rtx_REG (Pmode, 13);
3160	  insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3161	}
3162      else
3163	{
3164	  tlsreg = gen_rtx_REG (Pmode, 2);
3165	  insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3166	}
3167      emit_insn (insn);
3168    }
3169  else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3170    {
3171      rtx tlsreg, tmp;
3172
3173      tmp = gen_reg_rtx (Pmode);
3174      if (TARGET_64BIT)
3175	{
3176	  tlsreg = gen_rtx_REG (Pmode, 13);
3177	  insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3178	}
3179      else
3180	{
3181	  tlsreg = gen_rtx_REG (Pmode, 2);
3182	  insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3183	}
3184      emit_insn (insn);
3185      if (TARGET_64BIT)
3186	insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3187      else
3188	insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3189      emit_insn (insn);
3190    }
3191  else
3192    {
3193      rtx r3, got, tga, tmp1, tmp2, eqv;
3194
3195      /* We currently use relocations like @got@tlsgd for tls, which
3196	 means the linker will handle allocation of tls entries, placing
3197	 them in the .got section.  So use a pointer to the .got section,
3198	 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3199	 or to secondary GOT sections used by 32-bit -fPIC.  */
3200      if (TARGET_64BIT)
3201	got = gen_rtx_REG (Pmode, 2);
3202      else
3203	{
3204	  if (flag_pic == 1)
3205	    got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3206	  else
3207	    {
3208	      rtx gsym = rs6000_got_sym ();
3209	      got = gen_reg_rtx (Pmode);
3210	      if (flag_pic == 0)
3211		rs6000_emit_move (got, gsym, Pmode);
3212	      else
3213		{
3214		  rtx tempLR, tmp3, mem;
3215		  rtx first, last;
3216
3217		  tempLR = gen_reg_rtx (Pmode);
3218		  tmp1 = gen_reg_rtx (Pmode);
3219		  tmp2 = gen_reg_rtx (Pmode);
3220		  tmp3 = gen_reg_rtx (Pmode);
3221		  mem = gen_const_mem (Pmode, tmp1);
3222
3223		  first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
3224		  emit_move_insn (tmp1, tempLR);
3225		  emit_move_insn (tmp2, mem);
3226		  emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3227		  last = emit_move_insn (got, tmp3);
3228		  REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3229							REG_NOTES (last));
3230		  REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3231							 REG_NOTES (first));
3232		  REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3233							REG_NOTES (last));
3234		}
3235	    }
3236	}
3237
3238      if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3239	{
3240	  r3 = gen_rtx_REG (Pmode, 3);
3241	  if (TARGET_64BIT)
3242	    insn = gen_tls_gd_64 (r3, got, addr);
3243	  else
3244	    insn = gen_tls_gd_32 (r3, got, addr);
3245	  start_sequence ();
3246	  emit_insn (insn);
3247	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3248	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3249	  insn = emit_call_insn (insn);
3250	  CONST_OR_PURE_CALL_P (insn) = 1;
3251	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3252	  insn = get_insns ();
3253	  end_sequence ();
3254	  emit_libcall_block (insn, dest, r3, addr);
3255	}
3256      else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3257	{
3258	  r3 = gen_rtx_REG (Pmode, 3);
3259	  if (TARGET_64BIT)
3260	    insn = gen_tls_ld_64 (r3, got);
3261	  else
3262	    insn = gen_tls_ld_32 (r3, got);
3263	  start_sequence ();
3264	  emit_insn (insn);
3265	  tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3266	  insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3267	  insn = emit_call_insn (insn);
3268	  CONST_OR_PURE_CALL_P (insn) = 1;
3269	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3270	  insn = get_insns ();
3271	  end_sequence ();
3272	  tmp1 = gen_reg_rtx (Pmode);
3273	  eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3274				UNSPEC_TLSLD);
3275	  emit_libcall_block (insn, tmp1, r3, eqv);
3276	  if (rs6000_tls_size == 16)
3277	    {
3278	      if (TARGET_64BIT)
3279		insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3280	      else
3281		insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3282	    }
3283	  else if (rs6000_tls_size == 32)
3284	    {
3285	      tmp2 = gen_reg_rtx (Pmode);
3286	      if (TARGET_64BIT)
3287		insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3288	      else
3289		insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3290	      emit_insn (insn);
3291	      if (TARGET_64BIT)
3292		insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3293	      else
3294		insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3295	    }
3296	  else
3297	    {
3298	      tmp2 = gen_reg_rtx (Pmode);
3299	      if (TARGET_64BIT)
3300		insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3301	      else
3302		insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3303	      emit_insn (insn);
3304	      insn = gen_rtx_SET (Pmode, dest,
3305				  gen_rtx_PLUS (Pmode, tmp2, tmp1));
3306	    }
3307	  emit_insn (insn);
3308	}
3309      else
3310	{
3311	  /* IE, or 64 bit offset LE.  */
3312	  tmp2 = gen_reg_rtx (Pmode);
3313	  if (TARGET_64BIT)
3314	    insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3315	  else
3316	    insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3317	  emit_insn (insn);
3318	  if (TARGET_64BIT)
3319	    insn = gen_tls_tls_64 (dest, tmp2, addr);
3320	  else
3321	    insn = gen_tls_tls_32 (dest, tmp2, addr);
3322	  emit_insn (insn);
3323	}
3324    }
3325
3326  return dest;
3327}
3328
3329/* Return 1 if X contains a thread-local symbol.  */
3330
3331bool
3332rs6000_tls_referenced_p (rtx x)
3333{
3334  if (! TARGET_HAVE_TLS)
3335    return false;
3336
3337  return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3338}
3339
3340/* Return 1 if *X is a thread-local symbol.  This is the same as
3341   rs6000_tls_symbol_ref except for the type of the unused argument.  */
3342
3343static int
3344rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3345{
3346  return RS6000_SYMBOL_REF_TLS_P (*x);
3347}
3348
3349/* The convention appears to be to define this wherever it is used.
3350   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3351   is now used here.  */
3352#ifndef REG_MODE_OK_FOR_BASE_P
3353#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3354#endif
3355
3356/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
3357   replace the input X, or the original X if no replacement is called for.
3358   The output parameter *WIN is 1 if the calling macro should goto WIN,
3359   0 if it should not.
3360
3361   For RS/6000, we wish to handle large displacements off a base
3362   register by splitting the addend across an addiu/addis and the mem insn.
3363   This cuts number of extra insns needed from 3 to 1.
3364
3365   On Darwin, we use this to generate code for floating point constants.
3366   A movsf_low is generated so we wind up with 2 instructions rather than 3.
3367   The Darwin code is inside #if TARGET_MACHO because only then is
3368   machopic_function_base_name() defined.  */
3369rtx
3370rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3371				  int opnum, int type,
3372				  int ind_levels ATTRIBUTE_UNUSED, int *win)
3373{
3374  /* We must recognize output that we have already generated ourselves.  */
3375  if (GET_CODE (x) == PLUS
3376      && GET_CODE (XEXP (x, 0)) == PLUS
3377      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3378      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3379      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3380    {
3381      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3382		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3383		   opnum, (enum reload_type)type);
3384      *win = 1;
3385      return x;
3386    }
3387
3388#if TARGET_MACHO
3389  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3390      && GET_CODE (x) == LO_SUM
3391      && GET_CODE (XEXP (x, 0)) == PLUS
3392      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3393      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3394      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3395      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3396      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3397      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3398      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3399    {
3400      /* Result of previous invocation of this function on Darwin
3401	 floating point constant.  */
3402      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3403		   BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3404		   opnum, (enum reload_type)type);
3405      *win = 1;
3406      return x;
3407    }
3408#endif
3409
3410  /* Force ld/std non-word aligned offset into base register by wrapping
3411     in offset 0.  */
3412  if (GET_CODE (x) == PLUS
3413      && GET_CODE (XEXP (x, 0)) == REG
3414      && REGNO (XEXP (x, 0)) < 32
3415      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3416      && GET_CODE (XEXP (x, 1)) == CONST_INT
3417      && (INTVAL (XEXP (x, 1)) & 3) != 0
3418      && !ALTIVEC_VECTOR_MODE (mode)
3419      && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3420      && TARGET_POWERPC64)
3421    {
3422      x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3423      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3424		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3425		   opnum, (enum reload_type) type);
3426      *win = 1;
3427      return x;
3428    }
3429
3430  if (GET_CODE (x) == PLUS
3431      && GET_CODE (XEXP (x, 0)) == REG
3432      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3433      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3434      && GET_CODE (XEXP (x, 1)) == CONST_INT
3435      && !SPE_VECTOR_MODE (mode)
3436      && !(TARGET_E500_DOUBLE && (mode == DFmode
3437				  || mode == DImode))
3438      && !ALTIVEC_VECTOR_MODE (mode))
3439    {
3440      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3441      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3442      HOST_WIDE_INT high
3443	= (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3444
3445      /* Check for 32-bit overflow.  */
3446      if (high + low != val)
3447	{
3448	  *win = 0;
3449	  return x;
3450	}
3451
3452      /* Reload the high part into a base reg; leave the low part
3453	 in the mem directly.  */
3454
3455      x = gen_rtx_PLUS (GET_MODE (x),
3456			gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3457				      GEN_INT (high)),
3458			GEN_INT (low));
3459
3460      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3461		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3462		   opnum, (enum reload_type)type);
3463      *win = 1;
3464      return x;
3465    }
3466
3467  if (GET_CODE (x) == SYMBOL_REF
3468      && !ALTIVEC_VECTOR_MODE (mode)
3469      && !SPE_VECTOR_MODE (mode)
3470#if TARGET_MACHO
3471      && DEFAULT_ABI == ABI_DARWIN
3472      && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3473#else
3474      && DEFAULT_ABI == ABI_V4
3475      && !flag_pic
3476#endif
3477      /* Don't do this for TFmode, since the result isn't offsettable.
3478	 The same goes for DImode without 64-bit gprs and DFmode
3479	 without fprs.  */
3480      && mode != TFmode
3481      && (mode != DImode || TARGET_POWERPC64)
3482      && (mode != DFmode || TARGET_POWERPC64
3483	  || (TARGET_FPRS && TARGET_HARD_FLOAT)))
3484    {
3485#if TARGET_MACHO
3486      if (flag_pic)
3487	{
3488	  rtx offset = gen_rtx_CONST (Pmode,
3489			 gen_rtx_MINUS (Pmode, x,
3490					machopic_function_base_sym ()));
3491	  x = gen_rtx_LO_SUM (GET_MODE (x),
3492		gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3493		  gen_rtx_HIGH (Pmode, offset)), offset);
3494	}
3495      else
3496#endif
3497	x = gen_rtx_LO_SUM (GET_MODE (x),
3498	      gen_rtx_HIGH (Pmode, x), x);
3499
3500      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3501		   BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3502		   opnum, (enum reload_type)type);
3503      *win = 1;
3504      return x;
3505    }
3506
3507  /* Reload an offset address wrapped by an AND that represents the
3508     masking of the lower bits.  Strip the outer AND and let reload
3509     convert the offset address into an indirect address.  */
3510  if (TARGET_ALTIVEC
3511      && ALTIVEC_VECTOR_MODE (mode)
3512      && GET_CODE (x) == AND
3513      && GET_CODE (XEXP (x, 0)) == PLUS
3514      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3515      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3516      && GET_CODE (XEXP (x, 1)) == CONST_INT
3517      && INTVAL (XEXP (x, 1)) == -16)
3518    {
3519      x = XEXP (x, 0);
3520      *win = 1;
3521      return x;
3522    }
3523
3524  if (TARGET_TOC
3525      && constant_pool_expr_p (x)
3526      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3527    {
3528      x = create_TOC_reference (x);
3529      *win = 1;
3530      return x;
3531    }
3532  *win = 0;
3533  return x;
3534}
3535
3536/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3537   that is a valid memory address for an instruction.
3538   The MODE argument is the machine mode for the MEM expression
3539   that wants to use this address.
3540
3541   On the RS/6000, there are four valid address: a SYMBOL_REF that
3542   refers to a constant pool entry of an address (or the sum of it
3543   plus a constant), a short (16-bit signed) constant plus a register,
3544   the sum of two registers, or a register indirect, possibly with an
3545   auto-increment.  For DFmode and DImode with a constant plus register,
3546   we must ensure that both words are addressable or PowerPC64 with offset
3547   word aligned.
3548
3549   For modes spanning multiple registers (DFmode in 32-bit GPRs,
3550   32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3551   adjacent memory cells are accessed by adding word-sized offsets
3552   during assembly output.  */
3553int
3554rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3555{
3556  /* If this is an unaligned stvx/ldvx type address, discard the outer AND.  */
3557  if (TARGET_ALTIVEC
3558      && ALTIVEC_VECTOR_MODE (mode)
3559      && GET_CODE (x) == AND
3560      && GET_CODE (XEXP (x, 1)) == CONST_INT
3561      && INTVAL (XEXP (x, 1)) == -16)
3562    x = XEXP (x, 0);
3563
3564  if (RS6000_SYMBOL_REF_TLS_P (x))
3565    return 0;
3566  if (legitimate_indirect_address_p (x, reg_ok_strict))
3567    return 1;
3568  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3569      && !ALTIVEC_VECTOR_MODE (mode)
3570      && !SPE_VECTOR_MODE (mode)
3571      && mode != TFmode
3572      /* Restrict addressing for DI because of our SUBREG hackery.  */
3573      && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3574      && TARGET_UPDATE
3575      && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3576    return 1;
3577  if (legitimate_small_data_p (mode, x))
3578    return 1;
3579  if (legitimate_constant_pool_address_p (x))
3580    return 1;
3581  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
3582  if (! reg_ok_strict
3583      && GET_CODE (x) == PLUS
3584      && GET_CODE (XEXP (x, 0)) == REG
3585      && (XEXP (x, 0) == virtual_stack_vars_rtx
3586	  || XEXP (x, 0) == arg_pointer_rtx)
3587      && GET_CODE (XEXP (x, 1)) == CONST_INT)
3588    return 1;
3589  if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3590    return 1;
3591  if (mode != TImode
3592      && mode != TFmode
3593      && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3594	  || TARGET_POWERPC64
3595	  || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3596      && (TARGET_POWERPC64 || mode != DImode)
3597      && legitimate_indexed_address_p (x, reg_ok_strict))
3598    return 1;
3599  if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3600    return 1;
3601  return 0;
3602}
3603
3604/* Go to LABEL if ADDR (a legitimate address expression)
3605   has an effect that depends on the machine mode it is used for.
3606
3607   On the RS/6000 this is true of all integral offsets (since AltiVec
3608   modes don't allow them) or is a pre-increment or decrement.
3609
3610   ??? Except that due to conceptual problems in offsettable_address_p
3611   we can't really report the problems of integral offsets.  So leave
3612   this assuming that the adjustable offset must be valid for the
3613   sub-words of a TFmode operand, which is what we had before.  */
3614
3615bool
3616rs6000_mode_dependent_address (rtx addr)
3617{
3618  switch (GET_CODE (addr))
3619    {
3620    case PLUS:
3621      if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3622	{
3623	  unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3624	  return val + 12 + 0x8000 >= 0x10000;
3625	}
3626      break;
3627
3628    case LO_SUM:
3629      return true;
3630
3631    case PRE_INC:
3632    case PRE_DEC:
3633      return TARGET_UPDATE;
3634
3635    default:
3636      break;
3637    }
3638
3639  return false;
3640}
3641
3642/* More elaborate version of recog's offsettable_memref_p predicate
3643   that works around the ??? note of rs6000_mode_dependent_address.
3644   In particular it accepts
3645
3646     (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
3647
3648   in 32-bit mode, that the recog predicate rejects.  */
3649
3650bool
3651rs6000_offsettable_memref_p (rtx op)
3652{
3653  if (!MEM_P (op))
3654    return false;
3655
3656  /* First mimic offsettable_memref_p.  */
3657  if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
3658    return true;
3659
3660  /* offsettable_address_p invokes rs6000_mode_dependent_address, but
3661     the latter predicate knows nothing about the mode of the memory
3662     reference and, therefore, assumes that it is the largest supported
3663     mode (TFmode).  As a consequence, legitimate offsettable memory
3664     references are rejected.  rs6000_legitimate_offset_address_p contains
3665     the correct logic for the PLUS case of rs6000_mode_dependent_address.  */
3666  return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
3667}
3668
3669/* Return number of consecutive hard regs needed starting at reg REGNO
3670   to hold something of mode MODE.
3671   This is ordinarily the length in words of a value of mode MODE
3672   but can be less for certain modes in special long registers.
3673
3674   For the SPE, GPRs are 64 bits but only 32 bits are visible in
3675   scalar instructions.  The upper 32 bits are only available to the
3676   SIMD instructions.
3677
3678   POWER and PowerPC GPRs hold 32 bits worth;
3679   PowerPC64 GPRs and FPRs point register holds 64 bits worth.  */
3680
3681int
3682rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3683{
3684  if (FP_REGNO_P (regno))
3685    return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3686
3687  if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3688    return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3689
3690  if (ALTIVEC_REGNO_P (regno))
3691    return
3692      (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3693
3694  /* The value returned for SCmode in the E500 double case is 2 for
3695     ABI compatibility; storing an SCmode value in a single register
3696     would require function_arg and rs6000_spe_function_arg to handle
3697     SCmode so as to pass the value correctly in a pair of
3698     registers.  */
3699  if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
3700    return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3701
3702  return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3703}
3704
3705/* Change register usage conditional on target flags.  */
3706void
3707rs6000_conditional_register_usage (void)
3708{
3709  int i;
3710
3711  /* Set MQ register fixed (already call_used) if not POWER
3712     architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3713     be allocated.  */
3714  if (! TARGET_POWER)
3715    fixed_regs[64] = 1;
3716
3717  /* 64-bit AIX and Linux reserve GPR13 for thread-private data.  */
3718  if (TARGET_64BIT)
3719    fixed_regs[13] = call_used_regs[13]
3720      = call_really_used_regs[13] = 1;
3721
3722  /* Conditionally disable FPRs.  */
3723  if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3724    for (i = 32; i < 64; i++)
3725      fixed_regs[i] = call_used_regs[i]
3726	= call_really_used_regs[i] = 1;
3727
3728  /* The TOC register is not killed across calls in a way that is
3729     visible to the compiler.  */
3730  if (DEFAULT_ABI == ABI_AIX)
3731    call_really_used_regs[2] = 0;
3732
3733  if (DEFAULT_ABI == ABI_V4
3734      && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3735      && flag_pic == 2)
3736    fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3737
3738  if (DEFAULT_ABI == ABI_V4
3739      && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3740      && flag_pic == 1)
3741    fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3742      = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3743      = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3744
3745  if (DEFAULT_ABI == ABI_DARWIN
3746      && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3747      fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3748      = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3749      = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3750
3751  if (TARGET_TOC && TARGET_MINIMAL_TOC)
3752    fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3753      = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3754
3755  if (TARGET_ALTIVEC)
3756    global_regs[VSCR_REGNO] = 1;
3757
3758  if (TARGET_SPE)
3759    {
3760      global_regs[SPEFSCR_REGNO] = 1;
3761      fixed_regs[FIXED_SCRATCH]
3762	= call_used_regs[FIXED_SCRATCH]
3763	= call_really_used_regs[FIXED_SCRATCH] = 1;
3764    }
3765
3766  if (! TARGET_ALTIVEC)
3767    {
3768      for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3769	fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3770      call_really_used_regs[VRSAVE_REGNO] = 1;
3771    }
3772
3773  if (TARGET_ALTIVEC_ABI)
3774    for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3775      call_used_regs[i] = call_really_used_regs[i] = 1;
3776}
3777
3778/* Try to output insns to set TARGET equal to the constant C if it can
3779   be done in less than N insns.  Do all computations in MODE.
3780   Returns the place where the output has been placed if it can be
3781   done and the insns have been emitted.  If it would take more than N
3782   insns, zero is returned and no insns and emitted.  */
3783
3784rtx
3785rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3786		       rtx source, int n ATTRIBUTE_UNUSED)
3787{
3788  rtx result, insn, set;
3789  HOST_WIDE_INT c0, c1;
3790
3791  switch (mode)
3792    {
3793      case  QImode:
3794    case HImode:
3795      if (dest == NULL)
3796	dest = gen_reg_rtx (mode);
3797      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3798      return dest;
3799
3800    case SImode:
3801      result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3802
3803      emit_insn (gen_rtx_SET (VOIDmode, result,
3804			      GEN_INT (INTVAL (source)
3805				       & (~ (HOST_WIDE_INT) 0xffff))));
3806      emit_insn (gen_rtx_SET (VOIDmode, dest,
3807			      gen_rtx_IOR (SImode, result,
3808					   GEN_INT (INTVAL (source) & 0xffff))));
3809      result = dest;
3810      break;
3811
3812    case DImode:
3813      switch (GET_CODE (source))
3814	{
3815	case CONST_INT:
3816	  c0 = INTVAL (source);
3817	  c1 = -(c0 < 0);
3818	  break;
3819
3820	case CONST_DOUBLE:
3821#if HOST_BITS_PER_WIDE_INT >= 64
3822	  c0 = CONST_DOUBLE_LOW (source);
3823	  c1 = -(c0 < 0);
3824#else
3825	  c0 = CONST_DOUBLE_LOW (source);
3826	  c1 = CONST_DOUBLE_HIGH (source);
3827#endif
3828	  break;
3829
3830	default:
3831	  gcc_unreachable ();
3832	}
3833
3834      result = rs6000_emit_set_long_const (dest, c0, c1);
3835      break;
3836
3837    default:
3838      gcc_unreachable ();
3839    }
3840
3841  insn = get_last_insn ();
3842  set = single_set (insn);
3843  if (! CONSTANT_P (SET_SRC (set)))
3844    set_unique_reg_note (insn, REG_EQUAL, source);
3845
3846  return result;
3847}
3848
3849/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3850   fall back to a straight forward decomposition.  We do this to avoid
3851   exponential run times encountered when looking for longer sequences
3852   with rs6000_emit_set_const.  */
3853static rtx
3854rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3855{
3856  if (!TARGET_POWERPC64)
3857    {
3858      rtx operand1, operand2;
3859
3860      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3861					DImode);
3862      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3863					DImode);
3864      emit_move_insn (operand1, GEN_INT (c1));
3865      emit_move_insn (operand2, GEN_INT (c2));
3866    }
3867  else
3868    {
3869      HOST_WIDE_INT ud1, ud2, ud3, ud4;
3870
3871      ud1 = c1 & 0xffff;
3872      ud2 = (c1 & 0xffff0000) >> 16;
3873#if HOST_BITS_PER_WIDE_INT >= 64
3874      c2 = c1 >> 32;
3875#endif
3876      ud3 = c2 & 0xffff;
3877      ud4 = (c2 & 0xffff0000) >> 16;
3878
3879      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3880	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3881	{
3882	  if (ud1 & 0x8000)
3883	    emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) -  0x8000)));
3884	  else
3885	    emit_move_insn (dest, GEN_INT (ud1));
3886	}
3887
3888      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3889	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3890	{
3891	  if (ud2 & 0x8000)
3892	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3893					   - 0x80000000));
3894	  else
3895	    emit_move_insn (dest, GEN_INT (ud2 << 16));
3896	  if (ud1 != 0)
3897	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3898	}
3899      else if ((ud4 == 0xffff && (ud3 & 0x8000))
3900	       || (ud4 == 0 && ! (ud3 & 0x8000)))
3901	{
3902	  if (ud3 & 0x8000)
3903	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3904					   - 0x80000000));
3905	  else
3906	    emit_move_insn (dest, GEN_INT (ud3 << 16));
3907
3908	  if (ud2 != 0)
3909	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3910	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3911	  if (ud1 != 0)
3912	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3913	}
3914      else
3915	{
3916	  if (ud4 & 0x8000)
3917	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3918					   - 0x80000000));
3919	  else
3920	    emit_move_insn (dest, GEN_INT (ud4 << 16));
3921
3922	  if (ud3 != 0)
3923	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3924
3925	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3926	  if (ud2 != 0)
3927	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3928					       GEN_INT (ud2 << 16)));
3929	  if (ud1 != 0)
3930	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3931	}
3932    }
3933  return dest;
3934}
3935
3936/* Helper for the following.  Get rid of [r+r] memory refs
3937   in cases where it won't work (TImode, TFmode).  */
3938
3939static void
3940rs6000_eliminate_indexed_memrefs (rtx operands[2])
3941{
3942  if (GET_CODE (operands[0]) == MEM
3943      && GET_CODE (XEXP (operands[0], 0)) != REG
3944      && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
3945      && ! reload_in_progress)
3946    operands[0]
3947      = replace_equiv_address (operands[0],
3948			       copy_addr_to_reg (XEXP (operands[0], 0)));
3949
3950  if (GET_CODE (operands[1]) == MEM
3951      && GET_CODE (XEXP (operands[1], 0)) != REG
3952      && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
3953      && ! reload_in_progress)
3954    operands[1]
3955      = replace_equiv_address (operands[1],
3956			       copy_addr_to_reg (XEXP (operands[1], 0)));
3957}
3958
3959/* Emit a move from SOURCE to DEST in mode MODE.  */
3960void
3961rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3962{
3963  rtx operands[2];
3964  operands[0] = dest;
3965  operands[1] = source;
3966
3967  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
3968  if (GET_CODE (operands[1]) == CONST_DOUBLE
3969      && ! FLOAT_MODE_P (mode)
3970      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3971    {
3972      /* FIXME.  This should never happen.  */
3973      /* Since it seems that it does, do the safe thing and convert
3974	 to a CONST_INT.  */
3975      operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3976    }
3977  gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3978	      || FLOAT_MODE_P (mode)
3979	      || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3980		   || CONST_DOUBLE_LOW (operands[1]) < 0)
3981		  && (CONST_DOUBLE_HIGH (operands[1]) != -1
3982		      || CONST_DOUBLE_LOW (operands[1]) >= 0)));
3983
3984  /* Check if GCC is setting up a block move that will end up using FP
3985     registers as temporaries.  We must make sure this is acceptable.  */
3986  if (GET_CODE (operands[0]) == MEM
3987      && GET_CODE (operands[1]) == MEM
3988      && mode == DImode
3989      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3990	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3991      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3992					    ? 32 : MEM_ALIGN (operands[0])))
3993	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3994					       ? 32
3995					       : MEM_ALIGN (operands[1]))))
3996      && ! MEM_VOLATILE_P (operands [0])
3997      && ! MEM_VOLATILE_P (operands [1]))
3998    {
3999      emit_move_insn (adjust_address (operands[0], SImode, 0),
4000		      adjust_address (operands[1], SImode, 0));
4001      emit_move_insn (adjust_address (operands[0], SImode, 4),
4002		      adjust_address (operands[1], SImode, 4));
4003      return;
4004    }
4005
4006  if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
4007      && !gpc_reg_operand (operands[1], mode))
4008    operands[1] = force_reg (mode, operands[1]);
4009
4010  if (mode == SFmode && ! TARGET_POWERPC
4011      && TARGET_HARD_FLOAT && TARGET_FPRS
4012      && GET_CODE (operands[0]) == MEM)
4013    {
4014      int regnum;
4015
4016      if (reload_in_progress || reload_completed)
4017	regnum = true_regnum (operands[1]);
4018      else if (GET_CODE (operands[1]) == REG)
4019	regnum = REGNO (operands[1]);
4020      else
4021	regnum = -1;
4022
4023      /* If operands[1] is a register, on POWER it may have
4024	 double-precision data in it, so truncate it to single
4025	 precision.  */
4026      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4027	{
4028	  rtx newreg;
4029	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
4030	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4031	  operands[1] = newreg;
4032	}
4033    }
4034
4035  /* Recognize the case where operand[1] is a reference to thread-local
4036     data and load its address to a register.  */
4037  if (rs6000_tls_referenced_p (operands[1]))
4038    {
4039      enum tls_model model;
4040      rtx tmp = operands[1];
4041      rtx addend = NULL;
4042
4043      if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4044	{
4045          addend = XEXP (XEXP (tmp, 0), 1);
4046	  tmp = XEXP (XEXP (tmp, 0), 0);
4047	}
4048
4049      gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4050      model = SYMBOL_REF_TLS_MODEL (tmp);
4051      gcc_assert (model != 0);
4052
4053      tmp = rs6000_legitimize_tls_address (tmp, model);
4054      if (addend)
4055	{
4056	  tmp = gen_rtx_PLUS (mode, tmp, addend);
4057	  tmp = force_operand (tmp, operands[0]);
4058	}
4059      operands[1] = tmp;
4060    }
4061
4062  /* Handle the case where reload calls us with an invalid address.  */
4063  if (reload_in_progress && mode == Pmode
4064      && (! general_operand (operands[1], mode)
4065	  || ! nonimmediate_operand (operands[0], mode)))
4066    goto emit_set;
4067
4068  /* 128-bit constant floating-point values on Darwin should really be
4069     loaded as two parts.  */
4070  if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
4071      && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4072    {
4073      /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4074	 know how to get a DFmode SUBREG of a TFmode.  */
4075      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
4076			simplify_gen_subreg (DImode, operands[1], mode, 0),
4077			DImode);
4078      rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4079					     GET_MODE_SIZE (DImode)),
4080			simplify_gen_subreg (DImode, operands[1], mode,
4081					     GET_MODE_SIZE (DImode)),
4082			DImode);
4083      return;
4084    }
4085
4086  /* FIXME:  In the long term, this switch statement should go away
4087     and be replaced by a sequence of tests based on things like
4088     mode == Pmode.  */
4089  switch (mode)
4090    {
4091    case HImode:
4092    case QImode:
4093      if (CONSTANT_P (operands[1])
4094	  && GET_CODE (operands[1]) != CONST_INT)
4095	operands[1] = force_const_mem (mode, operands[1]);
4096      break;
4097
4098    case TFmode:
4099      rs6000_eliminate_indexed_memrefs (operands);
4100      /* fall through */
4101
4102    case DFmode:
4103    case SFmode:
4104      if (CONSTANT_P (operands[1])
4105	  && ! easy_fp_constant (operands[1], mode))
4106	operands[1] = force_const_mem (mode, operands[1]);
4107      break;
4108
4109    case V16QImode:
4110    case V8HImode:
4111    case V4SFmode:
4112    case V4SImode:
4113    case V4HImode:
4114    case V2SFmode:
4115    case V2SImode:
4116    case V1DImode:
4117      if (CONSTANT_P (operands[1])
4118	  && !easy_vector_constant (operands[1], mode))
4119	operands[1] = force_const_mem (mode, operands[1]);
4120      break;
4121
4122    case SImode:
4123    case DImode:
4124      /* Use default pattern for address of ELF small data */
4125      if (TARGET_ELF
4126	  && mode == Pmode
4127	  && DEFAULT_ABI == ABI_V4
4128	  && (GET_CODE (operands[1]) == SYMBOL_REF
4129	      || GET_CODE (operands[1]) == CONST)
4130	  && small_data_operand (operands[1], mode))
4131	{
4132	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4133	  return;
4134	}
4135
4136      if (DEFAULT_ABI == ABI_V4
4137	  && mode == Pmode && mode == SImode
4138	  && flag_pic == 1 && got_operand (operands[1], mode))
4139	{
4140	  emit_insn (gen_movsi_got (operands[0], operands[1]));
4141	  return;
4142	}
4143
4144      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
4145	  && TARGET_NO_TOC
4146	  && ! flag_pic
4147	  && mode == Pmode
4148	  && CONSTANT_P (operands[1])
4149	  && GET_CODE (operands[1]) != HIGH
4150	  && GET_CODE (operands[1]) != CONST_INT)
4151	{
4152	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
4153
4154	  /* If this is a function address on -mcall-aixdesc,
4155	     convert it to the address of the descriptor.  */
4156	  if (DEFAULT_ABI == ABI_AIX
4157	      && GET_CODE (operands[1]) == SYMBOL_REF
4158	      && XSTR (operands[1], 0)[0] == '.')
4159	    {
4160	      const char *name = XSTR (operands[1], 0);
4161	      rtx new_ref;
4162	      while (*name == '.')
4163		name++;
4164	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4165	      CONSTANT_POOL_ADDRESS_P (new_ref)
4166		= CONSTANT_POOL_ADDRESS_P (operands[1]);
4167	      SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
4168	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
4169	      SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
4170	      operands[1] = new_ref;
4171	    }
4172
4173	  if (DEFAULT_ABI == ABI_DARWIN)
4174	    {
4175#if TARGET_MACHO
4176	      if (MACHO_DYNAMIC_NO_PIC_P)
4177		{
4178		  /* Take care of any required data indirection.  */
4179		  operands[1] = rs6000_machopic_legitimize_pic_address (
4180				  operands[1], mode, operands[0]);
4181		  if (operands[0] != operands[1])
4182		    emit_insn (gen_rtx_SET (VOIDmode,
4183					    operands[0], operands[1]));
4184		  return;
4185		}
4186#endif
4187	      emit_insn (gen_macho_high (target, operands[1]));
4188	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
4189	      return;
4190	    }
4191
4192	  emit_insn (gen_elf_high (target, operands[1]));
4193	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
4194	  return;
4195	}
4196
4197      /* If this is a SYMBOL_REF that refers to a constant pool entry,
4198	 and we have put it in the TOC, we just need to make a TOC-relative
4199	 reference to it.  */
4200      if (TARGET_TOC
4201	  && GET_CODE (operands[1]) == SYMBOL_REF
4202	  && constant_pool_expr_p (operands[1])
4203	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4204					      get_pool_mode (operands[1])))
4205	{
4206	  operands[1] = create_TOC_reference (operands[1]);
4207	}
4208      else if (mode == Pmode
4209	       && CONSTANT_P (operands[1])
4210	       && ((GET_CODE (operands[1]) != CONST_INT
4211		    && ! easy_fp_constant (operands[1], mode))
4212		   || (GET_CODE (operands[1]) == CONST_INT
4213		       && num_insns_constant (operands[1], mode) > 2)
4214		   || (GET_CODE (operands[0]) == REG
4215		       && FP_REGNO_P (REGNO (operands[0]))))
4216	       && GET_CODE (operands[1]) != HIGH
4217	       && ! legitimate_constant_pool_address_p (operands[1])
4218	       && ! toc_relative_expr_p (operands[1]))
4219	{
4220	  /* Emit a USE operation so that the constant isn't deleted if
4221	     expensive optimizations are turned on because nobody
4222	     references it.  This should only be done for operands that
4223	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4224	     This should not be done for operands that contain LABEL_REFs.
4225	     For now, we just handle the obvious case.  */
4226	  if (GET_CODE (operands[1]) != LABEL_REF)
4227	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4228
4229#if TARGET_MACHO
4230	  /* Darwin uses a special PIC legitimizer.  */
4231	  if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4232	    {
4233	      operands[1] =
4234		rs6000_machopic_legitimize_pic_address (operands[1], mode,
4235							operands[0]);
4236	      if (operands[0] != operands[1])
4237		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4238	      return;
4239	    }
4240#endif
4241
4242	  /* If we are to limit the number of things we put in the TOC and
4243	     this is a symbol plus a constant we can add in one insn,
4244	     just put the symbol in the TOC and add the constant.  Don't do
4245	     this if reload is in progress.  */
4246	  if (GET_CODE (operands[1]) == CONST
4247	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4248	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
4249	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4250	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4251		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4252	      && ! side_effects_p (operands[0]))
4253	    {
4254	      rtx sym =
4255		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4256	      rtx other = XEXP (XEXP (operands[1], 0), 1);
4257
4258	      sym = force_reg (mode, sym);
4259	      if (mode == SImode)
4260		emit_insn (gen_addsi3 (operands[0], sym, other));
4261	      else
4262		emit_insn (gen_adddi3 (operands[0], sym, other));
4263	      return;
4264	    }
4265
4266	  operands[1] = force_const_mem (mode, operands[1]);
4267
4268	  if (TARGET_TOC
4269	      && constant_pool_expr_p (XEXP (operands[1], 0))
4270	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4271			get_pool_constant (XEXP (operands[1], 0)),
4272			get_pool_mode (XEXP (operands[1], 0))))
4273	    {
4274	      operands[1]
4275		= gen_const_mem (mode,
4276				 create_TOC_reference (XEXP (operands[1], 0)));
4277	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
4278	    }
4279	}
4280      break;
4281
4282    case TImode:
4283      rs6000_eliminate_indexed_memrefs (operands);
4284
4285      if (TARGET_POWER)
4286	{
4287	  emit_insn (gen_rtx_PARALLEL (VOIDmode,
4288		       gen_rtvec (2,
4289				  gen_rtx_SET (VOIDmode,
4290					       operands[0], operands[1]),
4291				  gen_rtx_CLOBBER (VOIDmode,
4292						   gen_rtx_SCRATCH (SImode)))));
4293	  return;
4294	}
4295      break;
4296
4297    default:
4298      gcc_unreachable ();
4299    }
4300
4301  /* Above, we may have called force_const_mem which may have returned
4302     an invalid address.  If we can, fix this up; otherwise, reload will
4303     have to deal with it.  */
4304  if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4305    operands[1] = validize_mem (operands[1]);
4306
4307 emit_set:
4308  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4309}
4310
4311/* Nonzero if we can use a floating-point register to pass this arg.  */
4312#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE)		\
4313  (SCALAR_FLOAT_MODE_P (MODE)			\
4314   && !DECIMAL_FLOAT_MODE_P (MODE)		\
4315   && (CUM)->fregno <= FP_ARG_MAX_REG		\
4316   && TARGET_HARD_FLOAT && TARGET_FPRS)
4317
4318/* Nonzero if we can use an AltiVec register to pass this arg.  */
4319#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED)	\
4320  (ALTIVEC_VECTOR_MODE (MODE)				\
4321   && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG		\
4322   && TARGET_ALTIVEC_ABI				\
4323   && (NAMED))
4324
4325/* Return a nonzero value to say to return the function value in
4326   memory, just as large structures are always returned.  TYPE will be
4327   the data type of the value, and FNTYPE will be the type of the
4328   function doing the returning, or @code{NULL} for libcalls.
4329
4330   The AIX ABI for the RS/6000 specifies that all structures are
4331   returned in memory.  The Darwin ABI does the same.  The SVR4 ABI
4332   specifies that structures <= 8 bytes are returned in r3/r4, but a
4333   draft put them in memory, and GCC used to implement the draft
4334   instead of the final standard.  Therefore, aix_struct_return
4335   controls this instead of DEFAULT_ABI; V.4 targets needing backward
4336   compatibility can change DRAFT_V4_STRUCT_RET to override the
4337   default, and -m switches get the final word.  See
4338   rs6000_override_options for more details.
4339
4340   The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4341   long double support is enabled.  These values are returned in memory.
4342
4343   int_size_in_bytes returns -1 for variable size objects, which go in
4344   memory always.  The cast to unsigned makes -1 > 8.  */
4345
4346static bool
4347rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4348{
4349  /* In the darwin64 abi, try to use registers for larger structs
4350     if possible.  */
4351  if (rs6000_darwin64_abi
4352      && TREE_CODE (type) == RECORD_TYPE
4353      && int_size_in_bytes (type) > 0)
4354    {
4355      CUMULATIVE_ARGS valcum;
4356      rtx valret;
4357
4358      valcum.words = 0;
4359      valcum.fregno = FP_ARG_MIN_REG;
4360      valcum.vregno = ALTIVEC_ARG_MIN_REG;
4361      /* Do a trial code generation as if this were going to be passed
4362	 as an argument; if any part goes in memory, we return NULL.  */
4363      valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4364      if (valret)
4365	return false;
4366      /* Otherwise fall through to more conventional ABI rules.  */
4367    }
4368
4369  if (AGGREGATE_TYPE_P (type)
4370      && (aix_struct_return
4371	  || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4372    return true;
4373
4374  /* Allow -maltivec -mabi=no-altivec without warning.  Altivec vector
4375     modes only exist for GCC vector types if -maltivec.  */
4376  if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4377      && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4378    return false;
4379
4380  /* Return synthetic vectors in memory.  */
4381  if (TREE_CODE (type) == VECTOR_TYPE
4382      && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
4383    {
4384      static bool warned_for_return_big_vectors = false;
4385      if (!warned_for_return_big_vectors)
4386	{
4387	  warning (0, "GCC vector returned by reference: "
4388		   "non-standard ABI extension with no compatibility guarantee");
4389	  warned_for_return_big_vectors = true;
4390	}
4391      return true;
4392    }
4393
4394  if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
4395    return true;
4396
4397  return false;
4398}
4399
4400/* Initialize a variable CUM of type CUMULATIVE_ARGS
4401   for a call to a function whose data type is FNTYPE.
4402   For a library call, FNTYPE is 0.
4403
4404   For incoming args we set the number of arguments in the prototype large
4405   so we never return a PARALLEL.  */
4406
4407void
4408init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4409		      rtx libname ATTRIBUTE_UNUSED, int incoming,
4410		      int libcall, int n_named_args)
4411{
4412  static CUMULATIVE_ARGS zero_cumulative;
4413
4414  *cum = zero_cumulative;
4415  cum->words = 0;
4416  cum->fregno = FP_ARG_MIN_REG;
4417  cum->vregno = ALTIVEC_ARG_MIN_REG;
4418  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4419  cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4420		      ? CALL_LIBCALL : CALL_NORMAL);
4421  cum->sysv_gregno = GP_ARG_MIN_REG;
4422  cum->stdarg = fntype
4423    && (TYPE_ARG_TYPES (fntype) != 0
4424	&& (TREE_VALUE (tree_last  (TYPE_ARG_TYPES (fntype)))
4425	    != void_type_node));
4426
4427  cum->nargs_prototype = 0;
4428  if (incoming || cum->prototype)
4429    cum->nargs_prototype = n_named_args;
4430
4431  /* Check for a longcall attribute.  */
4432  if ((!fntype && rs6000_default_long_calls)
4433      || (fntype
4434	  && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4435	  && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4436    cum->call_cookie |= CALL_LONG;
4437
4438  if (TARGET_DEBUG_ARG)
4439    {
4440      fprintf (stderr, "\ninit_cumulative_args:");
4441      if (fntype)
4442	{
4443	  tree ret_type = TREE_TYPE (fntype);
4444	  fprintf (stderr, " ret code = %s,",
4445		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
4446	}
4447
4448      if (cum->call_cookie & CALL_LONG)
4449	fprintf (stderr, " longcall,");
4450
4451      fprintf (stderr, " proto = %d, nargs = %d\n",
4452	       cum->prototype, cum->nargs_prototype);
4453    }
4454
4455  if (fntype
4456      && !TARGET_ALTIVEC
4457      && TARGET_ALTIVEC_ABI
4458      && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4459    {
4460      error ("cannot return value in vector register because"
4461	     " altivec instructions are disabled, use -maltivec"
4462	     " to enable them");
4463    }
4464}
4465
4466/* Return true if TYPE must be passed on the stack and not in registers.  */
4467
4468static bool
4469rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4470{
4471  if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4472    return must_pass_in_stack_var_size (mode, type);
4473  else
4474    return must_pass_in_stack_var_size_or_pad (mode, type);
4475}
4476
4477/* If defined, a C expression which determines whether, and in which
4478   direction, to pad out an argument with extra space.  The value
4479   should be of type `enum direction': either `upward' to pad above
4480   the argument, `downward' to pad below, or `none' to inhibit
4481   padding.
4482
4483   For the AIX ABI structs are always stored left shifted in their
4484   argument slot.  */
4485
4486enum direction
4487function_arg_padding (enum machine_mode mode, tree type)
4488{
4489#ifndef AGGREGATE_PADDING_FIXED
4490#define AGGREGATE_PADDING_FIXED 0
4491#endif
4492#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4493#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4494#endif
4495
4496  if (!AGGREGATE_PADDING_FIXED)
4497    {
4498      /* GCC used to pass structures of the same size as integer types as
4499	 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4500	 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4501	 passed padded downward, except that -mstrict-align further
4502	 muddied the water in that multi-component structures of 2 and 4
4503	 bytes in size were passed padded upward.
4504
4505	 The following arranges for best compatibility with previous
4506	 versions of gcc, but removes the -mstrict-align dependency.  */
4507      if (BYTES_BIG_ENDIAN)
4508	{
4509	  HOST_WIDE_INT size = 0;
4510
4511	  if (mode == BLKmode)
4512	    {
4513	      if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4514		size = int_size_in_bytes (type);
4515	    }
4516	  else
4517	    size = GET_MODE_SIZE (mode);
4518
4519	  if (size == 1 || size == 2 || size == 4)
4520	    return downward;
4521	}
4522      return upward;
4523    }
4524
4525  if (AGGREGATES_PAD_UPWARD_ALWAYS)
4526    {
4527      if (type != 0 && AGGREGATE_TYPE_P (type))
4528	return upward;
4529    }
4530
4531  /* Fall back to the default.  */
4532  return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4533}
4534
4535/* If defined, a C expression that gives the alignment boundary, in bits,
4536   of an argument with the specified mode and type.  If it is not defined,
4537   PARM_BOUNDARY is used for all arguments.
4538
4539   V.4 wants long longs and doubles to be double word aligned.  Just
4540   testing the mode size is a boneheaded way to do this as it means
4541   that other types such as complex int are also double word aligned.
4542   However, we're stuck with this because changing the ABI might break
4543   existing library interfaces.
4544
4545   Doubleword align SPE vectors.
4546   Quadword align Altivec vectors.
4547   Quadword align large synthetic vector types.   */
4548
4549int
4550function_arg_boundary (enum machine_mode mode, tree type)
4551{
4552  if (DEFAULT_ABI == ABI_V4
4553      && (GET_MODE_SIZE (mode) == 8
4554	  || (TARGET_HARD_FLOAT
4555	      && TARGET_FPRS
4556	      && mode == TFmode)))
4557    return 64;
4558  else if (SPE_VECTOR_MODE (mode)
4559	   || (type && TREE_CODE (type) == VECTOR_TYPE
4560	       && int_size_in_bytes (type) >= 8
4561	       && int_size_in_bytes (type) < 16))
4562    return 64;
4563  else if (ALTIVEC_VECTOR_MODE (mode)
4564	   || (type && TREE_CODE (type) == VECTOR_TYPE
4565	       && int_size_in_bytes (type) >= 16))
4566    return 128;
4567  else if (rs6000_darwin64_abi && mode == BLKmode
4568	   && type && TYPE_ALIGN (type) > 64)
4569    return 128;
4570  else
4571    return PARM_BOUNDARY;
4572}
4573
4574/* For a function parm of MODE and TYPE, return the starting word in
4575   the parameter area.  NWORDS of the parameter area are already used.  */
4576
4577static unsigned int
4578rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4579{
4580  unsigned int align;
4581  unsigned int parm_offset;
4582
4583  align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4584  parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4585  return nwords + (-(parm_offset + nwords) & align);
4586}
4587
4588/* Compute the size (in words) of a function argument.  */
4589
4590static unsigned long
4591rs6000_arg_size (enum machine_mode mode, tree type)
4592{
4593  unsigned long size;
4594
4595  if (mode != BLKmode)
4596    size = GET_MODE_SIZE (mode);
4597  else
4598    size = int_size_in_bytes (type);
4599
4600  if (TARGET_32BIT)
4601    return (size + 3) >> 2;
4602  else
4603    return (size + 7) >> 3;
4604}
4605
4606/* Use this to flush pending int fields.  */
4607
4608static void
4609rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4610					  HOST_WIDE_INT bitpos)
4611{
4612  unsigned int startbit, endbit;
4613  int intregs, intoffset;
4614  enum machine_mode mode;
4615
4616  if (cum->intoffset == -1)
4617    return;
4618
4619  intoffset = cum->intoffset;
4620  cum->intoffset = -1;
4621
4622  if (intoffset % BITS_PER_WORD != 0)
4623    {
4624      mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4625			    MODE_INT, 0);
4626      if (mode == BLKmode)
4627	{
4628	  /* We couldn't find an appropriate mode, which happens,
4629	     e.g., in packed structs when there are 3 bytes to load.
4630	     Back intoffset back to the beginning of the word in this
4631	     case.  */
4632	  intoffset = intoffset & -BITS_PER_WORD;
4633	}
4634    }
4635
4636  startbit = intoffset & -BITS_PER_WORD;
4637  endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4638  intregs = (endbit - startbit) / BITS_PER_WORD;
4639  cum->words += intregs;
4640}
4641
4642/* The darwin64 ABI calls for us to recurse down through structs,
4643   looking for elements passed in registers.  Unfortunately, we have
4644   to track int register count here also because of misalignments
4645   in powerpc alignment mode.  */
4646
4647static void
4648rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4649					    tree type,
4650					    HOST_WIDE_INT startbitpos)
4651{
4652  tree f;
4653
4654  for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4655    if (TREE_CODE (f) == FIELD_DECL)
4656      {
4657	HOST_WIDE_INT bitpos = startbitpos;
4658	tree ftype = TREE_TYPE (f);
4659	enum machine_mode mode;
4660	if (ftype == error_mark_node)
4661	  continue;
4662	mode = TYPE_MODE (ftype);
4663
4664	if (DECL_SIZE (f) != 0
4665	    && host_integerp (bit_position (f), 1))
4666	  bitpos += int_bit_position (f);
4667
4668	/* ??? FIXME: else assume zero offset.  */
4669
4670	if (TREE_CODE (ftype) == RECORD_TYPE)
4671	  rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4672	else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4673	  {
4674	    rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4675	    cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4676	    cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4677	  }
4678	else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4679	  {
4680	    rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4681	    cum->vregno++;
4682	    cum->words += 2;
4683	  }
4684	else if (cum->intoffset == -1)
4685	  cum->intoffset = bitpos;
4686      }
4687}
4688
4689/* Update the data in CUM to advance over an argument
4690   of mode MODE and data type TYPE.
4691   (TYPE is null for libcalls where that information may not be available.)
4692
4693   Note that for args passed by reference, function_arg will be called
4694   with MODE and TYPE set to that of the pointer to the arg, not the arg
4695   itself.  */
4696
4697void
4698function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4699		      tree type, int named, int depth)
4700{
4701  int size;
4702
4703  /* Only tick off an argument if we're not recursing.  */
4704  if (depth == 0)
4705    cum->nargs_prototype--;
4706
4707  if (TARGET_ALTIVEC_ABI
4708      && (ALTIVEC_VECTOR_MODE (mode)
4709	  || (type && TREE_CODE (type) == VECTOR_TYPE
4710	      && int_size_in_bytes (type) == 16)))
4711    {
4712      bool stack = false;
4713
4714      if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4715	{
4716	  cum->vregno++;
4717	  if (!TARGET_ALTIVEC)
4718	    error ("cannot pass argument in vector register because"
4719		   " altivec instructions are disabled, use -maltivec"
4720		   " to enable them");
4721
4722	  /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4723	     even if it is going to be passed in a vector register.
4724	     Darwin does the same for variable-argument functions.  */
4725	  if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4726	      || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4727	    stack = true;
4728	}
4729      else
4730	stack = true;
4731
4732      if (stack)
4733	{
4734	  int align;
4735
4736	  /* Vector parameters must be 16-byte aligned.  This places
4737	     them at 2 mod 4 in terms of words in 32-bit mode, since
4738	     the parameter save area starts at offset 24 from the
4739	     stack.  In 64-bit mode, they just have to start on an
4740	     even word, since the parameter save area is 16-byte
4741	     aligned.  Space for GPRs is reserved even if the argument
4742	     will be passed in memory.  */
4743	  if (TARGET_32BIT)
4744	    align = (2 - cum->words) & 3;
4745	  else
4746	    align = cum->words & 1;
4747	  cum->words += align + rs6000_arg_size (mode, type);
4748
4749	  if (TARGET_DEBUG_ARG)
4750	    {
4751	      fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4752		       cum->words, align);
4753	      fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4754		       cum->nargs_prototype, cum->prototype,
4755		       GET_MODE_NAME (mode));
4756	    }
4757	}
4758    }
4759  else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4760	   && !cum->stdarg
4761	   && cum->sysv_gregno <= GP_ARG_MAX_REG)
4762    cum->sysv_gregno++;
4763
4764  else if (rs6000_darwin64_abi
4765	   && mode == BLKmode
4766    	   && TREE_CODE (type) == RECORD_TYPE
4767	   && (size = int_size_in_bytes (type)) > 0)
4768    {
4769      /* Variable sized types have size == -1 and are
4770	 treated as if consisting entirely of ints.
4771	 Pad to 16 byte boundary if needed.  */
4772      if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4773	  && (cum->words % 2) != 0)
4774	cum->words++;
4775      /* For varargs, we can just go up by the size of the struct. */
4776      if (!named)
4777	cum->words += (size + 7) / 8;
4778      else
4779	{
4780	  /* It is tempting to say int register count just goes up by
4781	     sizeof(type)/8, but this is wrong in a case such as
4782	     { int; double; int; } [powerpc alignment].  We have to
4783	     grovel through the fields for these too.  */
4784	  cum->intoffset = 0;
4785	  rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
4786	  rs6000_darwin64_record_arg_advance_flush (cum,
4787						    size * BITS_PER_UNIT);
4788	}
4789    }
4790  else if (DEFAULT_ABI == ABI_V4)
4791    {
4792      if (TARGET_HARD_FLOAT && TARGET_FPRS
4793	  && (mode == SFmode || mode == DFmode
4794	      || (mode == TFmode && !TARGET_IEEEQUAD)))
4795	{
4796	  if (cum->fregno + (mode == TFmode ? 1 : 0) <= FP_ARG_V4_MAX_REG)
4797	    cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4798	  else
4799	    {
4800	      cum->fregno = FP_ARG_V4_MAX_REG + 1;
4801	      if (mode == DFmode || mode == TFmode)
4802		cum->words += cum->words & 1;
4803	      cum->words += rs6000_arg_size (mode, type);
4804	    }
4805	}
4806      else
4807	{
4808	  int n_words = rs6000_arg_size (mode, type);
4809	  int gregno = cum->sysv_gregno;
4810
4811	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4812	     (r7,r8) or (r9,r10).  As does any other 2 word item such
4813	     as complex int due to a historical mistake.  */
4814	  if (n_words == 2)
4815	    gregno += (1 - gregno) & 1;
4816
4817	  /* Multi-reg args are not split between registers and stack.  */
4818	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4819	    {
4820	      /* Long long and SPE vectors are aligned on the stack.
4821		 So are other 2 word items such as complex int due to
4822		 a historical mistake.  */
4823	      if (n_words == 2)
4824		cum->words += cum->words & 1;
4825	      cum->words += n_words;
4826	    }
4827
4828	  /* Note: continuing to accumulate gregno past when we've started
4829	     spilling to the stack indicates the fact that we've started
4830	     spilling to the stack to expand_builtin_saveregs.  */
4831	  cum->sysv_gregno = gregno + n_words;
4832	}
4833
4834      if (TARGET_DEBUG_ARG)
4835	{
4836	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4837		   cum->words, cum->fregno);
4838	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4839		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4840	  fprintf (stderr, "mode = %4s, named = %d\n",
4841		   GET_MODE_NAME (mode), named);
4842	}
4843    }
4844  else
4845    {
4846      int n_words = rs6000_arg_size (mode, type);
4847      int start_words = cum->words;
4848      int align_words = rs6000_parm_start (mode, type, start_words);
4849
4850      cum->words = align_words + n_words;
4851
4852      if (SCALAR_FLOAT_MODE_P (mode)
4853	  && !DECIMAL_FLOAT_MODE_P (mode)
4854	  && TARGET_HARD_FLOAT && TARGET_FPRS)
4855	cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4856
4857      if (TARGET_DEBUG_ARG)
4858	{
4859	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4860		   cum->words, cum->fregno);
4861	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4862		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4863	  fprintf (stderr, "named = %d, align = %d, depth = %d\n",
4864		   named, align_words - start_words, depth);
4865	}
4866    }
4867}
4868
4869static rtx
4870spe_build_register_parallel (enum machine_mode mode, int gregno)
4871{
4872  rtx r1, r3;
4873
4874  switch (mode)
4875    {
4876    case DFmode:
4877      r1 = gen_rtx_REG (DImode, gregno);
4878      r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4879      return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
4880
4881    case DCmode:
4882      r1 = gen_rtx_REG (DImode, gregno);
4883      r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4884      r3 = gen_rtx_REG (DImode, gregno + 2);
4885      r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4886      return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
4887
4888    default:
4889      gcc_unreachable ();
4890    }
4891}
4892
4893/* Determine where to put a SIMD argument on the SPE.  */
4894static rtx
4895rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4896			 tree type)
4897{
4898  int gregno = cum->sysv_gregno;
4899
4900  /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4901     are passed and returned in a pair of GPRs for ABI compatibility.  */
4902  if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
4903    {
4904      int n_words = rs6000_arg_size (mode, type);
4905
4906      /* Doubles go in an odd/even register pair (r5/r6, etc).  */
4907      if (mode == DFmode)
4908	gregno += (1 - gregno) & 1;
4909
4910      /* Multi-reg args are not split between registers and stack.  */
4911      if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4912	return NULL_RTX;
4913
4914      return spe_build_register_parallel (mode, gregno);
4915    }
4916  if (cum->stdarg)
4917    {
4918      int n_words = rs6000_arg_size (mode, type);
4919
4920      /* SPE vectors are put in odd registers.  */
4921      if (n_words == 2 && (gregno & 1) == 0)
4922	gregno += 1;
4923
4924      if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4925	{
4926	  rtx r1, r2;
4927	  enum machine_mode m = SImode;
4928
4929	  r1 = gen_rtx_REG (m, gregno);
4930	  r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4931	  r2 = gen_rtx_REG (m, gregno + 1);
4932	  r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4933	  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4934	}
4935      else
4936	return NULL_RTX;
4937    }
4938  else
4939    {
4940      if (gregno <= GP_ARG_MAX_REG)
4941	return gen_rtx_REG (mode, gregno);
4942      else
4943	return NULL_RTX;
4944    }
4945}
4946
4947/* A subroutine of rs6000_darwin64_record_arg.  Assign the bits of the
4948   structure between cum->intoffset and bitpos to integer registers.  */
4949
4950static void
4951rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
4952				  HOST_WIDE_INT bitpos, rtx rvec[], int *k)
4953{
4954  enum machine_mode mode;
4955  unsigned int regno;
4956  unsigned int startbit, endbit;
4957  int this_regno, intregs, intoffset;
4958  rtx reg;
4959
4960  if (cum->intoffset == -1)
4961    return;
4962
4963  intoffset = cum->intoffset;
4964  cum->intoffset = -1;
4965
4966  /* If this is the trailing part of a word, try to only load that
4967     much into the register.  Otherwise load the whole register.  Note
4968     that in the latter case we may pick up unwanted bits.  It's not a
4969     problem at the moment but may wish to revisit.  */
4970
4971  if (intoffset % BITS_PER_WORD != 0)
4972    {
4973      mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4974			  MODE_INT, 0);
4975      if (mode == BLKmode)
4976	{
4977	  /* We couldn't find an appropriate mode, which happens,
4978	     e.g., in packed structs when there are 3 bytes to load.
4979	     Back intoffset back to the beginning of the word in this
4980	     case.  */
4981	 intoffset = intoffset & -BITS_PER_WORD;
4982	 mode = word_mode;
4983	}
4984    }
4985  else
4986    mode = word_mode;
4987
4988  startbit = intoffset & -BITS_PER_WORD;
4989  endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4990  intregs = (endbit - startbit) / BITS_PER_WORD;
4991  this_regno = cum->words + intoffset / BITS_PER_WORD;
4992
4993  if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4994    cum->use_stack = 1;
4995
4996  intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4997  if (intregs <= 0)
4998    return;
4999
5000  intoffset /= BITS_PER_UNIT;
5001  do
5002    {
5003      regno = GP_ARG_MIN_REG + this_regno;
5004      reg = gen_rtx_REG (mode, regno);
5005      rvec[(*k)++] =
5006	gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5007
5008      this_regno += 1;
5009      intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5010      mode = word_mode;
5011      intregs -= 1;
5012    }
5013  while (intregs > 0);
5014}
5015
5016/* Recursive workhorse for the following.  */
5017
5018static void
5019rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
5020				    HOST_WIDE_INT startbitpos, rtx rvec[],
5021				    int *k)
5022{
5023  tree f;
5024
5025  for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5026    if (TREE_CODE (f) == FIELD_DECL)
5027      {
5028	HOST_WIDE_INT bitpos = startbitpos;
5029	tree ftype = TREE_TYPE (f);
5030	enum machine_mode mode;
5031	if (ftype == error_mark_node)
5032	  continue;
5033	mode = TYPE_MODE (ftype);
5034
5035	if (DECL_SIZE (f) != 0
5036	    && host_integerp (bit_position (f), 1))
5037	  bitpos += int_bit_position (f);
5038
5039	/* ??? FIXME: else assume zero offset.  */
5040
5041	if (TREE_CODE (ftype) == RECORD_TYPE)
5042	  rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5043	else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
5044	  {
5045#if 0
5046	    switch (mode)
5047	      {
5048	      case SCmode: mode = SFmode; break;
5049	      case DCmode: mode = DFmode; break;
5050	      case TCmode: mode = TFmode; break;
5051	      default: break;
5052	      }
5053#endif
5054	    rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5055	    rvec[(*k)++]
5056	      = gen_rtx_EXPR_LIST (VOIDmode,
5057				   gen_rtx_REG (mode, cum->fregno++),
5058				   GEN_INT (bitpos / BITS_PER_UNIT));
5059	    if (mode == TFmode)
5060	      cum->fregno++;
5061	  }
5062	else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5063	  {
5064	    rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5065	    rvec[(*k)++]
5066	      = gen_rtx_EXPR_LIST (VOIDmode,
5067				   gen_rtx_REG (mode, cum->vregno++),
5068				   GEN_INT (bitpos / BITS_PER_UNIT));
5069	  }
5070	else if (cum->intoffset == -1)
5071	  cum->intoffset = bitpos;
5072      }
5073}
5074
5075/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5076   the register(s) to be used for each field and subfield of a struct
5077   being passed by value, along with the offset of where the
5078   register's value may be found in the block.  FP fields go in FP
5079   register, vector fields go in vector registers, and everything
5080   else goes in int registers, packed as in memory.
5081
5082   This code is also used for function return values.  RETVAL indicates
5083   whether this is the case.
5084
5085   Much of this is taken from the SPARC V9 port, which has a similar
5086   calling convention.  */
5087
5088static rtx
5089rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
5090			    int named, bool retval)
5091{
5092  rtx rvec[FIRST_PSEUDO_REGISTER];
5093  int k = 1, kbase = 1;
5094  HOST_WIDE_INT typesize = int_size_in_bytes (type);
5095  /* This is a copy; modifications are not visible to our caller.  */
5096  CUMULATIVE_ARGS copy_cum = *orig_cum;
5097  CUMULATIVE_ARGS *cum = &copy_cum;
5098
5099  /* Pad to 16 byte boundary if needed.  */
5100  if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5101      && (cum->words % 2) != 0)
5102    cum->words++;
5103
5104  cum->intoffset = 0;
5105  cum->use_stack = 0;
5106  cum->named = named;
5107
5108  /* Put entries into rvec[] for individual FP and vector fields, and
5109     for the chunks of memory that go in int regs.  Note we start at
5110     element 1; 0 is reserved for an indication of using memory, and
5111     may or may not be filled in below. */
5112  rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5113  rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5114
5115  /* If any part of the struct went on the stack put all of it there.
5116     This hack is because the generic code for
5117     FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5118     parts of the struct are not at the beginning.  */
5119  if (cum->use_stack)
5120    {
5121      if (retval)
5122	return NULL_RTX;    /* doesn't go in registers at all */
5123      kbase = 0;
5124      rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5125    }
5126  if (k > 1 || cum->use_stack)
5127    return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
5128  else
5129    return NULL_RTX;
5130}
5131
5132/* Determine where to place an argument in 64-bit mode with 32-bit ABI.  */
5133
5134static rtx
5135rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
5136{
5137  int n_units;
5138  int i, k;
5139  rtx rvec[GP_ARG_NUM_REG + 1];
5140
5141  if (align_words >= GP_ARG_NUM_REG)
5142    return NULL_RTX;
5143
5144  n_units = rs6000_arg_size (mode, type);
5145
5146  /* Optimize the simple case where the arg fits in one gpr, except in
5147     the case of BLKmode due to assign_parms assuming that registers are
5148     BITS_PER_WORD wide.  */
5149  if (n_units == 0
5150      || (n_units == 1 && mode != BLKmode))
5151    return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5152
5153  k = 0;
5154  if (align_words + n_units > GP_ARG_NUM_REG)
5155    /* Not all of the arg fits in gprs.  Say that it goes in memory too,
5156       using a magic NULL_RTX component.
5157       This is not strictly correct.  Only some of the arg belongs in
5158       memory, not all of it.  However, the normal scheme using
5159       function_arg_partial_nregs can result in unusual subregs, eg.
5160       (subreg:SI (reg:DF) 4), which are not handled well.  The code to
5161       store the whole arg to memory is often more efficient than code
5162       to store pieces, and we know that space is available in the right
5163       place for the whole arg.  */
5164    rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5165
5166  i = 0;
5167  do
5168    {
5169      rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5170      rtx off = GEN_INT (i++ * 4);
5171      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5172    }
5173  while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5174
5175  return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5176}
5177
5178/* Determine where to put an argument to a function.
5179   Value is zero to push the argument on the stack,
5180   or a hard register in which to store the argument.
5181
5182   MODE is the argument's machine mode.
5183   TYPE is the data type of the argument (as a tree).
5184    This is null for libcalls where that information may
5185    not be available.
5186   CUM is a variable of type CUMULATIVE_ARGS which gives info about
5187    the preceding args and about the function being called.  It is
5188    not modified in this routine.
5189   NAMED is nonzero if this argument is a named parameter
5190    (otherwise it is an extra parameter matching an ellipsis).
5191
5192   On RS/6000 the first eight words of non-FP are normally in registers
5193   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
5194   Under V.4, the first 8 FP args are in registers.
5195
5196   If this is floating-point and no prototype is specified, we use
5197   both an FP and integer register (or possibly FP reg and stack).  Library
5198   functions (when CALL_LIBCALL is set) always have the proper types for args,
5199   so we can pass the FP value just in one register.  emit_library_function
5200   doesn't support PARALLEL anyway.
5201
5202   Note that for args passed by reference, function_arg will be called
5203   with MODE and TYPE set to that of the pointer to the arg, not the arg
5204   itself.  */
5205
5206rtx
5207function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5208	      tree type, int named)
5209{
5210  enum rs6000_abi abi = DEFAULT_ABI;
5211
5212  /* Return a marker to indicate whether CR1 needs to set or clear the
5213     bit that V.4 uses to say fp args were passed in registers.
5214     Assume that we don't need the marker for software floating point,
5215     or compiler generated library calls.  */
5216  if (mode == VOIDmode)
5217    {
5218      if (abi == ABI_V4
5219	  && (cum->call_cookie & CALL_LIBCALL) == 0
5220	  && (cum->stdarg
5221	      || (cum->nargs_prototype < 0
5222		  && (cum->prototype || TARGET_NO_PROTOTYPE))))
5223	{
5224	  /* For the SPE, we need to crxor CR6 always.  */
5225	  if (TARGET_SPE_ABI)
5226	    return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5227	  else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5228	    return GEN_INT (cum->call_cookie
5229			    | ((cum->fregno == FP_ARG_MIN_REG)
5230			       ? CALL_V4_SET_FP_ARGS
5231			       : CALL_V4_CLEAR_FP_ARGS));
5232	}
5233
5234      return GEN_INT (cum->call_cookie);
5235    }
5236
5237  if (rs6000_darwin64_abi && mode == BLKmode
5238      && TREE_CODE (type) == RECORD_TYPE)
5239    {
5240      rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
5241      if (rslt != NULL_RTX)
5242	return rslt;
5243      /* Else fall through to usual handling.  */
5244    }
5245
5246  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
5247    if (TARGET_64BIT && ! cum->prototype)
5248      {
5249	/* Vector parameters get passed in vector register
5250	   and also in GPRs or memory, in absence of prototype.  */
5251	int align_words;
5252	rtx slot;
5253	align_words = (cum->words + 1) & ~1;
5254
5255	if (align_words >= GP_ARG_NUM_REG)
5256	  {
5257	    slot = NULL_RTX;
5258	  }
5259	else
5260	  {
5261	    slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5262	  }
5263	return gen_rtx_PARALLEL (mode,
5264		 gen_rtvec (2,
5265			    gen_rtx_EXPR_LIST (VOIDmode,
5266					       slot, const0_rtx),
5267			    gen_rtx_EXPR_LIST (VOIDmode,
5268					       gen_rtx_REG (mode, cum->vregno),
5269					       const0_rtx)));
5270      }
5271    else
5272      return gen_rtx_REG (mode, cum->vregno);
5273  else if (TARGET_ALTIVEC_ABI
5274	   && (ALTIVEC_VECTOR_MODE (mode)
5275	       || (type && TREE_CODE (type) == VECTOR_TYPE
5276		   && int_size_in_bytes (type) == 16)))
5277    {
5278      if (named || abi == ABI_V4)
5279	return NULL_RTX;
5280      else
5281	{
5282	  /* Vector parameters to varargs functions under AIX or Darwin
5283	     get passed in memory and possibly also in GPRs.  */
5284	  int align, align_words, n_words;
5285	  enum machine_mode part_mode;
5286
5287	  /* Vector parameters must be 16-byte aligned.  This places them at
5288	     2 mod 4 in terms of words in 32-bit mode, since the parameter
5289	     save area starts at offset 24 from the stack.  In 64-bit mode,
5290	     they just have to start on an even word, since the parameter
5291	     save area is 16-byte aligned.  */
5292	  if (TARGET_32BIT)
5293	    align = (2 - cum->words) & 3;
5294	  else
5295	    align = cum->words & 1;
5296	  align_words = cum->words + align;
5297
5298	  /* Out of registers?  Memory, then.  */
5299	  if (align_words >= GP_ARG_NUM_REG)
5300	    return NULL_RTX;
5301
5302	  if (TARGET_32BIT && TARGET_POWERPC64)
5303	    return rs6000_mixed_function_arg (mode, type, align_words);
5304
5305	  /* The vector value goes in GPRs.  Only the part of the
5306	     value in GPRs is reported here.  */
5307	  part_mode = mode;
5308	  n_words = rs6000_arg_size (mode, type);
5309	  if (align_words + n_words > GP_ARG_NUM_REG)
5310	    /* Fortunately, there are only two possibilities, the value
5311	       is either wholly in GPRs or half in GPRs and half not.  */
5312	    part_mode = DImode;
5313
5314	  return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5315	}
5316    }
5317  else if (TARGET_SPE_ABI && TARGET_SPE
5318	   && (SPE_VECTOR_MODE (mode)
5319	       || (TARGET_E500_DOUBLE && (mode == DFmode
5320					  || mode == DCmode))))
5321    return rs6000_spe_function_arg (cum, mode, type);
5322
5323  else if (abi == ABI_V4)
5324    {
5325      if (TARGET_HARD_FLOAT && TARGET_FPRS
5326	  && (mode == SFmode || mode == DFmode
5327	      || (mode == TFmode && !TARGET_IEEEQUAD)))
5328	{
5329	  if (cum->fregno + (mode == TFmode ? 1 : 0) <= FP_ARG_V4_MAX_REG)
5330	    return gen_rtx_REG (mode, cum->fregno);
5331	  else
5332	    return NULL_RTX;
5333	}
5334      else
5335	{
5336	  int n_words = rs6000_arg_size (mode, type);
5337	  int gregno = cum->sysv_gregno;
5338
5339	  /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5340	     (r7,r8) or (r9,r10).  As does any other 2 word item such
5341	     as complex int due to a historical mistake.  */
5342	  if (n_words == 2)
5343	    gregno += (1 - gregno) & 1;
5344
5345	  /* Multi-reg args are not split between registers and stack.  */
5346	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5347	    return NULL_RTX;
5348
5349	  if (TARGET_32BIT && TARGET_POWERPC64)
5350	    return rs6000_mixed_function_arg (mode, type,
5351					      gregno - GP_ARG_MIN_REG);
5352	  return gen_rtx_REG (mode, gregno);
5353	}
5354    }
5355  else
5356    {
5357      int align_words = rs6000_parm_start (mode, type, cum->words);
5358
5359      if (USE_FP_FOR_ARG_P (cum, mode, type))
5360	{
5361	  rtx rvec[GP_ARG_NUM_REG + 1];
5362	  rtx r;
5363	  int k;
5364	  bool needs_psave;
5365	  enum machine_mode fmode = mode;
5366	  unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5367
5368	  if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5369	    {
5370	      /* Currently, we only ever need one reg here because complex
5371		 doubles are split.  */
5372	      gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
5373
5374	      /* Long double split over regs and memory.  */
5375	      fmode = DFmode;
5376	    }
5377
5378	  /* Do we also need to pass this arg in the parameter save
5379	     area?  */
5380	  needs_psave = (type
5381			 && (cum->nargs_prototype <= 0
5382			     || (DEFAULT_ABI == ABI_AIX
5383				 && TARGET_XL_COMPAT
5384				 && align_words >= GP_ARG_NUM_REG)));
5385
5386	  if (!needs_psave && mode == fmode)
5387	    return gen_rtx_REG (fmode, cum->fregno);
5388
5389	  k = 0;
5390	  if (needs_psave)
5391	    {
5392	      /* Describe the part that goes in gprs or the stack.
5393		 This piece must come first, before the fprs.  */
5394	      if (align_words < GP_ARG_NUM_REG)
5395		{
5396		  unsigned long n_words = rs6000_arg_size (mode, type);
5397
5398		  if (align_words + n_words > GP_ARG_NUM_REG
5399		      || (TARGET_32BIT && TARGET_POWERPC64))
5400		    {
5401		      /* If this is partially on the stack, then we only
5402			 include the portion actually in registers here.  */
5403		      enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5404		      rtx off;
5405		      int i = 0;
5406		      if (align_words + n_words > GP_ARG_NUM_REG)
5407			/* Not all of the arg fits in gprs.  Say that it
5408			   goes in memory too, using a magic NULL_RTX
5409			   component.  Also see comment in
5410			   rs6000_mixed_function_arg for why the normal
5411			   function_arg_partial_nregs scheme doesn't work
5412			   in this case. */
5413			rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5414						       const0_rtx);
5415		      do
5416			{
5417			  r = gen_rtx_REG (rmode,
5418					   GP_ARG_MIN_REG + align_words);
5419			  off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
5420			  rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5421			}
5422		      while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5423		    }
5424		  else
5425		    {
5426		      /* The whole arg fits in gprs.  */
5427		      r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5428		      rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5429		    }
5430		}
5431	      else
5432		/* It's entirely in memory.  */
5433		rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5434	    }
5435
5436	  /* Describe where this piece goes in the fprs.  */
5437	  r = gen_rtx_REG (fmode, cum->fregno);
5438	  rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5439
5440	  return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5441	}
5442      else if (align_words < GP_ARG_NUM_REG)
5443	{
5444	  if (TARGET_32BIT && TARGET_POWERPC64)
5445	    return rs6000_mixed_function_arg (mode, type, align_words);
5446
5447	  if (mode == BLKmode)
5448	    mode = Pmode;
5449
5450	  return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5451	}
5452      else
5453	return NULL_RTX;
5454    }
5455}
5456
5457/* For an arg passed partly in registers and partly in memory, this is
5458   the number of bytes passed in registers.  For args passed entirely in
5459   registers or entirely in memory, zero.  When an arg is described by a
5460   PARALLEL, perhaps using more than one register type, this function
5461   returns the number of bytes used by the first element of the PARALLEL.  */
5462
5463static int
5464rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5465			  tree type, bool named)
5466{
5467  int ret = 0;
5468  int align_words;
5469
5470  if (DEFAULT_ABI == ABI_V4)
5471    return 0;
5472
5473  if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5474      && cum->nargs_prototype >= 0)
5475    return 0;
5476
5477  /* In this complicated case we just disable the partial_nregs code.  */
5478  if (rs6000_darwin64_abi && mode == BLKmode
5479      && TREE_CODE (type) == RECORD_TYPE
5480      && int_size_in_bytes (type) > 0)
5481    return 0;
5482
5483  align_words = rs6000_parm_start (mode, type, cum->words);
5484
5485  if (USE_FP_FOR_ARG_P (cum, mode, type))
5486    {
5487      /* If we are passing this arg in the fixed parameter save area
5488	 (gprs or memory) as well as fprs, then this function should
5489	 return the number of partial bytes passed in the parameter
5490	 save area rather than partial bytes passed in fprs.  */
5491      if (type
5492	  && (cum->nargs_prototype <= 0
5493	      || (DEFAULT_ABI == ABI_AIX
5494		  && TARGET_XL_COMPAT
5495		  && align_words >= GP_ARG_NUM_REG)))
5496	return 0;
5497      else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
5498	       > FP_ARG_MAX_REG + 1)
5499	ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
5500      else if (cum->nargs_prototype >= 0)
5501	return 0;
5502    }
5503
5504  if (align_words < GP_ARG_NUM_REG
5505      && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5506    ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
5507
5508  if (ret != 0 && TARGET_DEBUG_ARG)
5509    fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
5510
5511  return ret;
5512}
5513
5514/* A C expression that indicates when an argument must be passed by
5515   reference.  If nonzero for an argument, a copy of that argument is
5516   made in memory and a pointer to the argument is passed instead of
5517   the argument itself.  The pointer is passed in whatever way is
5518   appropriate for passing a pointer to that type.
5519
5520   Under V.4, aggregates and long double are passed by reference.
5521
5522   As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5523   reference unless the AltiVec vector extension ABI is in force.
5524
5525   As an extension to all ABIs, variable sized types are passed by
5526   reference.  */
5527
5528static bool
5529rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5530			  enum machine_mode mode, tree type,
5531			  bool named ATTRIBUTE_UNUSED)
5532{
5533  if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
5534    {
5535      if (TARGET_DEBUG_ARG)
5536	fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5537      return 1;
5538    }
5539
5540  if (!type)
5541    return 0;
5542
5543  if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5544    {
5545      if (TARGET_DEBUG_ARG)
5546	fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5547      return 1;
5548    }
5549
5550  if (int_size_in_bytes (type) < 0)
5551    {
5552      if (TARGET_DEBUG_ARG)
5553	fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5554      return 1;
5555    }
5556
5557  /* Allow -maltivec -mabi=no-altivec without warning.  Altivec vector
5558     modes only exist for GCC vector types if -maltivec.  */
5559  if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5560    {
5561      if (TARGET_DEBUG_ARG)
5562	fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
5563      return 1;
5564    }
5565
5566  /* Pass synthetic vectors in memory.  */
5567  if (TREE_CODE (type) == VECTOR_TYPE
5568      && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
5569    {
5570      static bool warned_for_pass_big_vectors = false;
5571      if (TARGET_DEBUG_ARG)
5572	fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5573      if (!warned_for_pass_big_vectors)
5574	{
5575	  warning (0, "GCC vector passed by reference: "
5576		   "non-standard ABI extension with no compatibility guarantee");
5577	  warned_for_pass_big_vectors = true;
5578	}
5579      return 1;
5580    }
5581
5582  return 0;
5583}
5584
5585static void
5586rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5587{
5588  int i;
5589  enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5590
5591  if (nregs == 0)
5592    return;
5593
5594  for (i = 0; i < nregs; i++)
5595    {
5596      rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5597      if (reload_completed)
5598	{
5599	  if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5600	    tem = NULL_RTX;
5601	  else
5602	    tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5603				       i * GET_MODE_SIZE (reg_mode));
5604	}
5605      else
5606	tem = replace_equiv_address (tem, XEXP (tem, 0));
5607
5608      gcc_assert (tem);
5609
5610      emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5611    }
5612}
5613
5614/* Perform any needed actions needed for a function that is receiving a
5615   variable number of arguments.
5616
5617   CUM is as above.
5618
5619   MODE and TYPE are the mode and type of the current parameter.
5620
5621   PRETEND_SIZE is a variable that should be set to the amount of stack
5622   that must be pushed by the prolog to pretend that our caller pushed
5623   it.
5624
5625   Normally, this macro will push all remaining incoming registers on the
5626   stack and set PRETEND_SIZE to the length of the registers pushed.  */
5627
5628static void
5629setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5630			tree type, int *pretend_size ATTRIBUTE_UNUSED,
5631			int no_rtl)
5632{
5633  CUMULATIVE_ARGS next_cum;
5634  int reg_size = TARGET_32BIT ? 4 : 8;
5635  rtx save_area = NULL_RTX, mem;
5636  int first_reg_offset, set;
5637
5638  /* Skip the last named argument.  */
5639  next_cum = *cum;
5640  function_arg_advance (&next_cum, mode, type, 1, 0);
5641
5642  if (DEFAULT_ABI == ABI_V4)
5643    {
5644      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5645
5646      if (! no_rtl)
5647	{
5648	  int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5649	  HOST_WIDE_INT offset = 0;
5650
5651	  /* Try to optimize the size of the varargs save area.
5652	     The ABI requires that ap.reg_save_area is doubleword
5653	     aligned, but we don't need to allocate space for all
5654	     the bytes, only those to which we actually will save
5655	     anything.  */
5656	  if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5657	    gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5658	  if (TARGET_HARD_FLOAT && TARGET_FPRS
5659	      && next_cum.fregno <= FP_ARG_V4_MAX_REG
5660	      && cfun->va_list_fpr_size)
5661	    {
5662	      if (gpr_reg_num)
5663		fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5664			   * UNITS_PER_FP_WORD;
5665	      if (cfun->va_list_fpr_size
5666		  < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5667		fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5668	      else
5669		fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5670			    * UNITS_PER_FP_WORD;
5671	    }
5672	  if (gpr_reg_num)
5673	    {
5674	      offset = -((first_reg_offset * reg_size) & ~7);
5675	      if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5676		{
5677		  gpr_reg_num = cfun->va_list_gpr_size;
5678		  if (reg_size == 4 && (first_reg_offset & 1))
5679		    gpr_reg_num++;
5680		}
5681	      gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5682	    }
5683	  else if (fpr_size)
5684	    offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5685		       * UNITS_PER_FP_WORD
5686		     - (int) (GP_ARG_NUM_REG * reg_size);
5687
5688	  if (gpr_size + fpr_size)
5689	    {
5690	      rtx reg_save_area
5691		= assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5692	      gcc_assert (GET_CODE (reg_save_area) == MEM);
5693	      reg_save_area = XEXP (reg_save_area, 0);
5694	      if (GET_CODE (reg_save_area) == PLUS)
5695		{
5696		  gcc_assert (XEXP (reg_save_area, 0)
5697			      == virtual_stack_vars_rtx);
5698		  gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5699		  offset += INTVAL (XEXP (reg_save_area, 1));
5700		}
5701	      else
5702		gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5703	    }
5704
5705	  cfun->machine->varargs_save_offset = offset;
5706	  save_area = plus_constant (virtual_stack_vars_rtx, offset);
5707	}
5708    }
5709  else
5710    {
5711      first_reg_offset = next_cum.words;
5712      save_area = virtual_incoming_args_rtx;
5713
5714      if (targetm.calls.must_pass_in_stack (mode, type))
5715	first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5716    }
5717
5718  set = get_varargs_alias_set ();
5719  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5720      && cfun->va_list_gpr_size)
5721    {
5722      int nregs = GP_ARG_NUM_REG - first_reg_offset;
5723
5724      if (va_list_gpr_counter_field)
5725	{
5726	  /* V4 va_list_gpr_size counts number of registers needed.  */
5727	  if (nregs > cfun->va_list_gpr_size)
5728	    nregs = cfun->va_list_gpr_size;
5729	}
5730      else
5731	{
5732	  /* char * va_list instead counts number of bytes needed.  */
5733	  if (nregs > cfun->va_list_gpr_size / reg_size)
5734	    nregs = cfun->va_list_gpr_size / reg_size;
5735	}
5736
5737      mem = gen_rtx_MEM (BLKmode,
5738			 plus_constant (save_area,
5739					first_reg_offset * reg_size));
5740      MEM_NOTRAP_P (mem) = 1;
5741      set_mem_alias_set (mem, set);
5742      set_mem_align (mem, BITS_PER_WORD);
5743
5744      rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5745				  nregs);
5746    }
5747
5748  /* Save FP registers if needed.  */
5749  if (DEFAULT_ABI == ABI_V4
5750      && TARGET_HARD_FLOAT && TARGET_FPRS
5751      && ! no_rtl
5752      && next_cum.fregno <= FP_ARG_V4_MAX_REG
5753      && cfun->va_list_fpr_size)
5754    {
5755      int fregno = next_cum.fregno, nregs;
5756      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5757      rtx lab = gen_label_rtx ();
5758      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
5759					       * UNITS_PER_FP_WORD);
5760
5761      emit_jump_insn
5762	(gen_rtx_SET (VOIDmode,
5763		      pc_rtx,
5764		      gen_rtx_IF_THEN_ELSE (VOIDmode,
5765					    gen_rtx_NE (VOIDmode, cr1,
5766							const0_rtx),
5767					    gen_rtx_LABEL_REF (VOIDmode, lab),
5768					    pc_rtx)));
5769
5770      for (nregs = 0;
5771	   fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5772	   fregno++, off += UNITS_PER_FP_WORD, nregs++)
5773	{
5774	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5775	  MEM_NOTRAP_P (mem) = 1;
5776	  set_mem_alias_set (mem, set);
5777	  set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
5778	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5779	}
5780
5781      emit_label (lab);
5782    }
5783}
5784
5785/* Create the va_list data type.  */
5786
5787static tree
5788rs6000_build_builtin_va_list (void)
5789{
5790  tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5791
5792  /* For AIX, prefer 'char *' because that's what the system
5793     header files like.  */
5794  if (DEFAULT_ABI != ABI_V4)
5795    return build_pointer_type (char_type_node);
5796
5797  record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5798  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5799
5800  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5801		      unsigned_char_type_node);
5802  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5803		      unsigned_char_type_node);
5804  /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5805     every user file.  */
5806  f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5807		      short_unsigned_type_node);
5808  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5809		      ptr_type_node);
5810  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5811		      ptr_type_node);
5812
5813  va_list_gpr_counter_field = f_gpr;
5814  va_list_fpr_counter_field = f_fpr;
5815
5816  DECL_FIELD_CONTEXT (f_gpr) = record;
5817  DECL_FIELD_CONTEXT (f_fpr) = record;
5818  DECL_FIELD_CONTEXT (f_res) = record;
5819  DECL_FIELD_CONTEXT (f_ovf) = record;
5820  DECL_FIELD_CONTEXT (f_sav) = record;
5821
5822  TREE_CHAIN (record) = type_decl;
5823  TYPE_NAME (record) = type_decl;
5824  TYPE_FIELDS (record) = f_gpr;
5825  TREE_CHAIN (f_gpr) = f_fpr;
5826  TREE_CHAIN (f_fpr) = f_res;
5827  TREE_CHAIN (f_res) = f_ovf;
5828  TREE_CHAIN (f_ovf) = f_sav;
5829
5830  layout_type (record);
5831
5832  /* The correct type is an array type of one element.  */
5833  return build_array_type (record, build_index_type (size_zero_node));
5834}
5835
5836/* Implement va_start.  */
5837
5838void
5839rs6000_va_start (tree valist, rtx nextarg)
5840{
5841  HOST_WIDE_INT words, n_gpr, n_fpr;
5842  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5843  tree gpr, fpr, ovf, sav, t;
5844
5845  /* Only SVR4 needs something special.  */
5846  if (DEFAULT_ABI != ABI_V4)
5847    {
5848      std_expand_builtin_va_start (valist, nextarg);
5849      return;
5850    }
5851
5852  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5853  f_fpr = TREE_CHAIN (f_gpr);
5854  f_res = TREE_CHAIN (f_fpr);
5855  f_ovf = TREE_CHAIN (f_res);
5856  f_sav = TREE_CHAIN (f_ovf);
5857
5858  valist = build_va_arg_indirect_ref (valist);
5859  gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5860  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5861  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5862  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5863
5864  /* Count number of gp and fp argument registers used.  */
5865  words = current_function_args_info.words;
5866  n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5867	       GP_ARG_NUM_REG);
5868  n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5869	       FP_ARG_NUM_REG);
5870
5871  if (TARGET_DEBUG_ARG)
5872    fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5873	     HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5874	     words, n_gpr, n_fpr);
5875
5876  if (cfun->va_list_gpr_size)
5877    {
5878      t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5879		  build_int_cst (NULL_TREE, n_gpr));
5880      TREE_SIDE_EFFECTS (t) = 1;
5881      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5882    }
5883
5884  if (cfun->va_list_fpr_size)
5885    {
5886      t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5887		  build_int_cst (NULL_TREE, n_fpr));
5888      TREE_SIDE_EFFECTS (t) = 1;
5889      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5890    }
5891
5892  /* Find the overflow area.  */
5893  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5894  if (words != 0)
5895    t = build2 (PLUS_EXPR, TREE_TYPE (ovf), t,
5896	        build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5897  t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5898  TREE_SIDE_EFFECTS (t) = 1;
5899  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5900
5901  /* If there were no va_arg invocations, don't set up the register
5902     save area.  */
5903  if (!cfun->va_list_gpr_size
5904      && !cfun->va_list_fpr_size
5905      && n_gpr < GP_ARG_NUM_REG
5906      && n_fpr < FP_ARG_V4_MAX_REG)
5907    return;
5908
5909  /* Find the register save area.  */
5910  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5911  if (cfun->machine->varargs_save_offset)
5912    t = build2 (PLUS_EXPR, TREE_TYPE (sav), t,
5913	        build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
5914  t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5915  TREE_SIDE_EFFECTS (t) = 1;
5916  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5917}
5918
5919/* Implement va_arg.  */
5920
5921tree
5922rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5923{
5924  tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5925  tree gpr, fpr, ovf, sav, reg, t, u;
5926  int size, rsize, n_reg, sav_ofs, sav_scale;
5927  tree lab_false, lab_over, addr;
5928  int align;
5929  tree ptrtype = build_pointer_type (type);
5930
5931  if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5932    {
5933      t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5934      return build_va_arg_indirect_ref (t);
5935    }
5936
5937  if (DEFAULT_ABI != ABI_V4)
5938    {
5939      if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5940	{
5941	  tree elem_type = TREE_TYPE (type);
5942	  enum machine_mode elem_mode = TYPE_MODE (elem_type);
5943	  int elem_size = GET_MODE_SIZE (elem_mode);
5944
5945	  if (elem_size < UNITS_PER_WORD)
5946	    {
5947	      tree real_part, imag_part;
5948	      tree post = NULL_TREE;
5949
5950	      real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5951						  &post);
5952	      /* Copy the value into a temporary, lest the formal temporary
5953		 be reused out from under us.  */
5954	      real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5955	      append_to_statement_list (post, pre_p);
5956
5957	      imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5958						  post_p);
5959
5960	      return build2 (COMPLEX_EXPR, type, real_part, imag_part);
5961	    }
5962	}
5963
5964      return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5965    }
5966
5967  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5968  f_fpr = TREE_CHAIN (f_gpr);
5969  f_res = TREE_CHAIN (f_fpr);
5970  f_ovf = TREE_CHAIN (f_res);
5971  f_sav = TREE_CHAIN (f_ovf);
5972
5973  valist = build_va_arg_indirect_ref (valist);
5974  gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5975  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5976  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5977  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5978
5979  size = int_size_in_bytes (type);
5980  rsize = (size + 3) / 4;
5981  align = 1;
5982
5983  if (TARGET_HARD_FLOAT && TARGET_FPRS
5984      && (TYPE_MODE (type) == SFmode
5985	  || TYPE_MODE (type) == DFmode
5986	  || TYPE_MODE (type) == TFmode))
5987    {
5988      /* FP args go in FP registers, if present.  */
5989      reg = fpr;
5990      n_reg = (size + 7) / 8;
5991      sav_ofs = 8*4;
5992      sav_scale = 8;
5993      if (TYPE_MODE (type) != SFmode)
5994	align = 8;
5995    }
5996  else
5997    {
5998      /* Otherwise into GP registers.  */
5999      reg = gpr;
6000      n_reg = rsize;
6001      sav_ofs = 0;
6002      sav_scale = 4;
6003      if (n_reg == 2)
6004	align = 8;
6005    }
6006
6007  /* Pull the value out of the saved registers....  */
6008
6009  lab_over = NULL;
6010  addr = create_tmp_var (ptr_type_node, "addr");
6011  DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6012
6013  /*  AltiVec vectors never go in registers when -mabi=altivec.  */
6014  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6015    align = 16;
6016  else
6017    {
6018      lab_false = create_artificial_label ();
6019      lab_over = create_artificial_label ();
6020
6021      /* Long long and SPE vectors are aligned in the registers.
6022	 As are any other 2 gpr item such as complex int due to a
6023	 historical mistake.  */
6024      u = reg;
6025      if (n_reg == 2 && reg == gpr)
6026	{
6027	  u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
6028		     size_int (n_reg - 1));
6029	  u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6030	}
6031
6032      t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
6033      t = build2 (GE_EXPR, boolean_type_node, u, t);
6034      u = build1 (GOTO_EXPR, void_type_node, lab_false);
6035      t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6036      gimplify_and_add (t, pre_p);
6037
6038      t = sav;
6039      if (sav_ofs)
6040	t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
6041
6042      u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
6043      u = build1 (CONVERT_EXPR, integer_type_node, u);
6044      u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
6045      t = build2 (PLUS_EXPR, ptr_type_node, t, u);
6046
6047      t = build2 (MODIFY_EXPR, void_type_node, addr, t);
6048      gimplify_and_add (t, pre_p);
6049
6050      t = build1 (GOTO_EXPR, void_type_node, lab_over);
6051      gimplify_and_add (t, pre_p);
6052
6053      t = build1 (LABEL_EXPR, void_type_node, lab_false);
6054      append_to_statement_list (t, pre_p);
6055
6056      if ((n_reg == 2 && reg != gpr) || n_reg > 2)
6057	{
6058	  /* Ensure that we don't find any more args in regs.
6059	     Alignment has taken care of the n_reg == 2 gpr case.  */
6060	  t = build2 (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
6061	  gimplify_and_add (t, pre_p);
6062	}
6063    }
6064
6065  /* ... otherwise out of the overflow area.  */
6066
6067  /* Care for on-stack alignment if needed.  */
6068  t = ovf;
6069  if (align != 1)
6070    {
6071      t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6072      t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
6073		  build_int_cst (NULL_TREE, -align));
6074    }
6075  gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6076
6077  u = build2 (MODIFY_EXPR, void_type_node, addr, t);
6078  gimplify_and_add (u, pre_p);
6079
6080  t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
6081  t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6082  gimplify_and_add (t, pre_p);
6083
6084  if (lab_over)
6085    {
6086      t = build1 (LABEL_EXPR, void_type_node, lab_over);
6087      append_to_statement_list (t, pre_p);
6088    }
6089
6090  if (STRICT_ALIGNMENT
6091      && (TYPE_ALIGN (type)
6092	  > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6093    {
6094      /* The value (of type complex double, for example) may not be
6095	 aligned in memory in the saved registers, so copy via a
6096	 temporary.  (This is the same code as used for SPARC.)  */
6097      tree tmp = create_tmp_var (type, "va_arg_tmp");
6098      tree dest_addr = build_fold_addr_expr (tmp);
6099
6100      tree copy = build_function_call_expr
6101	(implicit_built_in_decls[BUILT_IN_MEMCPY],
6102	 tree_cons (NULL_TREE, dest_addr,
6103		    tree_cons (NULL_TREE, addr,
6104			       tree_cons (NULL_TREE, size_int (rsize * 4),
6105					  NULL_TREE))));
6106
6107      gimplify_and_add (copy, pre_p);
6108      addr = dest_addr;
6109    }
6110
6111  addr = fold_convert (ptrtype, addr);
6112  return build_va_arg_indirect_ref (addr);
6113}
6114
6115/* Builtins.  */
6116
6117static void
6118def_builtin (int mask, const char *name, tree type, int code)
6119{
6120  if (mask & target_flags)
6121    {
6122      if (rs6000_builtin_decls[code])
6123	abort ();
6124
6125      rs6000_builtin_decls[code] =
6126        lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
6127				     NULL, NULL_TREE);
6128    }
6129}
6130
6131/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
6132
6133static const struct builtin_description bdesc_3arg[] =
6134{
6135  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6136  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6137  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6138  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6139  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6140  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6141  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6142  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6143  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6144  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
6145  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
6146  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6147  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6148  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6149  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6150  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6151  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6152  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6153  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6154  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6155  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6156  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6157  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
6158
6159  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6160  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6161  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6162  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6163  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6164  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6165  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6166  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6167  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6168  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6169  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6170  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6171  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6172  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6173  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
6174};
6175
6176/* DST operations: void foo (void *, const int, const char).  */
6177
6178static const struct builtin_description bdesc_dst[] =
6179{
6180  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6181  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6182  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
6183  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6184
6185  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6186  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6187  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6188  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
6189};
6190
6191/* Simple binary operations: VECc = foo (VECa, VECb).  */
6192
6193static struct builtin_description bdesc_2arg[] =
6194{
6195  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6196  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6197  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6198  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
6199  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6200  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6201  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6202  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6203  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6204  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6205  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
6206  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
6207  { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
6208  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6209  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6210  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6211  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6212  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6213  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
6214  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6215  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
6216  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6217  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6218  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6219  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6220  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6221  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6222  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6223  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6224  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6225  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6226  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6227  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6228  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
6229  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6230  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
6231  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6232  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
6233  { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6234  { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6235  { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6236  { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6237  { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
6238  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6239  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6240  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6241  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6242  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6243  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
6244  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6245  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6246  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6247  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6248  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6249  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6250  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
6251  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6252  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6253  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6254  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6255  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6256  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6257  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6258  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
6259  { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
6260  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
6261  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6262  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6263  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
6264  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
6265  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6266  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6267  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6268  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6269  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6270  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6271  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6272  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6273  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6274  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6275  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6276  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6277  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
6278  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6279  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6280  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
6281  { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6282  { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6283  { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6284  { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6285  { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6286  { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
6287  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6288  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
6289  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6290  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6291  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6292  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
6293  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6294  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6295  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6296  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6297  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6298  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6299  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6300  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6301  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6302  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6303  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6304  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
6305  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
6306
6307  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6308  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6309  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6310  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6311  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6312  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6313  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6314  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6315  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6316  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6317  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6318  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6319  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6320  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6321  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6322  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6323  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6324  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6325  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6326  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6327  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6328  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6329  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6330  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6331  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6332  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6333  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6334  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6335  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6336  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6337  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6338  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6339  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6340  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6341  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6342  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6343  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6344  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6345  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6346  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6347  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6348  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6349  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6350  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6351  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6352  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6353  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6354  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6355  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6356  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6357  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6358  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6359  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6360  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6361  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6362  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6363  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6364  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6365  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6366  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6367  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6368  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6369  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6370  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6371  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6372  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6373  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6374  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6375  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6376  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6377  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6378  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6379  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6380  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6381  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6382  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6383  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6384  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6385  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6386  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6387  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6388  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6389  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6390  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6391  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6392  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6393  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6394  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6395  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6396  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6397  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6398  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6399  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6400  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6401  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6402  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6403  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6404  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6405  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6406  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6407  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6408  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6409  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6410  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6411  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6412  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6413  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6414  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6415  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6416  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6417  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6418  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6419  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6420  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6421  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6422  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6423  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6424  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6425  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6426  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6427  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6428  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6429  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6430  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6431  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6432  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6433  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6434
6435  /* Place holder, leave as first spe builtin.  */
6436  { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6437  { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6438  { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6439  { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6440  { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6441  { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6442  { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6443  { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6444  { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6445  { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6446  { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6447  { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6448  { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6449  { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6450  { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6451  { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6452  { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6453  { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6454  { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6455  { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6456  { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6457  { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6458  { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6459  { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6460  { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6461  { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6462  { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6463  { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6464  { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6465  { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6466  { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6467  { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6468  { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6469  { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6470  { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6471  { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6472  { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6473  { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6474  { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6475  { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6476  { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6477  { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6478  { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6479  { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6480  { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6481  { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6482  { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6483  { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6484  { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6485  { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6486  { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6487  { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6488  { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6489  { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6490  { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6491  { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6492  { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6493  { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6494  { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6495  { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6496  { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6497  { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6498  { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6499  { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6500  { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6501  { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6502  { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6503  { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6504  { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6505  { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6506  { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6507  { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6508  { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6509  { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
6510  { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6511  { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
6512  { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6513  { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6514  { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6515  { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6516  { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6517  { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6518  { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6519  { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6520  { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6521  { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6522  { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6523  { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6524  { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6525  { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6526  { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6527  { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6528  { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6529  { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6530  { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6531  { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6532  { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6533  { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6534  { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6535  { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6536  { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6537  { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6538  { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6539  { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6540  { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6541  { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6542  { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6543  { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6544  { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6545
6546  /* SPE binary operations expecting a 5-bit unsigned literal.  */
6547  { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6548
6549  { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6550  { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6551  { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6552  { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6553  { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6554  { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6555  { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6556  { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6557  { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6558  { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6559  { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6560  { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6561  { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6562  { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6563  { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6564  { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6565  { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6566  { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6567  { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6568  { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6569  { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6570  { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6571  { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6572  { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6573  { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6574  { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6575
6576  /* Place-holder.  Leave as last binary SPE builtin.  */
6577  { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
6578};
6579
6580/* AltiVec predicates.  */
6581
6582struct builtin_description_predicates
6583{
6584  const unsigned int mask;
6585  const enum insn_code icode;
6586  const char *opcode;
6587  const char *const name;
6588  const enum rs6000_builtins code;
6589};
6590
6591static const struct builtin_description_predicates bdesc_altivec_preds[] =
6592{
6593  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6594  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6595  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6596  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6597  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6598  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6599  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6600  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6601  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6602  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6603  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6604  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
6605  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6606
6607  { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6608  { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6609  { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
6610};
6611
6612/* SPE predicates.  */
6613static struct builtin_description bdesc_spe_predicates[] =
6614{
6615  /* Place-holder.  Leave as first.  */
6616  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6617  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6618  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6619  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6620  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6621  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6622  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6623  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6624  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6625  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6626  /* Place-holder.  Leave as last.  */
6627  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6628};
6629
6630/* SPE evsel predicates.  */
6631static struct builtin_description bdesc_spe_evsel[] =
6632{
6633  /* Place-holder.  Leave as first.  */
6634  { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6635  { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6636  { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6637  { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6638  { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6639  { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6640  { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6641  { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6642  { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6643  { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6644  /* Place-holder.  Leave as last.  */
6645  { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6646};
6647
6648/* ABS* operations.  */
6649
6650static const struct builtin_description bdesc_abs[] =
6651{
6652  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6653  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6654  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6655  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6656  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6657  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6658  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6659};
6660
6661/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6662   foo (VECa).  */
6663
6664static struct builtin_description bdesc_1arg[] =
6665{
6666  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6667  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6668  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6669  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6670  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6671  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6672  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6673  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6674  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6675  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6676  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6677  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6678  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6679  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6680  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6681  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6682  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6683
6684  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6685  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6686  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6687  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6688  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6689  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6690  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6691  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6692  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6693  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6694  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6695  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6696  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6697  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6698  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6699  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6700  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6701  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6702  { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6703
6704  /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6705     end with SPE_BUILTIN_EVSUBFUSIAAW.  */
6706  { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6707  { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6708  { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6709  { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6710  { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6711  { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6712  { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6713  { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6714  { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6715  { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6716  { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6717  { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6718  { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6719  { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6720  { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6721  { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6722  { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6723  { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6724  { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6725  { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6726  { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6727  { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6728  { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6729  { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6730  { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6731  { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6732  { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6733  { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6734
6735  /* Place-holder.  Leave as last unary SPE builtin.  */
6736  { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
6737};
6738
6739static rtx
6740rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6741{
6742  rtx pat;
6743  tree arg0 = TREE_VALUE (arglist);
6744  rtx op0 = expand_normal (arg0);
6745  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6746  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6747
6748  if (icode == CODE_FOR_nothing)
6749    /* Builtin not supported on this processor.  */
6750    return 0;
6751
6752  /* If we got invalid arguments bail out before generating bad rtl.  */
6753  if (arg0 == error_mark_node)
6754    return const0_rtx;
6755
6756  if (icode == CODE_FOR_altivec_vspltisb
6757      || icode == CODE_FOR_altivec_vspltish
6758      || icode == CODE_FOR_altivec_vspltisw
6759      || icode == CODE_FOR_spe_evsplatfi
6760      || icode == CODE_FOR_spe_evsplati)
6761    {
6762      /* Only allow 5-bit *signed* literals.  */
6763      if (GET_CODE (op0) != CONST_INT
6764	  || INTVAL (op0) > 15
6765	  || INTVAL (op0) < -16)
6766	{
6767	  error ("argument 1 must be a 5-bit signed literal");
6768	  return const0_rtx;
6769	}
6770    }
6771
6772  if (target == 0
6773      || GET_MODE (target) != tmode
6774      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6775    target = gen_reg_rtx (tmode);
6776
6777  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6778    op0 = copy_to_mode_reg (mode0, op0);
6779
6780  pat = GEN_FCN (icode) (target, op0);
6781  if (! pat)
6782    return 0;
6783  emit_insn (pat);
6784
6785  return target;
6786}
6787
6788static rtx
6789altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6790{
6791  rtx pat, scratch1, scratch2;
6792  tree arg0 = TREE_VALUE (arglist);
6793  rtx op0 = expand_normal (arg0);
6794  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6795  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6796
6797  /* If we have invalid arguments, bail out before generating bad rtl.  */
6798  if (arg0 == error_mark_node)
6799    return const0_rtx;
6800
6801  if (target == 0
6802      || GET_MODE (target) != tmode
6803      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6804    target = gen_reg_rtx (tmode);
6805
6806  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6807    op0 = copy_to_mode_reg (mode0, op0);
6808
6809  scratch1 = gen_reg_rtx (mode0);
6810  scratch2 = gen_reg_rtx (mode0);
6811
6812  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6813  if (! pat)
6814    return 0;
6815  emit_insn (pat);
6816
6817  return target;
6818}
6819
6820static rtx
6821rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6822{
6823  rtx pat;
6824  tree arg0 = TREE_VALUE (arglist);
6825  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6826  rtx op0 = expand_normal (arg0);
6827  rtx op1 = expand_normal (arg1);
6828  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6829  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6830  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6831
6832  if (icode == CODE_FOR_nothing)
6833    /* Builtin not supported on this processor.  */
6834    return 0;
6835
6836  /* If we got invalid arguments bail out before generating bad rtl.  */
6837  if (arg0 == error_mark_node || arg1 == error_mark_node)
6838    return const0_rtx;
6839
6840  if (icode == CODE_FOR_altivec_vcfux
6841      || icode == CODE_FOR_altivec_vcfsx
6842      || icode == CODE_FOR_altivec_vctsxs
6843      || icode == CODE_FOR_altivec_vctuxs
6844      || icode == CODE_FOR_altivec_vspltb
6845      || icode == CODE_FOR_altivec_vsplth
6846      || icode == CODE_FOR_altivec_vspltw
6847      || icode == CODE_FOR_spe_evaddiw
6848      || icode == CODE_FOR_spe_evldd
6849      || icode == CODE_FOR_spe_evldh
6850      || icode == CODE_FOR_spe_evldw
6851      || icode == CODE_FOR_spe_evlhhesplat
6852      || icode == CODE_FOR_spe_evlhhossplat
6853      || icode == CODE_FOR_spe_evlhhousplat
6854      || icode == CODE_FOR_spe_evlwhe
6855      || icode == CODE_FOR_spe_evlwhos
6856      || icode == CODE_FOR_spe_evlwhou
6857      || icode == CODE_FOR_spe_evlwhsplat
6858      || icode == CODE_FOR_spe_evlwwsplat
6859      || icode == CODE_FOR_spe_evrlwi
6860      || icode == CODE_FOR_spe_evslwi
6861      || icode == CODE_FOR_spe_evsrwis
6862      || icode == CODE_FOR_spe_evsubifw
6863      || icode == CODE_FOR_spe_evsrwiu)
6864    {
6865      /* Only allow 5-bit unsigned literals.  */
6866      STRIP_NOPS (arg1);
6867      if (TREE_CODE (arg1) != INTEGER_CST
6868	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
6869	{
6870	  error ("argument 2 must be a 5-bit unsigned literal");
6871	  return const0_rtx;
6872	}
6873    }
6874
6875  if (target == 0
6876      || GET_MODE (target) != tmode
6877      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6878    target = gen_reg_rtx (tmode);
6879
6880  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6881    op0 = copy_to_mode_reg (mode0, op0);
6882  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6883    op1 = copy_to_mode_reg (mode1, op1);
6884
6885  pat = GEN_FCN (icode) (target, op0, op1);
6886  if (! pat)
6887    return 0;
6888  emit_insn (pat);
6889
6890  return target;
6891}
6892
6893static rtx
6894altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6895				  tree arglist, rtx target)
6896{
6897  rtx pat, scratch;
6898  tree cr6_form = TREE_VALUE (arglist);
6899  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6900  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6901  rtx op0 = expand_normal (arg0);
6902  rtx op1 = expand_normal (arg1);
6903  enum machine_mode tmode = SImode;
6904  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6905  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6906  int cr6_form_int;
6907
6908  if (TREE_CODE (cr6_form) != INTEGER_CST)
6909    {
6910      error ("argument 1 of __builtin_altivec_predicate must be a constant");
6911      return const0_rtx;
6912    }
6913  else
6914    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6915
6916  gcc_assert (mode0 == mode1);
6917
6918  /* If we have invalid arguments, bail out before generating bad rtl.  */
6919  if (arg0 == error_mark_node || arg1 == error_mark_node)
6920    return const0_rtx;
6921
6922  if (target == 0
6923      || GET_MODE (target) != tmode
6924      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6925    target = gen_reg_rtx (tmode);
6926
6927  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6928    op0 = copy_to_mode_reg (mode0, op0);
6929  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6930    op1 = copy_to_mode_reg (mode1, op1);
6931
6932  scratch = gen_reg_rtx (mode0);
6933
6934  pat = GEN_FCN (icode) (scratch, op0, op1,
6935			 gen_rtx_SYMBOL_REF (Pmode, opcode));
6936  if (! pat)
6937    return 0;
6938  emit_insn (pat);
6939
6940  /* The vec_any* and vec_all* predicates use the same opcodes for two
6941     different operations, but the bits in CR6 will be different
6942     depending on what information we want.  So we have to play tricks
6943     with CR6 to get the right bits out.
6944
6945     If you think this is disgusting, look at the specs for the
6946     AltiVec predicates.  */
6947
6948  switch (cr6_form_int)
6949    {
6950    case 0:
6951      emit_insn (gen_cr6_test_for_zero (target));
6952      break;
6953    case 1:
6954      emit_insn (gen_cr6_test_for_zero_reverse (target));
6955      break;
6956    case 2:
6957      emit_insn (gen_cr6_test_for_lt (target));
6958      break;
6959    case 3:
6960      emit_insn (gen_cr6_test_for_lt_reverse (target));
6961      break;
6962    default:
6963      error ("argument 1 of __builtin_altivec_predicate is out of range");
6964      break;
6965    }
6966
6967  return target;
6968}
6969
6970static rtx
6971altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6972{
6973  rtx pat, addr;
6974  tree arg0 = TREE_VALUE (arglist);
6975  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6976  enum machine_mode tmode = insn_data[icode].operand[0].mode;
6977  enum machine_mode mode0 = Pmode;
6978  enum machine_mode mode1 = Pmode;
6979  rtx op0 = expand_normal (arg0);
6980  rtx op1 = expand_normal (arg1);
6981
6982  if (icode == CODE_FOR_nothing)
6983    /* Builtin not supported on this processor.  */
6984    return 0;
6985
6986  /* If we got invalid arguments bail out before generating bad rtl.  */
6987  if (arg0 == error_mark_node || arg1 == error_mark_node)
6988    return const0_rtx;
6989
6990  if (target == 0
6991      || GET_MODE (target) != tmode
6992      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6993    target = gen_reg_rtx (tmode);
6994
6995  op1 = copy_to_mode_reg (mode1, op1);
6996
6997  if (op0 == const0_rtx)
6998    {
6999      addr = gen_rtx_MEM (tmode, op1);
7000    }
7001  else
7002    {
7003      op0 = copy_to_mode_reg (mode0, op0);
7004      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7005    }
7006
7007  pat = GEN_FCN (icode) (target, addr);
7008
7009  if (! pat)
7010    return 0;
7011  emit_insn (pat);
7012
7013  return target;
7014}
7015
7016static rtx
7017spe_expand_stv_builtin (enum insn_code icode, tree arglist)
7018{
7019  tree arg0 = TREE_VALUE (arglist);
7020  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7021  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7022  rtx op0 = expand_normal (arg0);
7023  rtx op1 = expand_normal (arg1);
7024  rtx op2 = expand_normal (arg2);
7025  rtx pat;
7026  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7027  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7028  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7029
7030  /* Invalid arguments.  Bail before doing anything stoopid!  */
7031  if (arg0 == error_mark_node
7032      || arg1 == error_mark_node
7033      || arg2 == error_mark_node)
7034    return const0_rtx;
7035
7036  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7037    op0 = copy_to_mode_reg (mode2, op0);
7038  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7039    op1 = copy_to_mode_reg (mode0, op1);
7040  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7041    op2 = copy_to_mode_reg (mode1, op2);
7042
7043  pat = GEN_FCN (icode) (op1, op2, op0);
7044  if (pat)
7045    emit_insn (pat);
7046  return NULL_RTX;
7047}
7048
7049static rtx
7050altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
7051{
7052  tree arg0 = TREE_VALUE (arglist);
7053  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7054  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7055  rtx op0 = expand_normal (arg0);
7056  rtx op1 = expand_normal (arg1);
7057  rtx op2 = expand_normal (arg2);
7058  rtx pat, addr;
7059  enum machine_mode tmode = insn_data[icode].operand[0].mode;
7060  enum machine_mode mode1 = Pmode;
7061  enum machine_mode mode2 = Pmode;
7062
7063  /* Invalid arguments.  Bail before doing anything stoopid!  */
7064  if (arg0 == error_mark_node
7065      || arg1 == error_mark_node
7066      || arg2 == error_mark_node)
7067    return const0_rtx;
7068
7069  if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7070    op0 = copy_to_mode_reg (tmode, op0);
7071
7072  op2 = copy_to_mode_reg (mode2, op2);
7073
7074  if (op1 == const0_rtx)
7075    {
7076      addr = gen_rtx_MEM (tmode, op2);
7077    }
7078  else
7079    {
7080      op1 = copy_to_mode_reg (mode1, op1);
7081      addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7082    }
7083
7084  pat = GEN_FCN (icode) (addr, op0);
7085  if (pat)
7086    emit_insn (pat);
7087  return NULL_RTX;
7088}
7089
7090static rtx
7091rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
7092{
7093  rtx pat;
7094  tree arg0 = TREE_VALUE (arglist);
7095  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7096  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7097  rtx op0 = expand_normal (arg0);
7098  rtx op1 = expand_normal (arg1);
7099  rtx op2 = expand_normal (arg2);
7100  enum machine_mode tmode = insn_data[icode].operand[0].mode;
7101  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7102  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7103  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
7104
7105  if (icode == CODE_FOR_nothing)
7106    /* Builtin not supported on this processor.  */
7107    return 0;
7108
7109  /* If we got invalid arguments bail out before generating bad rtl.  */
7110  if (arg0 == error_mark_node
7111      || arg1 == error_mark_node
7112      || arg2 == error_mark_node)
7113    return const0_rtx;
7114
7115  if (icode == CODE_FOR_altivec_vsldoi_v4sf
7116      || icode == CODE_FOR_altivec_vsldoi_v4si
7117      || icode == CODE_FOR_altivec_vsldoi_v8hi
7118      || icode == CODE_FOR_altivec_vsldoi_v16qi)
7119    {
7120      /* Only allow 4-bit unsigned literals.  */
7121      STRIP_NOPS (arg2);
7122      if (TREE_CODE (arg2) != INTEGER_CST
7123	  || TREE_INT_CST_LOW (arg2) & ~0xf)
7124	{
7125	  error ("argument 3 must be a 4-bit unsigned literal");
7126	  return const0_rtx;
7127	}
7128    }
7129
7130  if (target == 0
7131      || GET_MODE (target) != tmode
7132      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7133    target = gen_reg_rtx (tmode);
7134
7135  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7136    op0 = copy_to_mode_reg (mode0, op0);
7137  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7138    op1 = copy_to_mode_reg (mode1, op1);
7139  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7140    op2 = copy_to_mode_reg (mode2, op2);
7141
7142  pat = GEN_FCN (icode) (target, op0, op1, op2);
7143  if (! pat)
7144    return 0;
7145  emit_insn (pat);
7146
7147  return target;
7148}
7149
7150/* Expand the lvx builtins.  */
7151static rtx
7152altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
7153{
7154  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7155  tree arglist = TREE_OPERAND (exp, 1);
7156  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7157  tree arg0;
7158  enum machine_mode tmode, mode0;
7159  rtx pat, op0;
7160  enum insn_code icode;
7161
7162  switch (fcode)
7163    {
7164    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
7165      icode = CODE_FOR_altivec_lvx_v16qi;
7166      break;
7167    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
7168      icode = CODE_FOR_altivec_lvx_v8hi;
7169      break;
7170    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
7171      icode = CODE_FOR_altivec_lvx_v4si;
7172      break;
7173    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
7174      icode = CODE_FOR_altivec_lvx_v4sf;
7175      break;
7176    default:
7177      *expandedp = false;
7178      return NULL_RTX;
7179    }
7180
7181  *expandedp = true;
7182
7183  arg0 = TREE_VALUE (arglist);
7184  op0 = expand_normal (arg0);
7185  tmode = insn_data[icode].operand[0].mode;
7186  mode0 = insn_data[icode].operand[1].mode;
7187
7188  if (target == 0
7189      || GET_MODE (target) != tmode
7190      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7191    target = gen_reg_rtx (tmode);
7192
7193  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7194    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7195
7196  pat = GEN_FCN (icode) (target, op0);
7197  if (! pat)
7198    return 0;
7199  emit_insn (pat);
7200  return target;
7201}
7202
7203/* Expand the stvx builtins.  */
7204static rtx
7205altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
7206			   bool *expandedp)
7207{
7208  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7209  tree arglist = TREE_OPERAND (exp, 1);
7210  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7211  tree arg0, arg1;
7212  enum machine_mode mode0, mode1;
7213  rtx pat, op0, op1;
7214  enum insn_code icode;
7215
7216  switch (fcode)
7217    {
7218    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
7219      icode = CODE_FOR_altivec_stvx_v16qi;
7220      break;
7221    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
7222      icode = CODE_FOR_altivec_stvx_v8hi;
7223      break;
7224    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
7225      icode = CODE_FOR_altivec_stvx_v4si;
7226      break;
7227    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
7228      icode = CODE_FOR_altivec_stvx_v4sf;
7229      break;
7230    default:
7231      *expandedp = false;
7232      return NULL_RTX;
7233    }
7234
7235  arg0 = TREE_VALUE (arglist);
7236  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7237  op0 = expand_normal (arg0);
7238  op1 = expand_normal (arg1);
7239  mode0 = insn_data[icode].operand[0].mode;
7240  mode1 = insn_data[icode].operand[1].mode;
7241
7242  if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7243    op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7244  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7245    op1 = copy_to_mode_reg (mode1, op1);
7246
7247  pat = GEN_FCN (icode) (op0, op1);
7248  if (pat)
7249    emit_insn (pat);
7250
7251  *expandedp = true;
7252  return NULL_RTX;
7253}
7254
7255/* Expand the dst builtins.  */
7256static rtx
7257altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
7258			    bool *expandedp)
7259{
7260  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7261  tree arglist = TREE_OPERAND (exp, 1);
7262  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7263  tree arg0, arg1, arg2;
7264  enum machine_mode mode0, mode1, mode2;
7265  rtx pat, op0, op1, op2;
7266  struct builtin_description *d;
7267  size_t i;
7268
7269  *expandedp = false;
7270
7271  /* Handle DST variants.  */
7272  d = (struct builtin_description *) bdesc_dst;
7273  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7274    if (d->code == fcode)
7275      {
7276	arg0 = TREE_VALUE (arglist);
7277	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7278	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7279	op0 = expand_normal (arg0);
7280	op1 = expand_normal (arg1);
7281	op2 = expand_normal (arg2);
7282	mode0 = insn_data[d->icode].operand[0].mode;
7283	mode1 = insn_data[d->icode].operand[1].mode;
7284	mode2 = insn_data[d->icode].operand[2].mode;
7285
7286	/* Invalid arguments, bail out before generating bad rtl.  */
7287	if (arg0 == error_mark_node
7288	    || arg1 == error_mark_node
7289	    || arg2 == error_mark_node)
7290	  return const0_rtx;
7291
7292	*expandedp = true;
7293	STRIP_NOPS (arg2);
7294	if (TREE_CODE (arg2) != INTEGER_CST
7295	    || TREE_INT_CST_LOW (arg2) & ~0x3)
7296	  {
7297	    error ("argument to %qs must be a 2-bit unsigned literal", d->name);
7298	    return const0_rtx;
7299	  }
7300
7301	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
7302	  op0 = copy_to_mode_reg (Pmode, op0);
7303	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7304	  op1 = copy_to_mode_reg (mode1, op1);
7305
7306	pat = GEN_FCN (d->icode) (op0, op1, op2);
7307	if (pat != 0)
7308	  emit_insn (pat);
7309
7310	return NULL_RTX;
7311      }
7312
7313  return NULL_RTX;
7314}
7315
7316/* Expand vec_init builtin.  */
7317static rtx
7318altivec_expand_vec_init_builtin (tree type, tree arglist, rtx target)
7319{
7320  enum machine_mode tmode = TYPE_MODE (type);
7321  enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7322  int i, n_elt = GET_MODE_NUNITS (tmode);
7323  rtvec v = rtvec_alloc (n_elt);
7324
7325  gcc_assert (VECTOR_MODE_P (tmode));
7326
7327  for (i = 0; i < n_elt; ++i, arglist = TREE_CHAIN (arglist))
7328    {
7329      rtx x = expand_normal (TREE_VALUE (arglist));
7330      RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7331    }
7332
7333  gcc_assert (arglist == NULL);
7334
7335  if (!target || !register_operand (target, tmode))
7336    target = gen_reg_rtx (tmode);
7337
7338  rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7339  return target;
7340}
7341
7342/* Return the integer constant in ARG.  Constrain it to be in the range
7343   of the subparts of VEC_TYPE; issue an error if not.  */
7344
7345static int
7346get_element_number (tree vec_type, tree arg)
7347{
7348  unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7349
7350  if (!host_integerp (arg, 1)
7351      || (elt = tree_low_cst (arg, 1), elt > max))
7352    {
7353      error ("selector must be an integer constant in the range 0..%wi", max);
7354      return 0;
7355    }
7356
7357  return elt;
7358}
7359
7360/* Expand vec_set builtin.  */
7361static rtx
7362altivec_expand_vec_set_builtin (tree arglist)
7363{
7364  enum machine_mode tmode, mode1;
7365  tree arg0, arg1, arg2;
7366  int elt;
7367  rtx op0, op1;
7368
7369  arg0 = TREE_VALUE (arglist);
7370  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7371  arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7372
7373  tmode = TYPE_MODE (TREE_TYPE (arg0));
7374  mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7375  gcc_assert (VECTOR_MODE_P (tmode));
7376
7377  op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7378  op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7379  elt = get_element_number (TREE_TYPE (arg0), arg2);
7380
7381  if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7382    op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7383
7384  op0 = force_reg (tmode, op0);
7385  op1 = force_reg (mode1, op1);
7386
7387  rs6000_expand_vector_set (op0, op1, elt);
7388
7389  return op0;
7390}
7391
7392/* Expand vec_ext builtin.  */
7393static rtx
7394altivec_expand_vec_ext_builtin (tree arglist, rtx target)
7395{
7396  enum machine_mode tmode, mode0;
7397  tree arg0, arg1;
7398  int elt;
7399  rtx op0;
7400
7401  arg0 = TREE_VALUE (arglist);
7402  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7403
7404  op0 = expand_normal (arg0);
7405  elt = get_element_number (TREE_TYPE (arg0), arg1);
7406
7407  tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7408  mode0 = TYPE_MODE (TREE_TYPE (arg0));
7409  gcc_assert (VECTOR_MODE_P (mode0));
7410
7411  op0 = force_reg (mode0, op0);
7412
7413  if (optimize || !target || !register_operand (target, tmode))
7414    target = gen_reg_rtx (tmode);
7415
7416  rs6000_expand_vector_extract (target, op0, elt);
7417
7418  return target;
7419}
7420
7421/* Expand the builtin in EXP and store the result in TARGET.  Store
7422   true in *EXPANDEDP if we found a builtin to expand.  */
7423static rtx
7424altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
7425{
7426  struct builtin_description *d;
7427  struct builtin_description_predicates *dp;
7428  size_t i;
7429  enum insn_code icode;
7430  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7431  tree arglist = TREE_OPERAND (exp, 1);
7432  tree arg0;
7433  rtx op0, pat;
7434  enum machine_mode tmode, mode0;
7435  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7436
7437  if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7438      && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7439    {
7440      *expandedp = true;
7441      error ("unresolved overload for Altivec builtin %qF", fndecl);
7442      return const0_rtx;
7443    }
7444
7445  target = altivec_expand_ld_builtin (exp, target, expandedp);
7446  if (*expandedp)
7447    return target;
7448
7449  target = altivec_expand_st_builtin (exp, target, expandedp);
7450  if (*expandedp)
7451    return target;
7452
7453  target = altivec_expand_dst_builtin (exp, target, expandedp);
7454  if (*expandedp)
7455    return target;
7456
7457  *expandedp = true;
7458
7459  switch (fcode)
7460    {
7461    case ALTIVEC_BUILTIN_STVX:
7462      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
7463    case ALTIVEC_BUILTIN_STVEBX:
7464      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
7465    case ALTIVEC_BUILTIN_STVEHX:
7466      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
7467    case ALTIVEC_BUILTIN_STVEWX:
7468      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
7469    case ALTIVEC_BUILTIN_STVXL:
7470      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
7471
7472    case ALTIVEC_BUILTIN_MFVSCR:
7473      icode = CODE_FOR_altivec_mfvscr;
7474      tmode = insn_data[icode].operand[0].mode;
7475
7476      if (target == 0
7477	  || GET_MODE (target) != tmode
7478	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7479	target = gen_reg_rtx (tmode);
7480
7481      pat = GEN_FCN (icode) (target);
7482      if (! pat)
7483	return 0;
7484      emit_insn (pat);
7485      return target;
7486
7487    case ALTIVEC_BUILTIN_MTVSCR:
7488      icode = CODE_FOR_altivec_mtvscr;
7489      arg0 = TREE_VALUE (arglist);
7490      op0 = expand_normal (arg0);
7491      mode0 = insn_data[icode].operand[0].mode;
7492
7493      /* If we got invalid arguments bail out before generating bad rtl.  */
7494      if (arg0 == error_mark_node)
7495	return const0_rtx;
7496
7497      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7498	op0 = copy_to_mode_reg (mode0, op0);
7499
7500      pat = GEN_FCN (icode) (op0);
7501      if (pat)
7502	emit_insn (pat);
7503      return NULL_RTX;
7504
7505    case ALTIVEC_BUILTIN_DSSALL:
7506      emit_insn (gen_altivec_dssall ());
7507      return NULL_RTX;
7508
7509    case ALTIVEC_BUILTIN_DSS:
7510      icode = CODE_FOR_altivec_dss;
7511      arg0 = TREE_VALUE (arglist);
7512      STRIP_NOPS (arg0);
7513      op0 = expand_normal (arg0);
7514      mode0 = insn_data[icode].operand[0].mode;
7515
7516      /* If we got invalid arguments bail out before generating bad rtl.  */
7517      if (arg0 == error_mark_node)
7518	return const0_rtx;
7519
7520      if (TREE_CODE (arg0) != INTEGER_CST
7521	  || TREE_INT_CST_LOW (arg0) & ~0x3)
7522	{
7523	  error ("argument to dss must be a 2-bit unsigned literal");
7524	  return const0_rtx;
7525	}
7526
7527      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7528	op0 = copy_to_mode_reg (mode0, op0);
7529
7530      emit_insn (gen_altivec_dss (op0));
7531      return NULL_RTX;
7532
7533    case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7534    case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7535    case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7536    case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
7537      return altivec_expand_vec_init_builtin (TREE_TYPE (exp), arglist, target);
7538
7539    case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7540    case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7541    case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7542    case ALTIVEC_BUILTIN_VEC_SET_V4SF:
7543      return altivec_expand_vec_set_builtin (arglist);
7544
7545    case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7546    case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7547    case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7548    case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
7549      return altivec_expand_vec_ext_builtin (arglist, target);
7550
7551    default:
7552      break;
7553      /* Fall through.  */
7554    }
7555
7556  /* Expand abs* operations.  */
7557  d = (struct builtin_description *) bdesc_abs;
7558  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7559    if (d->code == fcode)
7560      return altivec_expand_abs_builtin (d->icode, arglist, target);
7561
7562  /* Expand the AltiVec predicates.  */
7563  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7564  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7565    if (dp->code == fcode)
7566      return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
7567					       arglist, target);
7568
7569  /* LV* are funky.  We initialized them differently.  */
7570  switch (fcode)
7571    {
7572    case ALTIVEC_BUILTIN_LVSL:
7573      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
7574					arglist, target);
7575    case ALTIVEC_BUILTIN_LVSR:
7576      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
7577					arglist, target);
7578    case ALTIVEC_BUILTIN_LVEBX:
7579      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
7580					arglist, target);
7581    case ALTIVEC_BUILTIN_LVEHX:
7582      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
7583					arglist, target);
7584    case ALTIVEC_BUILTIN_LVEWX:
7585      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
7586					arglist, target);
7587    case ALTIVEC_BUILTIN_LVXL:
7588      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
7589					arglist, target);
7590    case ALTIVEC_BUILTIN_LVX:
7591      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
7592					arglist, target);
7593    default:
7594      break;
7595      /* Fall through.  */
7596    }
7597
7598  *expandedp = false;
7599  return NULL_RTX;
7600}
7601
7602/* Binops that need to be initialized manually, but can be expanded
7603   automagically by rs6000_expand_binop_builtin.  */
7604static struct builtin_description bdesc_2arg_spe[] =
7605{
7606  { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7607  { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7608  { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7609  { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7610  { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7611  { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7612  { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7613  { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7614  { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7615  { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7616  { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7617  { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7618  { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7619  { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7620  { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7621  { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7622  { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7623  { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7624  { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7625  { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7626  { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7627  { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7628};
7629
7630/* Expand the builtin in EXP and store the result in TARGET.  Store
7631   true in *EXPANDEDP if we found a builtin to expand.
7632
7633   This expands the SPE builtins that are not simple unary and binary
7634   operations.  */
7635static rtx
7636spe_expand_builtin (tree exp, rtx target, bool *expandedp)
7637{
7638  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7639  tree arglist = TREE_OPERAND (exp, 1);
7640  tree arg1, arg0;
7641  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7642  enum insn_code icode;
7643  enum machine_mode tmode, mode0;
7644  rtx pat, op0;
7645  struct builtin_description *d;
7646  size_t i;
7647
7648  *expandedp = true;
7649
7650  /* Syntax check for a 5-bit unsigned immediate.  */
7651  switch (fcode)
7652    {
7653    case SPE_BUILTIN_EVSTDD:
7654    case SPE_BUILTIN_EVSTDH:
7655    case SPE_BUILTIN_EVSTDW:
7656    case SPE_BUILTIN_EVSTWHE:
7657    case SPE_BUILTIN_EVSTWHO:
7658    case SPE_BUILTIN_EVSTWWE:
7659    case SPE_BUILTIN_EVSTWWO:
7660      arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7661      if (TREE_CODE (arg1) != INTEGER_CST
7662	  || TREE_INT_CST_LOW (arg1) & ~0x1f)
7663	{
7664	  error ("argument 2 must be a 5-bit unsigned literal");
7665	  return const0_rtx;
7666	}
7667      break;
7668    default:
7669      break;
7670    }
7671
7672  /* The evsplat*i instructions are not quite generic.  */
7673  switch (fcode)
7674    {
7675    case SPE_BUILTIN_EVSPLATFI:
7676      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7677					 arglist, target);
7678    case SPE_BUILTIN_EVSPLATI:
7679      return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7680					 arglist, target);
7681    default:
7682      break;
7683    }
7684
7685  d = (struct builtin_description *) bdesc_2arg_spe;
7686  for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7687    if (d->code == fcode)
7688      return rs6000_expand_binop_builtin (d->icode, arglist, target);
7689
7690  d = (struct builtin_description *) bdesc_spe_predicates;
7691  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7692    if (d->code == fcode)
7693      return spe_expand_predicate_builtin (d->icode, arglist, target);
7694
7695  d = (struct builtin_description *) bdesc_spe_evsel;
7696  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7697    if (d->code == fcode)
7698      return spe_expand_evsel_builtin (d->icode, arglist, target);
7699
7700  switch (fcode)
7701    {
7702    case SPE_BUILTIN_EVSTDDX:
7703      return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
7704    case SPE_BUILTIN_EVSTDHX:
7705      return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
7706    case SPE_BUILTIN_EVSTDWX:
7707      return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
7708    case SPE_BUILTIN_EVSTWHEX:
7709      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
7710    case SPE_BUILTIN_EVSTWHOX:
7711      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
7712    case SPE_BUILTIN_EVSTWWEX:
7713      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
7714    case SPE_BUILTIN_EVSTWWOX:
7715      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
7716    case SPE_BUILTIN_EVSTDD:
7717      return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
7718    case SPE_BUILTIN_EVSTDH:
7719      return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
7720    case SPE_BUILTIN_EVSTDW:
7721      return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
7722    case SPE_BUILTIN_EVSTWHE:
7723      return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
7724    case SPE_BUILTIN_EVSTWHO:
7725      return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
7726    case SPE_BUILTIN_EVSTWWE:
7727      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
7728    case SPE_BUILTIN_EVSTWWO:
7729      return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
7730    case SPE_BUILTIN_MFSPEFSCR:
7731      icode = CODE_FOR_spe_mfspefscr;
7732      tmode = insn_data[icode].operand[0].mode;
7733
7734      if (target == 0
7735	  || GET_MODE (target) != tmode
7736	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7737	target = gen_reg_rtx (tmode);
7738
7739      pat = GEN_FCN (icode) (target);
7740      if (! pat)
7741	return 0;
7742      emit_insn (pat);
7743      return target;
7744    case SPE_BUILTIN_MTSPEFSCR:
7745      icode = CODE_FOR_spe_mtspefscr;
7746      arg0 = TREE_VALUE (arglist);
7747      op0 = expand_normal (arg0);
7748      mode0 = insn_data[icode].operand[0].mode;
7749
7750      if (arg0 == error_mark_node)
7751	return const0_rtx;
7752
7753      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7754	op0 = copy_to_mode_reg (mode0, op0);
7755
7756      pat = GEN_FCN (icode) (op0);
7757      if (pat)
7758	emit_insn (pat);
7759      return NULL_RTX;
7760    default:
7761      break;
7762    }
7763
7764  *expandedp = false;
7765  return NULL_RTX;
7766}
7767
7768static rtx
7769spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
7770{
7771  rtx pat, scratch, tmp;
7772  tree form = TREE_VALUE (arglist);
7773  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7774  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7775  rtx op0 = expand_normal (arg0);
7776  rtx op1 = expand_normal (arg1);
7777  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7778  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7779  int form_int;
7780  enum rtx_code code;
7781
7782  if (TREE_CODE (form) != INTEGER_CST)
7783    {
7784      error ("argument 1 of __builtin_spe_predicate must be a constant");
7785      return const0_rtx;
7786    }
7787  else
7788    form_int = TREE_INT_CST_LOW (form);
7789
7790  gcc_assert (mode0 == mode1);
7791
7792  if (arg0 == error_mark_node || arg1 == error_mark_node)
7793    return const0_rtx;
7794
7795  if (target == 0
7796      || GET_MODE (target) != SImode
7797      || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7798    target = gen_reg_rtx (SImode);
7799
7800  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7801    op0 = copy_to_mode_reg (mode0, op0);
7802  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7803    op1 = copy_to_mode_reg (mode1, op1);
7804
7805  scratch = gen_reg_rtx (CCmode);
7806
7807  pat = GEN_FCN (icode) (scratch, op0, op1);
7808  if (! pat)
7809    return const0_rtx;
7810  emit_insn (pat);
7811
7812  /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7813     _lower_.  We use one compare, but look in different bits of the
7814     CR for each variant.
7815
7816     There are 2 elements in each SPE simd type (upper/lower).  The CR
7817     bits are set as follows:
7818
7819     BIT0  | BIT 1  | BIT 2   | BIT 3
7820     U     |   L    | (U | L) | (U & L)
7821
7822     So, for an "all" relationship, BIT 3 would be set.
7823     For an "any" relationship, BIT 2 would be set.  Etc.
7824
7825     Following traditional nomenclature, these bits map to:
7826
7827     BIT0  | BIT 1  | BIT 2   | BIT 3
7828     LT    | GT     | EQ      | OV
7829
7830     Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7831  */
7832
7833  switch (form_int)
7834    {
7835      /* All variant.  OV bit.  */
7836    case 0:
7837      /* We need to get to the OV bit, which is the ORDERED bit.  We
7838	 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7839	 that's ugly and will make validate_condition_mode die.
7840	 So let's just use another pattern.  */
7841      emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7842      return target;
7843      /* Any variant.  EQ bit.  */
7844    case 1:
7845      code = EQ;
7846      break;
7847      /* Upper variant.  LT bit.  */
7848    case 2:
7849      code = LT;
7850      break;
7851      /* Lower variant.  GT bit.  */
7852    case 3:
7853      code = GT;
7854      break;
7855    default:
7856      error ("argument 1 of __builtin_spe_predicate is out of range");
7857      return const0_rtx;
7858    }
7859
7860  tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7861  emit_move_insn (target, tmp);
7862
7863  return target;
7864}
7865
7866/* The evsel builtins look like this:
7867
7868     e = __builtin_spe_evsel_OP (a, b, c, d);
7869
7870   and work like this:
7871
7872     e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7873     e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7874*/
7875
7876static rtx
7877spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7878{
7879  rtx pat, scratch;
7880  tree arg0 = TREE_VALUE (arglist);
7881  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7882  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7883  tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7884  rtx op0 = expand_normal (arg0);
7885  rtx op1 = expand_normal (arg1);
7886  rtx op2 = expand_normal (arg2);
7887  rtx op3 = expand_normal (arg3);
7888  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7889  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7890
7891  gcc_assert (mode0 == mode1);
7892
7893  if (arg0 == error_mark_node || arg1 == error_mark_node
7894      || arg2 == error_mark_node || arg3 == error_mark_node)
7895    return const0_rtx;
7896
7897  if (target == 0
7898      || GET_MODE (target) != mode0
7899      || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7900    target = gen_reg_rtx (mode0);
7901
7902  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7903    op0 = copy_to_mode_reg (mode0, op0);
7904  if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7905    op1 = copy_to_mode_reg (mode0, op1);
7906  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7907    op2 = copy_to_mode_reg (mode0, op2);
7908  if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7909    op3 = copy_to_mode_reg (mode0, op3);
7910
7911  /* Generate the compare.  */
7912  scratch = gen_reg_rtx (CCmode);
7913  pat = GEN_FCN (icode) (scratch, op0, op1);
7914  if (! pat)
7915    return const0_rtx;
7916  emit_insn (pat);
7917
7918  if (mode0 == V2SImode)
7919    emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7920  else
7921    emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7922
7923  return target;
7924}
7925
7926/* Expand an expression EXP that calls a built-in function,
7927   with result going to TARGET if that's convenient
7928   (and in mode MODE if that's convenient).
7929   SUBTARGET may be used as the target for computing one of EXP's operands.
7930   IGNORE is nonzero if the value is to be ignored.  */
7931
7932static rtx
7933rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7934		       enum machine_mode mode ATTRIBUTE_UNUSED,
7935		       int ignore ATTRIBUTE_UNUSED)
7936{
7937  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7938  tree arglist = TREE_OPERAND (exp, 1);
7939  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7940  struct builtin_description *d;
7941  size_t i;
7942  rtx ret;
7943  bool success;
7944
7945  if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7946      || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7947    {
7948      int icode = (int) CODE_FOR_altivec_lvsr;
7949      enum machine_mode tmode = insn_data[icode].operand[0].mode;
7950      enum machine_mode mode = insn_data[icode].operand[1].mode;
7951      tree arg;
7952      rtx op, addr, pat;
7953
7954      gcc_assert (TARGET_ALTIVEC);
7955
7956      arg = TREE_VALUE (arglist);
7957      gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7958      op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7959      addr = memory_address (mode, op);
7960      if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7961	op = addr;
7962      else
7963	{
7964	  /* For the load case need to negate the address.  */
7965	  op = gen_reg_rtx (GET_MODE (addr));
7966	  emit_insn (gen_rtx_SET (VOIDmode, op,
7967			 gen_rtx_NEG (GET_MODE (addr), addr)));
7968	}
7969      op = gen_rtx_MEM (mode, op);
7970
7971      if (target == 0
7972	  || GET_MODE (target) != tmode
7973	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7974	target = gen_reg_rtx (tmode);
7975
7976      /*pat = gen_altivec_lvsr (target, op);*/
7977      pat = GEN_FCN (icode) (target, op);
7978      if (!pat)
7979	return 0;
7980      emit_insn (pat);
7981
7982      return target;
7983    }
7984
7985  if (TARGET_ALTIVEC)
7986    {
7987      ret = altivec_expand_builtin (exp, target, &success);
7988
7989      if (success)
7990	return ret;
7991    }
7992  if (TARGET_SPE)
7993    {
7994      ret = spe_expand_builtin (exp, target, &success);
7995
7996      if (success)
7997	return ret;
7998    }
7999
8000  gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
8001
8002  /* Handle simple unary operations.  */
8003  d = (struct builtin_description *) bdesc_1arg;
8004  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8005    if (d->code == fcode)
8006      return rs6000_expand_unop_builtin (d->icode, arglist, target);
8007
8008  /* Handle simple binary operations.  */
8009  d = (struct builtin_description *) bdesc_2arg;
8010  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8011    if (d->code == fcode)
8012      return rs6000_expand_binop_builtin (d->icode, arglist, target);
8013
8014  /* Handle simple ternary operations.  */
8015  d = (struct builtin_description *) bdesc_3arg;
8016  for (i = 0; i < ARRAY_SIZE  (bdesc_3arg); i++, d++)
8017    if (d->code == fcode)
8018      return rs6000_expand_ternop_builtin (d->icode, arglist, target);
8019
8020  gcc_unreachable ();
8021}
8022
8023static tree
8024build_opaque_vector_type (tree node, int nunits)
8025{
8026  node = copy_node (node);
8027  TYPE_MAIN_VARIANT (node) = node;
8028  return build_vector_type (node, nunits);
8029}
8030
8031static void
8032rs6000_init_builtins (void)
8033{
8034  V2SI_type_node = build_vector_type (intSI_type_node, 2);
8035  V2SF_type_node = build_vector_type (float_type_node, 2);
8036  V4HI_type_node = build_vector_type (intHI_type_node, 4);
8037  V4SI_type_node = build_vector_type (intSI_type_node, 4);
8038  V4SF_type_node = build_vector_type (float_type_node, 4);
8039  V8HI_type_node = build_vector_type (intHI_type_node, 8);
8040  V16QI_type_node = build_vector_type (intQI_type_node, 16);
8041
8042  unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8043  unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8044  unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8045
8046  opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8047  opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
8048  opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
8049  opaque_V4SI_type_node = copy_node (V4SI_type_node);
8050
8051  /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8052     types, especially in C++ land.  Similarly, 'vector pixel' is distinct from
8053     'vector unsigned short'.  */
8054
8055  bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8056  bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8057  bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8058  pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8059
8060  long_integer_type_internal_node = long_integer_type_node;
8061  long_unsigned_type_internal_node = long_unsigned_type_node;
8062  intQI_type_internal_node = intQI_type_node;
8063  uintQI_type_internal_node = unsigned_intQI_type_node;
8064  intHI_type_internal_node = intHI_type_node;
8065  uintHI_type_internal_node = unsigned_intHI_type_node;
8066  intSI_type_internal_node = intSI_type_node;
8067  uintSI_type_internal_node = unsigned_intSI_type_node;
8068  float_type_internal_node = float_type_node;
8069  void_type_internal_node = void_type_node;
8070
8071  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8072					    get_identifier ("__bool char"),
8073					    bool_char_type_node));
8074  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8075					    get_identifier ("__bool short"),
8076					    bool_short_type_node));
8077  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8078					    get_identifier ("__bool int"),
8079					    bool_int_type_node));
8080  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8081					    get_identifier ("__pixel"),
8082					    pixel_type_node));
8083
8084  bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8085  bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8086  bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8087  pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8088
8089  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8090					    get_identifier ("__vector unsigned char"),
8091					    unsigned_V16QI_type_node));
8092  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8093					    get_identifier ("__vector signed char"),
8094					    V16QI_type_node));
8095  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8096					    get_identifier ("__vector __bool char"),
8097					    bool_V16QI_type_node));
8098
8099  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8100					    get_identifier ("__vector unsigned short"),
8101					    unsigned_V8HI_type_node));
8102  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8103					    get_identifier ("__vector signed short"),
8104					    V8HI_type_node));
8105  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8106					    get_identifier ("__vector __bool short"),
8107					    bool_V8HI_type_node));
8108
8109  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8110					    get_identifier ("__vector unsigned int"),
8111					    unsigned_V4SI_type_node));
8112  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8113					    get_identifier ("__vector signed int"),
8114					    V4SI_type_node));
8115  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8116					    get_identifier ("__vector __bool int"),
8117					    bool_V4SI_type_node));
8118
8119  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8120					    get_identifier ("__vector float"),
8121					    V4SF_type_node));
8122  (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8123					    get_identifier ("__vector __pixel"),
8124					    pixel_V8HI_type_node));
8125
8126  if (TARGET_SPE)
8127    spe_init_builtins ();
8128  if (TARGET_ALTIVEC)
8129    altivec_init_builtins ();
8130  if (TARGET_ALTIVEC || TARGET_SPE)
8131    rs6000_common_init_builtins ();
8132
8133#if TARGET_XCOFF
8134  /* AIX libm provides clog as __clog.  */
8135  if (built_in_decls [BUILT_IN_CLOG])
8136    set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
8137#endif
8138}
8139
8140/* Search through a set of builtins and enable the mask bits.
8141   DESC is an array of builtins.
8142   SIZE is the total number of builtins.
8143   START is the builtin enum at which to start.
8144   END is the builtin enum at which to end.  */
8145static void
8146enable_mask_for_builtins (struct builtin_description *desc, int size,
8147			  enum rs6000_builtins start,
8148			  enum rs6000_builtins end)
8149{
8150  int i;
8151
8152  for (i = 0; i < size; ++i)
8153    if (desc[i].code == start)
8154      break;
8155
8156  if (i == size)
8157    return;
8158
8159  for (; i < size; ++i)
8160    {
8161      /* Flip all the bits on.  */
8162      desc[i].mask = target_flags;
8163      if (desc[i].code == end)
8164	break;
8165    }
8166}
8167
8168static void
8169spe_init_builtins (void)
8170{
8171  tree endlink = void_list_node;
8172  tree puint_type_node = build_pointer_type (unsigned_type_node);
8173  tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
8174  struct builtin_description *d;
8175  size_t i;
8176
8177  tree v2si_ftype_4_v2si
8178    = build_function_type
8179    (opaque_V2SI_type_node,
8180     tree_cons (NULL_TREE, opaque_V2SI_type_node,
8181		tree_cons (NULL_TREE, opaque_V2SI_type_node,
8182			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8183				      tree_cons (NULL_TREE, opaque_V2SI_type_node,
8184						 endlink)))));
8185
8186  tree v2sf_ftype_4_v2sf
8187    = build_function_type
8188    (opaque_V2SF_type_node,
8189     tree_cons (NULL_TREE, opaque_V2SF_type_node,
8190		tree_cons (NULL_TREE, opaque_V2SF_type_node,
8191			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
8192				      tree_cons (NULL_TREE, opaque_V2SF_type_node,
8193						 endlink)))));
8194
8195  tree int_ftype_int_v2si_v2si
8196    = build_function_type
8197    (integer_type_node,
8198     tree_cons (NULL_TREE, integer_type_node,
8199		tree_cons (NULL_TREE, opaque_V2SI_type_node,
8200			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8201				      endlink))));
8202
8203  tree int_ftype_int_v2sf_v2sf
8204    = build_function_type
8205    (integer_type_node,
8206     tree_cons (NULL_TREE, integer_type_node,
8207		tree_cons (NULL_TREE, opaque_V2SF_type_node,
8208			   tree_cons (NULL_TREE, opaque_V2SF_type_node,
8209				      endlink))));
8210
8211  tree void_ftype_v2si_puint_int
8212    = build_function_type (void_type_node,
8213			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8214				      tree_cons (NULL_TREE, puint_type_node,
8215						 tree_cons (NULL_TREE,
8216							    integer_type_node,
8217							    endlink))));
8218
8219  tree void_ftype_v2si_puint_char
8220    = build_function_type (void_type_node,
8221			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8222				      tree_cons (NULL_TREE, puint_type_node,
8223						 tree_cons (NULL_TREE,
8224							    char_type_node,
8225							    endlink))));
8226
8227  tree void_ftype_v2si_pv2si_int
8228    = build_function_type (void_type_node,
8229			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8230				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8231						 tree_cons (NULL_TREE,
8232							    integer_type_node,
8233							    endlink))));
8234
8235  tree void_ftype_v2si_pv2si_char
8236    = build_function_type (void_type_node,
8237			   tree_cons (NULL_TREE, opaque_V2SI_type_node,
8238				      tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8239						 tree_cons (NULL_TREE,
8240							    char_type_node,
8241							    endlink))));
8242
8243  tree void_ftype_int
8244    = build_function_type (void_type_node,
8245			   tree_cons (NULL_TREE, integer_type_node, endlink));
8246
8247  tree int_ftype_void
8248    = build_function_type (integer_type_node, endlink);
8249
8250  tree v2si_ftype_pv2si_int
8251    = build_function_type (opaque_V2SI_type_node,
8252			   tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
8253				      tree_cons (NULL_TREE, integer_type_node,
8254						 endlink)));
8255
8256  tree v2si_ftype_puint_int
8257    = build_function_type (opaque_V2SI_type_node,
8258			   tree_cons (NULL_TREE, puint_type_node,
8259				      tree_cons (NULL_TREE, integer_type_node,
8260						 endlink)));
8261
8262  tree v2si_ftype_pushort_int
8263    = build_function_type (opaque_V2SI_type_node,
8264			   tree_cons (NULL_TREE, pushort_type_node,
8265				      tree_cons (NULL_TREE, integer_type_node,
8266						 endlink)));
8267
8268  tree v2si_ftype_signed_char
8269    = build_function_type (opaque_V2SI_type_node,
8270			   tree_cons (NULL_TREE, signed_char_type_node,
8271				      endlink));
8272
8273  /* The initialization of the simple binary and unary builtins is
8274     done in rs6000_common_init_builtins, but we have to enable the
8275     mask bits here manually because we have run out of `target_flags'
8276     bits.  We really need to redesign this mask business.  */
8277
8278  enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8279			    ARRAY_SIZE (bdesc_2arg),
8280			    SPE_BUILTIN_EVADDW,
8281			    SPE_BUILTIN_EVXOR);
8282  enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8283			    ARRAY_SIZE (bdesc_1arg),
8284			    SPE_BUILTIN_EVABS,
8285			    SPE_BUILTIN_EVSUBFUSIAAW);
8286  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8287			    ARRAY_SIZE (bdesc_spe_predicates),
8288			    SPE_BUILTIN_EVCMPEQ,
8289			    SPE_BUILTIN_EVFSTSTLT);
8290  enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8291			    ARRAY_SIZE (bdesc_spe_evsel),
8292			    SPE_BUILTIN_EVSEL_CMPGTS,
8293			    SPE_BUILTIN_EVSEL_FSTSTEQ);
8294
8295  (*lang_hooks.decls.pushdecl)
8296    (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8297		 opaque_V2SI_type_node));
8298
8299  /* Initialize irregular SPE builtins.  */
8300
8301  def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8302  def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8303  def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8304  def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8305  def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8306  def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8307  def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8308  def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8309  def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8310  def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8311  def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8312  def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8313  def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8314  def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8315  def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8316  def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
8317  def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8318  def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
8319
8320  /* Loads.  */
8321  def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8322  def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8323  def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8324  def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8325  def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8326  def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8327  def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8328  def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8329  def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8330  def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8331  def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8332  def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8333  def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8334  def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8335  def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8336  def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8337  def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8338  def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8339  def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8340  def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8341  def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8342  def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8343
8344  /* Predicates.  */
8345  d = (struct builtin_description *) bdesc_spe_predicates;
8346  for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8347    {
8348      tree type;
8349
8350      switch (insn_data[d->icode].operand[1].mode)
8351	{
8352	case V2SImode:
8353	  type = int_ftype_int_v2si_v2si;
8354	  break;
8355	case V2SFmode:
8356	  type = int_ftype_int_v2sf_v2sf;
8357	  break;
8358	default:
8359	  gcc_unreachable ();
8360	}
8361
8362      def_builtin (d->mask, d->name, type, d->code);
8363    }
8364
8365  /* Evsel predicates.  */
8366  d = (struct builtin_description *) bdesc_spe_evsel;
8367  for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8368    {
8369      tree type;
8370
8371      switch (insn_data[d->icode].operand[1].mode)
8372	{
8373	case V2SImode:
8374	  type = v2si_ftype_4_v2si;
8375	  break;
8376	case V2SFmode:
8377	  type = v2sf_ftype_4_v2sf;
8378	  break;
8379	default:
8380	  gcc_unreachable ();
8381	}
8382
8383      def_builtin (d->mask, d->name, type, d->code);
8384    }
8385}
8386
8387static void
8388altivec_init_builtins (void)
8389{
8390  struct builtin_description *d;
8391  struct builtin_description_predicates *dp;
8392  size_t i;
8393  tree ftype;
8394
8395  tree pfloat_type_node = build_pointer_type (float_type_node);
8396  tree pint_type_node = build_pointer_type (integer_type_node);
8397  tree pshort_type_node = build_pointer_type (short_integer_type_node);
8398  tree pchar_type_node = build_pointer_type (char_type_node);
8399
8400  tree pvoid_type_node = build_pointer_type (void_type_node);
8401
8402  tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8403  tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8404  tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8405  tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8406
8407  tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8408
8409  tree int_ftype_opaque
8410    = build_function_type_list (integer_type_node,
8411				opaque_V4SI_type_node, NULL_TREE);
8412
8413  tree opaque_ftype_opaque_int
8414    = build_function_type_list (opaque_V4SI_type_node,
8415				opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8416  tree opaque_ftype_opaque_opaque_int
8417    = build_function_type_list (opaque_V4SI_type_node,
8418				opaque_V4SI_type_node, opaque_V4SI_type_node,
8419				integer_type_node, NULL_TREE);
8420  tree int_ftype_int_opaque_opaque
8421    = build_function_type_list (integer_type_node,
8422                                integer_type_node, opaque_V4SI_type_node,
8423                                opaque_V4SI_type_node, NULL_TREE);
8424  tree int_ftype_int_v4si_v4si
8425    = build_function_type_list (integer_type_node,
8426				integer_type_node, V4SI_type_node,
8427				V4SI_type_node, NULL_TREE);
8428  tree v4sf_ftype_pcfloat
8429    = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
8430  tree void_ftype_pfloat_v4sf
8431    = build_function_type_list (void_type_node,
8432				pfloat_type_node, V4SF_type_node, NULL_TREE);
8433  tree v4si_ftype_pcint
8434    = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8435  tree void_ftype_pint_v4si
8436    = build_function_type_list (void_type_node,
8437				pint_type_node, V4SI_type_node, NULL_TREE);
8438  tree v8hi_ftype_pcshort
8439    = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
8440  tree void_ftype_pshort_v8hi
8441    = build_function_type_list (void_type_node,
8442				pshort_type_node, V8HI_type_node, NULL_TREE);
8443  tree v16qi_ftype_pcchar
8444    = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
8445  tree void_ftype_pchar_v16qi
8446    = build_function_type_list (void_type_node,
8447				pchar_type_node, V16QI_type_node, NULL_TREE);
8448  tree void_ftype_v4si
8449    = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
8450  tree v8hi_ftype_void
8451    = build_function_type (V8HI_type_node, void_list_node);
8452  tree void_ftype_void
8453    = build_function_type (void_type_node, void_list_node);
8454  tree void_ftype_int
8455    = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
8456
8457  tree opaque_ftype_long_pcvoid
8458    = build_function_type_list (opaque_V4SI_type_node,
8459				long_integer_type_node, pcvoid_type_node, NULL_TREE);
8460  tree v16qi_ftype_long_pcvoid
8461    = build_function_type_list (V16QI_type_node,
8462				long_integer_type_node, pcvoid_type_node, NULL_TREE);
8463  tree v8hi_ftype_long_pcvoid
8464    = build_function_type_list (V8HI_type_node,
8465				long_integer_type_node, pcvoid_type_node, NULL_TREE);
8466  tree v4si_ftype_long_pcvoid
8467    = build_function_type_list (V4SI_type_node,
8468				long_integer_type_node, pcvoid_type_node, NULL_TREE);
8469
8470  tree void_ftype_opaque_long_pvoid
8471    = build_function_type_list (void_type_node,
8472				opaque_V4SI_type_node, long_integer_type_node,
8473				pvoid_type_node, NULL_TREE);
8474  tree void_ftype_v4si_long_pvoid
8475    = build_function_type_list (void_type_node,
8476				V4SI_type_node, long_integer_type_node,
8477				pvoid_type_node, NULL_TREE);
8478  tree void_ftype_v16qi_long_pvoid
8479    = build_function_type_list (void_type_node,
8480				V16QI_type_node, long_integer_type_node,
8481				pvoid_type_node, NULL_TREE);
8482  tree void_ftype_v8hi_long_pvoid
8483    = build_function_type_list (void_type_node,
8484				V8HI_type_node, long_integer_type_node,
8485				pvoid_type_node, NULL_TREE);
8486  tree int_ftype_int_v8hi_v8hi
8487    = build_function_type_list (integer_type_node,
8488				integer_type_node, V8HI_type_node,
8489				V8HI_type_node, NULL_TREE);
8490  tree int_ftype_int_v16qi_v16qi
8491    = build_function_type_list (integer_type_node,
8492				integer_type_node, V16QI_type_node,
8493				V16QI_type_node, NULL_TREE);
8494  tree int_ftype_int_v4sf_v4sf
8495    = build_function_type_list (integer_type_node,
8496				integer_type_node, V4SF_type_node,
8497				V4SF_type_node, NULL_TREE);
8498  tree v4si_ftype_v4si
8499    = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8500  tree v8hi_ftype_v8hi
8501    = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8502  tree v16qi_ftype_v16qi
8503    = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8504  tree v4sf_ftype_v4sf
8505    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8506  tree void_ftype_pcvoid_int_int
8507    = build_function_type_list (void_type_node,
8508				pcvoid_type_node, integer_type_node,
8509				integer_type_node, NULL_TREE);
8510
8511  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8512	       ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8513  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8514	       ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8515  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8516	       ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8517  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8518	       ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8519  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8520	       ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8521  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8522	       ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8523  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8524	       ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8525  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8526	       ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
8527  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8528  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8529  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
8530  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
8531  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8532  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8533  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8534  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8535  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8536  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8537  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8538  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8539  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8540  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8541  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8542  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
8543  def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8544  def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8545  def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8546  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8547  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8548  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8549  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8550  def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8551  def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8552  def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8553  def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8554  def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8555  def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8556  def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8557
8558  def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8559
8560  def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8561  def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8562  def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8563  def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8564  def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8565  def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8566  def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8567  def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8568  def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8569  def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8570
8571  /* Add the DST variants.  */
8572  d = (struct builtin_description *) bdesc_dst;
8573  for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8574    def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
8575
8576  /* Initialize the predicates.  */
8577  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8578  for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8579    {
8580      enum machine_mode mode1;
8581      tree type;
8582      bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8583			   && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8584
8585      if (is_overloaded)
8586	mode1 = VOIDmode;
8587      else
8588	mode1 = insn_data[dp->icode].operand[1].mode;
8589
8590      switch (mode1)
8591	{
8592	case VOIDmode:
8593	  type = int_ftype_int_opaque_opaque;
8594	  break;
8595	case V4SImode:
8596	  type = int_ftype_int_v4si_v4si;
8597	  break;
8598	case V8HImode:
8599	  type = int_ftype_int_v8hi_v8hi;
8600	  break;
8601	case V16QImode:
8602	  type = int_ftype_int_v16qi_v16qi;
8603	  break;
8604	case V4SFmode:
8605	  type = int_ftype_int_v4sf_v4sf;
8606	  break;
8607	default:
8608	  gcc_unreachable ();
8609	}
8610
8611      def_builtin (dp->mask, dp->name, type, dp->code);
8612    }
8613
8614  /* Initialize the abs* operators.  */
8615  d = (struct builtin_description *) bdesc_abs;
8616  for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8617    {
8618      enum machine_mode mode0;
8619      tree type;
8620
8621      mode0 = insn_data[d->icode].operand[0].mode;
8622
8623      switch (mode0)
8624	{
8625	case V4SImode:
8626	  type = v4si_ftype_v4si;
8627	  break;
8628	case V8HImode:
8629	  type = v8hi_ftype_v8hi;
8630	  break;
8631	case V16QImode:
8632	  type = v16qi_ftype_v16qi;
8633	  break;
8634	case V4SFmode:
8635	  type = v4sf_ftype_v4sf;
8636	  break;
8637	default:
8638	  gcc_unreachable ();
8639	}
8640
8641      def_builtin (d->mask, d->name, type, d->code);
8642    }
8643
8644  if (TARGET_ALTIVEC)
8645    {
8646      tree decl;
8647
8648      /* Initialize target builtin that implements
8649         targetm.vectorize.builtin_mask_for_load.  */
8650
8651      decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
8652                               v16qi_ftype_long_pcvoid,
8653                               ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8654                               BUILT_IN_MD, NULL,
8655                               tree_cons (get_identifier ("const"),
8656                                          NULL_TREE, NULL_TREE));
8657      /* Record the decl. Will be used by rs6000_builtin_mask_for_load.  */
8658      altivec_builtin_mask_for_load = decl;
8659    }
8660
8661  /* Access to the vec_init patterns.  */
8662  ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8663				    integer_type_node, integer_type_node,
8664				    integer_type_node, NULL_TREE);
8665  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8666	       ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8667
8668  ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8669				    short_integer_type_node,
8670				    short_integer_type_node,
8671				    short_integer_type_node,
8672				    short_integer_type_node,
8673				    short_integer_type_node,
8674				    short_integer_type_node,
8675				    short_integer_type_node, NULL_TREE);
8676  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8677	       ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8678
8679  ftype = build_function_type_list (V16QI_type_node, char_type_node,
8680				    char_type_node, char_type_node,
8681				    char_type_node, char_type_node,
8682				    char_type_node, char_type_node,
8683				    char_type_node, char_type_node,
8684				    char_type_node, char_type_node,
8685				    char_type_node, char_type_node,
8686				    char_type_node, char_type_node,
8687				    char_type_node, NULL_TREE);
8688  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8689	       ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8690
8691  ftype = build_function_type_list (V4SF_type_node, float_type_node,
8692				    float_type_node, float_type_node,
8693				    float_type_node, NULL_TREE);
8694  def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
8695	       ALTIVEC_BUILTIN_VEC_INIT_V4SF);
8696
8697  /* Access to the vec_set patterns.  */
8698  ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
8699				    intSI_type_node,
8700				    integer_type_node, NULL_TREE);
8701  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
8702	       ALTIVEC_BUILTIN_VEC_SET_V4SI);
8703
8704  ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
8705				    intHI_type_node,
8706				    integer_type_node, NULL_TREE);
8707  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
8708	       ALTIVEC_BUILTIN_VEC_SET_V8HI);
8709
8710  ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
8711				    intQI_type_node,
8712				    integer_type_node, NULL_TREE);
8713  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
8714	       ALTIVEC_BUILTIN_VEC_SET_V16QI);
8715
8716  ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
8717				    float_type_node,
8718				    integer_type_node, NULL_TREE);
8719  def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
8720	       ALTIVEC_BUILTIN_VEC_SET_V4SF);
8721
8722  /* Access to the vec_extract patterns.  */
8723  ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
8724				    integer_type_node, NULL_TREE);
8725  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
8726	       ALTIVEC_BUILTIN_VEC_EXT_V4SI);
8727
8728  ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
8729				    integer_type_node, NULL_TREE);
8730  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
8731	       ALTIVEC_BUILTIN_VEC_EXT_V8HI);
8732
8733  ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
8734				    integer_type_node, NULL_TREE);
8735  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
8736	       ALTIVEC_BUILTIN_VEC_EXT_V16QI);
8737
8738  ftype = build_function_type_list (float_type_node, V4SF_type_node,
8739				    integer_type_node, NULL_TREE);
8740  def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
8741	       ALTIVEC_BUILTIN_VEC_EXT_V4SF);
8742}
8743
8744static void
8745rs6000_common_init_builtins (void)
8746{
8747  struct builtin_description *d;
8748  size_t i;
8749
8750  tree v4sf_ftype_v4sf_v4sf_v16qi
8751    = build_function_type_list (V4SF_type_node,
8752				V4SF_type_node, V4SF_type_node,
8753				V16QI_type_node, NULL_TREE);
8754  tree v4si_ftype_v4si_v4si_v16qi
8755    = build_function_type_list (V4SI_type_node,
8756				V4SI_type_node, V4SI_type_node,
8757				V16QI_type_node, NULL_TREE);
8758  tree v8hi_ftype_v8hi_v8hi_v16qi
8759    = build_function_type_list (V8HI_type_node,
8760				V8HI_type_node, V8HI_type_node,
8761				V16QI_type_node, NULL_TREE);
8762  tree v16qi_ftype_v16qi_v16qi_v16qi
8763    = build_function_type_list (V16QI_type_node,
8764				V16QI_type_node, V16QI_type_node,
8765				V16QI_type_node, NULL_TREE);
8766  tree v4si_ftype_int
8767    = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8768  tree v8hi_ftype_int
8769    = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8770  tree v16qi_ftype_int
8771    = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
8772  tree v8hi_ftype_v16qi
8773    = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8774  tree v4sf_ftype_v4sf
8775    = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8776
8777  tree v2si_ftype_v2si_v2si
8778    = build_function_type_list (opaque_V2SI_type_node,
8779				opaque_V2SI_type_node,
8780				opaque_V2SI_type_node, NULL_TREE);
8781
8782  tree v2sf_ftype_v2sf_v2sf
8783    = build_function_type_list (opaque_V2SF_type_node,
8784				opaque_V2SF_type_node,
8785				opaque_V2SF_type_node, NULL_TREE);
8786
8787  tree v2si_ftype_int_int
8788    = build_function_type_list (opaque_V2SI_type_node,
8789				integer_type_node, integer_type_node,
8790				NULL_TREE);
8791
8792  tree opaque_ftype_opaque
8793    = build_function_type_list (opaque_V4SI_type_node,
8794				opaque_V4SI_type_node, NULL_TREE);
8795
8796  tree v2si_ftype_v2si
8797    = build_function_type_list (opaque_V2SI_type_node,
8798				opaque_V2SI_type_node, NULL_TREE);
8799
8800  tree v2sf_ftype_v2sf
8801    = build_function_type_list (opaque_V2SF_type_node,
8802				opaque_V2SF_type_node, NULL_TREE);
8803
8804  tree v2sf_ftype_v2si
8805    = build_function_type_list (opaque_V2SF_type_node,
8806				opaque_V2SI_type_node, NULL_TREE);
8807
8808  tree v2si_ftype_v2sf
8809    = build_function_type_list (opaque_V2SI_type_node,
8810				opaque_V2SF_type_node, NULL_TREE);
8811
8812  tree v2si_ftype_v2si_char
8813    = build_function_type_list (opaque_V2SI_type_node,
8814				opaque_V2SI_type_node,
8815				char_type_node, NULL_TREE);
8816
8817  tree v2si_ftype_int_char
8818    = build_function_type_list (opaque_V2SI_type_node,
8819				integer_type_node, char_type_node, NULL_TREE);
8820
8821  tree v2si_ftype_char
8822    = build_function_type_list (opaque_V2SI_type_node,
8823				char_type_node, NULL_TREE);
8824
8825  tree int_ftype_int_int
8826    = build_function_type_list (integer_type_node,
8827				integer_type_node, integer_type_node,
8828				NULL_TREE);
8829
8830  tree opaque_ftype_opaque_opaque
8831    = build_function_type_list (opaque_V4SI_type_node,
8832                                opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
8833  tree v4si_ftype_v4si_v4si
8834    = build_function_type_list (V4SI_type_node,
8835				V4SI_type_node, V4SI_type_node, NULL_TREE);
8836  tree v4sf_ftype_v4si_int
8837    = build_function_type_list (V4SF_type_node,
8838				V4SI_type_node, integer_type_node, NULL_TREE);
8839  tree v4si_ftype_v4sf_int
8840    = build_function_type_list (V4SI_type_node,
8841				V4SF_type_node, integer_type_node, NULL_TREE);
8842  tree v4si_ftype_v4si_int
8843    = build_function_type_list (V4SI_type_node,
8844				V4SI_type_node, integer_type_node, NULL_TREE);
8845  tree v8hi_ftype_v8hi_int
8846    = build_function_type_list (V8HI_type_node,
8847				V8HI_type_node, integer_type_node, NULL_TREE);
8848  tree v16qi_ftype_v16qi_int
8849    = build_function_type_list (V16QI_type_node,
8850				V16QI_type_node, integer_type_node, NULL_TREE);
8851  tree v16qi_ftype_v16qi_v16qi_int
8852    = build_function_type_list (V16QI_type_node,
8853				V16QI_type_node, V16QI_type_node,
8854				integer_type_node, NULL_TREE);
8855  tree v8hi_ftype_v8hi_v8hi_int
8856    = build_function_type_list (V8HI_type_node,
8857				V8HI_type_node, V8HI_type_node,
8858				integer_type_node, NULL_TREE);
8859  tree v4si_ftype_v4si_v4si_int
8860    = build_function_type_list (V4SI_type_node,
8861				V4SI_type_node, V4SI_type_node,
8862				integer_type_node, NULL_TREE);
8863  tree v4sf_ftype_v4sf_v4sf_int
8864    = build_function_type_list (V4SF_type_node,
8865				V4SF_type_node, V4SF_type_node,
8866				integer_type_node, NULL_TREE);
8867  tree v4sf_ftype_v4sf_v4sf
8868    = build_function_type_list (V4SF_type_node,
8869				V4SF_type_node, V4SF_type_node, NULL_TREE);
8870  tree opaque_ftype_opaque_opaque_opaque
8871    = build_function_type_list (opaque_V4SI_type_node,
8872                                opaque_V4SI_type_node, opaque_V4SI_type_node,
8873                                opaque_V4SI_type_node, NULL_TREE);
8874  tree v4sf_ftype_v4sf_v4sf_v4si
8875    = build_function_type_list (V4SF_type_node,
8876				V4SF_type_node, V4SF_type_node,
8877				V4SI_type_node, NULL_TREE);
8878  tree v4sf_ftype_v4sf_v4sf_v4sf
8879    = build_function_type_list (V4SF_type_node,
8880				V4SF_type_node, V4SF_type_node,
8881				V4SF_type_node, NULL_TREE);
8882  tree v4si_ftype_v4si_v4si_v4si
8883    = build_function_type_list (V4SI_type_node,
8884				V4SI_type_node, V4SI_type_node,
8885				V4SI_type_node, NULL_TREE);
8886  tree v8hi_ftype_v8hi_v8hi
8887    = build_function_type_list (V8HI_type_node,
8888				V8HI_type_node, V8HI_type_node, NULL_TREE);
8889  tree v8hi_ftype_v8hi_v8hi_v8hi
8890    = build_function_type_list (V8HI_type_node,
8891				V8HI_type_node, V8HI_type_node,
8892				V8HI_type_node, NULL_TREE);
8893  tree v4si_ftype_v8hi_v8hi_v4si
8894    = build_function_type_list (V4SI_type_node,
8895				V8HI_type_node, V8HI_type_node,
8896				V4SI_type_node, NULL_TREE);
8897  tree v4si_ftype_v16qi_v16qi_v4si
8898    = build_function_type_list (V4SI_type_node,
8899				V16QI_type_node, V16QI_type_node,
8900				V4SI_type_node, NULL_TREE);
8901  tree v16qi_ftype_v16qi_v16qi
8902    = build_function_type_list (V16QI_type_node,
8903				V16QI_type_node, V16QI_type_node, NULL_TREE);
8904  tree v4si_ftype_v4sf_v4sf
8905    = build_function_type_list (V4SI_type_node,
8906				V4SF_type_node, V4SF_type_node, NULL_TREE);
8907  tree v8hi_ftype_v16qi_v16qi
8908    = build_function_type_list (V8HI_type_node,
8909				V16QI_type_node, V16QI_type_node, NULL_TREE);
8910  tree v4si_ftype_v8hi_v8hi
8911    = build_function_type_list (V4SI_type_node,
8912				V8HI_type_node, V8HI_type_node, NULL_TREE);
8913  tree v8hi_ftype_v4si_v4si
8914    = build_function_type_list (V8HI_type_node,
8915				V4SI_type_node, V4SI_type_node, NULL_TREE);
8916  tree v16qi_ftype_v8hi_v8hi
8917    = build_function_type_list (V16QI_type_node,
8918				V8HI_type_node, V8HI_type_node, NULL_TREE);
8919  tree v4si_ftype_v16qi_v4si
8920    = build_function_type_list (V4SI_type_node,
8921				V16QI_type_node, V4SI_type_node, NULL_TREE);
8922  tree v4si_ftype_v16qi_v16qi
8923    = build_function_type_list (V4SI_type_node,
8924				V16QI_type_node, V16QI_type_node, NULL_TREE);
8925  tree v4si_ftype_v8hi_v4si
8926    = build_function_type_list (V4SI_type_node,
8927				V8HI_type_node, V4SI_type_node, NULL_TREE);
8928  tree v4si_ftype_v8hi
8929    = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8930  tree int_ftype_v4si_v4si
8931    = build_function_type_list (integer_type_node,
8932				V4SI_type_node, V4SI_type_node, NULL_TREE);
8933  tree int_ftype_v4sf_v4sf
8934    = build_function_type_list (integer_type_node,
8935				V4SF_type_node, V4SF_type_node, NULL_TREE);
8936  tree int_ftype_v16qi_v16qi
8937    = build_function_type_list (integer_type_node,
8938				V16QI_type_node, V16QI_type_node, NULL_TREE);
8939  tree int_ftype_v8hi_v8hi
8940    = build_function_type_list (integer_type_node,
8941				V8HI_type_node, V8HI_type_node, NULL_TREE);
8942
8943  /* Add the simple ternary operators.  */
8944  d = (struct builtin_description *) bdesc_3arg;
8945  for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8946    {
8947      enum machine_mode mode0, mode1, mode2, mode3;
8948      tree type;
8949      bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8950			   && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8951
8952      if (is_overloaded)
8953	{
8954          mode0 = VOIDmode;
8955          mode1 = VOIDmode;
8956          mode2 = VOIDmode;
8957          mode3 = VOIDmode;
8958	}
8959      else
8960	{
8961          if (d->name == 0 || d->icode == CODE_FOR_nothing)
8962	    continue;
8963
8964          mode0 = insn_data[d->icode].operand[0].mode;
8965          mode1 = insn_data[d->icode].operand[1].mode;
8966          mode2 = insn_data[d->icode].operand[2].mode;
8967          mode3 = insn_data[d->icode].operand[3].mode;
8968	}
8969
8970      /* When all four are of the same mode.  */
8971      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8972	{
8973	  switch (mode0)
8974	    {
8975	    case VOIDmode:
8976	      type = opaque_ftype_opaque_opaque_opaque;
8977	      break;
8978	    case V4SImode:
8979	      type = v4si_ftype_v4si_v4si_v4si;
8980	      break;
8981	    case V4SFmode:
8982	      type = v4sf_ftype_v4sf_v4sf_v4sf;
8983	      break;
8984	    case V8HImode:
8985	      type = v8hi_ftype_v8hi_v8hi_v8hi;
8986	      break;
8987	    case V16QImode:
8988	      type = v16qi_ftype_v16qi_v16qi_v16qi;
8989	      break;
8990	    default:
8991	      gcc_unreachable ();
8992	    }
8993	}
8994      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
8995	{
8996	  switch (mode0)
8997	    {
8998	    case V4SImode:
8999	      type = v4si_ftype_v4si_v4si_v16qi;
9000	      break;
9001	    case V4SFmode:
9002	      type = v4sf_ftype_v4sf_v4sf_v16qi;
9003	      break;
9004	    case V8HImode:
9005	      type = v8hi_ftype_v8hi_v8hi_v16qi;
9006	      break;
9007	    case V16QImode:
9008	      type = v16qi_ftype_v16qi_v16qi_v16qi;
9009	      break;
9010	    default:
9011	      gcc_unreachable ();
9012	    }
9013	}
9014      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
9015	       && mode3 == V4SImode)
9016	type = v4si_ftype_v16qi_v16qi_v4si;
9017      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
9018	       && mode3 == V4SImode)
9019	type = v4si_ftype_v8hi_v8hi_v4si;
9020      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
9021	       && mode3 == V4SImode)
9022	type = v4sf_ftype_v4sf_v4sf_v4si;
9023
9024      /* vchar, vchar, vchar, 4 bit literal.  */
9025      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
9026	       && mode3 == QImode)
9027	type = v16qi_ftype_v16qi_v16qi_int;
9028
9029      /* vshort, vshort, vshort, 4 bit literal.  */
9030      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
9031	       && mode3 == QImode)
9032	type = v8hi_ftype_v8hi_v8hi_int;
9033
9034      /* vint, vint, vint, 4 bit literal.  */
9035      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
9036	       && mode3 == QImode)
9037	type = v4si_ftype_v4si_v4si_int;
9038
9039      /* vfloat, vfloat, vfloat, 4 bit literal.  */
9040      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
9041	       && mode3 == QImode)
9042	type = v4sf_ftype_v4sf_v4sf_int;
9043
9044      else
9045	gcc_unreachable ();
9046
9047      def_builtin (d->mask, d->name, type, d->code);
9048    }
9049
9050  /* Add the simple binary operators.  */
9051  d = (struct builtin_description *) bdesc_2arg;
9052  for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9053    {
9054      enum machine_mode mode0, mode1, mode2;
9055      tree type;
9056      bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9057			   && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9058
9059      if (is_overloaded)
9060	{
9061	  mode0 = VOIDmode;
9062	  mode1 = VOIDmode;
9063	  mode2 = VOIDmode;
9064	}
9065      else
9066	{
9067          if (d->name == 0 || d->icode == CODE_FOR_nothing)
9068	    continue;
9069
9070          mode0 = insn_data[d->icode].operand[0].mode;
9071          mode1 = insn_data[d->icode].operand[1].mode;
9072          mode2 = insn_data[d->icode].operand[2].mode;
9073	}
9074
9075      /* When all three operands are of the same mode.  */
9076      if (mode0 == mode1 && mode1 == mode2)
9077	{
9078	  switch (mode0)
9079	    {
9080	    case VOIDmode:
9081	      type = opaque_ftype_opaque_opaque;
9082	      break;
9083	    case V4SFmode:
9084	      type = v4sf_ftype_v4sf_v4sf;
9085	      break;
9086	    case V4SImode:
9087	      type = v4si_ftype_v4si_v4si;
9088	      break;
9089	    case V16QImode:
9090	      type = v16qi_ftype_v16qi_v16qi;
9091	      break;
9092	    case V8HImode:
9093	      type = v8hi_ftype_v8hi_v8hi;
9094	      break;
9095	    case V2SImode:
9096	      type = v2si_ftype_v2si_v2si;
9097	      break;
9098	    case V2SFmode:
9099	      type = v2sf_ftype_v2sf_v2sf;
9100	      break;
9101	    case SImode:
9102	      type = int_ftype_int_int;
9103	      break;
9104	    default:
9105	      gcc_unreachable ();
9106	    }
9107	}
9108
9109      /* A few other combos we really don't want to do manually.  */
9110
9111      /* vint, vfloat, vfloat.  */
9112      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
9113	type = v4si_ftype_v4sf_v4sf;
9114
9115      /* vshort, vchar, vchar.  */
9116      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
9117	type = v8hi_ftype_v16qi_v16qi;
9118
9119      /* vint, vshort, vshort.  */
9120      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
9121	type = v4si_ftype_v8hi_v8hi;
9122
9123      /* vshort, vint, vint.  */
9124      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
9125	type = v8hi_ftype_v4si_v4si;
9126
9127      /* vchar, vshort, vshort.  */
9128      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
9129	type = v16qi_ftype_v8hi_v8hi;
9130
9131      /* vint, vchar, vint.  */
9132      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
9133	type = v4si_ftype_v16qi_v4si;
9134
9135      /* vint, vchar, vchar.  */
9136      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
9137	type = v4si_ftype_v16qi_v16qi;
9138
9139      /* vint, vshort, vint.  */
9140      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
9141	type = v4si_ftype_v8hi_v4si;
9142
9143      /* vint, vint, 5 bit literal.  */
9144      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
9145	type = v4si_ftype_v4si_int;
9146
9147      /* vshort, vshort, 5 bit literal.  */
9148      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
9149	type = v8hi_ftype_v8hi_int;
9150
9151      /* vchar, vchar, 5 bit literal.  */
9152      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
9153	type = v16qi_ftype_v16qi_int;
9154
9155      /* vfloat, vint, 5 bit literal.  */
9156      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
9157	type = v4sf_ftype_v4si_int;
9158
9159      /* vint, vfloat, 5 bit literal.  */
9160      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
9161	type = v4si_ftype_v4sf_int;
9162
9163      else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
9164	type = v2si_ftype_int_int;
9165
9166      else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
9167	type = v2si_ftype_v2si_char;
9168
9169      else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
9170	type = v2si_ftype_int_char;
9171
9172      else
9173	{
9174	  /* int, x, x.  */
9175	  gcc_assert (mode0 == SImode);
9176	  switch (mode1)
9177	    {
9178	    case V4SImode:
9179	      type = int_ftype_v4si_v4si;
9180	      break;
9181	    case V4SFmode:
9182	      type = int_ftype_v4sf_v4sf;
9183	      break;
9184	    case V16QImode:
9185	      type = int_ftype_v16qi_v16qi;
9186	      break;
9187	    case V8HImode:
9188	      type = int_ftype_v8hi_v8hi;
9189	      break;
9190	    default:
9191	      gcc_unreachable ();
9192	    }
9193	}
9194
9195      def_builtin (d->mask, d->name, type, d->code);
9196    }
9197
9198  /* Add the simple unary operators.  */
9199  d = (struct builtin_description *) bdesc_1arg;
9200  for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9201    {
9202      enum machine_mode mode0, mode1;
9203      tree type;
9204      bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9205			   && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9206
9207      if (is_overloaded)
9208        {
9209          mode0 = VOIDmode;
9210          mode1 = VOIDmode;
9211        }
9212      else
9213        {
9214          if (d->name == 0 || d->icode == CODE_FOR_nothing)
9215	    continue;
9216
9217          mode0 = insn_data[d->icode].operand[0].mode;
9218          mode1 = insn_data[d->icode].operand[1].mode;
9219        }
9220
9221      if (mode0 == V4SImode && mode1 == QImode)
9222	type = v4si_ftype_int;
9223      else if (mode0 == V8HImode && mode1 == QImode)
9224	type = v8hi_ftype_int;
9225      else if (mode0 == V16QImode && mode1 == QImode)
9226	type = v16qi_ftype_int;
9227      else if (mode0 == VOIDmode && mode1 == VOIDmode)
9228	type = opaque_ftype_opaque;
9229      else if (mode0 == V4SFmode && mode1 == V4SFmode)
9230	type = v4sf_ftype_v4sf;
9231      else if (mode0 == V8HImode && mode1 == V16QImode)
9232	type = v8hi_ftype_v16qi;
9233      else if (mode0 == V4SImode && mode1 == V8HImode)
9234	type = v4si_ftype_v8hi;
9235      else if (mode0 == V2SImode && mode1 == V2SImode)
9236	type = v2si_ftype_v2si;
9237      else if (mode0 == V2SFmode && mode1 == V2SFmode)
9238	type = v2sf_ftype_v2sf;
9239      else if (mode0 == V2SFmode && mode1 == V2SImode)
9240	type = v2sf_ftype_v2si;
9241      else if (mode0 == V2SImode && mode1 == V2SFmode)
9242	type = v2si_ftype_v2sf;
9243      else if (mode0 == V2SImode && mode1 == QImode)
9244	type = v2si_ftype_char;
9245      else
9246	gcc_unreachable ();
9247
9248      def_builtin (d->mask, d->name, type, d->code);
9249    }
9250}
9251
9252static void
9253rs6000_init_libfuncs (void)
9254{
9255  if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
9256      && !TARGET_POWER2 && !TARGET_POWERPC)
9257    {
9258      /* AIX library routines for float->int conversion.  */
9259      set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9260      set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
9261      set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9262      set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9263    }
9264
9265  if (!TARGET_IEEEQUAD)
9266      /* AIX/Darwin/64-bit Linux quad floating point routines.  */
9267    if (!TARGET_XL_COMPAT)
9268      {
9269	set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9270	set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9271	set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9272	set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
9273
9274	if (TARGET_SOFT_FLOAT)
9275	  {
9276	    set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
9277	    set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
9278	    set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
9279	    set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
9280	    set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
9281	    set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
9282	    set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
9283	    set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
9284
9285	    set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
9286	    set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
9287	    set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
9288	    set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
9289	    set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
9290	    set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
9291	    set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
9292	    set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
9293	  }
9294      }
9295    else
9296      {
9297	set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9298	set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9299	set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9300	set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9301      }
9302  else
9303    {
9304      /* 32-bit SVR4 quad floating point routines.  */
9305
9306      set_optab_libfunc (add_optab, TFmode, "_q_add");
9307      set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9308      set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9309      set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9310      set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9311      if (TARGET_PPC_GPOPT || TARGET_POWER2)
9312	set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9313
9314      set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9315      set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9316      set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9317      set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9318      set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9319      set_optab_libfunc (le_optab, TFmode, "_q_fle");
9320
9321      set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9322      set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9323      set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9324      set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9325      set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9326      set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9327      set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
9328      set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
9329    }
9330}
9331
9332
9333/* Expand a block clear operation, and return 1 if successful.  Return 0
9334   if we should let the compiler generate normal code.
9335
9336   operands[0] is the destination
9337   operands[1] is the length
9338   operands[3] is the alignment */
9339
9340int
9341expand_block_clear (rtx operands[])
9342{
9343  rtx orig_dest = operands[0];
9344  rtx bytes_rtx	= operands[1];
9345  rtx align_rtx = operands[3];
9346  bool constp	= (GET_CODE (bytes_rtx) == CONST_INT);
9347  HOST_WIDE_INT align;
9348  HOST_WIDE_INT bytes;
9349  int offset;
9350  int clear_bytes;
9351  int clear_step;
9352
9353  /* If this is not a fixed size move, just call memcpy */
9354  if (! constp)
9355    return 0;
9356
9357  /* This must be a fixed size alignment  */
9358  gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9359  align = INTVAL (align_rtx) * BITS_PER_UNIT;
9360
9361  /* Anything to clear? */
9362  bytes = INTVAL (bytes_rtx);
9363  if (bytes <= 0)
9364    return 1;
9365
9366  /* Use the builtin memset after a point, to avoid huge code bloat.
9367     When optimize_size, avoid any significant code bloat; calling
9368     memset is about 4 instructions, so allow for one instruction to
9369     load zero and three to do clearing.  */
9370  if (TARGET_ALTIVEC && align >= 128)
9371    clear_step = 16;
9372  else if (TARGET_POWERPC64 && align >= 32)
9373    clear_step = 8;
9374  else
9375    clear_step = 4;
9376
9377  if (optimize_size && bytes > 3 * clear_step)
9378    return 0;
9379  if (! optimize_size && bytes > 8 * clear_step)
9380    return 0;
9381
9382  for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9383    {
9384      enum machine_mode mode = BLKmode;
9385      rtx dest;
9386
9387      if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9388	{
9389	  clear_bytes = 16;
9390	  mode = V4SImode;
9391	}
9392      else if (bytes >= 8 && TARGET_POWERPC64
9393	  /* 64-bit loads and stores require word-aligned
9394	     displacements.  */
9395	  && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9396	{
9397	  clear_bytes = 8;
9398	  mode = DImode;
9399	}
9400      else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9401	{			/* move 4 bytes */
9402	  clear_bytes = 4;
9403	  mode = SImode;
9404	}
9405      else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9406	{			/* move 2 bytes */
9407	  clear_bytes = 2;
9408	  mode = HImode;
9409	}
9410      else /* move 1 byte at a time */
9411	{
9412	  clear_bytes = 1;
9413	  mode = QImode;
9414	}
9415
9416      dest = adjust_address (orig_dest, mode, offset);
9417
9418      emit_move_insn (dest, CONST0_RTX (mode));
9419    }
9420
9421  return 1;
9422}
9423
9424
9425/* Expand a block move operation, and return 1 if successful.  Return 0
9426   if we should let the compiler generate normal code.
9427
9428   operands[0] is the destination
9429   operands[1] is the source
9430   operands[2] is the length
9431   operands[3] is the alignment */
9432
9433#define MAX_MOVE_REG 4
9434
9435int
9436expand_block_move (rtx operands[])
9437{
9438  rtx orig_dest = operands[0];
9439  rtx orig_src	= operands[1];
9440  rtx bytes_rtx	= operands[2];
9441  rtx align_rtx = operands[3];
9442  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
9443  int align;
9444  int bytes;
9445  int offset;
9446  int move_bytes;
9447  rtx stores[MAX_MOVE_REG];
9448  int num_reg = 0;
9449
9450  /* If this is not a fixed size move, just call memcpy */
9451  if (! constp)
9452    return 0;
9453
9454  /* This must be a fixed size alignment */
9455  gcc_assert (GET_CODE (align_rtx) == CONST_INT);
9456  align = INTVAL (align_rtx) * BITS_PER_UNIT;
9457
9458  /* Anything to move? */
9459  bytes = INTVAL (bytes_rtx);
9460  if (bytes <= 0)
9461    return 1;
9462
9463  /* store_one_arg depends on expand_block_move to handle at least the size of
9464     reg_parm_stack_space.  */
9465  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
9466    return 0;
9467
9468  for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
9469    {
9470      union {
9471	rtx (*movmemsi) (rtx, rtx, rtx, rtx);
9472	rtx (*mov) (rtx, rtx);
9473      } gen_func;
9474      enum machine_mode mode = BLKmode;
9475      rtx src, dest;
9476
9477      /* Altivec first, since it will be faster than a string move
9478	 when it applies, and usually not significantly larger.  */
9479      if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9480	{
9481	  move_bytes = 16;
9482	  mode = V4SImode;
9483	  gen_func.mov = gen_movv4si;
9484	}
9485      else if (TARGET_STRING
9486	  && bytes > 24		/* move up to 32 bytes at a time */
9487	  && ! fixed_regs[5]
9488	  && ! fixed_regs[6]
9489	  && ! fixed_regs[7]
9490	  && ! fixed_regs[8]
9491	  && ! fixed_regs[9]
9492	  && ! fixed_regs[10]
9493	  && ! fixed_regs[11]
9494	  && ! fixed_regs[12])
9495	{
9496	  move_bytes = (bytes > 32) ? 32 : bytes;
9497	  gen_func.movmemsi = gen_movmemsi_8reg;
9498	}
9499      else if (TARGET_STRING
9500	       && bytes > 16	/* move up to 24 bytes at a time */
9501	       && ! fixed_regs[5]
9502	       && ! fixed_regs[6]
9503	       && ! fixed_regs[7]
9504	       && ! fixed_regs[8]
9505	       && ! fixed_regs[9]
9506	       && ! fixed_regs[10])
9507	{
9508	  move_bytes = (bytes > 24) ? 24 : bytes;
9509	  gen_func.movmemsi = gen_movmemsi_6reg;
9510	}
9511      else if (TARGET_STRING
9512	       && bytes > 8	/* move up to 16 bytes at a time */
9513	       && ! fixed_regs[5]
9514	       && ! fixed_regs[6]
9515	       && ! fixed_regs[7]
9516	       && ! fixed_regs[8])
9517	{
9518	  move_bytes = (bytes > 16) ? 16 : bytes;
9519	  gen_func.movmemsi = gen_movmemsi_4reg;
9520	}
9521      else if (bytes >= 8 && TARGET_POWERPC64
9522	       /* 64-bit loads and stores require word-aligned
9523		  displacements.  */
9524	       && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
9525	{
9526	  move_bytes = 8;
9527	  mode = DImode;
9528	  gen_func.mov = gen_movdi;
9529	}
9530      else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9531	{			/* move up to 8 bytes at a time */
9532	  move_bytes = (bytes > 8) ? 8 : bytes;
9533	  gen_func.movmemsi = gen_movmemsi_2reg;
9534	}
9535      else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
9536	{			/* move 4 bytes */
9537	  move_bytes = 4;
9538	  mode = SImode;
9539	  gen_func.mov = gen_movsi;
9540	}
9541      else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
9542	{			/* move 2 bytes */
9543	  move_bytes = 2;
9544	  mode = HImode;
9545	  gen_func.mov = gen_movhi;
9546	}
9547      else if (TARGET_STRING && bytes > 1)
9548	{			/* move up to 4 bytes at a time */
9549	  move_bytes = (bytes > 4) ? 4 : bytes;
9550	  gen_func.movmemsi = gen_movmemsi_1reg;
9551	}
9552      else /* move 1 byte at a time */
9553	{
9554	  move_bytes = 1;
9555	  mode = QImode;
9556	  gen_func.mov = gen_movqi;
9557	}
9558
9559      src = adjust_address (orig_src, mode, offset);
9560      dest = adjust_address (orig_dest, mode, offset);
9561
9562      if (mode != BLKmode)
9563	{
9564	  rtx tmp_reg = gen_reg_rtx (mode);
9565
9566	  emit_insn ((*gen_func.mov) (tmp_reg, src));
9567	  stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
9568	}
9569
9570      if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9571	{
9572	  int i;
9573	  for (i = 0; i < num_reg; i++)
9574	    emit_insn (stores[i]);
9575	  num_reg = 0;
9576	}
9577
9578      if (mode == BLKmode)
9579	{
9580	  /* Move the address into scratch registers.  The movmemsi
9581	     patterns require zero offset.  */
9582	  if (!REG_P (XEXP (src, 0)))
9583	    {
9584	      rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9585	      src = replace_equiv_address (src, src_reg);
9586	    }
9587	  set_mem_size (src, GEN_INT (move_bytes));
9588
9589	  if (!REG_P (XEXP (dest, 0)))
9590	    {
9591	      rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9592	      dest = replace_equiv_address (dest, dest_reg);
9593	    }
9594	  set_mem_size (dest, GEN_INT (move_bytes));
9595
9596	  emit_insn ((*gen_func.movmemsi) (dest, src,
9597					   GEN_INT (move_bytes & 31),
9598					   align_rtx));
9599	}
9600    }
9601
9602  return 1;
9603}
9604
9605
9606/* Return a string to perform a load_multiple operation.
9607   operands[0] is the vector.
9608   operands[1] is the source address.
9609   operands[2] is the first destination register.  */
9610
9611const char *
9612rs6000_output_load_multiple (rtx operands[3])
9613{
9614  /* We have to handle the case where the pseudo used to contain the address
9615     is assigned to one of the output registers.  */
9616  int i, j;
9617  int words = XVECLEN (operands[0], 0);
9618  rtx xop[10];
9619
9620  if (XVECLEN (operands[0], 0) == 1)
9621    return "{l|lwz} %2,0(%1)";
9622
9623  for (i = 0; i < words; i++)
9624    if (refers_to_regno_p (REGNO (operands[2]) + i,
9625			   REGNO (operands[2]) + i + 1, operands[1], 0))
9626      {
9627	if (i == words-1)
9628	  {
9629	    xop[0] = GEN_INT (4 * (words-1));
9630	    xop[1] = operands[1];
9631	    xop[2] = operands[2];
9632	    output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9633	    return "";
9634	  }
9635	else if (i == 0)
9636	  {
9637	    xop[0] = GEN_INT (4 * (words-1));
9638	    xop[1] = operands[1];
9639	    xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9640	    output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9641	    return "";
9642	  }
9643	else
9644	  {
9645	    for (j = 0; j < words; j++)
9646	      if (j != i)
9647		{
9648		  xop[0] = GEN_INT (j * 4);
9649		  xop[1] = operands[1];
9650		  xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9651		  output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9652		}
9653	    xop[0] = GEN_INT (i * 4);
9654	    xop[1] = operands[1];
9655	    output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9656	    return "";
9657	  }
9658      }
9659
9660  return "{lsi|lswi} %2,%1,%N0";
9661}
9662
9663
9664/* A validation routine: say whether CODE, a condition code, and MODE
9665   match.  The other alternatives either don't make sense or should
9666   never be generated.  */
9667
9668void
9669validate_condition_mode (enum rtx_code code, enum machine_mode mode)
9670{
9671  gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9672	       || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9673	      && GET_MODE_CLASS (mode) == MODE_CC);
9674
9675  /* These don't make sense.  */
9676  gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9677	      || mode != CCUNSmode);
9678
9679  gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9680	      || mode == CCUNSmode);
9681
9682  gcc_assert (mode == CCFPmode
9683	      || (code != ORDERED && code != UNORDERED
9684		  && code != UNEQ && code != LTGT
9685		  && code != UNGT && code != UNLT
9686		  && code != UNGE && code != UNLE));
9687
9688  /* These should never be generated except for
9689     flag_finite_math_only.  */
9690  gcc_assert (mode != CCFPmode
9691	      || flag_finite_math_only
9692	      || (code != LE && code != GE
9693		  && code != UNEQ && code != LTGT
9694		  && code != UNGT && code != UNLT));
9695
9696  /* These are invalid; the information is not there.  */
9697  gcc_assert (mode != CCEQmode || code == EQ || code == NE);
9698}
9699
9700
9701/* Return 1 if ANDOP is a mask that has no bits on that are not in the
9702   mask required to convert the result of a rotate insn into a shift
9703   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
9704
9705int
9706includes_lshift_p (rtx shiftop, rtx andop)
9707{
9708  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9709
9710  shift_mask <<= INTVAL (shiftop);
9711
9712  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9713}
9714
9715/* Similar, but for right shift.  */
9716
9717int
9718includes_rshift_p (rtx shiftop, rtx andop)
9719{
9720  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9721
9722  shift_mask >>= INTVAL (shiftop);
9723
9724  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9725}
9726
9727/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9728   to perform a left shift.  It must have exactly SHIFTOP least
9729   significant 0's, then one or more 1's, then zero or more 0's.  */
9730
9731int
9732includes_rldic_lshift_p (rtx shiftop, rtx andop)
9733{
9734  if (GET_CODE (andop) == CONST_INT)
9735    {
9736      HOST_WIDE_INT c, lsb, shift_mask;
9737
9738      c = INTVAL (andop);
9739      if (c == 0 || c == ~0)
9740	return 0;
9741
9742      shift_mask = ~0;
9743      shift_mask <<= INTVAL (shiftop);
9744
9745      /* Find the least significant one bit.  */
9746      lsb = c & -c;
9747
9748      /* It must coincide with the LSB of the shift mask.  */
9749      if (-lsb != shift_mask)
9750	return 0;
9751
9752      /* Invert to look for the next transition (if any).  */
9753      c = ~c;
9754
9755      /* Remove the low group of ones (originally low group of zeros).  */
9756      c &= -lsb;
9757
9758      /* Again find the lsb, and check we have all 1's above.  */
9759      lsb = c & -c;
9760      return c == -lsb;
9761    }
9762  else if (GET_CODE (andop) == CONST_DOUBLE
9763	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9764    {
9765      HOST_WIDE_INT low, high, lsb;
9766      HOST_WIDE_INT shift_mask_low, shift_mask_high;
9767
9768      low = CONST_DOUBLE_LOW (andop);
9769      if (HOST_BITS_PER_WIDE_INT < 64)
9770	high = CONST_DOUBLE_HIGH (andop);
9771
9772      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9773	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9774	return 0;
9775
9776      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9777	{
9778	  shift_mask_high = ~0;
9779	  if (INTVAL (shiftop) > 32)
9780	    shift_mask_high <<= INTVAL (shiftop) - 32;
9781
9782	  lsb = high & -high;
9783
9784	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9785	    return 0;
9786
9787	  high = ~high;
9788	  high &= -lsb;
9789
9790	  lsb = high & -high;
9791	  return high == -lsb;
9792	}
9793
9794      shift_mask_low = ~0;
9795      shift_mask_low <<= INTVAL (shiftop);
9796
9797      lsb = low & -low;
9798
9799      if (-lsb != shift_mask_low)
9800	return 0;
9801
9802      if (HOST_BITS_PER_WIDE_INT < 64)
9803	high = ~high;
9804      low = ~low;
9805      low &= -lsb;
9806
9807      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9808	{
9809	  lsb = high & -high;
9810	  return high == -lsb;
9811	}
9812
9813      lsb = low & -low;
9814      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9815    }
9816  else
9817    return 0;
9818}
9819
9820/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9821   to perform a left shift.  It must have SHIFTOP or more least
9822   significant 0's, with the remainder of the word 1's.  */
9823
9824int
9825includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9826{
9827  if (GET_CODE (andop) == CONST_INT)
9828    {
9829      HOST_WIDE_INT c, lsb, shift_mask;
9830
9831      shift_mask = ~0;
9832      shift_mask <<= INTVAL (shiftop);
9833      c = INTVAL (andop);
9834
9835      /* Find the least significant one bit.  */
9836      lsb = c & -c;
9837
9838      /* It must be covered by the shift mask.
9839	 This test also rejects c == 0.  */
9840      if ((lsb & shift_mask) == 0)
9841	return 0;
9842
9843      /* Check we have all 1's above the transition, and reject all 1's.  */
9844      return c == -lsb && lsb != 1;
9845    }
9846  else if (GET_CODE (andop) == CONST_DOUBLE
9847	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9848    {
9849      HOST_WIDE_INT low, lsb, shift_mask_low;
9850
9851      low = CONST_DOUBLE_LOW (andop);
9852
9853      if (HOST_BITS_PER_WIDE_INT < 64)
9854	{
9855	  HOST_WIDE_INT high, shift_mask_high;
9856
9857	  high = CONST_DOUBLE_HIGH (andop);
9858
9859	  if (low == 0)
9860	    {
9861	      shift_mask_high = ~0;
9862	      if (INTVAL (shiftop) > 32)
9863		shift_mask_high <<= INTVAL (shiftop) - 32;
9864
9865	      lsb = high & -high;
9866
9867	      if ((lsb & shift_mask_high) == 0)
9868		return 0;
9869
9870	      return high == -lsb;
9871	    }
9872	  if (high != ~0)
9873	    return 0;
9874	}
9875
9876      shift_mask_low = ~0;
9877      shift_mask_low <<= INTVAL (shiftop);
9878
9879      lsb = low & -low;
9880
9881      if ((lsb & shift_mask_low) == 0)
9882	return 0;
9883
9884      return low == -lsb && lsb != 1;
9885    }
9886  else
9887    return 0;
9888}
9889
9890/* Return 1 if operands will generate a valid arguments to rlwimi
9891instruction for insert with right shift in 64-bit mode.  The mask may
9892not start on the first bit or stop on the last bit because wrap-around
9893effects of instruction do not correspond to semantics of RTL insn.  */
9894
9895int
9896insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9897{
9898  if (INTVAL (startop) > 32
9899      && INTVAL (startop) < 64
9900      && INTVAL (sizeop) > 1
9901      && INTVAL (sizeop) + INTVAL (startop) < 64
9902      && INTVAL (shiftop) > 0
9903      && INTVAL (sizeop) + INTVAL (shiftop) < 32
9904      && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9905    return 1;
9906
9907  return 0;
9908}
9909
9910/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9911   for lfq and stfq insns iff the registers are hard registers.   */
9912
9913int
9914registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9915{
9916  /* We might have been passed a SUBREG.  */
9917  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9918    return 0;
9919
9920  /* We might have been passed non floating point registers.  */
9921  if (!FP_REGNO_P (REGNO (reg1))
9922      || !FP_REGNO_P (REGNO (reg2)))
9923    return 0;
9924
9925  return (REGNO (reg1) == REGNO (reg2) - 1);
9926}
9927
9928/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9929   addr1 and addr2 must be in consecutive memory locations
9930   (addr2 == addr1 + 8).  */
9931
9932int
9933mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9934{
9935  rtx addr1, addr2;
9936  unsigned int reg1, reg2;
9937  int offset1, offset2;
9938
9939  /* The mems cannot be volatile.  */
9940  if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9941    return 0;
9942
9943  addr1 = XEXP (mem1, 0);
9944  addr2 = XEXP (mem2, 0);
9945
9946  /* Extract an offset (if used) from the first addr.  */
9947  if (GET_CODE (addr1) == PLUS)
9948    {
9949      /* If not a REG, return zero.  */
9950      if (GET_CODE (XEXP (addr1, 0)) != REG)
9951	return 0;
9952      else
9953	{
9954	  reg1 = REGNO (XEXP (addr1, 0));
9955	  /* The offset must be constant!  */
9956	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9957	    return 0;
9958	  offset1 = INTVAL (XEXP (addr1, 1));
9959	}
9960    }
9961  else if (GET_CODE (addr1) != REG)
9962    return 0;
9963  else
9964    {
9965      reg1 = REGNO (addr1);
9966      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
9967      offset1 = 0;
9968    }
9969
9970  /* And now for the second addr.  */
9971  if (GET_CODE (addr2) == PLUS)
9972    {
9973      /* If not a REG, return zero.  */
9974      if (GET_CODE (XEXP (addr2, 0)) != REG)
9975	return 0;
9976      else
9977	{
9978	  reg2 = REGNO (XEXP (addr2, 0));
9979	  /* The offset must be constant. */
9980	  if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9981	    return 0;
9982	  offset2 = INTVAL (XEXP (addr2, 1));
9983	}
9984    }
9985  else if (GET_CODE (addr2) != REG)
9986    return 0;
9987  else
9988    {
9989      reg2 = REGNO (addr2);
9990      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
9991      offset2 = 0;
9992    }
9993
9994  /* Both of these must have the same base register.  */
9995  if (reg1 != reg2)
9996    return 0;
9997
9998  /* The offset for the second addr must be 8 more than the first addr.  */
9999  if (offset2 != offset1 + 8)
10000    return 0;
10001
10002  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
10003     instructions.  */
10004  return 1;
10005}
10006
10007/* Return the register class of a scratch register needed to copy IN into
10008   or out of a register in CLASS in MODE.  If it can be done directly,
10009   NO_REGS is returned.  */
10010
10011enum reg_class
10012rs6000_secondary_reload_class (enum reg_class class,
10013			       enum machine_mode mode ATTRIBUTE_UNUSED,
10014			       rtx in)
10015{
10016  int regno;
10017
10018  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
10019#if TARGET_MACHO
10020		     && MACHOPIC_INDIRECT
10021#endif
10022		     ))
10023    {
10024      /* We cannot copy a symbolic operand directly into anything
10025	 other than BASE_REGS for TARGET_ELF.  So indicate that a
10026	 register from BASE_REGS is needed as an intermediate
10027	 register.
10028
10029	 On Darwin, pic addresses require a load from memory, which
10030	 needs a base register.  */
10031      if (class != BASE_REGS
10032	  && (GET_CODE (in) == SYMBOL_REF
10033	      || GET_CODE (in) == HIGH
10034	      || GET_CODE (in) == LABEL_REF
10035	      || GET_CODE (in) == CONST))
10036	return BASE_REGS;
10037    }
10038
10039  if (GET_CODE (in) == REG)
10040    {
10041      regno = REGNO (in);
10042      if (regno >= FIRST_PSEUDO_REGISTER)
10043	{
10044	  regno = true_regnum (in);
10045	  if (regno >= FIRST_PSEUDO_REGISTER)
10046	    regno = -1;
10047	}
10048    }
10049  else if (GET_CODE (in) == SUBREG)
10050    {
10051      regno = true_regnum (in);
10052      if (regno >= FIRST_PSEUDO_REGISTER)
10053	regno = -1;
10054    }
10055  else
10056    regno = -1;
10057
10058  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10059     into anything.  */
10060  if (class == GENERAL_REGS || class == BASE_REGS
10061      || (regno >= 0 && INT_REGNO_P (regno)))
10062    return NO_REGS;
10063
10064  /* Constants, memory, and FP registers can go into FP registers.  */
10065  if ((regno == -1 || FP_REGNO_P (regno))
10066      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
10067    return NO_REGS;
10068
10069  /* Memory, and AltiVec registers can go into AltiVec registers.  */
10070  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
10071      && class == ALTIVEC_REGS)
10072    return NO_REGS;
10073
10074  /* We can copy among the CR registers.  */
10075  if ((class == CR_REGS || class == CR0_REGS)
10076      && regno >= 0 && CR_REGNO_P (regno))
10077    return NO_REGS;
10078
10079  /* Otherwise, we need GENERAL_REGS.  */
10080  return GENERAL_REGS;
10081}
10082
10083/* Given a comparison operation, return the bit number in CCR to test.  We
10084   know this is a valid comparison.
10085
10086   SCC_P is 1 if this is for an scc.  That means that %D will have been
10087   used instead of %C, so the bits will be in different places.
10088
10089   Return -1 if OP isn't a valid comparison for some reason.  */
10090
10091int
10092ccr_bit (rtx op, int scc_p)
10093{
10094  enum rtx_code code = GET_CODE (op);
10095  enum machine_mode cc_mode;
10096  int cc_regnum;
10097  int base_bit;
10098  rtx reg;
10099
10100  if (!COMPARISON_P (op))
10101    return -1;
10102
10103  reg = XEXP (op, 0);
10104
10105  gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
10106
10107  cc_mode = GET_MODE (reg);
10108  cc_regnum = REGNO (reg);
10109  base_bit = 4 * (cc_regnum - CR0_REGNO);
10110
10111  validate_condition_mode (code, cc_mode);
10112
10113  /* When generating a sCOND operation, only positive conditions are
10114     allowed.  */
10115  gcc_assert (!scc_p
10116	      || code == EQ || code == GT || code == LT || code == UNORDERED
10117	      || code == GTU || code == LTU);
10118
10119  switch (code)
10120    {
10121    case NE:
10122      return scc_p ? base_bit + 3 : base_bit + 2;
10123    case EQ:
10124      return base_bit + 2;
10125    case GT:  case GTU:  case UNLE:
10126      return base_bit + 1;
10127    case LT:  case LTU:  case UNGE:
10128      return base_bit;
10129    case ORDERED:  case UNORDERED:
10130      return base_bit + 3;
10131
10132    case GE:  case GEU:
10133      /* If scc, we will have done a cror to put the bit in the
10134	 unordered position.  So test that bit.  For integer, this is ! LT
10135	 unless this is an scc insn.  */
10136      return scc_p ? base_bit + 3 : base_bit;
10137
10138    case LE:  case LEU:
10139      return scc_p ? base_bit + 3 : base_bit + 1;
10140
10141    default:
10142      gcc_unreachable ();
10143    }
10144}
10145
10146/* Return the GOT register.  */
10147
10148rtx
10149rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
10150{
10151  /* The second flow pass currently (June 1999) can't update
10152     regs_ever_live without disturbing other parts of the compiler, so
10153     update it here to make the prolog/epilogue code happy.  */
10154  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
10155    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
10156
10157  current_function_uses_pic_offset_table = 1;
10158
10159  return pic_offset_table_rtx;
10160}
10161
10162/* Function to init struct machine_function.
10163   This will be called, via a pointer variable,
10164   from push_function_context.  */
10165
10166static struct machine_function *
10167rs6000_init_machine_status (void)
10168{
10169  return ggc_alloc_cleared (sizeof (machine_function));
10170}
10171
10172/* These macros test for integers and extract the low-order bits.  */
10173#define INT_P(X)  \
10174((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
10175 && GET_MODE (X) == VOIDmode)
10176
10177#define INT_LOWPART(X) \
10178  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
10179
10180int
10181extract_MB (rtx op)
10182{
10183  int i;
10184  unsigned long val = INT_LOWPART (op);
10185
10186  /* If the high bit is zero, the value is the first 1 bit we find
10187     from the left.  */
10188  if ((val & 0x80000000) == 0)
10189    {
10190      gcc_assert (val & 0xffffffff);
10191
10192      i = 1;
10193      while (((val <<= 1) & 0x80000000) == 0)
10194	++i;
10195      return i;
10196    }
10197
10198  /* If the high bit is set and the low bit is not, or the mask is all
10199     1's, the value is zero.  */
10200  if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
10201    return 0;
10202
10203  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
10204     from the right.  */
10205  i = 31;
10206  while (((val >>= 1) & 1) != 0)
10207    --i;
10208
10209  return i;
10210}
10211
10212int
10213extract_ME (rtx op)
10214{
10215  int i;
10216  unsigned long val = INT_LOWPART (op);
10217
10218  /* If the low bit is zero, the value is the first 1 bit we find from
10219     the right.  */
10220  if ((val & 1) == 0)
10221    {
10222      gcc_assert (val & 0xffffffff);
10223
10224      i = 30;
10225      while (((val >>= 1) & 1) == 0)
10226	--i;
10227
10228      return i;
10229    }
10230
10231  /* If the low bit is set and the high bit is not, or the mask is all
10232     1's, the value is 31.  */
10233  if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
10234    return 31;
10235
10236  /* Otherwise we have a wrap-around mask.  Look for the first 0 bit
10237     from the left.  */
10238  i = 0;
10239  while (((val <<= 1) & 0x80000000) != 0)
10240    ++i;
10241
10242  return i;
10243}
10244
10245/* Locate some local-dynamic symbol still in use by this function
10246   so that we can print its name in some tls_ld pattern.  */
10247
10248static const char *
10249rs6000_get_some_local_dynamic_name (void)
10250{
10251  rtx insn;
10252
10253  if (cfun->machine->some_ld_name)
10254    return cfun->machine->some_ld_name;
10255
10256  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10257    if (INSN_P (insn)
10258	&& for_each_rtx (&PATTERN (insn),
10259			 rs6000_get_some_local_dynamic_name_1, 0))
10260      return cfun->machine->some_ld_name;
10261
10262  gcc_unreachable ();
10263}
10264
10265/* Helper function for rs6000_get_some_local_dynamic_name.  */
10266
10267static int
10268rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
10269{
10270  rtx x = *px;
10271
10272  if (GET_CODE (x) == SYMBOL_REF)
10273    {
10274      const char *str = XSTR (x, 0);
10275      if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10276	{
10277	  cfun->machine->some_ld_name = str;
10278	  return 1;
10279	}
10280    }
10281
10282  return 0;
10283}
10284
10285/* Write out a function code label.  */
10286
10287void
10288rs6000_output_function_entry (FILE *file, const char *fname)
10289{
10290  if (fname[0] != '.')
10291    {
10292      switch (DEFAULT_ABI)
10293	{
10294	default:
10295	  gcc_unreachable ();
10296
10297	case ABI_AIX:
10298	  if (DOT_SYMBOLS)
10299	    putc ('.', file);
10300	  else
10301	    ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10302	  break;
10303
10304	case ABI_V4:
10305	case ABI_DARWIN:
10306	  break;
10307	}
10308    }
10309  if (TARGET_AIX)
10310    RS6000_OUTPUT_BASENAME (file, fname);
10311  else
10312    assemble_name (file, fname);
10313}
10314
10315/* Print an operand.  Recognize special options, documented below.  */
10316
10317#if TARGET_ELF
10318#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
10319#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
10320#else
10321#define SMALL_DATA_RELOC "sda21"
10322#define SMALL_DATA_REG 0
10323#endif
10324
10325void
10326print_operand (FILE *file, rtx x, int code)
10327{
10328  int i;
10329  HOST_WIDE_INT val;
10330  unsigned HOST_WIDE_INT uval;
10331
10332  switch (code)
10333    {
10334    case '.':
10335      /* Write out an instruction after the call which may be replaced
10336	 with glue code by the loader.  This depends on the AIX version.  */
10337      asm_fprintf (file, RS6000_CALL_GLUE);
10338      return;
10339
10340      /* %a is output_address.  */
10341
10342    case 'A':
10343      /* If X is a constant integer whose low-order 5 bits are zero,
10344	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
10345	 in the AIX assembler where "sri" with a zero shift count
10346	 writes a trash instruction.  */
10347      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
10348	putc ('l', file);
10349      else
10350	putc ('r', file);
10351      return;
10352
10353    case 'b':
10354      /* If constant, low-order 16 bits of constant, unsigned.
10355	 Otherwise, write normally.  */
10356      if (INT_P (x))
10357	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10358      else
10359	print_operand (file, x, 0);
10360      return;
10361
10362    case 'B':
10363      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10364	 for 64-bit mask direction.  */
10365      putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
10366      return;
10367
10368      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10369	 output_operand.  */
10370
10371    case 'c':
10372      /* X is a CR register.  Print the number of the GT bit of the CR.  */
10373      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10374	output_operand_lossage ("invalid %%E value");
10375      else
10376	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10377      return;
10378
10379    case 'D':
10380      /* Like 'J' but get to the GT bit only.  */
10381      gcc_assert (GET_CODE (x) == REG);
10382
10383      /* Bit 1 is GT bit.  */
10384      i = 4 * (REGNO (x) - CR0_REGNO) + 1;
10385
10386      /* Add one for shift count in rlinm for scc.  */
10387      fprintf (file, "%d", i + 1);
10388      return;
10389
10390    case 'E':
10391      /* X is a CR register.  Print the number of the EQ bit of the CR */
10392      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10393	output_operand_lossage ("invalid %%E value");
10394      else
10395	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
10396      return;
10397
10398    case 'f':
10399      /* X is a CR register.  Print the shift count needed to move it
10400	 to the high-order four bits.  */
10401      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10402	output_operand_lossage ("invalid %%f value");
10403      else
10404	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
10405      return;
10406
10407    case 'F':
10408      /* Similar, but print the count for the rotate in the opposite
10409	 direction.  */
10410      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10411	output_operand_lossage ("invalid %%F value");
10412      else
10413	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
10414      return;
10415
10416    case 'G':
10417      /* X is a constant integer.  If it is negative, print "m",
10418	 otherwise print "z".  This is to make an aze or ame insn.  */
10419      if (GET_CODE (x) != CONST_INT)
10420	output_operand_lossage ("invalid %%G value");
10421      else if (INTVAL (x) >= 0)
10422	putc ('z', file);
10423      else
10424	putc ('m', file);
10425      return;
10426
10427    case 'h':
10428      /* If constant, output low-order five bits.  Otherwise, write
10429	 normally.  */
10430      if (INT_P (x))
10431	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
10432      else
10433	print_operand (file, x, 0);
10434      return;
10435
10436    case 'H':
10437      /* If constant, output low-order six bits.  Otherwise, write
10438	 normally.  */
10439      if (INT_P (x))
10440	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
10441      else
10442	print_operand (file, x, 0);
10443      return;
10444
10445    case 'I':
10446      /* Print `i' if this is a constant, else nothing.  */
10447      if (INT_P (x))
10448	putc ('i', file);
10449      return;
10450
10451    case 'j':
10452      /* Write the bit number in CCR for jump.  */
10453      i = ccr_bit (x, 0);
10454      if (i == -1)
10455	output_operand_lossage ("invalid %%j code");
10456      else
10457	fprintf (file, "%d", i);
10458      return;
10459
10460    case 'J':
10461      /* Similar, but add one for shift count in rlinm for scc and pass
10462	 scc flag to `ccr_bit'.  */
10463      i = ccr_bit (x, 1);
10464      if (i == -1)
10465	output_operand_lossage ("invalid %%J code");
10466      else
10467	/* If we want bit 31, write a shift count of zero, not 32.  */
10468	fprintf (file, "%d", i == 31 ? 0 : i + 1);
10469      return;
10470
10471    case 'k':
10472      /* X must be a constant.  Write the 1's complement of the
10473	 constant.  */
10474      if (! INT_P (x))
10475	output_operand_lossage ("invalid %%k value");
10476      else
10477	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
10478      return;
10479
10480    case 'K':
10481      /* X must be a symbolic constant on ELF.  Write an
10482	 expression suitable for an 'addi' that adds in the low 16
10483	 bits of the MEM.  */
10484      if (GET_CODE (x) != CONST)
10485	{
10486	  print_operand_address (file, x);
10487	  fputs ("@l", file);
10488	}
10489      else
10490	{
10491	  if (GET_CODE (XEXP (x, 0)) != PLUS
10492	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10493		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10494	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
10495	    output_operand_lossage ("invalid %%K value");
10496	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
10497	  fputs ("@l", file);
10498	  /* For GNU as, there must be a non-alphanumeric character
10499	     between 'l' and the number.  The '-' is added by
10500	     print_operand() already.  */
10501	  if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10502	    fputs ("+", file);
10503	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10504	}
10505      return;
10506
10507      /* %l is output_asm_label.  */
10508
10509    case 'L':
10510      /* Write second word of DImode or DFmode reference.  Works on register
10511	 or non-indexed memory only.  */
10512      if (GET_CODE (x) == REG)
10513	fputs (reg_names[REGNO (x) + 1], file);
10514      else if (GET_CODE (x) == MEM)
10515	{
10516	  /* Handle possible auto-increment.  Since it is pre-increment and
10517	     we have already done it, we can just use an offset of word.  */
10518	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
10519	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10520	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10521					   UNITS_PER_WORD));
10522	  else
10523	    output_address (XEXP (adjust_address_nv (x, SImode,
10524						     UNITS_PER_WORD),
10525				  0));
10526
10527	  if (small_data_operand (x, GET_MODE (x)))
10528	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10529		     reg_names[SMALL_DATA_REG]);
10530	}
10531      return;
10532
10533    case 'm':
10534      /* MB value for a mask operand.  */
10535      if (! mask_operand (x, SImode))
10536	output_operand_lossage ("invalid %%m value");
10537
10538      fprintf (file, "%d", extract_MB (x));
10539      return;
10540
10541    case 'M':
10542      /* ME value for a mask operand.  */
10543      if (! mask_operand (x, SImode))
10544	output_operand_lossage ("invalid %%M value");
10545
10546      fprintf (file, "%d", extract_ME (x));
10547      return;
10548
10549      /* %n outputs the negative of its operand.  */
10550
10551    case 'N':
10552      /* Write the number of elements in the vector times 4.  */
10553      if (GET_CODE (x) != PARALLEL)
10554	output_operand_lossage ("invalid %%N value");
10555      else
10556	fprintf (file, "%d", XVECLEN (x, 0) * 4);
10557      return;
10558
10559    case 'O':
10560      /* Similar, but subtract 1 first.  */
10561      if (GET_CODE (x) != PARALLEL)
10562	output_operand_lossage ("invalid %%O value");
10563      else
10564	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
10565      return;
10566
10567    case 'p':
10568      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
10569      if (! INT_P (x)
10570	  || INT_LOWPART (x) < 0
10571	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
10572	output_operand_lossage ("invalid %%p value");
10573      else
10574	fprintf (file, "%d", i);
10575      return;
10576
10577    case 'P':
10578      /* The operand must be an indirect memory reference.  The result
10579	 is the register name.  */
10580      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10581	  || REGNO (XEXP (x, 0)) >= 32)
10582	output_operand_lossage ("invalid %%P value");
10583      else
10584	fputs (reg_names[REGNO (XEXP (x, 0))], file);
10585      return;
10586
10587    case 'q':
10588      /* This outputs the logical code corresponding to a boolean
10589	 expression.  The expression may have one or both operands
10590	 negated (if one, only the first one).  For condition register
10591	 logical operations, it will also treat the negated
10592	 CR codes as NOTs, but not handle NOTs of them.  */
10593      {
10594	const char *const *t = 0;
10595	const char *s;
10596	enum rtx_code code = GET_CODE (x);
10597	static const char * const tbl[3][3] = {
10598	  { "and", "andc", "nor" },
10599	  { "or", "orc", "nand" },
10600	  { "xor", "eqv", "xor" } };
10601
10602	if (code == AND)
10603	  t = tbl[0];
10604	else if (code == IOR)
10605	  t = tbl[1];
10606	else if (code == XOR)
10607	  t = tbl[2];
10608	else
10609	  output_operand_lossage ("invalid %%q value");
10610
10611	if (GET_CODE (XEXP (x, 0)) != NOT)
10612	  s = t[0];
10613	else
10614	  {
10615	    if (GET_CODE (XEXP (x, 1)) == NOT)
10616	      s = t[2];
10617	    else
10618	      s = t[1];
10619	  }
10620
10621	fputs (s, file);
10622      }
10623      return;
10624
10625    case 'Q':
10626      if (TARGET_MFCRF)
10627	fputc (',', file);
10628        /* FALLTHRU */
10629      else
10630	return;
10631
10632    case 'R':
10633      /* X is a CR register.  Print the mask for `mtcrf'.  */
10634      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10635	output_operand_lossage ("invalid %%R value");
10636      else
10637	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
10638      return;
10639
10640    case 's':
10641      /* Low 5 bits of 32 - value */
10642      if (! INT_P (x))
10643	output_operand_lossage ("invalid %%s value");
10644      else
10645	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
10646      return;
10647
10648    case 'S':
10649      /* PowerPC64 mask position.  All 0's is excluded.
10650	 CONST_INT 32-bit mask is considered sign-extended so any
10651	 transition must occur within the CONST_INT, not on the boundary.  */
10652      if (! mask64_operand (x, DImode))
10653	output_operand_lossage ("invalid %%S value");
10654
10655      uval = INT_LOWPART (x);
10656
10657      if (uval & 1)	/* Clear Left */
10658	{
10659#if HOST_BITS_PER_WIDE_INT > 64
10660	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10661#endif
10662	  i = 64;
10663	}
10664      else		/* Clear Right */
10665	{
10666	  uval = ~uval;
10667#if HOST_BITS_PER_WIDE_INT > 64
10668	  uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10669#endif
10670	  i = 63;
10671	}
10672      while (uval != 0)
10673	--i, uval >>= 1;
10674      gcc_assert (i >= 0);
10675      fprintf (file, "%d", i);
10676      return;
10677
10678    case 't':
10679      /* Like 'J' but get to the OVERFLOW/UNORDERED bit.  */
10680      gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
10681
10682      /* Bit 3 is OV bit.  */
10683      i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10684
10685      /* If we want bit 31, write a shift count of zero, not 32.  */
10686      fprintf (file, "%d", i == 31 ? 0 : i + 1);
10687      return;
10688
10689    case 'T':
10690      /* Print the symbolic name of a branch target register.  */
10691      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10692				  && REGNO (x) != COUNT_REGISTER_REGNUM))
10693	output_operand_lossage ("invalid %%T value");
10694      else if (REGNO (x) == LINK_REGISTER_REGNUM)
10695	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10696      else
10697	fputs ("ctr", file);
10698      return;
10699
10700    case 'u':
10701      /* High-order 16 bits of constant for use in unsigned operand.  */
10702      if (! INT_P (x))
10703	output_operand_lossage ("invalid %%u value");
10704      else
10705	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10706		 (INT_LOWPART (x) >> 16) & 0xffff);
10707      return;
10708
10709    case 'v':
10710      /* High-order 16 bits of constant for use in signed operand.  */
10711      if (! INT_P (x))
10712	output_operand_lossage ("invalid %%v value");
10713      else
10714	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10715		 (INT_LOWPART (x) >> 16) & 0xffff);
10716      return;
10717
10718    case 'U':
10719      /* Print `u' if this has an auto-increment or auto-decrement.  */
10720      if (GET_CODE (x) == MEM
10721	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
10722	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10723	putc ('u', file);
10724      return;
10725
10726    case 'V':
10727      /* Print the trap code for this operand.  */
10728      switch (GET_CODE (x))
10729	{
10730	case EQ:
10731	  fputs ("eq", file);   /* 4 */
10732	  break;
10733	case NE:
10734	  fputs ("ne", file);   /* 24 */
10735	  break;
10736	case LT:
10737	  fputs ("lt", file);   /* 16 */
10738	  break;
10739	case LE:
10740	  fputs ("le", file);   /* 20 */
10741	  break;
10742	case GT:
10743	  fputs ("gt", file);   /* 8 */
10744	  break;
10745	case GE:
10746	  fputs ("ge", file);   /* 12 */
10747	  break;
10748	case LTU:
10749	  fputs ("llt", file);  /* 2 */
10750	  break;
10751	case LEU:
10752	  fputs ("lle", file);  /* 6 */
10753	  break;
10754	case GTU:
10755	  fputs ("lgt", file);  /* 1 */
10756	  break;
10757	case GEU:
10758	  fputs ("lge", file);  /* 5 */
10759	  break;
10760	default:
10761	  gcc_unreachable ();
10762	}
10763      break;
10764
10765    case 'w':
10766      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
10767	 normally.  */
10768      if (INT_P (x))
10769	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10770		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10771      else
10772	print_operand (file, x, 0);
10773      return;
10774
10775    case 'W':
10776      /* MB value for a PowerPC64 rldic operand.  */
10777      val = (GET_CODE (x) == CONST_INT
10778	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10779
10780      if (val < 0)
10781	i = -1;
10782      else
10783	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10784	  if ((val <<= 1) < 0)
10785	    break;
10786
10787#if HOST_BITS_PER_WIDE_INT == 32
10788      if (GET_CODE (x) == CONST_INT && i >= 0)
10789	i += 32;  /* zero-extend high-part was all 0's */
10790      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10791	{
10792	  val = CONST_DOUBLE_LOW (x);
10793
10794	  gcc_assert (val);
10795	  if (val < 0)
10796	    --i;
10797	  else
10798	    for ( ; i < 64; i++)
10799	      if ((val <<= 1) < 0)
10800		break;
10801	}
10802#endif
10803
10804      fprintf (file, "%d", i + 1);
10805      return;
10806
10807    case 'X':
10808      if (GET_CODE (x) == MEM
10809	  && legitimate_indexed_address_p (XEXP (x, 0), 0))
10810	putc ('x', file);
10811      return;
10812
10813    case 'Y':
10814      /* Like 'L', for third word of TImode  */
10815      if (GET_CODE (x) == REG)
10816	fputs (reg_names[REGNO (x) + 2], file);
10817      else if (GET_CODE (x) == MEM)
10818	{
10819	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
10820	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10821	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10822	  else
10823	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10824	  if (small_data_operand (x, GET_MODE (x)))
10825	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10826		     reg_names[SMALL_DATA_REG]);
10827	}
10828      return;
10829
10830    case 'z':
10831      /* X is a SYMBOL_REF.  Write out the name preceded by a
10832	 period and without any trailing data in brackets.  Used for function
10833	 names.  If we are configured for System V (or the embedded ABI) on
10834	 the PowerPC, do not emit the period, since those systems do not use
10835	 TOCs and the like.  */
10836      gcc_assert (GET_CODE (x) == SYMBOL_REF);
10837
10838      /* Mark the decl as referenced so that cgraph will output the
10839	 function.  */
10840      if (SYMBOL_REF_DECL (x))
10841	mark_decl_referenced (SYMBOL_REF_DECL (x));
10842
10843      /* For macho, check to see if we need a stub.  */
10844      if (TARGET_MACHO)
10845	{
10846	  const char *name = XSTR (x, 0);
10847#if TARGET_MACHO
10848	  if (MACHOPIC_INDIRECT
10849	      && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10850	    name = machopic_indirection_name (x, /*stub_p=*/true);
10851#endif
10852	  assemble_name (file, name);
10853	}
10854      else if (!DOT_SYMBOLS)
10855	assemble_name (file, XSTR (x, 0));
10856      else
10857	rs6000_output_function_entry (file, XSTR (x, 0));
10858      return;
10859
10860    case 'Z':
10861      /* Like 'L', for last word of TImode.  */
10862      if (GET_CODE (x) == REG)
10863	fputs (reg_names[REGNO (x) + 3], file);
10864      else if (GET_CODE (x) == MEM)
10865	{
10866	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
10867	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10868	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10869	  else
10870	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10871	  if (small_data_operand (x, GET_MODE (x)))
10872	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10873		     reg_names[SMALL_DATA_REG]);
10874	}
10875      return;
10876
10877      /* Print AltiVec or SPE memory operand.  */
10878    case 'y':
10879      {
10880	rtx tmp;
10881
10882	gcc_assert (GET_CODE (x) == MEM);
10883
10884	tmp = XEXP (x, 0);
10885
10886	/* Ugly hack because %y is overloaded.  */
10887	if (TARGET_E500 && GET_MODE_SIZE (GET_MODE (x)) == 8)
10888	  {
10889	    /* Handle [reg].  */
10890	    if (GET_CODE (tmp) == REG)
10891	      {
10892		fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10893		break;
10894	      }
10895	    /* Handle [reg+UIMM].  */
10896	    else if (GET_CODE (tmp) == PLUS &&
10897		     GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10898	      {
10899		int x;
10900
10901		gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
10902
10903		x = INTVAL (XEXP (tmp, 1));
10904		fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10905		break;
10906	      }
10907
10908	    /* Fall through.  Must be [reg+reg].  */
10909	  }
10910	if (TARGET_ALTIVEC
10911	    && GET_CODE (tmp) == AND
10912	    && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10913	    && INTVAL (XEXP (tmp, 1)) == -16)
10914	  tmp = XEXP (tmp, 0);
10915	if (GET_CODE (tmp) == REG)
10916	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10917	else
10918	  {
10919	    gcc_assert (GET_CODE (tmp) == PLUS
10920			&& REG_P (XEXP (tmp, 0))
10921			&& REG_P (XEXP (tmp, 1)));
10922
10923	    if (REGNO (XEXP (tmp, 0)) == 0)
10924	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10925		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
10926	    else
10927	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10928		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
10929	  }
10930	break;
10931      }
10932
10933    case 0:
10934      if (GET_CODE (x) == REG)
10935	fprintf (file, "%s", reg_names[REGNO (x)]);
10936      else if (GET_CODE (x) == MEM)
10937	{
10938	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
10939	     know the width from the mode.  */
10940	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10941	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10942		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10943	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10944	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10945		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10946	  else
10947	    output_address (XEXP (x, 0));
10948	}
10949      else
10950	output_addr_const (file, x);
10951      return;
10952
10953    case '&':
10954      assemble_name (file, rs6000_get_some_local_dynamic_name ());
10955      return;
10956
10957    default:
10958      output_operand_lossage ("invalid %%xn code");
10959    }
10960}
10961
10962/* Print the address of an operand.  */
10963
10964void
10965print_operand_address (FILE *file, rtx x)
10966{
10967  if (GET_CODE (x) == REG)
10968    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10969  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10970	   || GET_CODE (x) == LABEL_REF)
10971    {
10972      output_addr_const (file, x);
10973      if (small_data_operand (x, GET_MODE (x)))
10974	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10975		 reg_names[SMALL_DATA_REG]);
10976      else
10977	gcc_assert (!TARGET_TOC);
10978    }
10979  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10980    {
10981      gcc_assert (REG_P (XEXP (x, 0)));
10982      if (REGNO (XEXP (x, 0)) == 0)
10983	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10984		 reg_names[ REGNO (XEXP (x, 0)) ]);
10985      else
10986	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10987		 reg_names[ REGNO (XEXP (x, 1)) ]);
10988    }
10989  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10990    fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10991	     INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10992#if TARGET_ELF
10993  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10994	   && CONSTANT_P (XEXP (x, 1)))
10995    {
10996      output_addr_const (file, XEXP (x, 1));
10997      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10998    }
10999#endif
11000#if TARGET_MACHO
11001  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
11002	   && CONSTANT_P (XEXP (x, 1)))
11003    {
11004      fprintf (file, "lo16(");
11005      output_addr_const (file, XEXP (x, 1));
11006      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11007    }
11008#endif
11009  else if (legitimate_constant_pool_address_p (x))
11010    {
11011      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
11012	{
11013	  rtx contains_minus = XEXP (x, 1);
11014	  rtx minus, symref;
11015	  const char *name;
11016
11017	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
11018	     turn it into (sym) for output_addr_const.  */
11019	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
11020	    contains_minus = XEXP (contains_minus, 0);
11021
11022	  minus = XEXP (contains_minus, 0);
11023	  symref = XEXP (minus, 0);
11024	  XEXP (contains_minus, 0) = symref;
11025	  if (TARGET_ELF)
11026	    {
11027	      char *newname;
11028
11029	      name = XSTR (symref, 0);
11030	      newname = alloca (strlen (name) + sizeof ("@toc"));
11031	      strcpy (newname, name);
11032	      strcat (newname, "@toc");
11033	      XSTR (symref, 0) = newname;
11034	    }
11035	  output_addr_const (file, XEXP (x, 1));
11036	  if (TARGET_ELF)
11037	    XSTR (symref, 0) = name;
11038	  XEXP (contains_minus, 0) = minus;
11039	}
11040      else
11041	output_addr_const (file, XEXP (x, 1));
11042
11043      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
11044    }
11045  else
11046    gcc_unreachable ();
11047}
11048
11049/* Target hook for assembling integer objects.  The PowerPC version has
11050   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11051   is defined.  It also needs to handle DI-mode objects on 64-bit
11052   targets.  */
11053
11054static bool
11055rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
11056{
11057#ifdef RELOCATABLE_NEEDS_FIXUP
11058  /* Special handling for SI values.  */
11059  if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
11060    {
11061      static int recurse = 0;
11062
11063      /* For -mrelocatable, we mark all addresses that need to be fixed up
11064	 in the .fixup section.  */
11065      if (TARGET_RELOCATABLE
11066	  && in_section != toc_section
11067	  && in_section != text_section
11068	  && !unlikely_text_section_p (in_section)
11069	  && !recurse
11070	  && GET_CODE (x) != CONST_INT
11071	  && GET_CODE (x) != CONST_DOUBLE
11072	  && CONSTANT_P (x))
11073	{
11074	  char buf[256];
11075
11076	  recurse = 1;
11077	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
11078	  fixuplabelno++;
11079	  ASM_OUTPUT_LABEL (asm_out_file, buf);
11080	  fprintf (asm_out_file, "\t.long\t(");
11081	  output_addr_const (asm_out_file, x);
11082	  fprintf (asm_out_file, ")@fixup\n");
11083	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
11084	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
11085	  fprintf (asm_out_file, "\t.long\t");
11086	  assemble_name (asm_out_file, buf);
11087	  fprintf (asm_out_file, "\n\t.previous\n");
11088	  recurse = 0;
11089	  return true;
11090	}
11091      /* Remove initial .'s to turn a -mcall-aixdesc function
11092	 address into the address of the descriptor, not the function
11093	 itself.  */
11094      else if (GET_CODE (x) == SYMBOL_REF
11095	       && XSTR (x, 0)[0] == '.'
11096	       && DEFAULT_ABI == ABI_AIX)
11097	{
11098	  const char *name = XSTR (x, 0);
11099	  while (*name == '.')
11100	    name++;
11101
11102	  fprintf (asm_out_file, "\t.long\t%s\n", name);
11103	  return true;
11104	}
11105    }
11106#endif /* RELOCATABLE_NEEDS_FIXUP */
11107  return default_assemble_integer (x, size, aligned_p);
11108}
11109
11110#ifdef HAVE_GAS_HIDDEN
11111/* Emit an assembler directive to set symbol visibility for DECL to
11112   VISIBILITY_TYPE.  */
11113
11114static void
11115rs6000_assemble_visibility (tree decl, int vis)
11116{
11117  /* Functions need to have their entry point symbol visibility set as
11118     well as their descriptor symbol visibility.  */
11119  if (DEFAULT_ABI == ABI_AIX
11120      && DOT_SYMBOLS
11121      && TREE_CODE (decl) == FUNCTION_DECL)
11122    {
11123      static const char * const visibility_types[] = {
11124	NULL, "internal", "hidden", "protected"
11125      };
11126
11127      const char *name, *type;
11128
11129      name = ((* targetm.strip_name_encoding)
11130	      (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
11131      type = visibility_types[vis];
11132
11133      fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
11134      fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
11135    }
11136  else
11137    default_assemble_visibility (decl, vis);
11138}
11139#endif
11140
11141enum rtx_code
11142rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
11143{
11144  /* Reversal of FP compares takes care -- an ordered compare
11145     becomes an unordered compare and vice versa.  */
11146  if (mode == CCFPmode
11147      && (!flag_finite_math_only
11148	  || code == UNLT || code == UNLE || code == UNGT || code == UNGE
11149	  || code == UNEQ || code == LTGT))
11150    return reverse_condition_maybe_unordered (code);
11151  else
11152    return reverse_condition (code);
11153}
11154
11155/* Generate a compare for CODE.  Return a brand-new rtx that
11156   represents the result of the compare.  */
11157
11158static rtx
11159rs6000_generate_compare (enum rtx_code code)
11160{
11161  enum machine_mode comp_mode;
11162  rtx compare_result;
11163
11164  if (rs6000_compare_fp_p)
11165    comp_mode = CCFPmode;
11166  else if (code == GTU || code == LTU
11167	   || code == GEU || code == LEU)
11168    comp_mode = CCUNSmode;
11169  else if ((code == EQ || code == NE)
11170	   && GET_CODE (rs6000_compare_op0) == SUBREG
11171	   && GET_CODE (rs6000_compare_op1) == SUBREG
11172	   && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
11173	   && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
11174    /* These are unsigned values, perhaps there will be a later
11175       ordering compare that can be shared with this one.
11176       Unfortunately we cannot detect the signedness of the operands
11177       for non-subregs.  */
11178    comp_mode = CCUNSmode;
11179  else
11180    comp_mode = CCmode;
11181
11182  /* First, the compare.  */
11183  compare_result = gen_reg_rtx (comp_mode);
11184
11185  /* E500 FP compare instructions on the GPRs.  Yuck!  */
11186  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11187      && rs6000_compare_fp_p)
11188    {
11189      rtx cmp, or_result, compare_result2;
11190      enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
11191
11192      if (op_mode == VOIDmode)
11193	op_mode = GET_MODE (rs6000_compare_op1);
11194
11195      /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
11196	 This explains the following mess.  */
11197
11198      switch (code)
11199	{
11200	case EQ: case UNEQ: case NE: case LTGT:
11201	  switch (op_mode)
11202	    {
11203	    case SFmode:
11204	      cmp = flag_unsafe_math_optimizations
11205		? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
11206				   rs6000_compare_op1)
11207		: gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
11208				   rs6000_compare_op1);
11209	      break;
11210
11211	    case DFmode:
11212	      cmp = flag_unsafe_math_optimizations
11213		? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
11214				   rs6000_compare_op1)
11215		: gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
11216				   rs6000_compare_op1);
11217	      break;
11218
11219	    default:
11220	      gcc_unreachable ();
11221	    }
11222	  break;
11223
11224	case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
11225	  switch (op_mode)
11226	    {
11227	    case SFmode:
11228	      cmp = flag_unsafe_math_optimizations
11229		? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
11230				   rs6000_compare_op1)
11231		: gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
11232				   rs6000_compare_op1);
11233	      break;
11234
11235	    case DFmode:
11236	      cmp = flag_unsafe_math_optimizations
11237		? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
11238				   rs6000_compare_op1)
11239		: gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
11240				   rs6000_compare_op1);
11241	      break;
11242
11243	    default:
11244	      gcc_unreachable ();
11245	    }
11246	  break;
11247
11248	case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
11249	  switch (op_mode)
11250	    {
11251	    case SFmode:
11252	      cmp = flag_unsafe_math_optimizations
11253		? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
11254				   rs6000_compare_op1)
11255		: gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11256				   rs6000_compare_op1);
11257	      break;
11258
11259	    case DFmode:
11260	      cmp = flag_unsafe_math_optimizations
11261		? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11262				   rs6000_compare_op1)
11263		: gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11264				   rs6000_compare_op1);
11265	      break;
11266
11267	    default:
11268	      gcc_unreachable ();
11269	    }
11270	  break;
11271        default:
11272          gcc_unreachable ();
11273	}
11274
11275      /* Synthesize LE and GE from LT/GT || EQ.  */
11276      if (code == LE || code == GE || code == LEU || code == GEU)
11277	{
11278	  emit_insn (cmp);
11279
11280	  switch (code)
11281	    {
11282	    case LE: code = LT; break;
11283	    case GE: code = GT; break;
11284	    case LEU: code = LT; break;
11285	    case GEU: code = GT; break;
11286	    default: gcc_unreachable ();
11287	    }
11288
11289	  compare_result2 = gen_reg_rtx (CCFPmode);
11290
11291	  /* Do the EQ.  */
11292	  switch (op_mode)
11293	    {
11294	    case SFmode:
11295	      cmp = flag_unsafe_math_optimizations
11296		? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11297				   rs6000_compare_op1)
11298		: gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11299				   rs6000_compare_op1);
11300	      break;
11301
11302	    case DFmode:
11303	      cmp = flag_unsafe_math_optimizations
11304		? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11305				   rs6000_compare_op1)
11306		: gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11307				   rs6000_compare_op1);
11308	      break;
11309
11310	    default:
11311	      gcc_unreachable ();
11312	    }
11313	  emit_insn (cmp);
11314
11315	  /* OR them together.  */
11316	  or_result = gen_reg_rtx (CCFPmode);
11317	  cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11318					   compare_result2);
11319	  compare_result = or_result;
11320	  code = EQ;
11321	}
11322      else
11323	{
11324	  if (code == NE || code == LTGT)
11325	    code = NE;
11326	  else
11327	    code = EQ;
11328	}
11329
11330      emit_insn (cmp);
11331    }
11332  else
11333    {
11334      /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11335	 CLOBBERs to match cmptf_internal2 pattern.  */
11336      if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11337	  && GET_MODE (rs6000_compare_op0) == TFmode
11338	  && !TARGET_IEEEQUAD
11339	  && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11340	emit_insn (gen_rtx_PARALLEL (VOIDmode,
11341	  gen_rtvec (9,
11342		     gen_rtx_SET (VOIDmode,
11343				  compare_result,
11344				  gen_rtx_COMPARE (comp_mode,
11345						   rs6000_compare_op0,
11346						   rs6000_compare_op1)),
11347		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11348		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11349		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11350		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11351		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11352		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11353		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11354		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
11355      else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11356	       && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11357	{
11358	  rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11359	  comp_mode = CCEQmode;
11360	  compare_result = gen_reg_rtx (CCEQmode);
11361	  if (TARGET_64BIT)
11362	    emit_insn (gen_stack_protect_testdi (compare_result,
11363						 rs6000_compare_op0, op1));
11364	  else
11365	    emit_insn (gen_stack_protect_testsi (compare_result,
11366						 rs6000_compare_op0, op1));
11367	}
11368      else
11369	emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11370				gen_rtx_COMPARE (comp_mode,
11371						 rs6000_compare_op0,
11372						 rs6000_compare_op1)));
11373    }
11374
11375  /* Some kinds of FP comparisons need an OR operation;
11376     under flag_finite_math_only we don't bother.  */
11377  if (rs6000_compare_fp_p
11378      && !flag_finite_math_only
11379      && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
11380      && (code == LE || code == GE
11381	  || code == UNEQ || code == LTGT
11382	  || code == UNGT || code == UNLT))
11383    {
11384      enum rtx_code or1, or2;
11385      rtx or1_rtx, or2_rtx, compare2_rtx;
11386      rtx or_result = gen_reg_rtx (CCEQmode);
11387
11388      switch (code)
11389	{
11390	case LE: or1 = LT;  or2 = EQ;  break;
11391	case GE: or1 = GT;  or2 = EQ;  break;
11392	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
11393	case LTGT: or1 = LT;  or2 = GT;  break;
11394	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
11395	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
11396	default:  gcc_unreachable ();
11397	}
11398      validate_condition_mode (or1, comp_mode);
11399      validate_condition_mode (or2, comp_mode);
11400      or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11401      or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
11402      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11403				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11404				      const_true_rtx);
11405      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11406
11407      compare_result = or_result;
11408      code = EQ;
11409    }
11410
11411  validate_condition_mode (code, GET_MODE (compare_result));
11412
11413  return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
11414}
11415
11416
11417/* Emit the RTL for an sCOND pattern.  */
11418
11419void
11420rs6000_emit_sCOND (enum rtx_code code, rtx result)
11421{
11422  rtx condition_rtx;
11423  enum machine_mode op_mode;
11424  enum rtx_code cond_code;
11425
11426  condition_rtx = rs6000_generate_compare (code);
11427  cond_code = GET_CODE (condition_rtx);
11428
11429  if (TARGET_E500 && rs6000_compare_fp_p
11430      && !TARGET_FPRS && TARGET_HARD_FLOAT)
11431    {
11432      rtx t;
11433
11434      PUT_MODE (condition_rtx, SImode);
11435      t = XEXP (condition_rtx, 0);
11436
11437      gcc_assert (cond_code == NE || cond_code == EQ);
11438
11439      if (cond_code == NE)
11440	emit_insn (gen_e500_flip_gt_bit (t, t));
11441
11442      emit_insn (gen_move_from_CR_gt_bit (result, t));
11443      return;
11444    }
11445
11446  if (cond_code == NE
11447      || cond_code == GE || cond_code == LE
11448      || cond_code == GEU || cond_code == LEU
11449      || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11450    {
11451      rtx not_result = gen_reg_rtx (CCEQmode);
11452      rtx not_op, rev_cond_rtx;
11453      enum machine_mode cc_mode;
11454
11455      cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11456
11457      rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
11458				     SImode, XEXP (condition_rtx, 0), const0_rtx);
11459      not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11460      emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11461      condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11462    }
11463
11464  op_mode = GET_MODE (rs6000_compare_op0);
11465  if (op_mode == VOIDmode)
11466    op_mode = GET_MODE (rs6000_compare_op1);
11467
11468  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11469    {
11470      PUT_MODE (condition_rtx, DImode);
11471      convert_move (result, condition_rtx, 0);
11472    }
11473  else
11474    {
11475      PUT_MODE (condition_rtx, SImode);
11476      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11477    }
11478}
11479
11480/* Emit a branch of kind CODE to location LOC.  */
11481
11482void
11483rs6000_emit_cbranch (enum rtx_code code, rtx loc)
11484{
11485  rtx condition_rtx, loc_ref;
11486
11487  condition_rtx = rs6000_generate_compare (code);
11488  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11489  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11490			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11491						     loc_ref, pc_rtx)));
11492}
11493
11494/* Return the string to output a conditional branch to LABEL, which is
11495   the operand number of the label, or -1 if the branch is really a
11496   conditional return.
11497
11498   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
11499   condition code register and its mode specifies what kind of
11500   comparison we made.
11501
11502   REVERSED is nonzero if we should reverse the sense of the comparison.
11503
11504   INSN is the insn.  */
11505
11506char *
11507output_cbranch (rtx op, const char *label, int reversed, rtx insn)
11508{
11509  static char string[64];
11510  enum rtx_code code = GET_CODE (op);
11511  rtx cc_reg = XEXP (op, 0);
11512  enum machine_mode mode = GET_MODE (cc_reg);
11513  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
11514  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
11515  int really_reversed = reversed ^ need_longbranch;
11516  char *s = string;
11517  const char *ccode;
11518  const char *pred;
11519  rtx note;
11520
11521  validate_condition_mode (code, mode);
11522
11523  /* Work out which way this really branches.  We could use
11524     reverse_condition_maybe_unordered here always but this
11525     makes the resulting assembler clearer.  */
11526  if (really_reversed)
11527    {
11528      /* Reversal of FP compares takes care -- an ordered compare
11529	 becomes an unordered compare and vice versa.  */
11530      if (mode == CCFPmode)
11531	code = reverse_condition_maybe_unordered (code);
11532      else
11533	code = reverse_condition (code);
11534    }
11535
11536  if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
11537    {
11538      /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11539	 to the GT bit.  */
11540      switch (code)
11541	{
11542	case EQ:
11543	  /* Opposite of GT.  */
11544	  code = GT;
11545	  break;
11546
11547	case NE:
11548	  code = UNLE;
11549	  break;
11550
11551	default:
11552	  gcc_unreachable ();
11553	}
11554    }
11555
11556  switch (code)
11557    {
11558      /* Not all of these are actually distinct opcodes, but
11559	 we distinguish them for clarity of the resulting assembler.  */
11560    case NE: case LTGT:
11561      ccode = "ne"; break;
11562    case EQ: case UNEQ:
11563      ccode = "eq"; break;
11564    case GE: case GEU:
11565      ccode = "ge"; break;
11566    case GT: case GTU: case UNGT:
11567      ccode = "gt"; break;
11568    case LE: case LEU:
11569      ccode = "le"; break;
11570    case LT: case LTU: case UNLT:
11571      ccode = "lt"; break;
11572    case UNORDERED: ccode = "un"; break;
11573    case ORDERED: ccode = "nu"; break;
11574    case UNGE: ccode = "nl"; break;
11575    case UNLE: ccode = "ng"; break;
11576    default:
11577      gcc_unreachable ();
11578    }
11579
11580  /* Maybe we have a guess as to how likely the branch is.
11581     The old mnemonics don't have a way to specify this information.  */
11582  pred = "";
11583  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11584  if (note != NULL_RTX)
11585    {
11586      /* PROB is the difference from 50%.  */
11587      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
11588
11589      /* Only hint for highly probable/improbable branches on newer
11590	 cpus as static prediction overrides processor dynamic
11591	 prediction.  For older cpus we may as well always hint, but
11592	 assume not taken for branches that are very close to 50% as a
11593	 mispredicted taken branch is more expensive than a
11594	 mispredicted not-taken branch.  */
11595      if (rs6000_always_hint
11596	  || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
11597	      && br_prob_note_reliable_p (note)))
11598	{
11599	  if (abs (prob) > REG_BR_PROB_BASE / 20
11600	      && ((prob > 0) ^ need_longbranch))
11601	    pred = "+";
11602	  else
11603	    pred = "-";
11604	}
11605    }
11606
11607  if (label == NULL)
11608    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
11609  else
11610    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
11611
11612  /* We need to escape any '%' characters in the reg_names string.
11613     Assume they'd only be the first character....  */
11614  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11615    *s++ = '%';
11616  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
11617
11618  if (label != NULL)
11619    {
11620      /* If the branch distance was too far, we may have to use an
11621	 unconditional branch to go the distance.  */
11622      if (need_longbranch)
11623	s += sprintf (s, ",$+8\n\tb %s", label);
11624      else
11625	s += sprintf (s, ",%s", label);
11626    }
11627
11628  return string;
11629}
11630
11631/* Return the string to flip the GT bit on a CR.  */
11632char *
11633output_e500_flip_gt_bit (rtx dst, rtx src)
11634{
11635  static char string[64];
11636  int a, b;
11637
11638  gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11639	      && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
11640
11641  /* GT bit.  */
11642  a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11643  b = 4 * (REGNO (src) - CR0_REGNO) + 1;
11644
11645  sprintf (string, "crnot %d,%d", a, b);
11646  return string;
11647}
11648
11649/* Return insn index for the vector compare instruction for given CODE,
11650   and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11651   not available.  */
11652
11653static int
11654get_vec_cmp_insn (enum rtx_code code,
11655		  enum machine_mode dest_mode,
11656		  enum machine_mode op_mode)
11657{
11658  if (!TARGET_ALTIVEC)
11659    return INSN_NOT_AVAILABLE;
11660
11661  switch (code)
11662    {
11663    case EQ:
11664      if (dest_mode == V16QImode && op_mode == V16QImode)
11665	return UNSPEC_VCMPEQUB;
11666      if (dest_mode == V8HImode && op_mode == V8HImode)
11667	return UNSPEC_VCMPEQUH;
11668      if (dest_mode == V4SImode && op_mode == V4SImode)
11669	return UNSPEC_VCMPEQUW;
11670      if (dest_mode == V4SImode && op_mode == V4SFmode)
11671	return UNSPEC_VCMPEQFP;
11672      break;
11673    case GE:
11674      if (dest_mode == V4SImode && op_mode == V4SFmode)
11675	return UNSPEC_VCMPGEFP;
11676    case GT:
11677      if (dest_mode == V16QImode && op_mode == V16QImode)
11678	return UNSPEC_VCMPGTSB;
11679      if (dest_mode == V8HImode && op_mode == V8HImode)
11680	return UNSPEC_VCMPGTSH;
11681      if (dest_mode == V4SImode && op_mode == V4SImode)
11682	return UNSPEC_VCMPGTSW;
11683      if (dest_mode == V4SImode && op_mode == V4SFmode)
11684	return UNSPEC_VCMPGTFP;
11685      break;
11686    case GTU:
11687      if (dest_mode == V16QImode && op_mode == V16QImode)
11688	return UNSPEC_VCMPGTUB;
11689      if (dest_mode == V8HImode && op_mode == V8HImode)
11690	return UNSPEC_VCMPGTUH;
11691      if (dest_mode == V4SImode && op_mode == V4SImode)
11692	return UNSPEC_VCMPGTUW;
11693      break;
11694    default:
11695      break;
11696    }
11697  return INSN_NOT_AVAILABLE;
11698}
11699
11700/* Emit vector compare for operands OP0 and OP1 using code RCODE.
11701   DMODE is expected destination mode. This is a recursive function.  */
11702
11703static rtx
11704rs6000_emit_vector_compare (enum rtx_code rcode,
11705			    rtx op0, rtx op1,
11706			    enum machine_mode dmode)
11707{
11708  int vec_cmp_insn;
11709  rtx mask;
11710  enum machine_mode dest_mode;
11711  enum machine_mode op_mode = GET_MODE (op1);
11712
11713  gcc_assert (TARGET_ALTIVEC);
11714  gcc_assert (GET_MODE (op0) == GET_MODE (op1));
11715
11716  /* Floating point vector compare instructions uses destination V4SImode.
11717     Move destination to appropriate mode later.  */
11718  if (dmode == V4SFmode)
11719    dest_mode = V4SImode;
11720  else
11721    dest_mode = dmode;
11722
11723  mask = gen_reg_rtx (dest_mode);
11724  vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11725
11726  if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11727    {
11728      bool swap_operands = false;
11729      bool try_again = false;
11730      switch (rcode)
11731	{
11732	case LT:
11733	  rcode = GT;
11734	  swap_operands = true;
11735	  try_again = true;
11736	  break;
11737	case LTU:
11738	  rcode = GTU;
11739	  swap_operands = true;
11740	  try_again = true;
11741	  break;
11742	case NE:
11743	  /* Treat A != B as ~(A==B).  */
11744	  {
11745	    enum insn_code nor_code;
11746	    rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11747						     dest_mode);
11748
11749	    nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
11750	    gcc_assert (nor_code != CODE_FOR_nothing);
11751	    emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11752
11753	    if (dmode != dest_mode)
11754	      {
11755		rtx temp = gen_reg_rtx (dest_mode);
11756		convert_move (temp, mask, 0);
11757		return temp;
11758	      }
11759	    return mask;
11760	  }
11761	  break;
11762	case GE:
11763	case GEU:
11764	case LE:
11765	case LEU:
11766	  /* Try GT/GTU/LT/LTU OR EQ */
11767	  {
11768	    rtx c_rtx, eq_rtx;
11769	    enum insn_code ior_code;
11770	    enum rtx_code new_code;
11771
11772	    switch (rcode)
11773	      {
11774	      case  GE:
11775		new_code = GT;
11776		break;
11777
11778	      case GEU:
11779		new_code = GTU;
11780		break;
11781
11782	      case LE:
11783		new_code = LT;
11784		break;
11785
11786	      case LEU:
11787		new_code = LTU;
11788		break;
11789
11790	      default:
11791		gcc_unreachable ();
11792	      }
11793
11794	    c_rtx = rs6000_emit_vector_compare (new_code,
11795						op0, op1, dest_mode);
11796	    eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11797						 dest_mode);
11798
11799	    ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
11800	    gcc_assert (ior_code != CODE_FOR_nothing);
11801	    emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11802	    if (dmode != dest_mode)
11803	      {
11804		rtx temp = gen_reg_rtx (dest_mode);
11805		convert_move (temp, mask, 0);
11806		return temp;
11807	      }
11808	    return mask;
11809	  }
11810	  break;
11811	default:
11812	  gcc_unreachable ();
11813	}
11814
11815      if (try_again)
11816	{
11817	  vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11818	  /* You only get two chances.  */
11819	  gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
11820	}
11821
11822      if (swap_operands)
11823	{
11824	  rtx tmp;
11825	  tmp = op0;
11826	  op0 = op1;
11827	  op1 = tmp;
11828	}
11829    }
11830
11831  emit_insn (gen_rtx_SET (VOIDmode, mask,
11832			  gen_rtx_UNSPEC (dest_mode,
11833					  gen_rtvec (2, op0, op1),
11834					  vec_cmp_insn)));
11835  if (dmode != dest_mode)
11836    {
11837      rtx temp = gen_reg_rtx (dest_mode);
11838      convert_move (temp, mask, 0);
11839      return temp;
11840    }
11841  return mask;
11842}
11843
11844/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11845   valid insn doesn exist for given mode.  */
11846
11847static int
11848get_vsel_insn (enum machine_mode mode)
11849{
11850  switch (mode)
11851    {
11852    case V4SImode:
11853      return UNSPEC_VSEL4SI;
11854      break;
11855    case V4SFmode:
11856      return UNSPEC_VSEL4SF;
11857      break;
11858    case V8HImode:
11859      return UNSPEC_VSEL8HI;
11860      break;
11861    case V16QImode:
11862      return UNSPEC_VSEL16QI;
11863      break;
11864    default:
11865      return INSN_NOT_AVAILABLE;
11866      break;
11867    }
11868  return INSN_NOT_AVAILABLE;
11869}
11870
11871/* Emit vector select insn where DEST is destination using
11872   operands OP1, OP2 and MASK.  */
11873
11874static void
11875rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11876{
11877  rtx t, temp;
11878  enum machine_mode dest_mode = GET_MODE (dest);
11879  int vsel_insn_index  = get_vsel_insn (GET_MODE (dest));
11880
11881  temp = gen_reg_rtx (dest_mode);
11882
11883  /* For each vector element, select op1 when mask is 1 otherwise
11884     select op2.  */
11885  t = gen_rtx_SET (VOIDmode, temp,
11886		   gen_rtx_UNSPEC (dest_mode,
11887				   gen_rtvec (3, op2, op1, mask),
11888				   vsel_insn_index));
11889  emit_insn (t);
11890  emit_move_insn (dest, temp);
11891  return;
11892}
11893
11894/* Emit vector conditional expression.
11895   DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11896   CC_OP0 and CC_OP1 are the two operands for the relation operation COND.  */
11897
11898int
11899rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11900			      rtx cond, rtx cc_op0, rtx cc_op1)
11901{
11902  enum machine_mode dest_mode = GET_MODE (dest);
11903  enum rtx_code rcode = GET_CODE (cond);
11904  rtx mask;
11905
11906  if (!TARGET_ALTIVEC)
11907    return 0;
11908
11909  /* Get the vector mask for the given relational operations.  */
11910  mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11911
11912  rs6000_emit_vector_select (dest, op1, op2, mask);
11913
11914  return 1;
11915}
11916
11917/* Emit a conditional move: move TRUE_COND to DEST if OP of the
11918   operands of the last comparison is nonzero/true, FALSE_COND if it
11919   is zero/false.  Return 0 if the hardware has no such operation.  */
11920
11921int
11922rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11923{
11924  enum rtx_code code = GET_CODE (op);
11925  rtx op0 = rs6000_compare_op0;
11926  rtx op1 = rs6000_compare_op1;
11927  REAL_VALUE_TYPE c1;
11928  enum machine_mode compare_mode = GET_MODE (op0);
11929  enum machine_mode result_mode = GET_MODE (dest);
11930  rtx temp;
11931  bool is_against_zero;
11932
11933  /* These modes should always match.  */
11934  if (GET_MODE (op1) != compare_mode
11935      /* In the isel case however, we can use a compare immediate, so
11936	 op1 may be a small constant.  */
11937      && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
11938    return 0;
11939  if (GET_MODE (true_cond) != result_mode)
11940    return 0;
11941  if (GET_MODE (false_cond) != result_mode)
11942    return 0;
11943
11944  /* First, work out if the hardware can do this at all, or
11945     if it's too slow....  */
11946  if (! rs6000_compare_fp_p)
11947    {
11948      if (TARGET_ISEL)
11949	return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11950      return 0;
11951    }
11952  else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11953	   && SCALAR_FLOAT_MODE_P (compare_mode))
11954    return 0;
11955
11956  is_against_zero = op1 == CONST0_RTX (compare_mode);
11957
11958  /* A floating-point subtract might overflow, underflow, or produce
11959     an inexact result, thus changing the floating-point flags, so it
11960     can't be generated if we care about that.  It's safe if one side
11961     of the construct is zero, since then no subtract will be
11962     generated.  */
11963  if (SCALAR_FLOAT_MODE_P (compare_mode)
11964      && flag_trapping_math && ! is_against_zero)
11965    return 0;
11966
11967  /* Eliminate half of the comparisons by switching operands, this
11968     makes the remaining code simpler.  */
11969  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
11970      || code == LTGT || code == LT || code == UNLE)
11971    {
11972      code = reverse_condition_maybe_unordered (code);
11973      temp = true_cond;
11974      true_cond = false_cond;
11975      false_cond = temp;
11976    }
11977
11978  /* UNEQ and LTGT take four instructions for a comparison with zero,
11979     it'll probably be faster to use a branch here too.  */
11980  if (code == UNEQ && HONOR_NANS (compare_mode))
11981    return 0;
11982
11983  if (GET_CODE (op1) == CONST_DOUBLE)
11984    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
11985
11986  /* We're going to try to implement comparisons by performing
11987     a subtract, then comparing against zero.  Unfortunately,
11988     Inf - Inf is NaN which is not zero, and so if we don't
11989     know that the operand is finite and the comparison
11990     would treat EQ different to UNORDERED, we can't do it.  */
11991  if (HONOR_INFINITIES (compare_mode)
11992      && code != GT && code != UNGE
11993      && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
11994      /* Constructs of the form (a OP b ? a : b) are safe.  */
11995      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
11996	  || (! rtx_equal_p (op0, true_cond)
11997	      && ! rtx_equal_p (op1, true_cond))))
11998    return 0;
11999
12000  /* At this point we know we can use fsel.  */
12001
12002  /* Reduce the comparison to a comparison against zero.  */
12003  if (! is_against_zero)
12004    {
12005      temp = gen_reg_rtx (compare_mode);
12006      emit_insn (gen_rtx_SET (VOIDmode, temp,
12007			      gen_rtx_MINUS (compare_mode, op0, op1)));
12008      op0 = temp;
12009      op1 = CONST0_RTX (compare_mode);
12010    }
12011
12012  /* If we don't care about NaNs we can reduce some of the comparisons
12013     down to faster ones.  */
12014  if (! HONOR_NANS (compare_mode))
12015    switch (code)
12016      {
12017      case GT:
12018	code = LE;
12019	temp = true_cond;
12020	true_cond = false_cond;
12021	false_cond = temp;
12022	break;
12023      case UNGE:
12024	code = GE;
12025	break;
12026      case UNEQ:
12027	code = EQ;
12028	break;
12029      default:
12030	break;
12031      }
12032
12033  /* Now, reduce everything down to a GE.  */
12034  switch (code)
12035    {
12036    case GE:
12037      break;
12038
12039    case LE:
12040      temp = gen_reg_rtx (compare_mode);
12041      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
12042      op0 = temp;
12043      break;
12044
12045    case ORDERED:
12046      temp = gen_reg_rtx (compare_mode);
12047      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
12048      op0 = temp;
12049      break;
12050
12051    case EQ:
12052      temp = gen_reg_rtx (compare_mode);
12053      emit_insn (gen_rtx_SET (VOIDmode, temp,
12054			      gen_rtx_NEG (compare_mode,
12055					   gen_rtx_ABS (compare_mode, op0))));
12056      op0 = temp;
12057      break;
12058
12059    case UNGE:
12060      /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
12061      temp = gen_reg_rtx (result_mode);
12062      emit_insn (gen_rtx_SET (VOIDmode, temp,
12063			      gen_rtx_IF_THEN_ELSE (result_mode,
12064						    gen_rtx_GE (VOIDmode,
12065								op0, op1),
12066						    true_cond, false_cond)));
12067      false_cond = true_cond;
12068      true_cond = temp;
12069
12070      temp = gen_reg_rtx (compare_mode);
12071      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
12072      op0 = temp;
12073      break;
12074
12075    case GT:
12076      /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
12077      temp = gen_reg_rtx (result_mode);
12078      emit_insn (gen_rtx_SET (VOIDmode, temp,
12079			      gen_rtx_IF_THEN_ELSE (result_mode,
12080						    gen_rtx_GE (VOIDmode,
12081								op0, op1),
12082						    true_cond, false_cond)));
12083      true_cond = false_cond;
12084      false_cond = temp;
12085
12086      temp = gen_reg_rtx (compare_mode);
12087      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
12088      op0 = temp;
12089      break;
12090
12091    default:
12092      gcc_unreachable ();
12093    }
12094
12095  emit_insn (gen_rtx_SET (VOIDmode, dest,
12096			  gen_rtx_IF_THEN_ELSE (result_mode,
12097						gen_rtx_GE (VOIDmode,
12098							    op0, op1),
12099						true_cond, false_cond)));
12100  return 1;
12101}
12102
12103/* Same as above, but for ints (isel).  */
12104
12105static int
12106rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
12107{
12108  rtx condition_rtx, cr;
12109
12110  /* All isel implementations thus far are 32-bits.  */
12111  if (GET_MODE (rs6000_compare_op0) != SImode)
12112    return 0;
12113
12114  /* We still have to do the compare, because isel doesn't do a
12115     compare, it just looks at the CRx bits set by a previous compare
12116     instruction.  */
12117  condition_rtx = rs6000_generate_compare (GET_CODE (op));
12118  cr = XEXP (condition_rtx, 0);
12119
12120  if (GET_MODE (cr) == CCmode)
12121    emit_insn (gen_isel_signed (dest, condition_rtx,
12122				true_cond, false_cond, cr));
12123  else
12124    emit_insn (gen_isel_unsigned (dest, condition_rtx,
12125				  true_cond, false_cond, cr));
12126
12127  return 1;
12128}
12129
12130const char *
12131output_isel (rtx *operands)
12132{
12133  enum rtx_code code;
12134
12135  code = GET_CODE (operands[1]);
12136  if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
12137    {
12138      PUT_CODE (operands[1], reverse_condition (code));
12139      return "isel %0,%3,%2,%j1";
12140    }
12141  else
12142    return "isel %0,%2,%3,%j1";
12143}
12144
12145void
12146rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
12147{
12148  enum machine_mode mode = GET_MODE (op0);
12149  enum rtx_code c;
12150  rtx target;
12151
12152  if (code == SMAX || code == SMIN)
12153    c = GE;
12154  else
12155    c = GEU;
12156
12157  if (code == SMAX || code == UMAX)
12158    target = emit_conditional_move (dest, c, op0, op1, mode,
12159				    op0, op1, mode, 0);
12160  else
12161    target = emit_conditional_move (dest, c, op0, op1, mode,
12162				    op1, op0, mode, 0);
12163  gcc_assert (target);
12164  if (target != dest)
12165    emit_move_insn (dest, target);
12166}
12167
12168/* Emit instructions to perform a load-reserved/store-conditional operation.
12169   The operation performed is an atomic
12170   (set M (CODE:MODE M OP))
12171   If not NULL, BEFORE is atomically set to M before the operation, and
12172   AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
12173   If SYNC_P then a memory barrier is emitted before the operation.
12174   Either OP or M may be wrapped in a NOT operation.  */
12175
12176void
12177rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
12178		  rtx m, rtx op, rtx before_param, rtx after_param,
12179		  bool sync_p)
12180{
12181  enum machine_mode used_mode;
12182  rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
12183  rtx used_m;
12184  rtvec vec;
12185  HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12186  rtx shift = NULL_RTX;
12187
12188  if (sync_p)
12189    emit_insn (gen_memory_barrier ());
12190
12191  if (GET_CODE (m) == NOT)
12192    used_m = XEXP (m, 0);
12193  else
12194    used_m = m;
12195
12196  /* If this is smaller than SImode, we'll have to use SImode with
12197     adjustments.  */
12198  if (mode == QImode || mode == HImode)
12199    {
12200      rtx newop, oldop;
12201
12202      if (MEM_ALIGN (used_m) >= 32)
12203	{
12204	  int ishift = 0;
12205	  if (BYTES_BIG_ENDIAN)
12206	    ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
12207
12208	  shift = GEN_INT (ishift);
12209	}
12210      else
12211	{
12212	  rtx addrSI, aligned_addr;
12213	  int shift_mask = mode == QImode ? 0x18 : 0x10;
12214
12215	  addrSI = force_reg (SImode, gen_lowpart_common (SImode,
12216							  XEXP (used_m, 0)));
12217	  shift = gen_reg_rtx (SImode);
12218
12219	  emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
12220				 GEN_INT (shift_mask)));
12221	  emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
12222
12223	  aligned_addr = expand_binop (Pmode, and_optab,
12224				       XEXP (used_m, 0),
12225				       GEN_INT (-4), NULL_RTX,
12226				       1, OPTAB_LIB_WIDEN);
12227	  used_m = change_address (used_m, SImode, aligned_addr);
12228	  set_mem_align (used_m, 32);
12229	  /* It's safe to keep the old alias set of USED_M, because
12230	     the operation is atomic and only affects the original
12231	     USED_M.  */
12232	  if (GET_CODE (m) == NOT)
12233	    m = gen_rtx_NOT (SImode, used_m);
12234	  else
12235	    m = used_m;
12236	}
12237
12238      if (GET_CODE (op) == NOT)
12239	{
12240	  oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
12241	  oldop = gen_rtx_NOT (SImode, oldop);
12242	}
12243      else
12244	oldop = lowpart_subreg (SImode, op, mode);
12245
12246      switch (code)
12247	{
12248	case IOR:
12249	case XOR:
12250	  newop = expand_binop (SImode, and_optab,
12251				oldop, GEN_INT (imask), NULL_RTX,
12252				1, OPTAB_LIB_WIDEN);
12253	  emit_insn (gen_ashlsi3 (newop, newop, shift));
12254	  break;
12255
12256	case AND:
12257	  newop = expand_binop (SImode, ior_optab,
12258				oldop, GEN_INT (~imask), NULL_RTX,
12259				1, OPTAB_LIB_WIDEN);
12260	  emit_insn (gen_rotlsi3 (newop, newop, shift));
12261	  break;
12262
12263	case PLUS:
12264	case MINUS:
12265	  {
12266	    rtx mask;
12267
12268	    newop = expand_binop (SImode, and_optab,
12269				  oldop, GEN_INT (imask), NULL_RTX,
12270				  1, OPTAB_LIB_WIDEN);
12271	    emit_insn (gen_ashlsi3 (newop, newop, shift));
12272
12273	    mask = gen_reg_rtx (SImode);
12274	    emit_move_insn (mask, GEN_INT (imask));
12275	    emit_insn (gen_ashlsi3 (mask, mask, shift));
12276
12277	    if (code == PLUS)
12278	      newop = gen_rtx_PLUS (SImode, m, newop);
12279	    else
12280	      newop = gen_rtx_MINUS (SImode, m, newop);
12281	    newop = gen_rtx_AND (SImode, newop, mask);
12282	    newop = gen_rtx_IOR (SImode, newop,
12283				 gen_rtx_AND (SImode,
12284					      gen_rtx_NOT (SImode, mask),
12285					      m));
12286	    break;
12287	  }
12288
12289	default:
12290	  gcc_unreachable ();
12291	}
12292
12293      if (GET_CODE (m) == NOT)
12294	{
12295	  rtx mask, xorm;
12296
12297	  mask = gen_reg_rtx (SImode);
12298	  emit_move_insn (mask, GEN_INT (imask));
12299	  emit_insn (gen_ashlsi3 (mask, mask, shift));
12300
12301	  xorm = gen_rtx_XOR (SImode, used_m, mask);
12302	  /* Depending on the value of 'op', the XOR or the operation might
12303	     be able to be simplified away.  */
12304	  newop = simplify_gen_binary (code, SImode, xorm, newop);
12305	}
12306      op = newop;
12307      used_mode = SImode;
12308      before = gen_reg_rtx (used_mode);
12309      after = gen_reg_rtx (used_mode);
12310    }
12311  else
12312    {
12313      used_mode = mode;
12314      before = before_param;
12315      after = after_param;
12316
12317      if (before == NULL_RTX)
12318	before = gen_reg_rtx (used_mode);
12319      if (after == NULL_RTX)
12320	after = gen_reg_rtx (used_mode);
12321    }
12322
12323  if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12324      && used_mode != mode)
12325    the_op = op;  /* Computed above.  */
12326  else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12327    the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12328  else
12329    the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12330
12331  set_after = gen_rtx_SET (VOIDmode, after, the_op);
12332  set_before = gen_rtx_SET (VOIDmode, before, used_m);
12333  set_atomic = gen_rtx_SET (VOIDmode, used_m,
12334			    gen_rtx_UNSPEC (used_mode,
12335					    gen_rtvec (1, the_op),
12336					    UNSPEC_SYNC_OP));
12337  cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12338
12339  if ((code == PLUS || code == MINUS) && used_mode != mode)
12340    vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12341		     gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12342  else
12343    vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12344  emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12345
12346  /* Shift and mask the return values properly.  */
12347  if (used_mode != mode && before_param)
12348    {
12349      emit_insn (gen_lshrsi3 (before, before, shift));
12350      convert_move (before_param, before, 1);
12351    }
12352
12353  if (used_mode != mode && after_param)
12354    {
12355      emit_insn (gen_lshrsi3 (after, after, shift));
12356      convert_move (after_param, after, 1);
12357    }
12358
12359  /* The previous sequence will end with a branch that's dependent on
12360     the conditional store, so placing an isync will ensure that no
12361     other instructions (especially, no load or store instructions)
12362     can start before the atomic operation completes.  */
12363  if (sync_p)
12364    emit_insn (gen_isync ());
12365}
12366
12367/* A subroutine of the atomic operation splitters.  Jump to LABEL if
12368   COND is true.  Mark the jump as unlikely to be taken.  */
12369
12370static void
12371emit_unlikely_jump (rtx cond, rtx label)
12372{
12373  rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12374  rtx x;
12375
12376  x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12377  x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12378  REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12379}
12380
12381/* A subroutine of the atomic operation splitters.  Emit a load-locked
12382   instruction in MODE.  */
12383
12384static void
12385emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12386{
12387  rtx (*fn) (rtx, rtx) = NULL;
12388  if (mode == SImode)
12389    fn = gen_load_locked_si;
12390  else if (mode == DImode)
12391    fn = gen_load_locked_di;
12392  emit_insn (fn (reg, mem));
12393}
12394
12395/* A subroutine of the atomic operation splitters.  Emit a store-conditional
12396   instruction in MODE.  */
12397
12398static void
12399emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12400{
12401  rtx (*fn) (rtx, rtx, rtx) = NULL;
12402  if (mode == SImode)
12403    fn = gen_store_conditional_si;
12404  else if (mode == DImode)
12405    fn = gen_store_conditional_di;
12406
12407  /* Emit sync before stwcx. to address PPC405 Erratum.  */
12408  if (PPC405_ERRATUM77)
12409    emit_insn (gen_memory_barrier ());
12410
12411  emit_insn (fn (res, mem, val));
12412}
12413
12414/* Expand an an atomic fetch-and-operate pattern.  CODE is the binary operation
12415   to perform.  MEM is the memory on which to operate.  VAL is the second
12416   operand of the binary operator.  BEFORE and AFTER are optional locations to
12417   return the value of MEM either before of after the operation.  SCRATCH is
12418   a scratch register.  */
12419
12420void
12421rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12422                       rtx before, rtx after, rtx scratch)
12423{
12424  enum machine_mode mode = GET_MODE (mem);
12425  rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12426
12427  emit_insn (gen_memory_barrier ());
12428
12429  label = gen_label_rtx ();
12430  emit_label (label);
12431  label = gen_rtx_LABEL_REF (VOIDmode, label);
12432
12433  if (before == NULL_RTX)
12434    before = scratch;
12435  emit_load_locked (mode, before, mem);
12436
12437  if (code == NOT)
12438    x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12439  else if (code == AND)
12440    x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12441  else
12442    x = gen_rtx_fmt_ee (code, mode, before, val);
12443
12444  if (after != NULL_RTX)
12445    emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12446  emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12447
12448  emit_store_conditional (mode, cond, mem, scratch);
12449
12450  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12451  emit_unlikely_jump (x, label);
12452
12453  emit_insn (gen_isync ());
12454}
12455
12456/* Expand an atomic compare and swap operation.  MEM is the memory on which
12457   to operate.  OLDVAL is the old value to be compared.  NEWVAL is the new
12458   value to be stored.  SCRATCH is a scratch GPR.  */
12459
12460void
12461rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12462			       rtx scratch)
12463{
12464  enum machine_mode mode = GET_MODE (mem);
12465  rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12466
12467  emit_insn (gen_memory_barrier ());
12468
12469  label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12470  label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12471  emit_label (XEXP (label1, 0));
12472
12473  emit_load_locked (mode, retval, mem);
12474
12475  x = gen_rtx_COMPARE (CCmode, retval, oldval);
12476  emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12477
12478  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12479  emit_unlikely_jump (x, label2);
12480
12481  emit_move_insn (scratch, newval);
12482  emit_store_conditional (mode, cond, mem, scratch);
12483
12484  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12485  emit_unlikely_jump (x, label1);
12486
12487  emit_insn (gen_isync ());
12488  emit_label (XEXP (label2, 0));
12489}
12490
12491/* Expand an atomic test and set operation.  MEM is the memory on which
12492   to operate.  VAL is the value set.  SCRATCH is a scratch GPR.  */
12493
12494void
12495rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12496{
12497  enum machine_mode mode = GET_MODE (mem);
12498  rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12499
12500  emit_insn (gen_memory_barrier ());
12501
12502  label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12503  emit_label (XEXP (label, 0));
12504
12505  emit_load_locked (mode, retval, mem);
12506  emit_move_insn (scratch, val);
12507  emit_store_conditional (mode, cond, mem, scratch);
12508
12509  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12510  emit_unlikely_jump (x, label);
12511
12512  emit_insn (gen_isync ());
12513}
12514
12515void
12516rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
12517{
12518  enum machine_mode mode = GET_MODE (mem);
12519  rtx addrSI, align, wdst, shift, mask;
12520  HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
12521  HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12522
12523  /* Shift amount for subword relative to aligned word.  */
12524  addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
12525  shift = gen_reg_rtx (SImode);
12526  emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
12527			 GEN_INT (shift_mask)));
12528  emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
12529
12530  /* Shift and mask old value into position within word.  */
12531  oldval = convert_modes (SImode, mode, oldval, 1);
12532  oldval = expand_binop (SImode, and_optab,
12533			 oldval, GEN_INT (imask), NULL_RTX,
12534			 1, OPTAB_LIB_WIDEN);
12535  emit_insn (gen_ashlsi3 (oldval, oldval, shift));
12536
12537  /* Shift and mask new value into position within word.  */
12538  newval = convert_modes (SImode, mode, newval, 1);
12539  newval = expand_binop (SImode, and_optab,
12540			 newval, GEN_INT (imask), NULL_RTX,
12541			 1, OPTAB_LIB_WIDEN);
12542  emit_insn (gen_ashlsi3 (newval, newval, shift));
12543
12544  /* Mask for insertion.  */
12545  mask = gen_reg_rtx (SImode);
12546  emit_move_insn (mask, GEN_INT (imask));
12547  emit_insn (gen_ashlsi3 (mask, mask, shift));
12548
12549  /* Address of aligned word containing subword.  */
12550  align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
12551			NULL_RTX, 1, OPTAB_LIB_WIDEN);
12552  mem = change_address (mem, SImode, align);
12553  set_mem_align (mem, 32);
12554  MEM_VOLATILE_P (mem) = 1;
12555
12556  wdst = gen_reg_rtx (SImode);
12557  emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
12558						    oldval, newval, mem));
12559
12560  emit_move_insn (dst, gen_lowpart (mode, wdst));
12561}
12562
12563void
12564rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
12565				  rtx oldval, rtx newval, rtx mem,
12566				  rtx scratch)
12567{
12568  rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12569
12570  emit_insn (gen_memory_barrier ());
12571  label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12572  label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12573  emit_label (XEXP (label1, 0));
12574
12575  emit_load_locked (SImode, scratch, mem);
12576
12577  /* Mask subword within loaded value for comparison with oldval.
12578     Use UNSPEC_AND to avoid clobber.*/
12579  emit_insn (gen_rtx_SET (SImode, dest,
12580			  gen_rtx_UNSPEC (SImode,
12581					  gen_rtvec (2, scratch, mask),
12582					  UNSPEC_AND)));
12583
12584  x = gen_rtx_COMPARE (CCmode, dest, oldval);
12585  emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12586
12587  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12588  emit_unlikely_jump (x, label2);
12589
12590  /* Clear subword within loaded value for insertion of new value.  */
12591  emit_insn (gen_rtx_SET (SImode, scratch,
12592			  gen_rtx_AND (SImode,
12593				       gen_rtx_NOT (SImode, mask), scratch)));
12594  emit_insn (gen_iorsi3 (scratch, scratch, newval));
12595  emit_store_conditional (SImode, cond, mem, scratch);
12596
12597  x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12598  emit_unlikely_jump (x, label1);
12599
12600  emit_insn (gen_isync ());
12601  emit_label (XEXP (label2, 0));
12602}
12603
12604
12605  /* Emit instructions to move SRC to DST.  Called by splitters for
12606   multi-register moves.  It will emit at most one instruction for
12607   each register that is accessed; that is, it won't emit li/lis pairs
12608   (or equivalent for 64-bit code).  One of SRC or DST must be a hard
12609   register.  */
12610
12611void
12612rs6000_split_multireg_move (rtx dst, rtx src)
12613{
12614  /* The register number of the first register being moved.  */
12615  int reg;
12616  /* The mode that is to be moved.  */
12617  enum machine_mode mode;
12618  /* The mode that the move is being done in, and its size.  */
12619  enum machine_mode reg_mode;
12620  int reg_mode_size;
12621  /* The number of registers that will be moved.  */
12622  int nregs;
12623
12624  reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12625  mode = GET_MODE (dst);
12626  nregs = hard_regno_nregs[reg][mode];
12627  if (FP_REGNO_P (reg))
12628    reg_mode = DFmode;
12629  else if (ALTIVEC_REGNO_P (reg))
12630    reg_mode = V16QImode;
12631  else if (TARGET_E500_DOUBLE && mode == TFmode)
12632    reg_mode = DFmode;
12633  else
12634    reg_mode = word_mode;
12635  reg_mode_size = GET_MODE_SIZE (reg_mode);
12636
12637  gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
12638
12639  if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12640    {
12641      /* Move register range backwards, if we might have destructive
12642	 overlap.  */
12643      int i;
12644      for (i = nregs - 1; i >= 0; i--)
12645	emit_insn (gen_rtx_SET (VOIDmode,
12646				simplify_gen_subreg (reg_mode, dst, mode,
12647						     i * reg_mode_size),
12648				simplify_gen_subreg (reg_mode, src, mode,
12649						     i * reg_mode_size)));
12650    }
12651  else
12652    {
12653      int i;
12654      int j = -1;
12655      bool used_update = false;
12656
12657      if (MEM_P (src) && INT_REGNO_P (reg))
12658	{
12659	  rtx breg;
12660
12661	  if (GET_CODE (XEXP (src, 0)) == PRE_INC
12662	      || GET_CODE (XEXP (src, 0)) == PRE_DEC)
12663	    {
12664	      rtx delta_rtx;
12665	      breg = XEXP (XEXP (src, 0), 0);
12666	      delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
12667			   ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
12668			   : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
12669	      emit_insn (TARGET_32BIT
12670			 ? gen_addsi3 (breg, breg, delta_rtx)
12671			 : gen_adddi3 (breg, breg, delta_rtx));
12672	      src = replace_equiv_address (src, breg);
12673	    }
12674	  else if (! rs6000_offsettable_memref_p (src))
12675	    {
12676	      rtx basereg;
12677	      basereg = gen_rtx_REG (Pmode, reg);
12678	      emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
12679	      src = replace_equiv_address (src, basereg);
12680	    }
12681
12682	  breg = XEXP (src, 0);
12683	  if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
12684	    breg = XEXP (breg, 0);
12685
12686	  /* If the base register we are using to address memory is
12687	     also a destination reg, then change that register last.  */
12688	  if (REG_P (breg)
12689	      && REGNO (breg) >= REGNO (dst)
12690	      && REGNO (breg) < REGNO (dst) + nregs)
12691	    j = REGNO (breg) - REGNO (dst);
12692	}
12693
12694      if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
12695	{
12696	  rtx breg;
12697
12698	  if (GET_CODE (XEXP (dst, 0)) == PRE_INC
12699	      || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
12700	    {
12701	      rtx delta_rtx;
12702	      breg = XEXP (XEXP (dst, 0), 0);
12703	      delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
12704			   ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
12705			   : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
12706
12707	      /* We have to update the breg before doing the store.
12708		 Use store with update, if available.  */
12709
12710	      if (TARGET_UPDATE)
12711		{
12712		  rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
12713		  emit_insn (TARGET_32BIT
12714			     ? (TARGET_POWERPC64
12715				? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
12716				: gen_movsi_update (breg, breg, delta_rtx, nsrc))
12717			     : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
12718		  used_update = true;
12719		}
12720	      else
12721		emit_insn (TARGET_32BIT
12722			   ? gen_addsi3 (breg, breg, delta_rtx)
12723			   : gen_adddi3 (breg, breg, delta_rtx));
12724	      dst = replace_equiv_address (dst, breg);
12725	    }
12726	  else
12727	    gcc_assert (rs6000_offsettable_memref_p (dst));
12728	}
12729
12730      for (i = 0; i < nregs; i++)
12731	{
12732	  /* Calculate index to next subword.  */
12733	  ++j;
12734	  if (j == nregs)
12735	    j = 0;
12736
12737	  /* If compiler already emitted move of first word by
12738	     store with update, no need to do anything.  */
12739	  if (j == 0 && used_update)
12740	    continue;
12741
12742	  emit_insn (gen_rtx_SET (VOIDmode,
12743				  simplify_gen_subreg (reg_mode, dst, mode,
12744						       j * reg_mode_size),
12745				  simplify_gen_subreg (reg_mode, src, mode,
12746						       j * reg_mode_size)));
12747	}
12748    }
12749}
12750
12751
12752/* This page contains routines that are used to determine what the
12753   function prologue and epilogue code will do and write them out.  */
12754
12755/* Return the first fixed-point register that is required to be
12756   saved. 32 if none.  */
12757
12758int
12759first_reg_to_save (void)
12760{
12761  int first_reg;
12762
12763  /* Find lowest numbered live register.  */
12764  for (first_reg = 13; first_reg <= 31; first_reg++)
12765    if (regs_ever_live[first_reg]
12766	&& (! call_used_regs[first_reg]
12767	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
12768		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12769		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
12770		    || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
12771      break;
12772
12773#if TARGET_MACHO
12774  if (flag_pic
12775      && current_function_uses_pic_offset_table
12776      && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
12777    return RS6000_PIC_OFFSET_TABLE_REGNUM;
12778#endif
12779
12780  return first_reg;
12781}
12782
12783/* Similar, for FP regs.  */
12784
12785int
12786first_fp_reg_to_save (void)
12787{
12788  int first_reg;
12789
12790  /* Find lowest numbered live register.  */
12791  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12792    if (regs_ever_live[first_reg])
12793      break;
12794
12795  return first_reg;
12796}
12797
12798/* Similar, for AltiVec regs.  */
12799
12800static int
12801first_altivec_reg_to_save (void)
12802{
12803  int i;
12804
12805  /* Stack frame remains as is unless we are in AltiVec ABI.  */
12806  if (! TARGET_ALTIVEC_ABI)
12807    return LAST_ALTIVEC_REGNO + 1;
12808
12809  /* On Darwin, the unwind routines are compiled without
12810     TARGET_ALTIVEC, and use save_world to save/restore the
12811     altivec registers when necessary.  */
12812  if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
12813      && ! TARGET_ALTIVEC)
12814    return FIRST_ALTIVEC_REGNO + 20;
12815
12816  /* Find lowest numbered live register.  */
12817  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12818    if (regs_ever_live[i])
12819      break;
12820
12821  return i;
12822}
12823
12824/* Return a 32-bit mask of the AltiVec registers we need to set in
12825   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
12826   the 32-bit word is 0.  */
12827
12828static unsigned int
12829compute_vrsave_mask (void)
12830{
12831  unsigned int i, mask = 0;
12832
12833  /* On Darwin, the unwind routines are compiled without
12834     TARGET_ALTIVEC, and use save_world to save/restore the
12835     call-saved altivec registers when necessary.  */
12836  if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
12837      && ! TARGET_ALTIVEC)
12838    mask |= 0xFFF;
12839
12840  /* First, find out if we use _any_ altivec registers.  */
12841  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12842    if (regs_ever_live[i])
12843      mask |= ALTIVEC_REG_BIT (i);
12844
12845  if (mask == 0)
12846    return mask;
12847
12848  /* Next, remove the argument registers from the set.  These must
12849     be in the VRSAVE mask set by the caller, so we don't need to add
12850     them in again.  More importantly, the mask we compute here is
12851     used to generate CLOBBERs in the set_vrsave insn, and we do not
12852     wish the argument registers to die.  */
12853  for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
12854    mask &= ~ALTIVEC_REG_BIT (i);
12855
12856  /* Similarly, remove the return value from the set.  */
12857  {
12858    bool yes = false;
12859    diddle_return_value (is_altivec_return_reg, &yes);
12860    if (yes)
12861      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12862  }
12863
12864  return mask;
12865}
12866
12867/* For a very restricted set of circumstances, we can cut down the
12868   size of prologues/epilogues by calling our own save/restore-the-world
12869   routines.  */
12870
12871static void
12872compute_save_world_info (rs6000_stack_t *info_ptr)
12873{
12874  info_ptr->world_save_p = 1;
12875  info_ptr->world_save_p
12876    = (WORLD_SAVE_P (info_ptr)
12877       && DEFAULT_ABI == ABI_DARWIN
12878       && ! (current_function_calls_setjmp && flag_exceptions)
12879       && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12880       && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12881       && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12882       && info_ptr->cr_save_p);
12883
12884  /* This will not work in conjunction with sibcalls.  Make sure there
12885     are none.  (This check is expensive, but seldom executed.) */
12886  if (WORLD_SAVE_P (info_ptr))
12887    {
12888      rtx insn;
12889      for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
12890	if ( GET_CODE (insn) == CALL_INSN
12891	     && SIBLING_CALL_P (insn))
12892	  {
12893	    info_ptr->world_save_p = 0;
12894	    break;
12895	  }
12896    }
12897
12898  if (WORLD_SAVE_P (info_ptr))
12899    {
12900      /* Even if we're not touching VRsave, make sure there's room on the
12901	 stack for it, if it looks like we're calling SAVE_WORLD, which
12902	 will attempt to save it. */
12903      info_ptr->vrsave_size  = 4;
12904
12905      /* "Save" the VRsave register too if we're saving the world.  */
12906      if (info_ptr->vrsave_mask == 0)
12907	info_ptr->vrsave_mask = compute_vrsave_mask ();
12908
12909      /* Because the Darwin register save/restore routines only handle
12910	 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
12911	 check.  */
12912      gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
12913		  && (info_ptr->first_altivec_reg_save
12914		      >= FIRST_SAVED_ALTIVEC_REGNO));
12915    }
12916  return;
12917}
12918
12919
12920static void
12921is_altivec_return_reg (rtx reg, void *xyes)
12922{
12923  bool *yes = (bool *) xyes;
12924  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12925    *yes = true;
12926}
12927
12928
12929/* Calculate the stack information for the current function.  This is
12930   complicated by having two separate calling sequences, the AIX calling
12931   sequence and the V.4 calling sequence.
12932
12933   AIX (and Darwin/Mac OS X) stack frames look like:
12934							  32-bit  64-bit
12935	SP---->	+---------------------------------------+
12936		| back chain to caller			| 0	  0
12937		+---------------------------------------+
12938		| saved CR				| 4       8 (8-11)
12939		+---------------------------------------+
12940		| saved LR				| 8       16
12941		+---------------------------------------+
12942		| reserved for compilers		| 12      24
12943		+---------------------------------------+
12944		| reserved for binders			| 16      32
12945		+---------------------------------------+
12946		| saved TOC pointer			| 20      40
12947		+---------------------------------------+
12948		| Parameter save area (P)		| 24      48
12949		+---------------------------------------+
12950		| Alloca space (A)			| 24+P    etc.
12951		+---------------------------------------+
12952		| Local variable space (L)		| 24+P+A
12953		+---------------------------------------+
12954		| Float/int conversion temporary (X)	| 24+P+A+L
12955		+---------------------------------------+
12956		| Save area for AltiVec registers (W)	| 24+P+A+L+X
12957		+---------------------------------------+
12958		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
12959		+---------------------------------------+
12960		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
12961		+---------------------------------------+
12962		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
12963		+---------------------------------------+
12964		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
12965		+---------------------------------------+
12966	old SP->| back chain to caller's caller		|
12967		+---------------------------------------+
12968
12969   The required alignment for AIX configurations is two words (i.e., 8
12970   or 16 bytes).
12971
12972
12973   V.4 stack frames look like:
12974
12975	SP---->	+---------------------------------------+
12976		| back chain to caller			| 0
12977		+---------------------------------------+
12978		| caller's saved LR			| 4
12979		+---------------------------------------+
12980		| Parameter save area (P)		| 8
12981		+---------------------------------------+
12982		| Alloca space (A)			| 8+P
12983		+---------------------------------------+
12984		| Varargs save area (V)			| 8+P+A
12985		+---------------------------------------+
12986		| Local variable space (L)		| 8+P+A+V
12987		+---------------------------------------+
12988		| Float/int conversion temporary (X)	| 8+P+A+V+L
12989		+---------------------------------------+
12990		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
12991		+---------------------------------------+
12992		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
12993		+---------------------------------------+
12994		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
12995		+---------------------------------------+
12996		| SPE: area for 64-bit GP registers	|
12997		+---------------------------------------+
12998		| SPE alignment padding			|
12999		+---------------------------------------+
13000		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
13001		+---------------------------------------+
13002		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
13003		+---------------------------------------+
13004		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
13005		+---------------------------------------+
13006	old SP->| back chain to caller's caller		|
13007		+---------------------------------------+
13008
13009   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13010   given.  (But note below and in sysv4.h that we require only 8 and
13011   may round up the size of our stack frame anyways.  The historical
13012   reason is early versions of powerpc-linux which didn't properly
13013   align the stack at program startup.  A happy side-effect is that
13014   -mno-eabi libraries can be used with -meabi programs.)
13015
13016   The EABI configuration defaults to the V.4 layout.  However,
13017   the stack alignment requirements may differ.  If -mno-eabi is not
13018   given, the required stack alignment is 8 bytes; if -mno-eabi is
13019   given, the required alignment is 16 bytes.  (But see V.4 comment
13020   above.)  */
13021
13022#ifndef ABI_STACK_BOUNDARY
13023#define ABI_STACK_BOUNDARY STACK_BOUNDARY
13024#endif
13025
13026static rs6000_stack_t *
13027rs6000_stack_info (void)
13028{
13029  static rs6000_stack_t info;
13030  rs6000_stack_t *info_ptr = &info;
13031  int reg_size = TARGET_32BIT ? 4 : 8;
13032  int ehrd_size;
13033  int save_align;
13034  HOST_WIDE_INT non_fixed_size;
13035
13036  memset (&info, 0, sizeof (info));
13037
13038  if (TARGET_SPE)
13039    {
13040      /* Cache value so we don't rescan instruction chain over and over.  */
13041      if (cfun->machine->insn_chain_scanned_p == 0)
13042	cfun->machine->insn_chain_scanned_p
13043	  = spe_func_has_64bit_regs_p () + 1;
13044      info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
13045    }
13046
13047  /* Select which calling sequence.  */
13048  info_ptr->abi = DEFAULT_ABI;
13049
13050  /* Calculate which registers need to be saved & save area size.  */
13051  info_ptr->first_gp_reg_save = first_reg_to_save ();
13052  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
13053     even if it currently looks like we won't.  */
13054  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
13055       || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
13056       || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
13057      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
13058    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
13059  else
13060    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
13061
13062  /* For the SPE, we have an additional upper 32-bits on each GPR.
13063     Ideally we should save the entire 64-bits only when the upper
13064     half is used in SIMD instructions.  Since we only record
13065     registers live (not the size they are used in), this proves
13066     difficult because we'd have to traverse the instruction chain at
13067     the right time, taking reload into account.  This is a real pain,
13068     so we opt to save the GPRs in 64-bits always if but one register
13069     gets used in 64-bits.  Otherwise, all the registers in the frame
13070     get saved in 32-bits.
13071
13072     So... since when we save all GPRs (except the SP) in 64-bits, the
13073     traditional GP save area will be empty.  */
13074  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
13075    info_ptr->gp_size = 0;
13076
13077  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
13078  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
13079
13080  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
13081  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
13082				 - info_ptr->first_altivec_reg_save);
13083
13084  /* Does this function call anything?  */
13085  info_ptr->calls_p = (! current_function_is_leaf
13086		       || cfun->machine->ra_needs_full_frame);
13087
13088  /* Determine if we need to save the link register.  */
13089  if ((DEFAULT_ABI == ABI_AIX
13090       && current_function_profile
13091       && !TARGET_PROFILE_KERNEL)
13092#ifdef TARGET_RELOCATABLE
13093      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
13094#endif
13095      || (info_ptr->first_fp_reg_save != 64
13096	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
13097      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
13098      || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
13099      || info_ptr->calls_p
13100      || rs6000_ra_ever_killed ())
13101    {
13102      info_ptr->lr_save_p = 1;
13103      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
13104    }
13105
13106  /* Determine if we need to save the condition code registers.  */
13107  if (regs_ever_live[CR2_REGNO]
13108      || regs_ever_live[CR3_REGNO]
13109      || regs_ever_live[CR4_REGNO])
13110    {
13111      info_ptr->cr_save_p = 1;
13112      if (DEFAULT_ABI == ABI_V4)
13113	info_ptr->cr_size = reg_size;
13114    }
13115
13116  /* If the current function calls __builtin_eh_return, then we need
13117     to allocate stack space for registers that will hold data for
13118     the exception handler.  */
13119  if (current_function_calls_eh_return)
13120    {
13121      unsigned int i;
13122      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
13123	continue;
13124
13125      /* SPE saves EH registers in 64-bits.  */
13126      ehrd_size = i * (TARGET_SPE_ABI
13127		       && info_ptr->spe_64bit_regs_used != 0
13128		       ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
13129    }
13130  else
13131    ehrd_size = 0;
13132
13133  /* Determine various sizes.  */
13134  info_ptr->reg_size     = reg_size;
13135  info_ptr->fixed_size   = RS6000_SAVE_AREA;
13136  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
13137  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
13138					 TARGET_ALTIVEC ? 16 : 8);
13139  if (FRAME_GROWS_DOWNWARD)
13140    info_ptr->vars_size
13141      += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
13142		       + info_ptr->parm_size,
13143		       ABI_STACK_BOUNDARY / BITS_PER_UNIT)
13144	 - (info_ptr->fixed_size + info_ptr->vars_size
13145	    + info_ptr->parm_size);
13146
13147  if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
13148    info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
13149  else
13150    info_ptr->spe_gp_size = 0;
13151
13152  if (TARGET_ALTIVEC_ABI)
13153    info_ptr->vrsave_mask = compute_vrsave_mask ();
13154  else
13155    info_ptr->vrsave_mask = 0;
13156
13157  if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
13158    info_ptr->vrsave_size  = 4;
13159  else
13160    info_ptr->vrsave_size  = 0;
13161
13162  compute_save_world_info (info_ptr);
13163
13164  /* Calculate the offsets.  */
13165  switch (DEFAULT_ABI)
13166    {
13167    case ABI_NONE:
13168    default:
13169      gcc_unreachable ();
13170
13171    case ABI_AIX:
13172    case ABI_DARWIN:
13173      info_ptr->fp_save_offset   = - info_ptr->fp_size;
13174      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
13175
13176      if (TARGET_ALTIVEC_ABI)
13177	{
13178	  info_ptr->vrsave_save_offset
13179	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
13180
13181	  /* Align stack so vector save area is on a quadword boundary.
13182	     The padding goes above the vectors.  */
13183	  if (info_ptr->altivec_size != 0)
13184	    info_ptr->altivec_padding_size
13185	      = info_ptr->vrsave_save_offset & 0xF;
13186	  else
13187	    info_ptr->altivec_padding_size = 0;
13188
13189	  info_ptr->altivec_save_offset
13190	    = info_ptr->vrsave_save_offset
13191	    - info_ptr->altivec_padding_size
13192	    - info_ptr->altivec_size;
13193	  gcc_assert (info_ptr->altivec_size == 0
13194		      || info_ptr->altivec_save_offset % 16 == 0);
13195
13196	  /* Adjust for AltiVec case.  */
13197	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
13198	}
13199      else
13200	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
13201      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
13202      info_ptr->lr_save_offset   = 2*reg_size;
13203      break;
13204
13205    case ABI_V4:
13206      info_ptr->fp_save_offset   = - info_ptr->fp_size;
13207      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
13208      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
13209
13210      if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
13211	{
13212	  /* Align stack so SPE GPR save area is aligned on a
13213	     double-word boundary.  */
13214	  if (info_ptr->spe_gp_size != 0)
13215	    info_ptr->spe_padding_size
13216	      = 8 - (-info_ptr->cr_save_offset % 8);
13217	  else
13218	    info_ptr->spe_padding_size = 0;
13219
13220	  info_ptr->spe_gp_save_offset
13221	    = info_ptr->cr_save_offset
13222	    - info_ptr->spe_padding_size
13223	    - info_ptr->spe_gp_size;
13224
13225	  /* Adjust for SPE case.  */
13226	  info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
13227	}
13228      else if (TARGET_ALTIVEC_ABI)
13229	{
13230	  info_ptr->vrsave_save_offset
13231	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
13232
13233	  /* Align stack so vector save area is on a quadword boundary.  */
13234	  if (info_ptr->altivec_size != 0)
13235	    info_ptr->altivec_padding_size
13236	      = 16 - (-info_ptr->vrsave_save_offset % 16);
13237	  else
13238	    info_ptr->altivec_padding_size = 0;
13239
13240	  info_ptr->altivec_save_offset
13241	    = info_ptr->vrsave_save_offset
13242	    - info_ptr->altivec_padding_size
13243	    - info_ptr->altivec_size;
13244
13245	  /* Adjust for AltiVec case.  */
13246	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
13247	}
13248      else
13249	info_ptr->ehrd_offset    = info_ptr->cr_save_offset;
13250      info_ptr->ehrd_offset      -= ehrd_size;
13251      info_ptr->lr_save_offset   = reg_size;
13252      break;
13253    }
13254
13255  save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
13256  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
13257					 + info_ptr->gp_size
13258					 + info_ptr->altivec_size
13259					 + info_ptr->altivec_padding_size
13260					 + info_ptr->spe_gp_size
13261					 + info_ptr->spe_padding_size
13262					 + ehrd_size
13263					 + info_ptr->cr_size
13264					 + info_ptr->vrsave_size,
13265					 save_align);
13266
13267  non_fixed_size	 = (info_ptr->vars_size
13268			    + info_ptr->parm_size
13269			    + info_ptr->save_size);
13270
13271  info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
13272				       ABI_STACK_BOUNDARY / BITS_PER_UNIT);
13273
13274  /* Determine if we need to allocate any stack frame:
13275
13276     For AIX we need to push the stack if a frame pointer is needed
13277     (because the stack might be dynamically adjusted), if we are
13278     debugging, if we make calls, or if the sum of fp_save, gp_save,
13279     and local variables are more than the space needed to save all
13280     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
13281     + 18*8 = 288 (GPR13 reserved).
13282
13283     For V.4 we don't have the stack cushion that AIX uses, but assume
13284     that the debugger can handle stackless frames.  */
13285
13286  if (info_ptr->calls_p)
13287    info_ptr->push_p = 1;
13288
13289  else if (DEFAULT_ABI == ABI_V4)
13290    info_ptr->push_p = non_fixed_size != 0;
13291
13292  else if (frame_pointer_needed)
13293    info_ptr->push_p = 1;
13294
13295  else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
13296    info_ptr->push_p = 1;
13297
13298  else
13299    info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
13300
13301  /* Zero offsets if we're not saving those registers.  */
13302  if (info_ptr->fp_size == 0)
13303    info_ptr->fp_save_offset = 0;
13304
13305  if (info_ptr->gp_size == 0)
13306    info_ptr->gp_save_offset = 0;
13307
13308  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
13309    info_ptr->altivec_save_offset = 0;
13310
13311  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
13312    info_ptr->vrsave_save_offset = 0;
13313
13314  if (! TARGET_SPE_ABI
13315      || info_ptr->spe_64bit_regs_used == 0
13316      || info_ptr->spe_gp_size == 0)
13317    info_ptr->spe_gp_save_offset = 0;
13318
13319  if (! info_ptr->lr_save_p)
13320    info_ptr->lr_save_offset = 0;
13321
13322  if (! info_ptr->cr_save_p)
13323    info_ptr->cr_save_offset = 0;
13324
13325  return info_ptr;
13326}
13327
13328/* Return true if the current function uses any GPRs in 64-bit SIMD
13329   mode.  */
13330
13331static bool
13332spe_func_has_64bit_regs_p (void)
13333{
13334  rtx insns, insn;
13335
13336  /* Functions that save and restore all the call-saved registers will
13337     need to save/restore the registers in 64-bits.  */
13338  if (current_function_calls_eh_return
13339      || current_function_calls_setjmp
13340      || current_function_has_nonlocal_goto)
13341    return true;
13342
13343  insns = get_insns ();
13344
13345  for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
13346    {
13347      if (INSN_P (insn))
13348	{
13349	  rtx i;
13350
13351	  /* FIXME: This should be implemented with attributes...
13352
13353	         (set_attr "spe64" "true")....then,
13354	         if (get_spe64(insn)) return true;
13355
13356	     It's the only reliable way to do the stuff below.  */
13357
13358	  i = PATTERN (insn);
13359	  if (GET_CODE (i) == SET)
13360	    {
13361	      enum machine_mode mode = GET_MODE (SET_SRC (i));
13362
13363	      if (SPE_VECTOR_MODE (mode))
13364		return true;
13365	      if (TARGET_E500_DOUBLE && mode == DFmode)
13366		return true;
13367	    }
13368	}
13369    }
13370
13371  return false;
13372}
13373
13374static void
13375debug_stack_info (rs6000_stack_t *info)
13376{
13377  const char *abi_string;
13378
13379  if (! info)
13380    info = rs6000_stack_info ();
13381
13382  fprintf (stderr, "\nStack information for function %s:\n",
13383	   ((current_function_decl && DECL_NAME (current_function_decl))
13384	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13385	    : "<unknown>"));
13386
13387  switch (info->abi)
13388    {
13389    default:		 abi_string = "Unknown";	break;
13390    case ABI_NONE:	 abi_string = "NONE";		break;
13391    case ABI_AIX:	 abi_string = "AIX";		break;
13392    case ABI_DARWIN:	 abi_string = "Darwin";		break;
13393    case ABI_V4:	 abi_string = "V.4";		break;
13394    }
13395
13396  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
13397
13398  if (TARGET_ALTIVEC_ABI)
13399    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13400
13401  if (TARGET_SPE_ABI)
13402    fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13403
13404  if (info->first_gp_reg_save != 32)
13405    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
13406
13407  if (info->first_fp_reg_save != 64)
13408    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
13409
13410  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13411    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13412	     info->first_altivec_reg_save);
13413
13414  if (info->lr_save_p)
13415    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
13416
13417  if (info->cr_save_p)
13418    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
13419
13420  if (info->vrsave_mask)
13421    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
13422
13423  if (info->push_p)
13424    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
13425
13426  if (info->calls_p)
13427    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
13428
13429  if (info->gp_save_offset)
13430    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
13431
13432  if (info->fp_save_offset)
13433    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
13434
13435  if (info->altivec_save_offset)
13436    fprintf (stderr, "\taltivec_save_offset = %5d\n",
13437	     info->altivec_save_offset);
13438
13439  if (info->spe_gp_save_offset)
13440    fprintf (stderr, "\tspe_gp_save_offset  = %5d\n",
13441	     info->spe_gp_save_offset);
13442
13443  if (info->vrsave_save_offset)
13444    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
13445	     info->vrsave_save_offset);
13446
13447  if (info->lr_save_offset)
13448    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
13449
13450  if (info->cr_save_offset)
13451    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
13452
13453  if (info->varargs_save_offset)
13454    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13455
13456  if (info->total_size)
13457    fprintf (stderr, "\ttotal_size          = "HOST_WIDE_INT_PRINT_DEC"\n",
13458	     info->total_size);
13459
13460  if (info->vars_size)
13461    fprintf (stderr, "\tvars_size           = "HOST_WIDE_INT_PRINT_DEC"\n",
13462	     info->vars_size);
13463
13464  if (info->parm_size)
13465    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
13466
13467  if (info->fixed_size)
13468    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
13469
13470  if (info->gp_size)
13471    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
13472
13473  if (info->spe_gp_size)
13474    fprintf (stderr, "\tspe_gp_size         = %5d\n", info->spe_gp_size);
13475
13476  if (info->fp_size)
13477    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
13478
13479  if (info->altivec_size)
13480    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
13481
13482  if (info->vrsave_size)
13483    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
13484
13485  if (info->altivec_padding_size)
13486    fprintf (stderr, "\taltivec_padding_size= %5d\n",
13487	     info->altivec_padding_size);
13488
13489  if (info->spe_padding_size)
13490    fprintf (stderr, "\tspe_padding_size    = %5d\n",
13491	     info->spe_padding_size);
13492
13493  if (info->cr_size)
13494    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
13495
13496  if (info->save_size)
13497    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
13498
13499  if (info->reg_size != 4)
13500    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
13501
13502  fprintf (stderr, "\n");
13503}
13504
13505rtx
13506rs6000_return_addr (int count, rtx frame)
13507{
13508  /* Currently we don't optimize very well between prolog and body
13509     code and for PIC code the code can be actually quite bad, so
13510     don't try to be too clever here.  */
13511  if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
13512    {
13513      cfun->machine->ra_needs_full_frame = 1;
13514
13515      return
13516	gen_rtx_MEM
13517	  (Pmode,
13518	   memory_address
13519	   (Pmode,
13520	    plus_constant (copy_to_reg
13521			   (gen_rtx_MEM (Pmode,
13522					 memory_address (Pmode, frame))),
13523			   RETURN_ADDRESS_OFFSET)));
13524    }
13525
13526  cfun->machine->ra_need_lr = 1;
13527  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
13528}
13529
13530/* Say whether a function is a candidate for sibcall handling or not.
13531   We do not allow indirect calls to be optimized into sibling calls.
13532   Also, we can't do it if there are any vector parameters; there's
13533   nowhere to put the VRsave code so it works; note that functions with
13534   vector parameters are required to have a prototype, so the argument
13535   type info must be available here.  (The tail recursion case can work
13536   with vector parameters, but there's no way to distinguish here.) */
13537static bool
13538rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
13539{
13540  tree type;
13541  if (decl)
13542    {
13543      if (TARGET_ALTIVEC_VRSAVE)
13544	{
13545	  for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
13546	       type; type = TREE_CHAIN (type))
13547	    {
13548	      if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
13549		return false;
13550	    }
13551	}
13552      if (DEFAULT_ABI == ABI_DARWIN
13553	  || ((*targetm.binds_local_p) (decl)
13554	      && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
13555	{
13556	  tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
13557
13558	  if (!lookup_attribute ("longcall", attr_list)
13559	      || lookup_attribute ("shortcall", attr_list))
13560	    return true;
13561	}
13562    }
13563  return false;
13564}
13565
13566/* NULL if INSN insn is valid within a low-overhead loop.
13567   Otherwise return why doloop cannot be applied.
13568   PowerPC uses the COUNT register for branch on table instructions.  */
13569
13570static const char *
13571rs6000_invalid_within_doloop (rtx insn)
13572{
13573  if (CALL_P (insn))
13574    return "Function call in the loop.";
13575
13576  if (JUMP_P (insn)
13577      && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13578	  || GET_CODE (PATTERN (insn)) == ADDR_VEC))
13579    return "Computed branch in the loop.";
13580
13581  return NULL;
13582}
13583
13584static int
13585rs6000_ra_ever_killed (void)
13586{
13587  rtx top;
13588  rtx reg;
13589  rtx insn;
13590
13591  if (current_function_is_thunk)
13592    return 0;
13593
13594  /* regs_ever_live has LR marked as used if any sibcalls are present,
13595     but this should not force saving and restoring in the
13596     pro/epilogue.  Likewise, reg_set_between_p thinks a sibcall
13597     clobbers LR, so that is inappropriate.  */
13598
13599  /* Also, the prologue can generate a store into LR that
13600     doesn't really count, like this:
13601
13602        move LR->R0
13603        bcl to set PIC register
13604        move LR->R31
13605        move R0->LR
13606
13607     When we're called from the epilogue, we need to avoid counting
13608     this as a store.  */
13609
13610  push_topmost_sequence ();
13611  top = get_insns ();
13612  pop_topmost_sequence ();
13613  reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13614
13615  for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13616    {
13617      if (INSN_P (insn))
13618	{
13619	  if (CALL_P (insn))
13620	    {
13621	      if (!SIBLING_CALL_P (insn))
13622		return 1;
13623	    }
13624	  else if (find_regno_note (insn, REG_INC, LINK_REGISTER_REGNUM))
13625	    return 1;
13626	  else if (set_of (reg, insn) != NULL_RTX
13627		   && !prologue_epilogue_contains (insn))
13628	    return 1;
13629    	}
13630    }
13631  return 0;
13632}
13633
13634/* Add a REG_MAYBE_DEAD note to the insn.  */
13635static void
13636rs6000_maybe_dead (rtx insn)
13637{
13638  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13639					const0_rtx,
13640					REG_NOTES (insn));
13641}
13642
13643/* Emit instructions needed to load the TOC register.
13644   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
13645   a constant pool; or for SVR4 -fpic.  */
13646
13647void
13648rs6000_emit_load_toc_table (int fromprolog)
13649{
13650  rtx dest, insn;
13651  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
13652
13653  if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
13654    {
13655      char buf[30];
13656      rtx lab, tmp1, tmp2, got, tempLR;
13657
13658      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13659      lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13660      if (flag_pic == 2)
13661	got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13662      else
13663	got = rs6000_got_sym ();
13664      tmp1 = tmp2 = dest;
13665      if (!fromprolog)
13666	{
13667	  tmp1 = gen_reg_rtx (Pmode);
13668	  tmp2 = gen_reg_rtx (Pmode);
13669	}
13670      tempLR = (fromprolog
13671		? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13672		: gen_reg_rtx (Pmode));
13673      insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
13674      if (fromprolog)
13675	rs6000_maybe_dead (insn);
13676      insn = emit_move_insn (tmp1, tempLR);
13677      if (fromprolog)
13678	rs6000_maybe_dead (insn);
13679      insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
13680      if (fromprolog)
13681	rs6000_maybe_dead (insn);
13682      insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
13683      if (fromprolog)
13684	rs6000_maybe_dead (insn);
13685    }
13686  else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
13687    {
13688      rtx tempLR = (fromprolog
13689		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13690		    : gen_reg_rtx (Pmode));
13691
13692      insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
13693      if (fromprolog)
13694	rs6000_maybe_dead (insn);
13695      insn = emit_move_insn (dest, tempLR);
13696      if (fromprolog)
13697	rs6000_maybe_dead (insn);
13698    }
13699  else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
13700    {
13701      char buf[30];
13702      rtx tempLR = (fromprolog
13703		    ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13704		    : gen_reg_rtx (Pmode));
13705      rtx temp0 = (fromprolog
13706		   ? gen_rtx_REG (Pmode, 0)
13707		   : gen_reg_rtx (Pmode));
13708
13709      if (fromprolog)
13710	{
13711	  rtx symF, symL;
13712
13713	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13714	  symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13715
13716	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
13717	  symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13718
13719	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
13720							       symF)));
13721	  rs6000_maybe_dead (emit_move_insn (dest, tempLR));
13722	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
13723							       symL,
13724							       symF)));
13725	}
13726      else
13727	{
13728	  rtx tocsym;
13729
13730	  tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13731	  emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
13732	  emit_move_insn (dest, tempLR);
13733	  emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
13734	}
13735      insn = emit_insn (gen_addsi3 (dest, temp0, dest));
13736      if (fromprolog)
13737	rs6000_maybe_dead (insn);
13738    }
13739  else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
13740    {
13741      /* This is for AIX code running in non-PIC ELF32.  */
13742      char buf[30];
13743      rtx realsym;
13744      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13745      realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13746
13747      insn = emit_insn (gen_elf_high (dest, realsym));
13748      if (fromprolog)
13749	rs6000_maybe_dead (insn);
13750      insn = emit_insn (gen_elf_low (dest, dest, realsym));
13751      if (fromprolog)
13752	rs6000_maybe_dead (insn);
13753    }
13754  else
13755    {
13756      gcc_assert (DEFAULT_ABI == ABI_AIX);
13757
13758      if (TARGET_32BIT)
13759	insn = emit_insn (gen_load_toc_aix_si (dest));
13760      else
13761	insn = emit_insn (gen_load_toc_aix_di (dest));
13762      if (fromprolog)
13763	rs6000_maybe_dead (insn);
13764    }
13765}
13766
13767/* Emit instructions to restore the link register after determining where
13768   its value has been stored.  */
13769
13770void
13771rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
13772{
13773  rs6000_stack_t *info = rs6000_stack_info ();
13774  rtx operands[2];
13775
13776  operands[0] = source;
13777  operands[1] = scratch;
13778
13779  if (info->lr_save_p)
13780    {
13781      rtx frame_rtx = stack_pointer_rtx;
13782      HOST_WIDE_INT sp_offset = 0;
13783      rtx tmp;
13784
13785      if (frame_pointer_needed
13786	  || current_function_calls_alloca
13787	  || info->total_size > 32767)
13788	{
13789	  tmp = gen_frame_mem (Pmode, frame_rtx);
13790	  emit_move_insn (operands[1], tmp);
13791	  frame_rtx = operands[1];
13792	}
13793      else if (info->push_p)
13794	sp_offset = info->total_size;
13795
13796      tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
13797      tmp = gen_frame_mem (Pmode, tmp);
13798      emit_move_insn (tmp, operands[0]);
13799    }
13800  else
13801    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
13802}
13803
13804static GTY(()) int set = -1;
13805
13806int
13807get_TOC_alias_set (void)
13808{
13809  if (set == -1)
13810    set = new_alias_set ();
13811  return set;
13812}
13813
13814/* This returns nonzero if the current function uses the TOC.  This is
13815   determined by the presence of (use (unspec ... UNSPEC_TOC)), which
13816   is generated by the ABI_V4 load_toc_* patterns.  */
13817#if TARGET_ELF
13818static int
13819uses_TOC (void)
13820{
13821  rtx insn;
13822
13823  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13824    if (INSN_P (insn))
13825      {
13826	rtx pat = PATTERN (insn);
13827	int i;
13828
13829	if (GET_CODE (pat) == PARALLEL)
13830	  for (i = 0; i < XVECLEN (pat, 0); i++)
13831	    {
13832	      rtx sub = XVECEXP (pat, 0, i);
13833	      if (GET_CODE (sub) == USE)
13834		{
13835		  sub = XEXP (sub, 0);
13836		  if (GET_CODE (sub) == UNSPEC
13837		      && XINT (sub, 1) == UNSPEC_TOC)
13838		    return 1;
13839		}
13840	    }
13841      }
13842  return 0;
13843}
13844#endif
13845
13846rtx
13847create_TOC_reference (rtx symbol)
13848{
13849  if (no_new_pseudos)
13850    regs_ever_live[TOC_REGISTER] = 1;
13851  return gen_rtx_PLUS (Pmode,
13852	   gen_rtx_REG (Pmode, TOC_REGISTER),
13853	     gen_rtx_CONST (Pmode,
13854	       gen_rtx_MINUS (Pmode, symbol,
13855		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
13856}
13857
13858/* If _Unwind_* has been called from within the same module,
13859   toc register is not guaranteed to be saved to 40(1) on function
13860   entry.  Save it there in that case.  */
13861
13862void
13863rs6000_aix_emit_builtin_unwind_init (void)
13864{
13865  rtx mem;
13866  rtx stack_top = gen_reg_rtx (Pmode);
13867  rtx opcode_addr = gen_reg_rtx (Pmode);
13868  rtx opcode = gen_reg_rtx (SImode);
13869  rtx tocompare = gen_reg_rtx (SImode);
13870  rtx no_toc_save_needed = gen_label_rtx ();
13871
13872  mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
13873  emit_move_insn (stack_top, mem);
13874
13875  mem = gen_frame_mem (Pmode,
13876		       gen_rtx_PLUS (Pmode, stack_top,
13877				     GEN_INT (2 * GET_MODE_SIZE (Pmode))));
13878  emit_move_insn (opcode_addr, mem);
13879  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13880  emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
13881					   : 0xE8410028, SImode));
13882
13883  do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
13884			   SImode, NULL_RTX, NULL_RTX,
13885			   no_toc_save_needed);
13886
13887  mem = gen_frame_mem (Pmode,
13888		       gen_rtx_PLUS (Pmode, stack_top,
13889				     GEN_INT (5 * GET_MODE_SIZE (Pmode))));
13890  emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13891  emit_label (no_toc_save_needed);
13892}
13893
13894/* This ties together stack memory (MEM with an alias set of frame_alias_set)
13895   and the change to the stack pointer.  */
13896
13897static void
13898rs6000_emit_stack_tie (void)
13899{
13900  rtx mem = gen_frame_mem (BLKmode,
13901			   gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
13902
13903  emit_insn (gen_stack_tie (mem));
13904}
13905
13906/* Emit the correct code for allocating stack space, as insns.
13907   If COPY_R12, make sure a copy of the old frame is left in r12.
13908   The generated code may use hard register 0 as a temporary.  */
13909
13910static void
13911rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
13912{
13913  rtx insn;
13914  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13915  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
13916  rtx todec = gen_int_mode (-size, Pmode);
13917
13918  if (INTVAL (todec) != -size)
13919    {
13920      warning (0, "stack frame too large");
13921      emit_insn (gen_trap ());
13922      return;
13923    }
13924
13925  if (current_function_limit_stack)
13926    {
13927      if (REG_P (stack_limit_rtx)
13928	  && REGNO (stack_limit_rtx) > 1
13929	  && REGNO (stack_limit_rtx) <= 31)
13930	{
13931	  emit_insn (TARGET_32BIT
13932		     ? gen_addsi3 (tmp_reg,
13933				   stack_limit_rtx,
13934				   GEN_INT (size))
13935		     : gen_adddi3 (tmp_reg,
13936				   stack_limit_rtx,
13937				   GEN_INT (size)));
13938
13939	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13940				    const0_rtx));
13941	}
13942      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
13943	       && TARGET_32BIT
13944	       && DEFAULT_ABI == ABI_V4)
13945	{
13946	  rtx toload = gen_rtx_CONST (VOIDmode,
13947				      gen_rtx_PLUS (Pmode,
13948						    stack_limit_rtx,
13949						    GEN_INT (size)));
13950
13951	  emit_insn (gen_elf_high (tmp_reg, toload));
13952	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13953	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13954				    const0_rtx));
13955	}
13956      else
13957	warning (0, "stack limit expression is not supported");
13958    }
13959
13960  if (copy_r12 || ! TARGET_UPDATE)
13961    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13962
13963  if (TARGET_UPDATE)
13964    {
13965      if (size > 32767)
13966	{
13967	  /* Need a note here so that try_split doesn't get confused.  */
13968	  if (get_last_insn () == NULL_RTX)
13969	    emit_note (NOTE_INSN_DELETED);
13970	  insn = emit_move_insn (tmp_reg, todec);
13971	  try_split (PATTERN (insn), insn, 0);
13972	  todec = tmp_reg;
13973	}
13974
13975      insn = emit_insn (TARGET_32BIT
13976			? gen_movsi_update (stack_reg, stack_reg,
13977					    todec, stack_reg)
13978			: gen_movdi_di_update (stack_reg, stack_reg,
13979					    todec, stack_reg));
13980    }
13981  else
13982    {
13983      insn = emit_insn (TARGET_32BIT
13984			? gen_addsi3 (stack_reg, stack_reg, todec)
13985			: gen_adddi3 (stack_reg, stack_reg, todec));
13986      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13987		      gen_rtx_REG (Pmode, 12));
13988    }
13989
13990  RTX_FRAME_RELATED_P (insn) = 1;
13991  REG_NOTES (insn) =
13992    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13993		       gen_rtx_SET (VOIDmode, stack_reg,
13994				    gen_rtx_PLUS (Pmode, stack_reg,
13995						  GEN_INT (-size))),
13996		       REG_NOTES (insn));
13997}
13998
13999/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
14000   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
14001   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
14002   deduce these equivalences by itself so it wasn't necessary to hold
14003   its hand so much.  */
14004
14005static void
14006rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
14007		      rtx reg2, rtx rreg)
14008{
14009  rtx real, temp;
14010
14011  /* copy_rtx will not make unique copies of registers, so we need to
14012     ensure we don't have unwanted sharing here.  */
14013  if (reg == reg2)
14014    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14015
14016  if (reg == rreg)
14017    reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14018
14019  real = copy_rtx (PATTERN (insn));
14020
14021  if (reg2 != NULL_RTX)
14022    real = replace_rtx (real, reg2, rreg);
14023
14024  real = replace_rtx (real, reg,
14025		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
14026							STACK_POINTER_REGNUM),
14027				    GEN_INT (val)));
14028
14029  /* We expect that 'real' is either a SET or a PARALLEL containing
14030     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
14031     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
14032
14033  if (GET_CODE (real) == SET)
14034    {
14035      rtx set = real;
14036
14037      temp = simplify_rtx (SET_SRC (set));
14038      if (temp)
14039	SET_SRC (set) = temp;
14040      temp = simplify_rtx (SET_DEST (set));
14041      if (temp)
14042	SET_DEST (set) = temp;
14043      if (GET_CODE (SET_DEST (set)) == MEM)
14044	{
14045	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14046	  if (temp)
14047	    XEXP (SET_DEST (set), 0) = temp;
14048	}
14049    }
14050  else
14051    {
14052      int i;
14053
14054      gcc_assert (GET_CODE (real) == PARALLEL);
14055      for (i = 0; i < XVECLEN (real, 0); i++)
14056	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
14057	  {
14058	    rtx set = XVECEXP (real, 0, i);
14059
14060	    temp = simplify_rtx (SET_SRC (set));
14061	    if (temp)
14062	      SET_SRC (set) = temp;
14063	    temp = simplify_rtx (SET_DEST (set));
14064	    if (temp)
14065	      SET_DEST (set) = temp;
14066	    if (GET_CODE (SET_DEST (set)) == MEM)
14067	      {
14068		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14069		if (temp)
14070		  XEXP (SET_DEST (set), 0) = temp;
14071	      }
14072	    RTX_FRAME_RELATED_P (set) = 1;
14073	  }
14074    }
14075
14076  if (TARGET_SPE)
14077    real = spe_synthesize_frame_save (real);
14078
14079  RTX_FRAME_RELATED_P (insn) = 1;
14080  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14081					real,
14082					REG_NOTES (insn));
14083}
14084
14085/* Given an SPE frame note, return a PARALLEL of SETs with the
14086   original note, plus a synthetic register save.  */
14087
14088static rtx
14089spe_synthesize_frame_save (rtx real)
14090{
14091  rtx synth, offset, reg, real2;
14092
14093  if (GET_CODE (real) != SET
14094      || GET_MODE (SET_SRC (real)) != V2SImode)
14095    return real;
14096
14097  /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14098     frame related note.  The parallel contains a set of the register
14099     being saved, and another set to a synthetic register (n+1200).
14100     This is so we can differentiate between 64-bit and 32-bit saves.
14101     Words cannot describe this nastiness.  */
14102
14103  gcc_assert (GET_CODE (SET_DEST (real)) == MEM
14104	      && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
14105	      && GET_CODE (SET_SRC (real)) == REG);
14106
14107  /* Transform:
14108       (set (mem (plus (reg x) (const y)))
14109            (reg z))
14110     into:
14111       (set (mem (plus (reg x) (const y+4)))
14112            (reg z+1200))
14113  */
14114
14115  real2 = copy_rtx (real);
14116  PUT_MODE (SET_DEST (real2), SImode);
14117  reg = SET_SRC (real2);
14118  real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
14119  synth = copy_rtx (real2);
14120
14121  if (BYTES_BIG_ENDIAN)
14122    {
14123      offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
14124      real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
14125    }
14126
14127  reg = SET_SRC (synth);
14128
14129  synth = replace_rtx (synth, reg,
14130		       gen_rtx_REG (SImode, REGNO (reg) + 1200));
14131
14132  offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
14133  synth = replace_rtx (synth, offset,
14134		       GEN_INT (INTVAL (offset)
14135				+ (BYTES_BIG_ENDIAN ? 0 : 4)));
14136
14137  RTX_FRAME_RELATED_P (synth) = 1;
14138  RTX_FRAME_RELATED_P (real2) = 1;
14139  if (BYTES_BIG_ENDIAN)
14140    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
14141  else
14142    real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
14143
14144  return real;
14145}
14146
14147/* Returns an insn that has a vrsave set operation with the
14148   appropriate CLOBBERs.  */
14149
14150static rtx
14151generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
14152{
14153  int nclobs, i;
14154  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
14155  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
14156
14157  clobs[0]
14158    = gen_rtx_SET (VOIDmode,
14159		   vrsave,
14160		   gen_rtx_UNSPEC_VOLATILE (SImode,
14161					    gen_rtvec (2, reg, vrsave),
14162					    UNSPECV_SET_VRSAVE));
14163
14164  nclobs = 1;
14165
14166  /* We need to clobber the registers in the mask so the scheduler
14167     does not move sets to VRSAVE before sets of AltiVec registers.
14168
14169     However, if the function receives nonlocal gotos, reload will set
14170     all call saved registers live.  We will end up with:
14171
14172     	(set (reg 999) (mem))
14173	(parallel [ (set (reg vrsave) (unspec blah))
14174		    (clobber (reg 999))])
14175
14176     The clobber will cause the store into reg 999 to be dead, and
14177     flow will attempt to delete an epilogue insn.  In this case, we
14178     need an unspec use/set of the register.  */
14179
14180  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
14181    if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14182      {
14183	if (!epiloguep || call_used_regs [i])
14184	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
14185					     gen_rtx_REG (V4SImode, i));
14186	else
14187	  {
14188	    rtx reg = gen_rtx_REG (V4SImode, i);
14189
14190	    clobs[nclobs++]
14191	      = gen_rtx_SET (VOIDmode,
14192			     reg,
14193			     gen_rtx_UNSPEC (V4SImode,
14194					     gen_rtvec (1, reg), 27));
14195	  }
14196      }
14197
14198  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
14199
14200  for (i = 0; i < nclobs; ++i)
14201    XVECEXP (insn, 0, i) = clobs[i];
14202
14203  return insn;
14204}
14205
14206/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
14207   Save REGNO into [FRAME_REG + OFFSET] in mode MODE.  */
14208
14209static void
14210emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
14211		 unsigned int regno, int offset, HOST_WIDE_INT total_size)
14212{
14213  rtx reg, offset_rtx, insn, mem, addr, int_rtx;
14214  rtx replacea, replaceb;
14215
14216  int_rtx = GEN_INT (offset);
14217
14218  /* Some cases that need register indexed addressing.  */
14219  if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
14220      || (TARGET_E500_DOUBLE && mode == DFmode)
14221      || (TARGET_SPE_ABI
14222	  && SPE_VECTOR_MODE (mode)
14223	  && !SPE_CONST_OFFSET_OK (offset)))
14224    {
14225      /* Whomever calls us must make sure r11 is available in the
14226	 flow path of instructions in the prologue.  */
14227      offset_rtx = gen_rtx_REG (Pmode, 11);
14228      emit_move_insn (offset_rtx, int_rtx);
14229
14230      replacea = offset_rtx;
14231      replaceb = int_rtx;
14232    }
14233  else
14234    {
14235      offset_rtx = int_rtx;
14236      replacea = NULL_RTX;
14237      replaceb = NULL_RTX;
14238    }
14239
14240  reg = gen_rtx_REG (mode, regno);
14241  addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
14242  mem = gen_frame_mem (mode, addr);
14243
14244  insn = emit_move_insn (mem, reg);
14245
14246  rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
14247}
14248
14249/* Emit an offset memory reference suitable for a frame store, while
14250   converting to a valid addressing mode.  */
14251
14252static rtx
14253gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
14254{
14255  rtx int_rtx, offset_rtx;
14256
14257  int_rtx = GEN_INT (offset);
14258
14259  if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
14260      || (TARGET_E500_DOUBLE && mode == DFmode))
14261    {
14262      offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14263      emit_move_insn (offset_rtx, int_rtx);
14264    }
14265  else
14266    offset_rtx = int_rtx;
14267
14268  return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
14269}
14270
14271/* Look for user-defined global regs.  We should not save and restore these,
14272   and cannot use stmw/lmw if there are any in its range.  */
14273
14274static bool
14275no_global_regs_above (int first_greg)
14276{
14277  int i;
14278  for (i = 0; i < 32 - first_greg; i++)
14279    if (global_regs[first_greg + i])
14280      return false;
14281  return true;
14282}
14283
14284#ifndef TARGET_FIX_AND_CONTINUE
14285#define TARGET_FIX_AND_CONTINUE 0
14286#endif
14287
14288/* Emit function prologue as insns.  */
14289
14290void
14291rs6000_emit_prologue (void)
14292{
14293  rs6000_stack_t *info = rs6000_stack_info ();
14294  enum machine_mode reg_mode = Pmode;
14295  int reg_size = TARGET_32BIT ? 4 : 8;
14296  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14297  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
14298  rtx frame_reg_rtx = sp_reg_rtx;
14299  rtx cr_save_rtx = NULL_RTX;
14300  rtx insn;
14301  int saving_FPRs_inline;
14302  int using_store_multiple;
14303  HOST_WIDE_INT sp_offset = 0;
14304
14305  if (TARGET_FIX_AND_CONTINUE)
14306    {
14307      /* gdb on darwin arranges to forward a function from the old
14308	 address by modifying the first 5 instructions of the function
14309	 to branch to the overriding function.  This is necessary to
14310	 permit function pointers that point to the old function to
14311	 actually forward to the new function.  */
14312      emit_insn (gen_nop ());
14313      emit_insn (gen_nop ());
14314      emit_insn (gen_nop ());
14315      emit_insn (gen_nop ());
14316      emit_insn (gen_nop ());
14317    }
14318
14319  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14320    {
14321      reg_mode = V2SImode;
14322      reg_size = 8;
14323    }
14324
14325  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14326			  && (!TARGET_SPE_ABI
14327			      || info->spe_64bit_regs_used == 0)
14328			  && info->first_gp_reg_save < 31
14329			  && no_global_regs_above (info->first_gp_reg_save));
14330  saving_FPRs_inline = (info->first_fp_reg_save == 64
14331			|| FP_SAVE_INLINE (info->first_fp_reg_save)
14332			|| current_function_calls_eh_return
14333			|| cfun->machine->ra_need_lr);
14334
14335  /* For V.4, update stack before we do any saving and set back pointer.  */
14336  if (! WORLD_SAVE_P (info)
14337      && info->push_p
14338      && (DEFAULT_ABI == ABI_V4
14339	  || current_function_calls_eh_return))
14340    {
14341      if (info->total_size < 32767)
14342	sp_offset = info->total_size;
14343      else
14344	frame_reg_rtx = frame_ptr_rtx;
14345      rs6000_emit_allocate_stack (info->total_size,
14346				  (frame_reg_rtx != sp_reg_rtx
14347				   && (info->cr_save_p
14348				       || info->lr_save_p
14349				       || info->first_fp_reg_save < 64
14350				       || info->first_gp_reg_save < 32
14351				       )));
14352      if (frame_reg_rtx != sp_reg_rtx)
14353	rs6000_emit_stack_tie ();
14354    }
14355
14356  /* Handle world saves specially here.  */
14357  if (WORLD_SAVE_P (info))
14358    {
14359      int i, j, sz;
14360      rtx treg;
14361      rtvec p;
14362      rtx reg0;
14363
14364      /* save_world expects lr in r0. */
14365      reg0 = gen_rtx_REG (Pmode, 0);
14366      if (info->lr_save_p)
14367	{
14368	  insn = emit_move_insn (reg0,
14369				 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14370	  RTX_FRAME_RELATED_P (insn) = 1;
14371	}
14372
14373      /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
14374	 assumptions about the offsets of various bits of the stack
14375	 frame.  */
14376      gcc_assert (info->gp_save_offset == -220
14377		  && info->fp_save_offset == -144
14378		  && info->lr_save_offset == 8
14379		  && info->cr_save_offset == 4
14380		  && info->push_p
14381		  && info->lr_save_p
14382		  && (!current_function_calls_eh_return
14383		       || info->ehrd_offset == -432)
14384		  && info->vrsave_save_offset == -224
14385		  && info->altivec_save_offset == -416);
14386
14387      treg = gen_rtx_REG (SImode, 11);
14388      emit_move_insn (treg, GEN_INT (-info->total_size));
14389
14390      /* SAVE_WORLD takes the caller's LR in R0 and the frame size
14391	 in R11.  It also clobbers R12, so beware!  */
14392
14393      /* Preserve CR2 for save_world prologues */
14394      sz = 5;
14395      sz += 32 - info->first_gp_reg_save;
14396      sz += 64 - info->first_fp_reg_save;
14397      sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14398      p = rtvec_alloc (sz);
14399      j = 0;
14400      RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
14401					    gen_rtx_REG (Pmode,
14402							 LINK_REGISTER_REGNUM));
14403      RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14404					gen_rtx_SYMBOL_REF (Pmode,
14405							    "*save_world"));
14406      /* We do floats first so that the instruction pattern matches
14407	 properly.  */
14408      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14409	{
14410	  rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14411	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14412				   GEN_INT (info->fp_save_offset
14413					    + sp_offset + 8 * i));
14414	  rtx mem = gen_frame_mem (DFmode, addr);
14415
14416	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14417	}
14418      for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
14419	{
14420	  rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14421	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14422				   GEN_INT (info->altivec_save_offset
14423					    + sp_offset + 16 * i));
14424	  rtx mem = gen_frame_mem (V4SImode, addr);
14425
14426	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14427	}
14428      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14429	{
14430	  rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14431	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14432				   GEN_INT (info->gp_save_offset
14433					    + sp_offset + reg_size * i));
14434	  rtx mem = gen_frame_mem (reg_mode, addr);
14435
14436	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14437	}
14438
14439      {
14440	/* CR register traditionally saved as CR2.  */
14441	rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14442	rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14443				 GEN_INT (info->cr_save_offset
14444					  + sp_offset));
14445	rtx mem = gen_frame_mem (reg_mode, addr);
14446
14447	RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14448      }
14449      /* Explain about use of R0.  */
14450      if (info->lr_save_p)
14451	{
14452	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14453				   GEN_INT (info->lr_save_offset
14454					    + sp_offset));
14455	  rtx mem = gen_frame_mem (reg_mode, addr);
14456
14457	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
14458	}
14459      /* Explain what happens to the stack pointer.  */
14460      {
14461	rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
14462	RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
14463      }
14464
14465      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14466      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14467			    treg, GEN_INT (-info->total_size));
14468      sp_offset = info->total_size;
14469    }
14470
14471  /* Save AltiVec registers if needed.  */
14472  if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14473    {
14474      int i;
14475
14476      /* There should be a non inline version of this, for when we
14477	 are saving lots of vector registers.  */
14478      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14479	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14480	  {
14481	    rtx areg, savereg, mem;
14482	    int offset;
14483
14484	    offset = info->altivec_save_offset + sp_offset
14485	      + 16 * (i - info->first_altivec_reg_save);
14486
14487	    savereg = gen_rtx_REG (V4SImode, i);
14488
14489	    areg = gen_rtx_REG (Pmode, 0);
14490	    emit_move_insn (areg, GEN_INT (offset));
14491
14492	    /* AltiVec addressing mode is [reg+reg].  */
14493	    mem = gen_frame_mem (V4SImode,
14494				 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
14495
14496	    insn = emit_move_insn (mem, savereg);
14497
14498	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14499				  areg, GEN_INT (offset));
14500	  }
14501    }
14502
14503  /* VRSAVE is a bit vector representing which AltiVec registers
14504     are used.  The OS uses this to determine which vector
14505     registers to save on a context switch.  We need to save
14506     VRSAVE on the stack frame, add whatever AltiVec registers we
14507     used in this function, and do the corresponding magic in the
14508     epilogue.  */
14509
14510  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
14511      && info->vrsave_mask != 0)
14512    {
14513      rtx reg, mem, vrsave;
14514      int offset;
14515
14516      /* Get VRSAVE onto a GPR.  Note that ABI_V4 might be using r12
14517	 as frame_reg_rtx and r11 as the static chain pointer for
14518	 nested functions.  */
14519      reg = gen_rtx_REG (SImode, 0);
14520      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
14521      if (TARGET_MACHO)
14522	emit_insn (gen_get_vrsave_internal (reg));
14523      else
14524	emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
14525
14526      if (!WORLD_SAVE_P (info))
14527	{
14528          /* Save VRSAVE.  */
14529          offset = info->vrsave_save_offset + sp_offset;
14530          mem = gen_frame_mem (SImode,
14531			       gen_rtx_PLUS (Pmode, frame_reg_rtx,
14532					     GEN_INT (offset)));
14533          insn = emit_move_insn (mem, reg);
14534	}
14535
14536      /* Include the registers in the mask.  */
14537      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
14538
14539      insn = emit_insn (generate_set_vrsave (reg, info, 0));
14540    }
14541
14542  /* If we use the link register, get it into r0.  */
14543  if (!WORLD_SAVE_P (info) && info->lr_save_p)
14544    {
14545      insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14546			     gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14547      RTX_FRAME_RELATED_P (insn) = 1;
14548    }
14549
14550  /* If we need to save CR, put it into r12.  */
14551  if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
14552    {
14553      rtx set;
14554
14555      cr_save_rtx = gen_rtx_REG (SImode, 12);
14556      insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14557      RTX_FRAME_RELATED_P (insn) = 1;
14558      /* Now, there's no way that dwarf2out_frame_debug_expr is going
14559	 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14560	 But that's OK.  All we have to do is specify that _one_ condition
14561	 code register is saved in this stack slot.  The thrower's epilogue
14562	 will then restore all the call-saved registers.
14563	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
14564      set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14565			 gen_rtx_REG (SImode, CR2_REGNO));
14566      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14567					    set,
14568					    REG_NOTES (insn));
14569    }
14570
14571  /* Do any required saving of fpr's.  If only one or two to save, do
14572     it ourselves.  Otherwise, call function.  */
14573  if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
14574    {
14575      int i;
14576      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14577	if ((regs_ever_live[info->first_fp_reg_save+i]
14578	     && ! call_used_regs[info->first_fp_reg_save+i]))
14579	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14580			   info->first_fp_reg_save + i,
14581			   info->fp_save_offset + sp_offset + 8 * i,
14582			   info->total_size);
14583    }
14584  else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
14585    {
14586      int i;
14587      char rname[30];
14588      const char *alloc_rname;
14589      rtvec p;
14590      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
14591
14592      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14593					  gen_rtx_REG (Pmode,
14594						       LINK_REGISTER_REGNUM));
14595      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14596	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
14597      alloc_rname = ggc_strdup (rname);
14598      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14599				      gen_rtx_SYMBOL_REF (Pmode,
14600							  alloc_rname));
14601      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14602	{
14603	  rtx addr, reg, mem;
14604	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14605	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14606			       GEN_INT (info->fp_save_offset
14607					+ sp_offset + 8*i));
14608	  mem = gen_frame_mem (DFmode, addr);
14609
14610	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14611	}
14612      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14613      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14614			    NULL_RTX, NULL_RTX);
14615    }
14616
14617  /* Save GPRs.  This is done as a PARALLEL if we are using
14618     the store-multiple instructions.  */
14619  if (!WORLD_SAVE_P (info) && using_store_multiple)
14620    {
14621      rtvec p;
14622      int i;
14623      p = rtvec_alloc (32 - info->first_gp_reg_save);
14624      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14625	{
14626	  rtx addr, reg, mem;
14627	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14628	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14629			       GEN_INT (info->gp_save_offset
14630					+ sp_offset
14631					+ reg_size * i));
14632	  mem = gen_frame_mem (reg_mode, addr);
14633
14634	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14635	}
14636      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14637      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14638			    NULL_RTX, NULL_RTX);
14639    }
14640  else if (!WORLD_SAVE_P (info))
14641    {
14642      int i;
14643      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14644	if ((regs_ever_live[info->first_gp_reg_save + i]
14645	     && (!call_used_regs[info->first_gp_reg_save + i]
14646		 || (i + info->first_gp_reg_save
14647		     == RS6000_PIC_OFFSET_TABLE_REGNUM
14648		     && TARGET_TOC && TARGET_MINIMAL_TOC)))
14649	    || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14650		&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14651		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
14652	  {
14653	    rtx addr, reg, mem;
14654	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14655
14656	    if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14657	      {
14658		int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14659		rtx b;
14660
14661		if (!SPE_CONST_OFFSET_OK (offset))
14662		  {
14663		    b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14664		    emit_move_insn (b, GEN_INT (offset));
14665		  }
14666		else
14667		  b = GEN_INT (offset);
14668
14669		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14670		mem = gen_frame_mem (V2SImode, addr);
14671		insn = emit_move_insn (mem, reg);
14672
14673		if (GET_CODE (b) == CONST_INT)
14674		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14675					NULL_RTX, NULL_RTX);
14676		else
14677		  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14678					b, GEN_INT (offset));
14679	      }
14680	    else
14681	      {
14682		addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14683				     GEN_INT (info->gp_save_offset
14684					      + sp_offset
14685					      + reg_size * i));
14686		mem = gen_frame_mem (reg_mode, addr);
14687
14688		insn = emit_move_insn (mem, reg);
14689		rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14690				      NULL_RTX, NULL_RTX);
14691	      }
14692	  }
14693    }
14694
14695  /* ??? There's no need to emit actual instructions here, but it's the
14696     easiest way to get the frame unwind information emitted.  */
14697  if (current_function_calls_eh_return)
14698    {
14699      unsigned int i, regno;
14700
14701      /* In AIX ABI we need to pretend we save r2 here.  */
14702      if (TARGET_AIX)
14703	{
14704	  rtx addr, reg, mem;
14705
14706	  reg = gen_rtx_REG (reg_mode, 2);
14707	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14708			       GEN_INT (sp_offset + 5 * reg_size));
14709	  mem = gen_frame_mem (reg_mode, addr);
14710
14711	  insn = emit_move_insn (mem, reg);
14712	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14713				NULL_RTX, NULL_RTX);
14714	  PATTERN (insn) = gen_blockage ();
14715	}
14716
14717      for (i = 0; ; ++i)
14718	{
14719	  regno = EH_RETURN_DATA_REGNO (i);
14720	  if (regno == INVALID_REGNUM)
14721	    break;
14722
14723	  emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14724			   info->ehrd_offset + sp_offset
14725			   + reg_size * (int) i,
14726			   info->total_size);
14727	}
14728    }
14729
14730  /* Save lr if we used it.  */
14731  if (!WORLD_SAVE_P (info) && info->lr_save_p)
14732    {
14733      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14734			       GEN_INT (info->lr_save_offset + sp_offset));
14735      rtx reg = gen_rtx_REG (Pmode, 0);
14736      rtx mem = gen_rtx_MEM (Pmode, addr);
14737      /* This should not be of frame_alias_set, because of
14738	 __builtin_return_address.  */
14739
14740      insn = emit_move_insn (mem, reg);
14741      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14742			    NULL_RTX, NULL_RTX);
14743    }
14744
14745  /* Save CR if we use any that must be preserved.  */
14746  if (!WORLD_SAVE_P (info) && info->cr_save_p)
14747    {
14748      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14749			       GEN_INT (info->cr_save_offset + sp_offset));
14750      rtx mem = gen_frame_mem (SImode, addr);
14751      /* See the large comment above about why CR2_REGNO is used.  */
14752      rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
14753
14754      /* If r12 was used to hold the original sp, copy cr into r0 now
14755	 that it's free.  */
14756      if (REGNO (frame_reg_rtx) == 12)
14757	{
14758	  rtx set;
14759
14760	  cr_save_rtx = gen_rtx_REG (SImode, 0);
14761	  insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14762	  RTX_FRAME_RELATED_P (insn) = 1;
14763	  set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
14764	  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14765						set,
14766						REG_NOTES (insn));
14767
14768	}
14769      insn = emit_move_insn (mem, cr_save_rtx);
14770
14771      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14772			    NULL_RTX, NULL_RTX);
14773    }
14774
14775  /* Update stack and set back pointer unless this is V.4,
14776     for which it was done previously.  */
14777  if (!WORLD_SAVE_P (info) && info->push_p
14778      && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
14779    rs6000_emit_allocate_stack (info->total_size, FALSE);
14780
14781  /* Set frame pointer, if needed.  */
14782  if (frame_pointer_needed)
14783    {
14784      insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
14785			     sp_reg_rtx);
14786      RTX_FRAME_RELATED_P (insn) = 1;
14787    }
14788
14789  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
14790  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
14791      || (DEFAULT_ABI == ABI_V4
14792	  && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
14793	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
14794    {
14795      /* If emit_load_toc_table will use the link register, we need to save
14796	 it.  We use R12 for this purpose because emit_load_toc_table
14797	 can use register 0.  This allows us to use a plain 'blr' to return
14798	 from the procedure more often.  */
14799      int save_LR_around_toc_setup = (TARGET_ELF
14800				      && DEFAULT_ABI != ABI_AIX
14801				      && flag_pic
14802				      && ! info->lr_save_p
14803				      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
14804      if (save_LR_around_toc_setup)
14805	{
14806	  rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14807
14808	  insn = emit_move_insn (frame_ptr_rtx, lr);
14809	  rs6000_maybe_dead (insn);
14810	  RTX_FRAME_RELATED_P (insn) = 1;
14811
14812	  rs6000_emit_load_toc_table (TRUE);
14813
14814	  insn = emit_move_insn (lr, frame_ptr_rtx);
14815	  rs6000_maybe_dead (insn);
14816	  RTX_FRAME_RELATED_P (insn) = 1;
14817	}
14818      else
14819	rs6000_emit_load_toc_table (TRUE);
14820    }
14821
14822#if TARGET_MACHO
14823  if (DEFAULT_ABI == ABI_DARWIN
14824      && flag_pic && current_function_uses_pic_offset_table)
14825    {
14826      rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
14827      rtx src = machopic_function_base_sym ();
14828
14829      /* Save and restore LR locally around this call (in R0).  */
14830      if (!info->lr_save_p)
14831	rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
14832
14833      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
14834
14835      insn = emit_move_insn (gen_rtx_REG (Pmode,
14836					  RS6000_PIC_OFFSET_TABLE_REGNUM),
14837			     lr);
14838      rs6000_maybe_dead (insn);
14839
14840      if (!info->lr_save_p)
14841	rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
14842    }
14843#endif
14844}
14845
14846/* Write function prologue.  */
14847
14848static void
14849rs6000_output_function_prologue (FILE *file,
14850				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
14851{
14852  rs6000_stack_t *info = rs6000_stack_info ();
14853
14854  if (TARGET_DEBUG_STACK)
14855    debug_stack_info (info);
14856
14857  /* Write .extern for any function we will call to save and restore
14858     fp values.  */
14859  if (info->first_fp_reg_save < 64
14860      && !FP_SAVE_INLINE (info->first_fp_reg_save))
14861    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
14862	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
14863	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14864	     RESTORE_FP_SUFFIX);
14865
14866  /* Write .extern for AIX common mode routines, if needed.  */
14867  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14868    {
14869      fputs ("\t.extern __mulh\n", file);
14870      fputs ("\t.extern __mull\n", file);
14871      fputs ("\t.extern __divss\n", file);
14872      fputs ("\t.extern __divus\n", file);
14873      fputs ("\t.extern __quoss\n", file);
14874      fputs ("\t.extern __quous\n", file);
14875      common_mode_defined = 1;
14876    }
14877
14878  if (! HAVE_prologue)
14879    {
14880      start_sequence ();
14881
14882      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14883	 the "toplevel" insn chain.  */
14884      emit_note (NOTE_INSN_DELETED);
14885      rs6000_emit_prologue ();
14886      emit_note (NOTE_INSN_DELETED);
14887
14888      /* Expand INSN_ADDRESSES so final() doesn't crash.  */
14889      {
14890	rtx insn;
14891	unsigned addr = 0;
14892	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14893	  {
14894	    INSN_ADDRESSES_NEW (insn, addr);
14895	    addr += 4;
14896	  }
14897      }
14898
14899      if (TARGET_DEBUG_STACK)
14900	debug_rtx_list (get_insns (), 100);
14901      final (get_insns (), file, FALSE);
14902      end_sequence ();
14903    }
14904
14905  rs6000_pic_labelno++;
14906}
14907
14908/* Emit function epilogue as insns.
14909
14910   At present, dwarf2out_frame_debug_expr doesn't understand
14911   register restores, so we don't bother setting RTX_FRAME_RELATED_P
14912   anywhere in the epilogue.  Most of the insns below would in any case
14913   need special notes to explain where r11 is in relation to the stack.  */
14914
14915void
14916rs6000_emit_epilogue (int sibcall)
14917{
14918  rs6000_stack_t *info;
14919  int restoring_FPRs_inline;
14920  int using_load_multiple;
14921  int using_mfcr_multiple;
14922  int use_backchain_to_restore_sp;
14923  int sp_offset = 0;
14924  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14925  rtx frame_reg_rtx = sp_reg_rtx;
14926  enum machine_mode reg_mode = Pmode;
14927  int reg_size = TARGET_32BIT ? 4 : 8;
14928  int i;
14929
14930  info = rs6000_stack_info ();
14931
14932  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14933    {
14934      reg_mode = V2SImode;
14935      reg_size = 8;
14936    }
14937
14938  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
14939			 && (!TARGET_SPE_ABI
14940			     || info->spe_64bit_regs_used == 0)
14941			 && info->first_gp_reg_save < 31
14942			 && no_global_regs_above (info->first_gp_reg_save));
14943  restoring_FPRs_inline = (sibcall
14944			   || current_function_calls_eh_return
14945			   || info->first_fp_reg_save == 64
14946			   || FP_SAVE_INLINE (info->first_fp_reg_save));
14947  use_backchain_to_restore_sp = (frame_pointer_needed
14948				 || current_function_calls_alloca
14949				 || info->total_size > 32767);
14950  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14951			 || rs6000_cpu == PROCESSOR_PPC603
14952			 || rs6000_cpu == PROCESSOR_PPC750
14953			 || optimize_size);
14954
14955  if (WORLD_SAVE_P (info))
14956    {
14957      int i, j;
14958      char rname[30];
14959      const char *alloc_rname;
14960      rtvec p;
14961
14962      /* eh_rest_world_r10 will return to the location saved in the LR
14963	 stack slot (which is not likely to be our caller.)
14964	 Input: R10 -- stack adjustment.  Clobbers R0, R11, R12, R7, R8.
14965	 rest_world is similar, except any R10 parameter is ignored.
14966	 The exception-handling stuff that was here in 2.95 is no
14967	 longer necessary.  */
14968
14969      p = rtvec_alloc (9
14970		       + 1
14971		       + 32 - info->first_gp_reg_save
14972		       + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14973		       + 63 + 1 - info->first_fp_reg_save);
14974
14975      strcpy (rname, ((current_function_calls_eh_return) ?
14976		      "*eh_rest_world_r10" : "*rest_world"));
14977      alloc_rname = ggc_strdup (rname);
14978
14979      j = 0;
14980      RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14981      RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
14982					gen_rtx_REG (Pmode,
14983						     LINK_REGISTER_REGNUM));
14984      RTVEC_ELT (p, j++)
14985	= gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
14986      /* The instruction pattern requires a clobber here;
14987	 it is shared with the restVEC helper. */
14988      RTVEC_ELT (p, j++)
14989	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
14990
14991      {
14992	/* CR register traditionally saved as CR2.  */
14993	rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14994	rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14995				 GEN_INT (info->cr_save_offset));
14996	rtx mem = gen_frame_mem (reg_mode, addr);
14997
14998	RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14999      }
15000
15001      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15002	{
15003	  rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15004	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15005				   GEN_INT (info->gp_save_offset
15006					    + reg_size * i));
15007	  rtx mem = gen_frame_mem (reg_mode, addr);
15008
15009	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15010	}
15011      for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
15012	{
15013	  rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15014	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15015				   GEN_INT (info->altivec_save_offset
15016					    + 16 * i));
15017	  rtx mem = gen_frame_mem (V4SImode, addr);
15018
15019	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15020	}
15021      for (i = 0; info->first_fp_reg_save + i <= 63; i++)
15022	{
15023	  rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15024	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15025				   GEN_INT (info->fp_save_offset
15026					    + 8 * i));
15027	  rtx mem = gen_frame_mem (DFmode, addr);
15028
15029	  RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15030	}
15031      RTVEC_ELT (p, j++)
15032	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
15033      RTVEC_ELT (p, j++)
15034	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
15035      RTVEC_ELT (p, j++)
15036	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
15037      RTVEC_ELT (p, j++)
15038	= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
15039      RTVEC_ELT (p, j++)
15040	= gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
15041      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15042
15043      return;
15044    }
15045
15046  /* If we have a frame pointer, a call to alloca,  or a large stack
15047     frame, restore the old stack pointer using the backchain.  Otherwise,
15048     we know what size to update it with.  */
15049  if (use_backchain_to_restore_sp)
15050    {
15051      /* Under V.4, don't reset the stack pointer until after we're done
15052	 loading the saved registers.  */
15053      if (DEFAULT_ABI == ABI_V4)
15054	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
15055
15056      emit_move_insn (frame_reg_rtx,
15057		      gen_rtx_MEM (Pmode, sp_reg_rtx));
15058    }
15059  else if (info->push_p)
15060    {
15061      if (DEFAULT_ABI == ABI_V4
15062	  || current_function_calls_eh_return)
15063	sp_offset = info->total_size;
15064      else
15065	{
15066	  emit_insn (TARGET_32BIT
15067		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15068				   GEN_INT (info->total_size))
15069		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15070				   GEN_INT (info->total_size)));
15071	}
15072    }
15073
15074  /* Restore AltiVec registers if needed.  */
15075  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15076    {
15077      int i;
15078
15079      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15080	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15081	  {
15082	    rtx addr, areg, mem;
15083
15084	    areg = gen_rtx_REG (Pmode, 0);
15085	    emit_move_insn
15086	      (areg, GEN_INT (info->altivec_save_offset
15087			      + sp_offset
15088			      + 16 * (i - info->first_altivec_reg_save)));
15089
15090	    /* AltiVec addressing mode is [reg+reg].  */
15091	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
15092	    mem = gen_frame_mem (V4SImode, addr);
15093
15094	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
15095	  }
15096    }
15097
15098  /* Restore VRSAVE if needed.  */
15099  if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15100      && info->vrsave_mask != 0)
15101    {
15102      rtx addr, mem, reg;
15103
15104      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15105			   GEN_INT (info->vrsave_save_offset + sp_offset));
15106      mem = gen_frame_mem (SImode, addr);
15107      reg = gen_rtx_REG (SImode, 12);
15108      emit_move_insn (reg, mem);
15109
15110      emit_insn (generate_set_vrsave (reg, info, 1));
15111    }
15112
15113  /* Get the old lr if we saved it.  */
15114  if (info->lr_save_p)
15115    {
15116      rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
15117				      info->lr_save_offset + sp_offset);
15118
15119      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
15120    }
15121
15122  /* Get the old cr if we saved it.  */
15123  if (info->cr_save_p)
15124    {
15125      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15126			       GEN_INT (info->cr_save_offset + sp_offset));
15127      rtx mem = gen_frame_mem (SImode, addr);
15128
15129      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
15130    }
15131
15132  /* Set LR here to try to overlap restores below.  */
15133  if (info->lr_save_p)
15134    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
15135		    gen_rtx_REG (Pmode, 0));
15136
15137  /* Load exception handler data registers, if needed.  */
15138  if (current_function_calls_eh_return)
15139    {
15140      unsigned int i, regno;
15141
15142      if (TARGET_AIX)
15143	{
15144	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15145				   GEN_INT (sp_offset + 5 * reg_size));
15146	  rtx mem = gen_frame_mem (reg_mode, addr);
15147
15148	  emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
15149	}
15150
15151      for (i = 0; ; ++i)
15152	{
15153	  rtx mem;
15154
15155	  regno = EH_RETURN_DATA_REGNO (i);
15156	  if (regno == INVALID_REGNUM)
15157	    break;
15158
15159	  mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
15160				      info->ehrd_offset + sp_offset
15161				      + reg_size * (int) i);
15162
15163	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
15164	}
15165    }
15166
15167  /* Restore GPRs.  This is done as a PARALLEL if we are using
15168     the load-multiple instructions.  */
15169  if (using_load_multiple)
15170    {
15171      rtvec p;
15172      p = rtvec_alloc (32 - info->first_gp_reg_save);
15173      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15174	{
15175	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15176				   GEN_INT (info->gp_save_offset
15177					    + sp_offset
15178					    + reg_size * i));
15179	  rtx mem = gen_frame_mem (reg_mode, addr);
15180
15181	  RTVEC_ELT (p, i) =
15182	    gen_rtx_SET (VOIDmode,
15183			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
15184			 mem);
15185	}
15186      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15187    }
15188  else
15189    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15190      if ((regs_ever_live[info->first_gp_reg_save + i]
15191	   && (!call_used_regs[info->first_gp_reg_save + i]
15192	       || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
15193		   && TARGET_TOC && TARGET_MINIMAL_TOC)))
15194	  || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
15195	      && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15196		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
15197	{
15198	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15199				   GEN_INT (info->gp_save_offset
15200					    + sp_offset
15201					    + reg_size * i));
15202	  rtx mem = gen_frame_mem (reg_mode, addr);
15203
15204	  /* Restore 64-bit quantities for SPE.  */
15205	  if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15206	    {
15207	      int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
15208	      rtx b;
15209
15210	      if (!SPE_CONST_OFFSET_OK (offset))
15211		{
15212		  b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15213		  emit_move_insn (b, GEN_INT (offset));
15214		}
15215	      else
15216		b = GEN_INT (offset);
15217
15218	      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
15219	      mem = gen_frame_mem (V2SImode, addr);
15220	    }
15221
15222	  emit_move_insn (gen_rtx_REG (reg_mode,
15223				       info->first_gp_reg_save + i), mem);
15224	}
15225
15226  /* Restore fpr's if we need to do it without calling a function.  */
15227  if (restoring_FPRs_inline)
15228    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15229      if ((regs_ever_live[info->first_fp_reg_save+i]
15230	   && ! call_used_regs[info->first_fp_reg_save+i]))
15231	{
15232	  rtx addr, mem;
15233	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15234			       GEN_INT (info->fp_save_offset
15235					+ sp_offset
15236					+ 8 * i));
15237	  mem = gen_frame_mem (DFmode, addr);
15238
15239	  emit_move_insn (gen_rtx_REG (DFmode,
15240				       info->first_fp_reg_save + i),
15241			  mem);
15242	}
15243
15244  /* If we saved cr, restore it here.  Just those that were used.  */
15245  if (info->cr_save_p)
15246    {
15247      rtx r12_rtx = gen_rtx_REG (SImode, 12);
15248      int count = 0;
15249
15250      if (using_mfcr_multiple)
15251	{
15252	  for (i = 0; i < 8; i++)
15253	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15254	      count++;
15255	  gcc_assert (count);
15256	}
15257
15258      if (using_mfcr_multiple && count > 1)
15259	{
15260	  rtvec p;
15261	  int ndx;
15262
15263	  p = rtvec_alloc (count);
15264
15265	  ndx = 0;
15266	  for (i = 0; i < 8; i++)
15267	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15268	      {
15269		rtvec r = rtvec_alloc (2);
15270		RTVEC_ELT (r, 0) = r12_rtx;
15271		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
15272		RTVEC_ELT (p, ndx) =
15273		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
15274			       gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
15275		ndx++;
15276	      }
15277	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15278	  gcc_assert (ndx == count);
15279	}
15280      else
15281	for (i = 0; i < 8; i++)
15282	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15283	    {
15284	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
15285							   CR0_REGNO+i),
15286					      r12_rtx));
15287	    }
15288    }
15289
15290  /* If this is V.4, unwind the stack pointer after all of the loads
15291     have been done.  */
15292  if (frame_reg_rtx != sp_reg_rtx)
15293    {
15294      /* This blockage is needed so that sched doesn't decide to move
15295	 the sp change before the register restores.  */
15296      rs6000_emit_stack_tie ();
15297      emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15298    }
15299  else if (sp_offset != 0)
15300    emit_insn (TARGET_32BIT
15301	       ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15302			     GEN_INT (sp_offset))
15303	       : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15304			     GEN_INT (sp_offset)));
15305
15306  if (current_function_calls_eh_return)
15307    {
15308      rtx sa = EH_RETURN_STACKADJ_RTX;
15309      emit_insn (TARGET_32BIT
15310		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
15311		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
15312    }
15313
15314  if (!sibcall)
15315    {
15316      rtvec p;
15317      if (! restoring_FPRs_inline)
15318	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
15319      else
15320	p = rtvec_alloc (2);
15321
15322      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
15323      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15324				      gen_rtx_REG (Pmode,
15325						   LINK_REGISTER_REGNUM));
15326
15327      /* If we have to restore more than two FP registers, branch to the
15328	 restore function.  It will return to our caller.  */
15329      if (! restoring_FPRs_inline)
15330	{
15331	  int i;
15332	  char rname[30];
15333	  const char *alloc_rname;
15334
15335	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
15336		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
15337	  alloc_rname = ggc_strdup (rname);
15338	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
15339					  gen_rtx_SYMBOL_REF (Pmode,
15340							      alloc_rname));
15341
15342	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15343	    {
15344	      rtx addr, mem;
15345	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15346				   GEN_INT (info->fp_save_offset + 8*i));
15347	      mem = gen_frame_mem (DFmode, addr);
15348
15349	      RTVEC_ELT (p, i+3) =
15350		gen_rtx_SET (VOIDmode,
15351			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15352			     mem);
15353	    }
15354	}
15355
15356      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15357    }
15358}
15359
15360/* Write function epilogue.  */
15361
15362static void
15363rs6000_output_function_epilogue (FILE *file,
15364				 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
15365{
15366  if (! HAVE_epilogue)
15367    {
15368      rtx insn = get_last_insn ();
15369      /* If the last insn was a BARRIER, we don't have to write anything except
15370	 the trace table.  */
15371      if (GET_CODE (insn) == NOTE)
15372	insn = prev_nonnote_insn (insn);
15373      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
15374	{
15375	  /* This is slightly ugly, but at least we don't have two
15376	     copies of the epilogue-emitting code.  */
15377	  start_sequence ();
15378
15379	  /* A NOTE_INSN_DELETED is supposed to be at the start
15380	     and end of the "toplevel" insn chain.  */
15381	  emit_note (NOTE_INSN_DELETED);
15382	  rs6000_emit_epilogue (FALSE);
15383	  emit_note (NOTE_INSN_DELETED);
15384
15385	  /* Expand INSN_ADDRESSES so final() doesn't crash.  */
15386	  {
15387	    rtx insn;
15388	    unsigned addr = 0;
15389	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15390	      {
15391		INSN_ADDRESSES_NEW (insn, addr);
15392		addr += 4;
15393	      }
15394	  }
15395
15396	  if (TARGET_DEBUG_STACK)
15397	    debug_rtx_list (get_insns (), 100);
15398	  final (get_insns (), file, FALSE);
15399	  end_sequence ();
15400	}
15401    }
15402
15403#if TARGET_MACHO
15404  macho_branch_islands ();
15405  /* Mach-O doesn't support labels at the end of objects, so if
15406     it looks like we might want one, insert a NOP.  */
15407  {
15408    rtx insn = get_last_insn ();
15409    while (insn
15410	   && NOTE_P (insn)
15411	   && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
15412      insn = PREV_INSN (insn);
15413    if (insn
15414	&& (LABEL_P (insn)
15415	    || (NOTE_P (insn)
15416		&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
15417      fputs ("\tnop\n", file);
15418  }
15419#endif
15420
15421  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
15422     on its format.
15423
15424     We don't output a traceback table if -finhibit-size-directive was
15425     used.  The documentation for -finhibit-size-directive reads
15426     ``don't output a @code{.size} assembler directive, or anything
15427     else that would cause trouble if the function is split in the
15428     middle, and the two halves are placed at locations far apart in
15429     memory.''  The traceback table has this property, since it
15430     includes the offset from the start of the function to the
15431     traceback table itself.
15432
15433     System V.4 Powerpc's (and the embedded ABI derived from it) use a
15434     different traceback table.  */
15435  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
15436      && rs6000_traceback != traceback_none && !current_function_is_thunk)
15437    {
15438      const char *fname = NULL;
15439      const char *language_string = lang_hooks.name;
15440      int fixed_parms = 0, float_parms = 0, parm_info = 0;
15441      int i;
15442      int optional_tbtab;
15443      rs6000_stack_t *info = rs6000_stack_info ();
15444
15445      if (rs6000_traceback == traceback_full)
15446	optional_tbtab = 1;
15447      else if (rs6000_traceback == traceback_part)
15448	optional_tbtab = 0;
15449      else
15450	optional_tbtab = !optimize_size && !TARGET_ELF;
15451
15452      if (optional_tbtab)
15453	{
15454	  fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15455	  while (*fname == '.')	/* V.4 encodes . in the name */
15456	    fname++;
15457
15458	  /* Need label immediately before tbtab, so we can compute
15459	     its offset from the function start.  */
15460	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15461	  ASM_OUTPUT_LABEL (file, fname);
15462	}
15463
15464      /* The .tbtab pseudo-op can only be used for the first eight
15465	 expressions, since it can't handle the possibly variable
15466	 length fields that follow.  However, if you omit the optional
15467	 fields, the assembler outputs zeros for all optional fields
15468	 anyways, giving each variable length field is minimum length
15469	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
15470	 pseudo-op at all.  */
15471
15472      /* An all-zero word flags the start of the tbtab, for debuggers
15473	 that have to find it by searching forward from the entry
15474	 point or from the current pc.  */
15475      fputs ("\t.long 0\n", file);
15476
15477      /* Tbtab format type.  Use format type 0.  */
15478      fputs ("\t.byte 0,", file);
15479
15480      /* Language type.  Unfortunately, there does not seem to be any
15481	 official way to discover the language being compiled, so we
15482	 use language_string.
15483	 C is 0.  Fortran is 1.  Pascal is 2.  Ada is 3.  C++ is 9.
15484	 Java is 13.  Objective-C is 14.  Objective-C++ isn't assigned
15485	 a number, so for now use 9.  */
15486      if (! strcmp (language_string, "GNU C"))
15487	i = 0;
15488      else if (! strcmp (language_string, "GNU F77")
15489	       || ! strcmp (language_string, "GNU F95"))
15490	i = 1;
15491      else if (! strcmp (language_string, "GNU Pascal"))
15492	i = 2;
15493      else if (! strcmp (language_string, "GNU Ada"))
15494	i = 3;
15495      else if (! strcmp (language_string, "GNU C++")
15496	       || ! strcmp (language_string, "GNU Objective-C++"))
15497	i = 9;
15498      else if (! strcmp (language_string, "GNU Java"))
15499	i = 13;
15500      else if (! strcmp (language_string, "GNU Objective-C"))
15501	i = 14;
15502      else
15503	gcc_unreachable ();
15504      fprintf (file, "%d,", i);
15505
15506      /* 8 single bit fields: global linkage (not set for C extern linkage,
15507	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15508	 from start of procedure stored in tbtab, internal function, function
15509	 has controlled storage, function has no toc, function uses fp,
15510	 function logs/aborts fp operations.  */
15511      /* Assume that fp operations are used if any fp reg must be saved.  */
15512      fprintf (file, "%d,",
15513	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
15514
15515      /* 6 bitfields: function is interrupt handler, name present in
15516	 proc table, function calls alloca, on condition directives
15517	 (controls stack walks, 3 bits), saves condition reg, saves
15518	 link reg.  */
15519      /* The `function calls alloca' bit seems to be set whenever reg 31 is
15520	 set up as a frame pointer, even when there is no alloca call.  */
15521      fprintf (file, "%d,",
15522	       ((optional_tbtab << 6)
15523		| ((optional_tbtab & frame_pointer_needed) << 5)
15524		| (info->cr_save_p << 1)
15525		| (info->lr_save_p)));
15526
15527      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
15528	 (6 bits).  */
15529      fprintf (file, "%d,",
15530	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
15531
15532      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
15533      fprintf (file, "%d,", (32 - first_reg_to_save ()));
15534
15535      if (optional_tbtab)
15536	{
15537	  /* Compute the parameter info from the function decl argument
15538	     list.  */
15539	  tree decl;
15540	  int next_parm_info_bit = 31;
15541
15542	  for (decl = DECL_ARGUMENTS (current_function_decl);
15543	       decl; decl = TREE_CHAIN (decl))
15544	    {
15545	      rtx parameter = DECL_INCOMING_RTL (decl);
15546	      enum machine_mode mode = GET_MODE (parameter);
15547
15548	      if (GET_CODE (parameter) == REG)
15549		{
15550		  if (SCALAR_FLOAT_MODE_P (mode))
15551		    {
15552		      int bits;
15553
15554		      float_parms++;
15555
15556		      switch (mode)
15557			{
15558			case SFmode:
15559			  bits = 0x2;
15560			  break;
15561
15562			case DFmode:
15563			case TFmode:
15564			  bits = 0x3;
15565			  break;
15566
15567			default:
15568			  gcc_unreachable ();
15569			}
15570
15571		      /* If only one bit will fit, don't or in this entry.  */
15572		      if (next_parm_info_bit > 0)
15573			parm_info |= (bits << (next_parm_info_bit - 1));
15574		      next_parm_info_bit -= 2;
15575		    }
15576		  else
15577		    {
15578		      fixed_parms += ((GET_MODE_SIZE (mode)
15579				       + (UNITS_PER_WORD - 1))
15580				      / UNITS_PER_WORD);
15581		      next_parm_info_bit -= 1;
15582		    }
15583		}
15584	    }
15585	}
15586
15587      /* Number of fixed point parameters.  */
15588      /* This is actually the number of words of fixed point parameters; thus
15589	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
15590      fprintf (file, "%d,", fixed_parms);
15591
15592      /* 2 bitfields: number of floating point parameters (7 bits), parameters
15593	 all on stack.  */
15594      /* This is actually the number of fp registers that hold parameters;
15595	 and thus the maximum value is 13.  */
15596      /* Set parameters on stack bit if parameters are not in their original
15597	 registers, regardless of whether they are on the stack?  Xlc
15598	 seems to set the bit when not optimizing.  */
15599      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15600
15601      if (! optional_tbtab)
15602	return;
15603
15604      /* Optional fields follow.  Some are variable length.  */
15605
15606      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15607	 11 double float.  */
15608      /* There is an entry for each parameter in a register, in the order that
15609	 they occur in the parameter list.  Any intervening arguments on the
15610	 stack are ignored.  If the list overflows a long (max possible length
15611	 34 bits) then completely leave off all elements that don't fit.  */
15612      /* Only emit this long if there was at least one parameter.  */
15613      if (fixed_parms || float_parms)
15614	fprintf (file, "\t.long %d\n", parm_info);
15615
15616      /* Offset from start of code to tb table.  */
15617      fputs ("\t.long ", file);
15618      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15619      if (TARGET_AIX)
15620	RS6000_OUTPUT_BASENAME (file, fname);
15621      else
15622	assemble_name (file, fname);
15623      putc ('-', file);
15624      rs6000_output_function_entry (file, fname);
15625      putc ('\n', file);
15626
15627      /* Interrupt handler mask.  */
15628      /* Omit this long, since we never set the interrupt handler bit
15629	 above.  */
15630
15631      /* Number of CTL (controlled storage) anchors.  */
15632      /* Omit this long, since the has_ctl bit is never set above.  */
15633
15634      /* Displacement into stack of each CTL anchor.  */
15635      /* Omit this list of longs, because there are no CTL anchors.  */
15636
15637      /* Length of function name.  */
15638      if (*fname == '*')
15639	++fname;
15640      fprintf (file, "\t.short %d\n", (int) strlen (fname));
15641
15642      /* Function name.  */
15643      assemble_string (fname, strlen (fname));
15644
15645      /* Register for alloca automatic storage; this is always reg 31.
15646	 Only emit this if the alloca bit was set above.  */
15647      if (frame_pointer_needed)
15648	fputs ("\t.byte 31\n", file);
15649
15650      fputs ("\t.align 2\n", file);
15651    }
15652}
15653
15654/* A C compound statement that outputs the assembler code for a thunk
15655   function, used to implement C++ virtual function calls with
15656   multiple inheritance.  The thunk acts as a wrapper around a virtual
15657   function, adjusting the implicit object parameter before handing
15658   control off to the real function.
15659
15660   First, emit code to add the integer DELTA to the location that
15661   contains the incoming first argument.  Assume that this argument
15662   contains a pointer, and is the one used to pass the `this' pointer
15663   in C++.  This is the incoming argument *before* the function
15664   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
15665   values of all other incoming arguments.
15666
15667   After the addition, emit code to jump to FUNCTION, which is a
15668   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
15669   not touch the return address.  Hence returning from FUNCTION will
15670   return to whoever called the current `thunk'.
15671
15672   The effect must be as if FUNCTION had been called directly with the
15673   adjusted first argument.  This macro is responsible for emitting
15674   all of the code for a thunk function; output_function_prologue()
15675   and output_function_epilogue() are not invoked.
15676
15677   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
15678   been extracted from it.)  It might possibly be useful on some
15679   targets, but probably not.
15680
15681   If you do not define this macro, the target-independent code in the
15682   C++ frontend will generate a less efficient heavyweight thunk that
15683   calls FUNCTION instead of jumping to it.  The generic approach does
15684   not support varargs.  */
15685
15686static void
15687rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
15688			HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
15689			tree function)
15690{
15691  rtx this, insn, funexp;
15692
15693  reload_completed = 1;
15694  epilogue_completed = 1;
15695  no_new_pseudos = 1;
15696  reset_block_changes ();
15697
15698  /* Mark the end of the (empty) prologue.  */
15699  emit_note (NOTE_INSN_PROLOGUE_END);
15700
15701  /* Find the "this" pointer.  If the function returns a structure,
15702     the structure return pointer is in r3.  */
15703  if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
15704    this = gen_rtx_REG (Pmode, 4);
15705  else
15706    this = gen_rtx_REG (Pmode, 3);
15707
15708  /* Apply the constant offset, if required.  */
15709  if (delta)
15710    {
15711      rtx delta_rtx = GEN_INT (delta);
15712      emit_insn (TARGET_32BIT
15713		 ? gen_addsi3 (this, this, delta_rtx)
15714		 : gen_adddi3 (this, this, delta_rtx));
15715    }
15716
15717  /* Apply the offset from the vtable, if required.  */
15718  if (vcall_offset)
15719    {
15720      rtx vcall_offset_rtx = GEN_INT (vcall_offset);
15721      rtx tmp = gen_rtx_REG (Pmode, 12);
15722
15723      emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
15724      if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
15725	{
15726	  emit_insn (TARGET_32BIT
15727		     ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
15728		     : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
15729	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
15730	}
15731      else
15732	{
15733	  rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
15734
15735	  emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
15736	}
15737      emit_insn (TARGET_32BIT
15738		 ? gen_addsi3 (this, this, tmp)
15739		 : gen_adddi3 (this, this, tmp));
15740    }
15741
15742  /* Generate a tail call to the target function.  */
15743  if (!TREE_USED (function))
15744    {
15745      assemble_external (function);
15746      TREE_USED (function) = 1;
15747    }
15748  funexp = XEXP (DECL_RTL (function), 0);
15749  funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
15750
15751#if TARGET_MACHO
15752  if (MACHOPIC_INDIRECT)
15753    funexp = machopic_indirect_call_target (funexp);
15754#endif
15755
15756  /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
15757     generate sibcall RTL explicitly.  */
15758  insn = emit_call_insn (
15759	   gen_rtx_PARALLEL (VOIDmode,
15760	     gen_rtvec (4,
15761			gen_rtx_CALL (VOIDmode,
15762				      funexp, const0_rtx),
15763			gen_rtx_USE (VOIDmode, const0_rtx),
15764			gen_rtx_USE (VOIDmode,
15765				     gen_rtx_REG (SImode,
15766						  LINK_REGISTER_REGNUM)),
15767			gen_rtx_RETURN (VOIDmode))));
15768  SIBLING_CALL_P (insn) = 1;
15769  emit_barrier ();
15770
15771  /* Run just enough of rest_of_compilation to get the insns emitted.
15772     There's not really enough bulk here to make other passes such as
15773     instruction scheduling worth while.  Note that use_thunk calls
15774     assemble_start_function and assemble_end_function.  */
15775  insn = get_insns ();
15776  insn_locators_initialize ();
15777  shorten_branches (insn);
15778  final_start_function (insn, file, 1);
15779  final (insn, file, 1);
15780  final_end_function ();
15781
15782  reload_completed = 0;
15783  epilogue_completed = 0;
15784  no_new_pseudos = 0;
15785}
15786
15787/* A quick summary of the various types of 'constant-pool tables'
15788   under PowerPC:
15789
15790   Target	Flags		Name		One table per
15791   AIX		(none)		AIX TOC		object file
15792   AIX		-mfull-toc	AIX TOC		object file
15793   AIX		-mminimal-toc	AIX minimal TOC	translation unit
15794   SVR4/EABI	(none)		SVR4 SDATA	object file
15795   SVR4/EABI	-fpic		SVR4 pic	object file
15796   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
15797   SVR4/EABI	-mrelocatable	EABI TOC	function
15798   SVR4/EABI	-maix		AIX TOC		object file
15799   SVR4/EABI	-maix -mminimal-toc
15800				AIX minimal TOC	translation unit
15801
15802   Name			Reg.	Set by	entries	      contains:
15803					made by	 addrs?	fp?	sum?
15804
15805   AIX TOC		2	crt0	as	 Y	option	option
15806   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
15807   SVR4 SDATA		13	crt0	gcc	 N	Y	N
15808   SVR4 pic		30	prolog	ld	 Y	not yet	N
15809   SVR4 PIC		30	prolog	gcc	 Y	option	option
15810   EABI TOC		30	prolog	gcc	 Y	option	option
15811
15812*/
15813
15814/* Hash functions for the hash table.  */
15815
15816static unsigned
15817rs6000_hash_constant (rtx k)
15818{
15819  enum rtx_code code = GET_CODE (k);
15820  enum machine_mode mode = GET_MODE (k);
15821  unsigned result = (code << 3) ^ mode;
15822  const char *format;
15823  int flen, fidx;
15824
15825  format = GET_RTX_FORMAT (code);
15826  flen = strlen (format);
15827  fidx = 0;
15828
15829  switch (code)
15830    {
15831    case LABEL_REF:
15832      return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15833
15834    case CONST_DOUBLE:
15835      if (mode != VOIDmode)
15836	return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15837      flen = 2;
15838      break;
15839
15840    case CODE_LABEL:
15841      fidx = 3;
15842      break;
15843
15844    default:
15845      break;
15846    }
15847
15848  for (; fidx < flen; fidx++)
15849    switch (format[fidx])
15850      {
15851      case 's':
15852	{
15853	  unsigned i, len;
15854	  const char *str = XSTR (k, fidx);
15855	  len = strlen (str);
15856	  result = result * 613 + len;
15857	  for (i = 0; i < len; i++)
15858	    result = result * 613 + (unsigned) str[i];
15859	  break;
15860	}
15861      case 'u':
15862      case 'e':
15863	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15864	break;
15865      case 'i':
15866      case 'n':
15867	result = result * 613 + (unsigned) XINT (k, fidx);
15868	break;
15869      case 'w':
15870	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15871	  result = result * 613 + (unsigned) XWINT (k, fidx);
15872	else
15873	  {
15874	    size_t i;
15875	    for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
15876	      result = result * 613 + (unsigned) (XWINT (k, fidx)
15877						  >> CHAR_BIT * i);
15878	  }
15879	break;
15880      case '0':
15881	break;
15882      default:
15883	gcc_unreachable ();
15884      }
15885
15886  return result;
15887}
15888
15889static unsigned
15890toc_hash_function (const void *hash_entry)
15891{
15892  const struct toc_hash_struct *thc =
15893    (const struct toc_hash_struct *) hash_entry;
15894  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
15895}
15896
15897/* Compare H1 and H2 for equivalence.  */
15898
15899static int
15900toc_hash_eq (const void *h1, const void *h2)
15901{
15902  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15903  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15904
15905  if (((const struct toc_hash_struct *) h1)->key_mode
15906      != ((const struct toc_hash_struct *) h2)->key_mode)
15907    return 0;
15908
15909  return rtx_equal_p (r1, r2);
15910}
15911
15912/* These are the names given by the C++ front-end to vtables, and
15913   vtable-like objects.  Ideally, this logic should not be here;
15914   instead, there should be some programmatic way of inquiring as
15915   to whether or not an object is a vtable.  */
15916
15917#define VTABLE_NAME_P(NAME)				\
15918  (strncmp ("_vt.", name, strlen ("_vt.")) == 0		\
15919  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
15920  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
15921  || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0	\
15922  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
15923
15924void
15925rs6000_output_symbol_ref (FILE *file, rtx x)
15926{
15927  /* Currently C++ toc references to vtables can be emitted before it
15928     is decided whether the vtable is public or private.  If this is
15929     the case, then the linker will eventually complain that there is
15930     a reference to an unknown section.  Thus, for vtables only,
15931     we emit the TOC reference to reference the symbol and not the
15932     section.  */
15933  const char *name = XSTR (x, 0);
15934
15935  if (VTABLE_NAME_P (name))
15936    {
15937      RS6000_OUTPUT_BASENAME (file, name);
15938    }
15939  else
15940    assemble_name (file, name);
15941}
15942
15943/* Output a TOC entry.  We derive the entry name from what is being
15944   written.  */
15945
15946void
15947output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
15948{
15949  char buf[256];
15950  const char *name = buf;
15951  const char *real_name;
15952  rtx base = x;
15953  HOST_WIDE_INT offset = 0;
15954
15955  gcc_assert (!TARGET_NO_TOC);
15956
15957  /* When the linker won't eliminate them, don't output duplicate
15958     TOC entries (this happens on AIX if there is any kind of TOC,
15959     and on SVR4 under -fPIC or -mrelocatable).  Don't do this for
15960     CODE_LABELs.  */
15961  if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
15962    {
15963      struct toc_hash_struct *h;
15964      void * * found;
15965
15966      /* Create toc_hash_table.  This can't be done at OVERRIDE_OPTIONS
15967	 time because GGC is not initialized at that point.  */
15968      if (toc_hash_table == NULL)
15969	toc_hash_table = htab_create_ggc (1021, toc_hash_function,
15970					  toc_hash_eq, NULL);
15971
15972      h = ggc_alloc (sizeof (*h));
15973      h->key = x;
15974      h->key_mode = mode;
15975      h->labelno = labelno;
15976
15977      found = htab_find_slot (toc_hash_table, h, 1);
15978      if (*found == NULL)
15979	*found = h;
15980      else  /* This is indeed a duplicate.
15981	       Set this label equal to that label.  */
15982	{
15983	  fputs ("\t.set ", file);
15984	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15985	  fprintf (file, "%d,", labelno);
15986	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15987	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
15988					      found)->labelno));
15989	  return;
15990	}
15991    }
15992
15993  /* If we're going to put a double constant in the TOC, make sure it's
15994     aligned properly when strict alignment is on.  */
15995  if (GET_CODE (x) == CONST_DOUBLE
15996      && STRICT_ALIGNMENT
15997      && GET_MODE_BITSIZE (mode) >= 64
15998      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15999    ASM_OUTPUT_ALIGN (file, 3);
16000  }
16001
16002  (*targetm.asm_out.internal_label) (file, "LC", labelno);
16003
16004  /* Handle FP constants specially.  Note that if we have a minimal
16005     TOC, things we put here aren't actually in the TOC, so we can allow
16006     FP constants.  */
16007  if (GET_CODE (x) == CONST_DOUBLE &&
16008      (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
16009    {
16010      REAL_VALUE_TYPE rv;
16011      long k[4];
16012
16013      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
16014      if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16015	REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
16016      else
16017	REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
16018
16019      if (TARGET_64BIT)
16020	{
16021	  if (TARGET_MINIMAL_TOC)
16022	    fputs (DOUBLE_INT_ASM_OP, file);
16023	  else
16024	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16025		     k[0] & 0xffffffff, k[1] & 0xffffffff,
16026		     k[2] & 0xffffffff, k[3] & 0xffffffff);
16027	  fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
16028		   k[0] & 0xffffffff, k[1] & 0xffffffff,
16029		   k[2] & 0xffffffff, k[3] & 0xffffffff);
16030	  return;
16031	}
16032      else
16033	{
16034	  if (TARGET_MINIMAL_TOC)
16035	    fputs ("\t.long ", file);
16036	  else
16037	    fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16038		     k[0] & 0xffffffff, k[1] & 0xffffffff,
16039		     k[2] & 0xffffffff, k[3] & 0xffffffff);
16040	  fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
16041		   k[0] & 0xffffffff, k[1] & 0xffffffff,
16042		   k[2] & 0xffffffff, k[3] & 0xffffffff);
16043	  return;
16044	}
16045    }
16046  else if (GET_CODE (x) == CONST_DOUBLE &&
16047	   (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
16048    {
16049      REAL_VALUE_TYPE rv;
16050      long k[2];
16051
16052      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
16053
16054      if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16055	REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
16056      else
16057	REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
16058
16059      if (TARGET_64BIT)
16060	{
16061	  if (TARGET_MINIMAL_TOC)
16062	    fputs (DOUBLE_INT_ASM_OP, file);
16063	  else
16064	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16065		     k[0] & 0xffffffff, k[1] & 0xffffffff);
16066	  fprintf (file, "0x%lx%08lx\n",
16067		   k[0] & 0xffffffff, k[1] & 0xffffffff);
16068	  return;
16069	}
16070      else
16071	{
16072	  if (TARGET_MINIMAL_TOC)
16073	    fputs ("\t.long ", file);
16074	  else
16075	    fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16076		     k[0] & 0xffffffff, k[1] & 0xffffffff);
16077	  fprintf (file, "0x%lx,0x%lx\n",
16078		   k[0] & 0xffffffff, k[1] & 0xffffffff);
16079	  return;
16080	}
16081    }
16082  else if (GET_CODE (x) == CONST_DOUBLE &&
16083	   (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
16084    {
16085      REAL_VALUE_TYPE rv;
16086      long l;
16087
16088      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
16089      if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16090	REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
16091      else
16092	REAL_VALUE_TO_TARGET_SINGLE (rv, l);
16093
16094      if (TARGET_64BIT)
16095	{
16096	  if (TARGET_MINIMAL_TOC)
16097	    fputs (DOUBLE_INT_ASM_OP, file);
16098	  else
16099	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16100	  fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
16101	  return;
16102	}
16103      else
16104	{
16105	  if (TARGET_MINIMAL_TOC)
16106	    fputs ("\t.long ", file);
16107	  else
16108	    fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16109	  fprintf (file, "0x%lx\n", l & 0xffffffff);
16110	  return;
16111	}
16112    }
16113  else if (GET_MODE (x) == VOIDmode
16114	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
16115    {
16116      unsigned HOST_WIDE_INT low;
16117      HOST_WIDE_INT high;
16118
16119      if (GET_CODE (x) == CONST_DOUBLE)
16120	{
16121	  low = CONST_DOUBLE_LOW (x);
16122	  high = CONST_DOUBLE_HIGH (x);
16123	}
16124      else
16125#if HOST_BITS_PER_WIDE_INT == 32
16126	{
16127	  low = INTVAL (x);
16128	  high = (low & 0x80000000) ? ~0 : 0;
16129	}
16130#else
16131	{
16132	  low = INTVAL (x) & 0xffffffff;
16133	  high = (HOST_WIDE_INT) INTVAL (x) >> 32;
16134	}
16135#endif
16136
16137      /* TOC entries are always Pmode-sized, but since this
16138	 is a bigendian machine then if we're putting smaller
16139	 integer constants in the TOC we have to pad them.
16140	 (This is still a win over putting the constants in
16141	 a separate constant pool, because then we'd have
16142	 to have both a TOC entry _and_ the actual constant.)
16143
16144	 For a 32-bit target, CONST_INT values are loaded and shifted
16145	 entirely within `low' and can be stored in one TOC entry.  */
16146
16147      /* It would be easy to make this work, but it doesn't now.  */
16148      gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
16149
16150      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
16151	{
16152#if HOST_BITS_PER_WIDE_INT == 32
16153	  lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
16154			 POINTER_SIZE, &low, &high, 0);
16155#else
16156	  low |= high << 32;
16157	  low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
16158	  high = (HOST_WIDE_INT) low >> 32;
16159	  low &= 0xffffffff;
16160#endif
16161	}
16162
16163      if (TARGET_64BIT)
16164	{
16165	  if (TARGET_MINIMAL_TOC)
16166	    fputs (DOUBLE_INT_ASM_OP, file);
16167	  else
16168	    fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16169		     (long) high & 0xffffffff, (long) low & 0xffffffff);
16170	  fprintf (file, "0x%lx%08lx\n",
16171		   (long) high & 0xffffffff, (long) low & 0xffffffff);
16172	  return;
16173	}
16174      else
16175	{
16176	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
16177	    {
16178	      if (TARGET_MINIMAL_TOC)
16179		fputs ("\t.long ", file);
16180	      else
16181		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16182			 (long) high & 0xffffffff, (long) low & 0xffffffff);
16183	      fprintf (file, "0x%lx,0x%lx\n",
16184		       (long) high & 0xffffffff, (long) low & 0xffffffff);
16185	    }
16186	  else
16187	    {
16188	      if (TARGET_MINIMAL_TOC)
16189		fputs ("\t.long ", file);
16190	      else
16191		fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
16192	      fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
16193	    }
16194	  return;
16195	}
16196    }
16197
16198  if (GET_CODE (x) == CONST)
16199    {
16200      gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
16201
16202      base = XEXP (XEXP (x, 0), 0);
16203      offset = INTVAL (XEXP (XEXP (x, 0), 1));
16204    }
16205
16206  switch (GET_CODE (base))
16207    {
16208    case SYMBOL_REF:
16209      name = XSTR (base, 0);
16210      break;
16211
16212    case LABEL_REF:
16213      ASM_GENERATE_INTERNAL_LABEL (buf, "L",
16214				   CODE_LABEL_NUMBER (XEXP (base, 0)));
16215      break;
16216
16217    case CODE_LABEL:
16218      ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
16219      break;
16220
16221    default:
16222      gcc_unreachable ();
16223    }
16224
16225  real_name = (*targetm.strip_name_encoding) (name);
16226  if (TARGET_MINIMAL_TOC)
16227    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
16228  else
16229    {
16230      fprintf (file, "\t.tc %s", real_name);
16231
16232      if (offset < 0)
16233	fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
16234      else if (offset)
16235	fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
16236
16237      fputs ("[TC],", file);
16238    }
16239
16240  /* Currently C++ toc references to vtables can be emitted before it
16241     is decided whether the vtable is public or private.  If this is
16242     the case, then the linker will eventually complain that there is
16243     a TOC reference to an unknown section.  Thus, for vtables only,
16244     we emit the TOC reference to reference the symbol and not the
16245     section.  */
16246  if (VTABLE_NAME_P (name))
16247    {
16248      RS6000_OUTPUT_BASENAME (file, name);
16249      if (offset < 0)
16250	fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
16251      else if (offset > 0)
16252	fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
16253    }
16254  else
16255    output_addr_const (file, x);
16256  putc ('\n', file);
16257}
16258
16259/* Output an assembler pseudo-op to write an ASCII string of N characters
16260   starting at P to FILE.
16261
16262   On the RS/6000, we have to do this using the .byte operation and
16263   write out special characters outside the quoted string.
16264   Also, the assembler is broken; very long strings are truncated,
16265   so we must artificially break them up early.  */
16266
16267void
16268output_ascii (FILE *file, const char *p, int n)
16269{
16270  char c;
16271  int i, count_string;
16272  const char *for_string = "\t.byte \"";
16273  const char *for_decimal = "\t.byte ";
16274  const char *to_close = NULL;
16275
16276  count_string = 0;
16277  for (i = 0; i < n; i++)
16278    {
16279      c = *p++;
16280      if (c >= ' ' && c < 0177)
16281	{
16282	  if (for_string)
16283	    fputs (for_string, file);
16284	  putc (c, file);
16285
16286	  /* Write two quotes to get one.  */
16287	  if (c == '"')
16288	    {
16289	      putc (c, file);
16290	      ++count_string;
16291	    }
16292
16293	  for_string = NULL;
16294	  for_decimal = "\"\n\t.byte ";
16295	  to_close = "\"\n";
16296	  ++count_string;
16297
16298	  if (count_string >= 512)
16299	    {
16300	      fputs (to_close, file);
16301
16302	      for_string = "\t.byte \"";
16303	      for_decimal = "\t.byte ";
16304	      to_close = NULL;
16305	      count_string = 0;
16306	    }
16307	}
16308      else
16309	{
16310	  if (for_decimal)
16311	    fputs (for_decimal, file);
16312	  fprintf (file, "%d", c);
16313
16314	  for_string = "\n\t.byte \"";
16315	  for_decimal = ", ";
16316	  to_close = "\n";
16317	  count_string = 0;
16318	}
16319    }
16320
16321  /* Now close the string if we have written one.  Then end the line.  */
16322  if (to_close)
16323    fputs (to_close, file);
16324}
16325
16326/* Generate a unique section name for FILENAME for a section type
16327   represented by SECTION_DESC.  Output goes into BUF.
16328
16329   SECTION_DESC can be any string, as long as it is different for each
16330   possible section type.
16331
16332   We name the section in the same manner as xlc.  The name begins with an
16333   underscore followed by the filename (after stripping any leading directory
16334   names) with the last period replaced by the string SECTION_DESC.  If
16335   FILENAME does not contain a period, SECTION_DESC is appended to the end of
16336   the name.  */
16337
16338void
16339rs6000_gen_section_name (char **buf, const char *filename,
16340			 const char *section_desc)
16341{
16342  const char *q, *after_last_slash, *last_period = 0;
16343  char *p;
16344  int len;
16345
16346  after_last_slash = filename;
16347  for (q = filename; *q; q++)
16348    {
16349      if (*q == '/')
16350	after_last_slash = q + 1;
16351      else if (*q == '.')
16352	last_period = q;
16353    }
16354
16355  len = strlen (after_last_slash) + strlen (section_desc) + 2;
16356  *buf = (char *) xmalloc (len);
16357
16358  p = *buf;
16359  *p++ = '_';
16360
16361  for (q = after_last_slash; *q; q++)
16362    {
16363      if (q == last_period)
16364	{
16365	  strcpy (p, section_desc);
16366	  p += strlen (section_desc);
16367	  break;
16368	}
16369
16370      else if (ISALNUM (*q))
16371	*p++ = *q;
16372    }
16373
16374  if (last_period == 0)
16375    strcpy (p, section_desc);
16376  else
16377    *p = '\0';
16378}
16379
16380/* Emit profile function.  */
16381
16382void
16383output_profile_hook (int labelno ATTRIBUTE_UNUSED)
16384{
16385  /* Non-standard profiling for kernels, which just saves LR then calls
16386     _mcount without worrying about arg saves.  The idea is to change
16387     the function prologue as little as possible as it isn't easy to
16388     account for arg save/restore code added just for _mcount.  */
16389  if (TARGET_PROFILE_KERNEL)
16390    return;
16391
16392  if (DEFAULT_ABI == ABI_AIX)
16393    {
16394#ifndef NO_PROFILE_COUNTERS
16395# define NO_PROFILE_COUNTERS 0
16396#endif
16397      if (NO_PROFILE_COUNTERS)
16398	emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16399      else
16400	{
16401	  char buf[30];
16402	  const char *label_name;
16403	  rtx fun;
16404
16405	  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16406	  label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16407	  fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
16408
16409	  emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16410			     fun, Pmode);
16411	}
16412    }
16413  else if (DEFAULT_ABI == ABI_DARWIN)
16414    {
16415      const char *mcount_name = RS6000_MCOUNT;
16416      int caller_addr_regno = LINK_REGISTER_REGNUM;
16417
16418      /* Be conservative and always set this, at least for now.  */
16419      current_function_uses_pic_offset_table = 1;
16420
16421#if TARGET_MACHO
16422      /* For PIC code, set up a stub and collect the caller's address
16423	 from r0, which is where the prologue puts it.  */
16424      if (MACHOPIC_INDIRECT
16425	  && current_function_uses_pic_offset_table)
16426	caller_addr_regno = 0;
16427#endif
16428      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16429			 0, VOIDmode, 1,
16430			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16431    }
16432}
16433
16434/* Write function profiler code.  */
16435
16436void
16437output_function_profiler (FILE *file, int labelno)
16438{
16439  char buf[100];
16440
16441  switch (DEFAULT_ABI)
16442    {
16443    default:
16444      gcc_unreachable ();
16445
16446    case ABI_V4:
16447      if (!TARGET_32BIT)
16448	{
16449	  warning (0, "no profiling of 64-bit code for this ABI");
16450	  return;
16451	}
16452      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16453      fprintf (file, "\tmflr %s\n", reg_names[0]);
16454      if (NO_PROFILE_COUNTERS)
16455	{
16456	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16457		       reg_names[0], reg_names[1]);
16458	}
16459      else if (TARGET_SECURE_PLT && flag_pic)
16460	{
16461	  asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16462		       reg_names[0], reg_names[1]);
16463	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16464	  asm_fprintf (file, "\t{cau|addis} %s,%s,",
16465		       reg_names[12], reg_names[12]);
16466	  assemble_name (file, buf);
16467	  asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16468	  assemble_name (file, buf);
16469	  asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16470	}
16471      else if (flag_pic == 1)
16472	{
16473	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
16474	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16475		       reg_names[0], reg_names[1]);
16476	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16477	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
16478	  assemble_name (file, buf);
16479	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
16480	}
16481      else if (flag_pic > 1)
16482	{
16483	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16484		       reg_names[0], reg_names[1]);
16485	  /* Now, we need to get the address of the label.  */
16486	  fputs ("\tbcl 20,31,1f\n\t.long ", file);
16487	  assemble_name (file, buf);
16488	  fputs ("-.\n1:", file);
16489	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
16490	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
16491		       reg_names[0], reg_names[11]);
16492	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16493		       reg_names[0], reg_names[0], reg_names[11]);
16494	}
16495      else
16496	{
16497	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
16498	  assemble_name (file, buf);
16499	  fputs ("@ha\n", file);
16500	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16501		       reg_names[0], reg_names[1]);
16502	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
16503	  assemble_name (file, buf);
16504	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
16505	}
16506
16507      /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH.  */
16508      fprintf (file, "\tbl %s%s\n",
16509	       RS6000_MCOUNT, flag_pic ? "@plt" : "");
16510      break;
16511
16512    case ABI_AIX:
16513    case ABI_DARWIN:
16514      if (!TARGET_PROFILE_KERNEL)
16515	{
16516	  /* Don't do anything, done in output_profile_hook ().  */
16517	}
16518      else
16519	{
16520	  gcc_assert (!TARGET_32BIT);
16521
16522	  asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16523	  asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16524
16525	  if (cfun->static_chain_decl != NULL)
16526	    {
16527	      asm_fprintf (file, "\tstd %s,24(%s)\n",
16528			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16529	      fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16530	      asm_fprintf (file, "\tld %s,24(%s)\n",
16531			   reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16532	    }
16533	  else
16534	    fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16535	}
16536      break;
16537    }
16538}
16539
16540
16541/* Power4 load update and store update instructions are cracked into a
16542   load or store and an integer insn which are executed in the same cycle.
16543   Branches have their own dispatch slot which does not count against the
16544   GCC issue rate, but it changes the program flow so there are no other
16545   instructions to issue in this cycle.  */
16546
16547static int
16548rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16549		       int verbose ATTRIBUTE_UNUSED,
16550		       rtx insn, int more)
16551{
16552  if (GET_CODE (PATTERN (insn)) == USE
16553      || GET_CODE (PATTERN (insn)) == CLOBBER)
16554    return more;
16555
16556  if (rs6000_sched_groups)
16557    {
16558      if (is_microcoded_insn (insn))
16559	return 0;
16560      else if (is_cracked_insn (insn))
16561	return more > 2 ? more - 2 : 0;
16562    }
16563
16564  return more - 1;
16565}
16566
16567/* Adjust the cost of a scheduling dependency.  Return the new cost of
16568   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
16569
16570static int
16571rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
16572{
16573  if (! recog_memoized (insn))
16574    return 0;
16575
16576  if (REG_NOTE_KIND (link) != 0)
16577    return 0;
16578
16579  if (REG_NOTE_KIND (link) == 0)
16580    {
16581      /* Data dependency; DEP_INSN writes a register that INSN reads
16582	 some cycles later.  */
16583
16584      /* Separate a load from a narrower, dependent store.  */
16585      if (rs6000_sched_groups
16586	  && GET_CODE (PATTERN (insn)) == SET
16587	  && GET_CODE (PATTERN (dep_insn)) == SET
16588	  && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16589	  && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16590	  && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16591	      > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16592	return cost + 14;
16593
16594      switch (get_attr_type (insn))
16595	{
16596	case TYPE_JMPREG:
16597	  /* Tell the first scheduling pass about the latency between
16598	     a mtctr and bctr (and mtlr and br/blr).  The first
16599	     scheduling pass will not know about this latency since
16600	     the mtctr instruction, which has the latency associated
16601	     to it, will be generated by reload.  */
16602	  return TARGET_POWER ? 5 : 4;
16603	case TYPE_BRANCH:
16604	  /* Leave some extra cycles between a compare and its
16605	     dependent branch, to inhibit expensive mispredicts.  */
16606	  if ((rs6000_cpu_attr == CPU_PPC603
16607	       || rs6000_cpu_attr == CPU_PPC604
16608	       || rs6000_cpu_attr == CPU_PPC604E
16609	       || rs6000_cpu_attr == CPU_PPC620
16610	       || rs6000_cpu_attr == CPU_PPC630
16611	       || rs6000_cpu_attr == CPU_PPC750
16612	       || rs6000_cpu_attr == CPU_PPC7400
16613	       || rs6000_cpu_attr == CPU_PPC7450
16614	       || rs6000_cpu_attr == CPU_POWER4
16615	       || rs6000_cpu_attr == CPU_POWER5)
16616	      && recog_memoized (dep_insn)
16617	      && (INSN_CODE (dep_insn) >= 0)
16618	      && (get_attr_type (dep_insn) == TYPE_CMP
16619		  || get_attr_type (dep_insn) == TYPE_COMPARE
16620		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
16621		  || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
16622		  || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
16623		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
16624		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
16625		  || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
16626	    return cost + 2;
16627	default:
16628	  break;
16629	}
16630      /* Fall out to return default cost.  */
16631    }
16632
16633  return cost;
16634}
16635
16636/* The function returns a true if INSN is microcoded.
16637   Return false otherwise.  */
16638
16639static bool
16640is_microcoded_insn (rtx insn)
16641{
16642  if (!insn || !INSN_P (insn)
16643      || GET_CODE (PATTERN (insn)) == USE
16644      || GET_CODE (PATTERN (insn)) == CLOBBER)
16645    return false;
16646
16647  if (rs6000_sched_groups)
16648    {
16649      enum attr_type type = get_attr_type (insn);
16650      if (type == TYPE_LOAD_EXT_U
16651	  || type == TYPE_LOAD_EXT_UX
16652	  || type == TYPE_LOAD_UX
16653	  || type == TYPE_STORE_UX
16654	  || type == TYPE_MFCR)
16655	return true;
16656    }
16657
16658  return false;
16659}
16660
16661/* The function returns a nonzero value if INSN can be scheduled only
16662   as the first insn in a dispatch group ("dispatch-slot restricted").
16663   In this case, the returned value indicates how many dispatch slots
16664   the insn occupies (at the beginning of the group).
16665   Return 0 otherwise.  */
16666
16667static int
16668is_dispatch_slot_restricted (rtx insn)
16669{
16670  enum attr_type type;
16671
16672  if (!rs6000_sched_groups)
16673    return 0;
16674
16675  if (!insn
16676      || insn == NULL_RTX
16677      || GET_CODE (insn) == NOTE
16678      || GET_CODE (PATTERN (insn)) == USE
16679      || GET_CODE (PATTERN (insn)) == CLOBBER)
16680    return 0;
16681
16682  type = get_attr_type (insn);
16683
16684  switch (type)
16685    {
16686    case TYPE_MFCR:
16687    case TYPE_MFCRF:
16688    case TYPE_MTCR:
16689    case TYPE_DELAYED_CR:
16690    case TYPE_CR_LOGICAL:
16691    case TYPE_MTJMPR:
16692    case TYPE_MFJMPR:
16693      return 1;
16694    case TYPE_IDIV:
16695    case TYPE_LDIV:
16696      return 2;
16697    case TYPE_LOAD_L:
16698    case TYPE_STORE_C:
16699    case TYPE_ISYNC:
16700    case TYPE_SYNC:
16701      return 4;
16702    default:
16703      if (rs6000_cpu == PROCESSOR_POWER5
16704	  && is_cracked_insn (insn))
16705	return 2;
16706      return 0;
16707    }
16708}
16709
16710/* The function returns true if INSN is cracked into 2 instructions
16711   by the processor (and therefore occupies 2 issue slots).  */
16712
16713static bool
16714is_cracked_insn (rtx insn)
16715{
16716  if (!insn || !INSN_P (insn)
16717      || GET_CODE (PATTERN (insn)) == USE
16718      || GET_CODE (PATTERN (insn)) == CLOBBER)
16719    return false;
16720
16721  if (rs6000_sched_groups)
16722    {
16723      enum attr_type type = get_attr_type (insn);
16724      if (type == TYPE_LOAD_U || type == TYPE_STORE_U
16725	  || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
16726	  || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
16727	  || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
16728	  || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
16729	  || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
16730	  || type == TYPE_IDIV || type == TYPE_LDIV
16731	  || type == TYPE_INSERT_WORD)
16732	return true;
16733    }
16734
16735  return false;
16736}
16737
16738/* The function returns true if INSN can be issued only from
16739   the branch slot.  */
16740
16741static bool
16742is_branch_slot_insn (rtx insn)
16743{
16744  if (!insn || !INSN_P (insn)
16745      || GET_CODE (PATTERN (insn)) == USE
16746      || GET_CODE (PATTERN (insn)) == CLOBBER)
16747    return false;
16748
16749  if (rs6000_sched_groups)
16750    {
16751      enum attr_type type = get_attr_type (insn);
16752      if (type == TYPE_BRANCH || type == TYPE_JMPREG)
16753	return true;
16754      return false;
16755    }
16756
16757  return false;
16758}
16759
16760/* A C statement (sans semicolon) to update the integer scheduling
16761   priority INSN_PRIORITY (INSN). Increase the priority to execute the
16762   INSN earlier, reduce the priority to execute INSN later.  Do not
16763   define this macro if you do not need to adjust the scheduling
16764   priorities of insns.  */
16765
16766static int
16767rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
16768{
16769  /* On machines (like the 750) which have asymmetric integer units,
16770     where one integer unit can do multiply and divides and the other
16771     can't, reduce the priority of multiply/divide so it is scheduled
16772     before other integer operations.  */
16773
16774#if 0
16775  if (! INSN_P (insn))
16776    return priority;
16777
16778  if (GET_CODE (PATTERN (insn)) == USE)
16779    return priority;
16780
16781  switch (rs6000_cpu_attr) {
16782  case CPU_PPC750:
16783    switch (get_attr_type (insn))
16784      {
16785      default:
16786	break;
16787
16788      case TYPE_IMUL:
16789      case TYPE_IDIV:
16790	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
16791		 priority, priority);
16792	if (priority >= 0 && priority < 0x01000000)
16793	  priority >>= 3;
16794	break;
16795      }
16796  }
16797#endif
16798
16799  if (is_dispatch_slot_restricted (insn)
16800      && reload_completed
16801      && current_sched_info->sched_max_insns_priority
16802      && rs6000_sched_restricted_insns_priority)
16803    {
16804
16805      /* Prioritize insns that can be dispatched only in the first
16806	 dispatch slot.  */
16807      if (rs6000_sched_restricted_insns_priority == 1)
16808	/* Attach highest priority to insn. This means that in
16809	   haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
16810	   precede 'priority' (critical path) considerations.  */
16811	return current_sched_info->sched_max_insns_priority;
16812      else if (rs6000_sched_restricted_insns_priority == 2)
16813	/* Increase priority of insn by a minimal amount. This means that in
16814	   haifa-sched.c:ready_sort(), only 'priority' (critical path)
16815	   considerations precede dispatch-slot restriction considerations.  */
16816	return (priority + 1);
16817    }
16818
16819  return priority;
16820}
16821
16822/* Return how many instructions the machine can issue per cycle.  */
16823
16824static int
16825rs6000_issue_rate (void)
16826{
16827  /* Use issue rate of 1 for first scheduling pass to decrease degradation.  */
16828  if (!reload_completed)
16829    return 1;
16830
16831  switch (rs6000_cpu_attr) {
16832  case CPU_RIOS1:  /* ? */
16833  case CPU_RS64A:
16834  case CPU_PPC601: /* ? */
16835  case CPU_PPC7450:
16836    return 3;
16837  case CPU_PPC440:
16838  case CPU_PPC603:
16839  case CPU_PPC750:
16840  case CPU_PPC7400:
16841  case CPU_PPC8540:
16842    return 2;
16843  case CPU_RIOS2:
16844  case CPU_PPC604:
16845  case CPU_PPC604E:
16846  case CPU_PPC620:
16847  case CPU_PPC630:
16848    return 4;
16849  case CPU_POWER4:
16850  case CPU_POWER5:
16851    return 5;
16852  default:
16853    return 1;
16854  }
16855}
16856
16857/* Return how many instructions to look ahead for better insn
16858   scheduling.  */
16859
16860static int
16861rs6000_use_sched_lookahead (void)
16862{
16863  if (rs6000_cpu_attr == CPU_PPC8540)
16864    return 4;
16865  return 0;
16866}
16867
16868/* Determine is PAT refers to memory.  */
16869
16870static bool
16871is_mem_ref (rtx pat)
16872{
16873  const char * fmt;
16874  int i, j;
16875  bool ret = false;
16876
16877  if (GET_CODE (pat) == MEM)
16878    return true;
16879
16880  /* Recursively process the pattern.  */
16881  fmt = GET_RTX_FORMAT (GET_CODE (pat));
16882
16883  for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16884    {
16885      if (fmt[i] == 'e')
16886	ret |= is_mem_ref (XEXP (pat, i));
16887      else if (fmt[i] == 'E')
16888	for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16889	  ret |= is_mem_ref (XVECEXP (pat, i, j));
16890    }
16891
16892  return ret;
16893}
16894
16895/* Determine if PAT is a PATTERN of a load insn.  */
16896
16897static bool
16898is_load_insn1 (rtx pat)
16899{
16900  if (!pat || pat == NULL_RTX)
16901    return false;
16902
16903  if (GET_CODE (pat) == SET)
16904    return is_mem_ref (SET_SRC (pat));
16905
16906  if (GET_CODE (pat) == PARALLEL)
16907    {
16908      int i;
16909
16910      for (i = 0; i < XVECLEN (pat, 0); i++)
16911	if (is_load_insn1 (XVECEXP (pat, 0, i)))
16912	  return true;
16913    }
16914
16915  return false;
16916}
16917
16918/* Determine if INSN loads from memory.  */
16919
16920static bool
16921is_load_insn (rtx insn)
16922{
16923  if (!insn || !INSN_P (insn))
16924    return false;
16925
16926  if (GET_CODE (insn) == CALL_INSN)
16927    return false;
16928
16929  return is_load_insn1 (PATTERN (insn));
16930}
16931
16932/* Determine if PAT is a PATTERN of a store insn.  */
16933
16934static bool
16935is_store_insn1 (rtx pat)
16936{
16937  if (!pat || pat == NULL_RTX)
16938    return false;
16939
16940  if (GET_CODE (pat) == SET)
16941    return is_mem_ref (SET_DEST (pat));
16942
16943  if (GET_CODE (pat) == PARALLEL)
16944    {
16945      int i;
16946
16947      for (i = 0; i < XVECLEN (pat, 0); i++)
16948	if (is_store_insn1 (XVECEXP (pat, 0, i)))
16949	  return true;
16950    }
16951
16952  return false;
16953}
16954
16955/* Determine if INSN stores to memory.  */
16956
16957static bool
16958is_store_insn (rtx insn)
16959{
16960  if (!insn || !INSN_P (insn))
16961    return false;
16962
16963  return is_store_insn1 (PATTERN (insn));
16964}
16965
16966/* Returns whether the dependence between INSN and NEXT is considered
16967   costly by the given target.  */
16968
16969static bool
16970rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16971			     int distance)
16972{
16973  /* If the flag is not enabled - no dependence is considered costly;
16974     allow all dependent insns in the same group.
16975     This is the most aggressive option.  */
16976  if (rs6000_sched_costly_dep == no_dep_costly)
16977    return false;
16978
16979  /* If the flag is set to 1 - a dependence is always considered costly;
16980     do not allow dependent instructions in the same group.
16981     This is the most conservative option.  */
16982  if (rs6000_sched_costly_dep == all_deps_costly)
16983    return true;
16984
16985  if (rs6000_sched_costly_dep == store_to_load_dep_costly
16986      && is_load_insn (next)
16987      && is_store_insn (insn))
16988    /* Prevent load after store in the same group.  */
16989    return true;
16990
16991  if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
16992      && is_load_insn (next)
16993      && is_store_insn (insn)
16994      && (!link || (int) REG_NOTE_KIND (link) == 0))
16995     /* Prevent load after store in the same group if it is a true
16996	dependence.  */
16997     return true;
16998
16999  /* The flag is set to X; dependences with latency >= X are considered costly,
17000     and will not be scheduled in the same group.  */
17001  if (rs6000_sched_costly_dep <= max_dep_latency
17002      && ((cost - distance) >= (int)rs6000_sched_costly_dep))
17003    return true;
17004
17005  return false;
17006}
17007
17008/* Return the next insn after INSN that is found before TAIL is reached,
17009   skipping any "non-active" insns - insns that will not actually occupy
17010   an issue slot.  Return NULL_RTX if such an insn is not found.  */
17011
17012static rtx
17013get_next_active_insn (rtx insn, rtx tail)
17014{
17015  if (insn == NULL_RTX || insn == tail)
17016    return NULL_RTX;
17017
17018  while (1)
17019    {
17020      insn = NEXT_INSN (insn);
17021      if (insn == NULL_RTX || insn == tail)
17022	return NULL_RTX;
17023
17024      if (CALL_P (insn)
17025	  || JUMP_P (insn)
17026	  || (NONJUMP_INSN_P (insn)
17027	      && GET_CODE (PATTERN (insn)) != USE
17028	      && GET_CODE (PATTERN (insn)) != CLOBBER
17029	      && INSN_CODE (insn) != CODE_FOR_stack_tie))
17030	break;
17031    }
17032  return insn;
17033}
17034
17035/* Return whether the presence of INSN causes a dispatch group termination
17036   of group WHICH_GROUP.
17037
17038   If WHICH_GROUP == current_group, this function will return true if INSN
17039   causes the termination of the current group (i.e, the dispatch group to
17040   which INSN belongs). This means that INSN will be the last insn in the
17041   group it belongs to.
17042
17043   If WHICH_GROUP == previous_group, this function will return true if INSN
17044   causes the termination of the previous group (i.e, the dispatch group that
17045   precedes the group to which INSN belongs).  This means that INSN will be
17046   the first insn in the group it belongs to).  */
17047
17048static bool
17049insn_terminates_group_p (rtx insn, enum group_termination which_group)
17050{
17051  enum attr_type type;
17052
17053  if (! insn)
17054    return false;
17055
17056  type = get_attr_type (insn);
17057
17058  if (is_microcoded_insn (insn))
17059    return true;
17060
17061  if (which_group == current_group)
17062    {
17063      if (is_branch_slot_insn (insn))
17064	return true;
17065      return false;
17066    }
17067  else if (which_group == previous_group)
17068    {
17069      if (is_dispatch_slot_restricted (insn))
17070	return true;
17071      return false;
17072    }
17073
17074  return false;
17075}
17076
17077/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
17078   dispatch group) from the insns in GROUP_INSNS.  Return false otherwise.  */
17079
17080static bool
17081is_costly_group (rtx *group_insns, rtx next_insn)
17082{
17083  int i;
17084  rtx link;
17085  int cost;
17086  int issue_rate = rs6000_issue_rate ();
17087
17088  for (i = 0; i < issue_rate; i++)
17089    {
17090      rtx insn = group_insns[i];
17091      if (!insn)
17092	continue;
17093      for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
17094	{
17095	  rtx next = XEXP (link, 0);
17096	  if (next == next_insn)
17097	    {
17098	      cost = insn_cost (insn, link, next_insn);
17099	      if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
17100		return true;
17101	    }
17102	}
17103    }
17104
17105  return false;
17106}
17107
17108/* Utility of the function redefine_groups.
17109   Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
17110   in the same dispatch group.  If so, insert nops before NEXT_INSN, in order
17111   to keep it "far" (in a separate group) from GROUP_INSNS, following
17112   one of the following schemes, depending on the value of the flag
17113   -minsert_sched_nops = X:
17114   (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
17115       in order to force NEXT_INSN into a separate group.
17116   (2) X < sched_finish_regroup_exact: insert exactly X nops.
17117   GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
17118   insertion (has a group just ended, how many vacant issue slots remain in the
17119   last group, and how many dispatch groups were encountered so far).  */
17120
17121static int
17122force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
17123		 rtx next_insn, bool *group_end, int can_issue_more,
17124		 int *group_count)
17125{
17126  rtx nop;
17127  bool force;
17128  int issue_rate = rs6000_issue_rate ();
17129  bool end = *group_end;
17130  int i;
17131
17132  if (next_insn == NULL_RTX)
17133    return can_issue_more;
17134
17135  if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
17136    return can_issue_more;
17137
17138  force = is_costly_group (group_insns, next_insn);
17139  if (!force)
17140    return can_issue_more;
17141
17142  if (sched_verbose > 6)
17143    fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
17144	     *group_count ,can_issue_more);
17145
17146  if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
17147    {
17148      if (*group_end)
17149	can_issue_more = 0;
17150
17151      /* Since only a branch can be issued in the last issue_slot, it is
17152	 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
17153	 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
17154	 in this case the last nop will start a new group and the branch
17155	 will be forced to the new group.  */
17156      if (can_issue_more && !is_branch_slot_insn (next_insn))
17157	can_issue_more--;
17158
17159      while (can_issue_more > 0)
17160	{
17161	  nop = gen_nop ();
17162	  emit_insn_before (nop, next_insn);
17163	  can_issue_more--;
17164	}
17165
17166      *group_end = true;
17167      return 0;
17168    }
17169
17170  if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
17171    {
17172      int n_nops = rs6000_sched_insert_nops;
17173
17174      /* Nops can't be issued from the branch slot, so the effective
17175	 issue_rate for nops is 'issue_rate - 1'.  */
17176      if (can_issue_more == 0)
17177	can_issue_more = issue_rate;
17178      can_issue_more--;
17179      if (can_issue_more == 0)
17180	{
17181	  can_issue_more = issue_rate - 1;
17182	  (*group_count)++;
17183	  end = true;
17184	  for (i = 0; i < issue_rate; i++)
17185	    {
17186	      group_insns[i] = 0;
17187	    }
17188	}
17189
17190      while (n_nops > 0)
17191	{
17192	  nop = gen_nop ();
17193	  emit_insn_before (nop, next_insn);
17194	  if (can_issue_more == issue_rate - 1) /* new group begins */
17195	    end = false;
17196	  can_issue_more--;
17197	  if (can_issue_more == 0)
17198	    {
17199	      can_issue_more = issue_rate - 1;
17200	      (*group_count)++;
17201	      end = true;
17202	      for (i = 0; i < issue_rate; i++)
17203		{
17204		  group_insns[i] = 0;
17205		}
17206	    }
17207	  n_nops--;
17208	}
17209
17210      /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1').  */
17211      can_issue_more++;
17212
17213      /* Is next_insn going to start a new group?  */
17214      *group_end
17215	= (end
17216	   || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
17217	   || (can_issue_more <= 2 && is_cracked_insn (next_insn))
17218	   || (can_issue_more < issue_rate &&
17219	       insn_terminates_group_p (next_insn, previous_group)));
17220      if (*group_end && end)
17221	(*group_count)--;
17222
17223      if (sched_verbose > 6)
17224	fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
17225		 *group_count, can_issue_more);
17226      return can_issue_more;
17227    }
17228
17229  return can_issue_more;
17230}
17231
17232/* This function tries to synch the dispatch groups that the compiler "sees"
17233   with the dispatch groups that the processor dispatcher is expected to
17234   form in practice.  It tries to achieve this synchronization by forcing the
17235   estimated processor grouping on the compiler (as opposed to the function
17236   'pad_goups' which tries to force the scheduler's grouping on the processor).
17237
17238   The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
17239   examines the (estimated) dispatch groups that will be formed by the processor
17240   dispatcher.  It marks these group boundaries to reflect the estimated
17241   processor grouping, overriding the grouping that the scheduler had marked.
17242   Depending on the value of the flag '-minsert-sched-nops' this function can
17243   force certain insns into separate groups or force a certain distance between
17244   them by inserting nops, for example, if there exists a "costly dependence"
17245   between the insns.
17246
17247   The function estimates the group boundaries that the processor will form as
17248   follows:  It keeps track of how many vacant issue slots are available after
17249   each insn.  A subsequent insn will start a new group if one of the following
17250   4 cases applies:
17251   - no more vacant issue slots remain in the current dispatch group.
17252   - only the last issue slot, which is the branch slot, is vacant, but the next
17253     insn is not a branch.
17254   - only the last 2 or less issue slots, including the branch slot, are vacant,
17255     which means that a cracked insn (which occupies two issue slots) can't be
17256     issued in this group.
17257   - less than 'issue_rate' slots are vacant, and the next insn always needs to
17258     start a new group.  */
17259
17260static int
17261redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
17262{
17263  rtx insn, next_insn;
17264  int issue_rate;
17265  int can_issue_more;
17266  int slot, i;
17267  bool group_end;
17268  int group_count = 0;
17269  rtx *group_insns;
17270
17271  /* Initialize.  */
17272  issue_rate = rs6000_issue_rate ();
17273  group_insns = alloca (issue_rate * sizeof (rtx));
17274  for (i = 0; i < issue_rate; i++)
17275    {
17276      group_insns[i] = 0;
17277    }
17278  can_issue_more = issue_rate;
17279  slot = 0;
17280  insn = get_next_active_insn (prev_head_insn, tail);
17281  group_end = false;
17282
17283  while (insn != NULL_RTX)
17284    {
17285      slot = (issue_rate - can_issue_more);
17286      group_insns[slot] = insn;
17287      can_issue_more =
17288	rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17289      if (insn_terminates_group_p (insn, current_group))
17290	can_issue_more = 0;
17291
17292      next_insn = get_next_active_insn (insn, tail);
17293      if (next_insn == NULL_RTX)
17294	return group_count + 1;
17295
17296      /* Is next_insn going to start a new group?  */
17297      group_end
17298	= (can_issue_more == 0
17299	   || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
17300	   || (can_issue_more <= 2 && is_cracked_insn (next_insn))
17301	   || (can_issue_more < issue_rate &&
17302	       insn_terminates_group_p (next_insn, previous_group)));
17303
17304      can_issue_more = force_new_group (sched_verbose, dump, group_insns,
17305					next_insn, &group_end, can_issue_more,
17306					&group_count);
17307
17308      if (group_end)
17309	{
17310	  group_count++;
17311	  can_issue_more = 0;
17312	  for (i = 0; i < issue_rate; i++)
17313	    {
17314	      group_insns[i] = 0;
17315	    }
17316	}
17317
17318      if (GET_MODE (next_insn) == TImode && can_issue_more)
17319	PUT_MODE (next_insn, VOIDmode);
17320      else if (!can_issue_more && GET_MODE (next_insn) != TImode)
17321	PUT_MODE (next_insn, TImode);
17322
17323      insn = next_insn;
17324      if (can_issue_more == 0)
17325	can_issue_more = issue_rate;
17326    } /* while */
17327
17328  return group_count;
17329}
17330
17331/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
17332   dispatch group boundaries that the scheduler had marked.  Pad with nops
17333   any dispatch groups which have vacant issue slots, in order to force the
17334   scheduler's grouping on the processor dispatcher.  The function
17335   returns the number of dispatch groups found.  */
17336
17337static int
17338pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
17339{
17340  rtx insn, next_insn;
17341  rtx nop;
17342  int issue_rate;
17343  int can_issue_more;
17344  int group_end;
17345  int group_count = 0;
17346
17347  /* Initialize issue_rate.  */
17348  issue_rate = rs6000_issue_rate ();
17349  can_issue_more = issue_rate;
17350
17351  insn = get_next_active_insn (prev_head_insn, tail);
17352  next_insn = get_next_active_insn (insn, tail);
17353
17354  while (insn != NULL_RTX)
17355    {
17356      can_issue_more =
17357      	rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17358
17359      group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
17360
17361      if (next_insn == NULL_RTX)
17362	break;
17363
17364      if (group_end)
17365	{
17366	  /* If the scheduler had marked group termination at this location
17367	     (between insn and next_indn), and neither insn nor next_insn will
17368	     force group termination, pad the group with nops to force group
17369	     termination.  */
17370	  if (can_issue_more
17371	      && (rs6000_sched_insert_nops == sched_finish_pad_groups)
17372	      && !insn_terminates_group_p (insn, current_group)
17373	      && !insn_terminates_group_p (next_insn, previous_group))
17374	    {
17375	      if (!is_branch_slot_insn (next_insn))
17376		can_issue_more--;
17377
17378	      while (can_issue_more)
17379		{
17380		  nop = gen_nop ();
17381		  emit_insn_before (nop, next_insn);
17382		  can_issue_more--;
17383		}
17384	    }
17385
17386	  can_issue_more = issue_rate;
17387	  group_count++;
17388	}
17389
17390      insn = next_insn;
17391      next_insn = get_next_active_insn (insn, tail);
17392    }
17393
17394  return group_count;
17395}
17396
17397/* The following function is called at the end of scheduling BB.
17398   After reload, it inserts nops at insn group bundling.  */
17399
17400static void
17401rs6000_sched_finish (FILE *dump, int sched_verbose)
17402{
17403  int n_groups;
17404
17405  if (sched_verbose)
17406    fprintf (dump, "=== Finishing schedule.\n");
17407
17408  if (reload_completed && rs6000_sched_groups)
17409    {
17410      if (rs6000_sched_insert_nops == sched_finish_none)
17411	return;
17412
17413      if (rs6000_sched_insert_nops == sched_finish_pad_groups)
17414	n_groups = pad_groups (dump, sched_verbose,
17415			       current_sched_info->prev_head,
17416			       current_sched_info->next_tail);
17417      else
17418	n_groups = redefine_groups (dump, sched_verbose,
17419				    current_sched_info->prev_head,
17420				    current_sched_info->next_tail);
17421
17422      if (sched_verbose >= 6)
17423	{
17424    	  fprintf (dump, "ngroups = %d\n", n_groups);
17425	  print_rtl (dump, current_sched_info->prev_head);
17426	  fprintf (dump, "Done finish_sched\n");
17427	}
17428    }
17429}
17430
17431/* Length in units of the trampoline for entering a nested function.  */
17432
17433int
17434rs6000_trampoline_size (void)
17435{
17436  int ret = 0;
17437
17438  switch (DEFAULT_ABI)
17439    {
17440    default:
17441      gcc_unreachable ();
17442
17443    case ABI_AIX:
17444      ret = (TARGET_32BIT) ? 12 : 24;
17445      break;
17446
17447    case ABI_DARWIN:
17448    case ABI_V4:
17449      ret = (TARGET_32BIT) ? 40 : 48;
17450      break;
17451    }
17452
17453  return ret;
17454}
17455
17456/* Emit RTL insns to initialize the variable parts of a trampoline.
17457   FNADDR is an RTX for the address of the function's pure code.
17458   CXT is an RTX for the static chain value for the function.  */
17459
17460void
17461rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
17462{
17463  int regsize = (TARGET_32BIT) ? 4 : 8;
17464  rtx ctx_reg = force_reg (Pmode, cxt);
17465
17466  switch (DEFAULT_ABI)
17467    {
17468    default:
17469      gcc_unreachable ();
17470
17471/* Macros to shorten the code expansions below.  */
17472#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
17473#define MEM_PLUS(addr,offset) \
17474  gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
17475
17476    /* Under AIX, just build the 3 word function descriptor */
17477    case ABI_AIX:
17478      {
17479	rtx fn_reg = gen_reg_rtx (Pmode);
17480	rtx toc_reg = gen_reg_rtx (Pmode);
17481	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
17482	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
17483	emit_move_insn (MEM_DEREF (addr), fn_reg);
17484	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
17485	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
17486      }
17487      break;
17488
17489    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
17490    case ABI_DARWIN:
17491    case ABI_V4:
17492      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
17493			 FALSE, VOIDmode, 4,
17494			 addr, Pmode,
17495			 GEN_INT (rs6000_trampoline_size ()), SImode,
17496			 fnaddr, Pmode,
17497			 ctx_reg, Pmode);
17498      break;
17499    }
17500
17501  return;
17502}
17503
17504
17505/* Table of valid machine attributes.  */
17506
17507const struct attribute_spec rs6000_attribute_table[] =
17508{
17509  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
17510  { "altivec",   1, 1, false, true,  false, rs6000_handle_altivec_attribute },
17511  { "longcall",  0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
17512  { "shortcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
17513  { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
17514  { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
17515#ifdef SUBTARGET_ATTRIBUTE_TABLE
17516  SUBTARGET_ATTRIBUTE_TABLE,
17517#endif
17518  { NULL,        0, 0, false, false, false, NULL }
17519};
17520
17521/* Handle the "altivec" attribute.  The attribute may have
17522   arguments as follows:
17523
17524	__attribute__((altivec(vector__)))
17525	__attribute__((altivec(pixel__)))	(always followed by 'unsigned short')
17526	__attribute__((altivec(bool__)))	(always followed by 'unsigned')
17527
17528  and may appear more than once (e.g., 'vector bool char') in a
17529  given declaration.  */
17530
17531static tree
17532rs6000_handle_altivec_attribute (tree *node,
17533				 tree name ATTRIBUTE_UNUSED,
17534				 tree args,
17535				 int flags ATTRIBUTE_UNUSED,
17536				 bool *no_add_attrs)
17537{
17538  tree type = *node, result = NULL_TREE;
17539  enum machine_mode mode;
17540  int unsigned_p;
17541  char altivec_type
17542    = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
17543	&& TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
17544       ? *IDENTIFIER_POINTER (TREE_VALUE (args))
17545       : '?');
17546
17547  while (POINTER_TYPE_P (type)
17548	 || TREE_CODE (type) == FUNCTION_TYPE
17549	 || TREE_CODE (type) == METHOD_TYPE
17550	 || TREE_CODE (type) == ARRAY_TYPE)
17551    type = TREE_TYPE (type);
17552
17553  mode = TYPE_MODE (type);
17554
17555  /* Check for invalid AltiVec type qualifiers.  */
17556  if (type == long_unsigned_type_node || type == long_integer_type_node)
17557    {
17558    if (TARGET_64BIT)
17559      error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
17560    else if (rs6000_warn_altivec_long)
17561      warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
17562    }
17563  else if (type == long_long_unsigned_type_node
17564           || type == long_long_integer_type_node)
17565    error ("use of %<long long%> in AltiVec types is invalid");
17566  else if (type == double_type_node)
17567    error ("use of %<double%> in AltiVec types is invalid");
17568  else if (type == long_double_type_node)
17569    error ("use of %<long double%> in AltiVec types is invalid");
17570  else if (type == boolean_type_node)
17571    error ("use of boolean types in AltiVec types is invalid");
17572  else if (TREE_CODE (type) == COMPLEX_TYPE)
17573    error ("use of %<complex%> in AltiVec types is invalid");
17574  else if (DECIMAL_FLOAT_MODE_P (mode))
17575    error ("use of decimal floating point types in AltiVec types is invalid");
17576
17577  switch (altivec_type)
17578    {
17579    case 'v':
17580      unsigned_p = TYPE_UNSIGNED (type);
17581      switch (mode)
17582	{
17583	case SImode:
17584	  result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
17585	  break;
17586	case HImode:
17587	  result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
17588	  break;
17589	case QImode:
17590	  result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
17591	  break;
17592	case SFmode: result = V4SF_type_node; break;
17593	  /* If the user says 'vector int bool', we may be handed the 'bool'
17594	     attribute _before_ the 'vector' attribute, and so select the
17595	     proper type in the 'b' case below.  */
17596	case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
17597	  result = type;
17598	default: break;
17599	}
17600      break;
17601    case 'b':
17602      switch (mode)
17603	{
17604	case SImode: case V4SImode: result = bool_V4SI_type_node; break;
17605	case HImode: case V8HImode: result = bool_V8HI_type_node; break;
17606	case QImode: case V16QImode: result = bool_V16QI_type_node;
17607	default: break;
17608	}
17609      break;
17610    case 'p':
17611      switch (mode)
17612	{
17613	case V8HImode: result = pixel_V8HI_type_node;
17614	default: break;
17615	}
17616    default: break;
17617    }
17618
17619  if (result && result != type && TYPE_READONLY (type))
17620    result = build_qualified_type (result, TYPE_QUAL_CONST);
17621
17622  *no_add_attrs = true;  /* No need to hang on to the attribute.  */
17623
17624  if (result)
17625    *node = reconstruct_complex_type (*node, result);
17626
17627  return NULL_TREE;
17628}
17629
17630/* AltiVec defines four built-in scalar types that serve as vector
17631   elements; we must teach the compiler how to mangle them.  */
17632
17633static const char *
17634rs6000_mangle_fundamental_type (tree type)
17635{
17636  if (type == bool_char_type_node) return "U6__boolc";
17637  if (type == bool_short_type_node) return "U6__bools";
17638  if (type == pixel_type_node) return "u7__pixel";
17639  if (type == bool_int_type_node) return "U6__booli";
17640
17641  /* Mangle IBM extended float long double as `g' (__float128) on
17642     powerpc*-linux where long-double-64 previously was the default.  */
17643  if (TYPE_MAIN_VARIANT (type) == long_double_type_node
17644      && TARGET_ELF
17645      && TARGET_LONG_DOUBLE_128
17646      && !TARGET_IEEEQUAD)
17647    return "g";
17648
17649  /* For all other types, use normal C++ mangling.  */
17650  return NULL;
17651}
17652
17653/* Handle a "longcall" or "shortcall" attribute; arguments as in
17654   struct attribute_spec.handler.  */
17655
17656static tree
17657rs6000_handle_longcall_attribute (tree *node, tree name,
17658				  tree args ATTRIBUTE_UNUSED,
17659				  int flags ATTRIBUTE_UNUSED,
17660				  bool *no_add_attrs)
17661{
17662  if (TREE_CODE (*node) != FUNCTION_TYPE
17663      && TREE_CODE (*node) != FIELD_DECL
17664      && TREE_CODE (*node) != TYPE_DECL)
17665    {
17666      warning (OPT_Wattributes, "%qs attribute only applies to functions",
17667	       IDENTIFIER_POINTER (name));
17668      *no_add_attrs = true;
17669    }
17670
17671  return NULL_TREE;
17672}
17673
17674/* Set longcall attributes on all functions declared when
17675   rs6000_default_long_calls is true.  */
17676static void
17677rs6000_set_default_type_attributes (tree type)
17678{
17679  if (rs6000_default_long_calls
17680      && (TREE_CODE (type) == FUNCTION_TYPE
17681	  || TREE_CODE (type) == METHOD_TYPE))
17682    TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
17683					NULL_TREE,
17684					TYPE_ATTRIBUTES (type));
17685
17686#if TARGET_MACHO
17687  darwin_set_default_type_attributes (type);
17688#endif
17689}
17690
17691/* Return a reference suitable for calling a function with the
17692   longcall attribute.  */
17693
17694rtx
17695rs6000_longcall_ref (rtx call_ref)
17696{
17697  const char *call_name;
17698  tree node;
17699
17700  if (GET_CODE (call_ref) != SYMBOL_REF)
17701    return call_ref;
17702
17703  /* System V adds '.' to the internal name, so skip them.  */
17704  call_name = XSTR (call_ref, 0);
17705  if (*call_name == '.')
17706    {
17707      while (*call_name == '.')
17708	call_name++;
17709
17710      node = get_identifier (call_name);
17711      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
17712    }
17713
17714  return force_reg (Pmode, call_ref);
17715}
17716
17717#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
17718#define TARGET_USE_MS_BITFIELD_LAYOUT 0
17719#endif
17720
17721/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
17722   struct attribute_spec.handler.  */
17723static tree
17724rs6000_handle_struct_attribute (tree *node, tree name,
17725				tree args ATTRIBUTE_UNUSED,
17726				int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
17727{
17728  tree *type = NULL;
17729  if (DECL_P (*node))
17730    {
17731      if (TREE_CODE (*node) == TYPE_DECL)
17732        type = &TREE_TYPE (*node);
17733    }
17734  else
17735    type = node;
17736
17737  if (!(type && (TREE_CODE (*type) == RECORD_TYPE
17738                 || TREE_CODE (*type) == UNION_TYPE)))
17739    {
17740      warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
17741      *no_add_attrs = true;
17742    }
17743
17744  else if ((is_attribute_p ("ms_struct", name)
17745            && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
17746           || ((is_attribute_p ("gcc_struct", name)
17747                && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
17748    {
17749      warning (OPT_Wattributes, "%qs incompatible attribute ignored",
17750               IDENTIFIER_POINTER (name));
17751      *no_add_attrs = true;
17752    }
17753
17754  return NULL_TREE;
17755}
17756
17757static bool
17758rs6000_ms_bitfield_layout_p (tree record_type)
17759{
17760  return (TARGET_USE_MS_BITFIELD_LAYOUT &&
17761          !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
17762    || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
17763}
17764
17765#ifdef USING_ELFOS_H
17766
17767/* A get_unnamed_section callback, used for switching to toc_section.  */
17768
17769static void
17770rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
17771{
17772  if (DEFAULT_ABI == ABI_AIX
17773      && TARGET_MINIMAL_TOC
17774      && !TARGET_RELOCATABLE)
17775    {
17776      if (!toc_initialized)
17777	{
17778	  toc_initialized = 1;
17779	  fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
17780	  (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
17781	  fprintf (asm_out_file, "\t.tc ");
17782	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
17783	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17784	  fprintf (asm_out_file, "\n");
17785
17786	  fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17787	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17788	  fprintf (asm_out_file, " = .+32768\n");
17789	}
17790      else
17791	fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17792    }
17793  else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
17794    fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
17795  else
17796    {
17797      fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17798      if (!toc_initialized)
17799	{
17800	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17801	  fprintf (asm_out_file, " = .+32768\n");
17802	  toc_initialized = 1;
17803	}
17804    }
17805}
17806
17807/* Implement TARGET_ASM_INIT_SECTIONS.  */
17808
17809static void
17810rs6000_elf_asm_init_sections (void)
17811{
17812  toc_section
17813    = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
17814
17815  sdata2_section
17816    = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
17817			   SDATA2_SECTION_ASM_OP);
17818}
17819
17820/* Implement TARGET_SELECT_RTX_SECTION.  */
17821
17822static section *
17823rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
17824			       unsigned HOST_WIDE_INT align)
17825{
17826  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
17827    return toc_section;
17828  else
17829    return default_elf_select_rtx_section (mode, x, align);
17830}
17831
17832/* For a SYMBOL_REF, set generic flags and then perform some
17833   target-specific processing.
17834
17835   When the AIX ABI is requested on a non-AIX system, replace the
17836   function name with the real name (with a leading .) rather than the
17837   function descriptor name.  This saves a lot of overriding code to
17838   read the prefixes.  */
17839
17840static void
17841rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
17842{
17843  default_encode_section_info (decl, rtl, first);
17844
17845  if (first
17846      && TREE_CODE (decl) == FUNCTION_DECL
17847      && !TARGET_AIX
17848      && DEFAULT_ABI == ABI_AIX)
17849    {
17850      rtx sym_ref = XEXP (rtl, 0);
17851      size_t len = strlen (XSTR (sym_ref, 0));
17852      char *str = alloca (len + 2);
17853      str[0] = '.';
17854      memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
17855      XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
17856    }
17857}
17858
17859bool
17860rs6000_elf_in_small_data_p (tree decl)
17861{
17862  if (rs6000_sdata == SDATA_NONE)
17863    return false;
17864
17865  /* We want to merge strings, so we never consider them small data.  */
17866  if (TREE_CODE (decl) == STRING_CST)
17867    return false;
17868
17869  /* Functions are never in the small data area.  */
17870  if (TREE_CODE (decl) == FUNCTION_DECL)
17871    return false;
17872
17873  if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
17874    {
17875      const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
17876      if (strcmp (section, ".sdata") == 0
17877	  || strcmp (section, ".sdata2") == 0
17878	  || strcmp (section, ".sbss") == 0
17879	  || strcmp (section, ".sbss2") == 0
17880	  || strcmp (section, ".PPC.EMB.sdata0") == 0
17881	  || strcmp (section, ".PPC.EMB.sbss0") == 0)
17882	return true;
17883    }
17884  else
17885    {
17886      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
17887
17888      if (size > 0
17889	  && (unsigned HOST_WIDE_INT) size <= g_switch_value
17890	  /* If it's not public, and we're not going to reference it there,
17891	     there's no need to put it in the small data section.  */
17892	  && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
17893	return true;
17894    }
17895
17896  return false;
17897}
17898
17899#endif /* USING_ELFOS_H */
17900
17901/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P.  */
17902
17903static bool
17904rs6000_use_blocks_for_constant_p (enum machine_mode mode, rtx x)
17905{
17906  return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
17907}
17908
17909/* Return a REG that occurs in ADDR with coefficient 1.
17910   ADDR can be effectively incremented by incrementing REG.
17911
17912   r0 is special and we must not select it as an address
17913   register by this routine since our caller will try to
17914   increment the returned register via an "la" instruction.  */
17915
17916rtx
17917find_addr_reg (rtx addr)
17918{
17919  while (GET_CODE (addr) == PLUS)
17920    {
17921      if (GET_CODE (XEXP (addr, 0)) == REG
17922	  && REGNO (XEXP (addr, 0)) != 0)
17923	addr = XEXP (addr, 0);
17924      else if (GET_CODE (XEXP (addr, 1)) == REG
17925	       && REGNO (XEXP (addr, 1)) != 0)
17926	addr = XEXP (addr, 1);
17927      else if (CONSTANT_P (XEXP (addr, 0)))
17928	addr = XEXP (addr, 1);
17929      else if (CONSTANT_P (XEXP (addr, 1)))
17930	addr = XEXP (addr, 0);
17931      else
17932	gcc_unreachable ();
17933    }
17934  gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
17935  return addr;
17936}
17937
17938void
17939rs6000_fatal_bad_address (rtx op)
17940{
17941  fatal_insn ("bad address", op);
17942}
17943
17944#if TARGET_MACHO
17945
17946static tree branch_island_list = 0;
17947
17948/* Remember to generate a branch island for far calls to the given
17949   function.  */
17950
17951static void
17952add_compiler_branch_island (tree label_name, tree function_name,
17953			    int line_number)
17954{
17955  tree branch_island = build_tree_list (function_name, label_name);
17956  TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
17957  TREE_CHAIN (branch_island) = branch_island_list;
17958  branch_island_list = branch_island;
17959}
17960
17961#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND)     TREE_VALUE (BRANCH_ISLAND)
17962#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND)  TREE_PURPOSE (BRANCH_ISLAND)
17963#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND)    \
17964		TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
17965
17966/* Generate far-jump branch islands for everything on the
17967   branch_island_list.  Invoked immediately after the last instruction
17968   of the epilogue has been emitted; the branch-islands must be
17969   appended to, and contiguous with, the function body.  Mach-O stubs
17970   are generated in machopic_output_stub().  */
17971
17972static void
17973macho_branch_islands (void)
17974{
17975  char tmp_buf[512];
17976  tree branch_island;
17977
17978  for (branch_island = branch_island_list;
17979       branch_island;
17980       branch_island = TREE_CHAIN (branch_island))
17981    {
17982      const char *label =
17983	IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17984      const char *name  =
17985	IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
17986      char name_buf[512];
17987      /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF().  */
17988      if (name[0] == '*' || name[0] == '&')
17989	strcpy (name_buf, name+1);
17990      else
17991	{
17992	  name_buf[0] = '_';
17993	  strcpy (name_buf+1, name);
17994	}
17995      strcpy (tmp_buf, "\n");
17996      strcat (tmp_buf, label);
17997#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17998      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
17999	dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
18000#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
18001      if (flag_pic)
18002	{
18003	  strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
18004	  strcat (tmp_buf, label);
18005	  strcat (tmp_buf, "_pic\n");
18006	  strcat (tmp_buf, label);
18007	  strcat (tmp_buf, "_pic:\n\tmflr r11\n");
18008
18009	  strcat (tmp_buf, "\taddis r11,r11,ha16(");
18010	  strcat (tmp_buf, name_buf);
18011	  strcat (tmp_buf, " - ");
18012	  strcat (tmp_buf, label);
18013	  strcat (tmp_buf, "_pic)\n");
18014
18015	  strcat (tmp_buf, "\tmtlr r0\n");
18016
18017	  strcat (tmp_buf, "\taddi r12,r11,lo16(");
18018	  strcat (tmp_buf, name_buf);
18019	  strcat (tmp_buf, " - ");
18020	  strcat (tmp_buf, label);
18021	  strcat (tmp_buf, "_pic)\n");
18022
18023	  strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
18024	}
18025      else
18026	{
18027	  strcat (tmp_buf, ":\nlis r12,hi16(");
18028	  strcat (tmp_buf, name_buf);
18029	  strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
18030	  strcat (tmp_buf, name_buf);
18031	  strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
18032	}
18033      output_asm_insn (tmp_buf, 0);
18034#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
18035      if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
18036	dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
18037#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
18038    }
18039
18040  branch_island_list = 0;
18041}
18042
18043/* NO_PREVIOUS_DEF checks in the link list whether the function name is
18044   already there or not.  */
18045
18046static int
18047no_previous_def (tree function_name)
18048{
18049  tree branch_island;
18050  for (branch_island = branch_island_list;
18051       branch_island;
18052       branch_island = TREE_CHAIN (branch_island))
18053    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
18054      return 0;
18055  return 1;
18056}
18057
18058/* GET_PREV_LABEL gets the label name from the previous definition of
18059   the function.  */
18060
18061static tree
18062get_prev_label (tree function_name)
18063{
18064  tree branch_island;
18065  for (branch_island = branch_island_list;
18066       branch_island;
18067       branch_island = TREE_CHAIN (branch_island))
18068    if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
18069      return BRANCH_ISLAND_LABEL_NAME (branch_island);
18070  return 0;
18071}
18072
18073#ifndef DARWIN_LINKER_GENERATES_ISLANDS
18074#define DARWIN_LINKER_GENERATES_ISLANDS 0
18075#endif
18076
18077/* KEXTs still need branch islands.  */
18078#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
18079				 || flag_mkernel || flag_apple_kext)
18080
18081/* INSN is either a function call or a millicode call.  It may have an
18082   unconditional jump in its delay slot.
18083
18084   CALL_DEST is the routine we are calling.  */
18085
18086char *
18087output_call (rtx insn, rtx *operands, int dest_operand_number,
18088	     int cookie_operand_number)
18089{
18090  static char buf[256];
18091  if (DARWIN_GENERATE_ISLANDS
18092      && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
18093      && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
18094    {
18095      tree labelname;
18096      tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
18097
18098      if (no_previous_def (funname))
18099	{
18100	  int line_number = 0;
18101	  rtx label_rtx = gen_label_rtx ();
18102	  char *label_buf, temp_buf[256];
18103	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
18104				       CODE_LABEL_NUMBER (label_rtx));
18105	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
18106	  labelname = get_identifier (label_buf);
18107	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
18108	  if (insn)
18109	    line_number = NOTE_LINE_NUMBER (insn);
18110	  add_compiler_branch_island (labelname, funname, line_number);
18111	}
18112      else
18113	labelname = get_prev_label (funname);
18114
18115      /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
18116	 instruction will reach 'foo', otherwise link as 'bl L42'".
18117	 "L42" should be a 'branch island', that will do a far jump to
18118	 'foo'.  Branch islands are generated in
18119	 macho_branch_islands().  */
18120      sprintf (buf, "jbsr %%z%d,%.246s",
18121	       dest_operand_number, IDENTIFIER_POINTER (labelname));
18122    }
18123  else
18124    sprintf (buf, "bl %%z%d", dest_operand_number);
18125  return buf;
18126}
18127
18128/* Generate PIC and indirect symbol stubs.  */
18129
18130void
18131machopic_output_stub (FILE *file, const char *symb, const char *stub)
18132{
18133  unsigned int length;
18134  char *symbol_name, *lazy_ptr_name;
18135  char *local_label_0;
18136  static int label = 0;
18137
18138  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
18139  symb = (*targetm.strip_name_encoding) (symb);
18140
18141
18142  length = strlen (symb);
18143  symbol_name = alloca (length + 32);
18144  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
18145
18146  lazy_ptr_name = alloca (length + 32);
18147  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
18148
18149  if (flag_pic == 2)
18150    switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
18151  else
18152    switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
18153
18154  if (flag_pic == 2)
18155    {
18156      fprintf (file, "\t.align 5\n");
18157
18158      fprintf (file, "%s:\n", stub);
18159      fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18160
18161      label++;
18162      local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
18163      sprintf (local_label_0, "\"L%011d$spb\"", label);
18164
18165      fprintf (file, "\tmflr r0\n");
18166      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
18167      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
18168      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
18169	       lazy_ptr_name, local_label_0);
18170      fprintf (file, "\tmtlr r0\n");
18171      fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
18172	       (TARGET_64BIT ? "ldu" : "lwzu"),
18173	       lazy_ptr_name, local_label_0);
18174      fprintf (file, "\tmtctr r12\n");
18175      fprintf (file, "\tbctr\n");
18176    }
18177  else
18178    {
18179      fprintf (file, "\t.align 4\n");
18180
18181      fprintf (file, "%s:\n", stub);
18182      fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18183
18184      fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
18185      fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
18186	       (TARGET_64BIT ? "ldu" : "lwzu"),
18187	       lazy_ptr_name);
18188      fprintf (file, "\tmtctr r12\n");
18189      fprintf (file, "\tbctr\n");
18190    }
18191
18192  switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
18193  fprintf (file, "%s:\n", lazy_ptr_name);
18194  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18195  fprintf (file, "%sdyld_stub_binding_helper\n",
18196	   (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
18197}
18198
18199/* Legitimize PIC addresses.  If the address is already
18200   position-independent, we return ORIG.  Newly generated
18201   position-independent addresses go into a reg.  This is REG if non
18202   zero, otherwise we allocate register(s) as necessary.  */
18203
18204#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
18205
18206rtx
18207rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
18208					rtx reg)
18209{
18210  rtx base, offset;
18211
18212  if (reg == NULL && ! reload_in_progress && ! reload_completed)
18213    reg = gen_reg_rtx (Pmode);
18214
18215  if (GET_CODE (orig) == CONST)
18216    {
18217      rtx reg_temp;
18218
18219      if (GET_CODE (XEXP (orig, 0)) == PLUS
18220	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
18221	return orig;
18222
18223      gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
18224
18225      /* Use a different reg for the intermediate value, as
18226	 it will be marked UNCHANGING.  */
18227      reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
18228      base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
18229						     Pmode, reg_temp);
18230      offset =
18231	rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
18232						Pmode, reg);
18233
18234      if (GET_CODE (offset) == CONST_INT)
18235	{
18236	  if (SMALL_INT (offset))
18237	    return plus_constant (base, INTVAL (offset));
18238	  else if (! reload_in_progress && ! reload_completed)
18239	    offset = force_reg (Pmode, offset);
18240	  else
18241	    {
18242 	      rtx mem = force_const_mem (Pmode, orig);
18243	      return machopic_legitimize_pic_address (mem, Pmode, reg);
18244	    }
18245	}
18246      return gen_rtx_PLUS (Pmode, base, offset);
18247    }
18248
18249  /* Fall back on generic machopic code.  */
18250  return machopic_legitimize_pic_address (orig, mode, reg);
18251}
18252
18253/* Output a .machine directive for the Darwin assembler, and call
18254   the generic start_file routine.  */
18255
18256static void
18257rs6000_darwin_file_start (void)
18258{
18259  static const struct
18260  {
18261    const char *arg;
18262    const char *name;
18263    int if_set;
18264  } mapping[] = {
18265    { "ppc64", "ppc64", MASK_64BIT },
18266    { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
18267    { "power4", "ppc970", 0 },
18268    { "G5", "ppc970", 0 },
18269    { "7450", "ppc7450", 0 },
18270    { "7400", "ppc7400", MASK_ALTIVEC },
18271    { "G4", "ppc7400", 0 },
18272    { "750", "ppc750", 0 },
18273    { "740", "ppc750", 0 },
18274    { "G3", "ppc750", 0 },
18275    { "604e", "ppc604e", 0 },
18276    { "604", "ppc604", 0 },
18277    { "603e", "ppc603", 0 },
18278    { "603", "ppc603", 0 },
18279    { "601", "ppc601", 0 },
18280    { NULL, "ppc", 0 } };
18281  const char *cpu_id = "";
18282  size_t i;
18283
18284  rs6000_file_start ();
18285  darwin_file_start ();
18286
18287  /* Determine the argument to -mcpu=.  Default to G3 if not specified.  */
18288  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
18289    if (rs6000_select[i].set_arch_p && rs6000_select[i].string
18290	&& rs6000_select[i].string[0] != '\0')
18291      cpu_id = rs6000_select[i].string;
18292
18293  /* Look through the mapping array.  Pick the first name that either
18294     matches the argument, has a bit set in IF_SET that is also set
18295     in the target flags, or has a NULL name.  */
18296
18297  i = 0;
18298  while (mapping[i].arg != NULL
18299	 && strcmp (mapping[i].arg, cpu_id) != 0
18300	 && (mapping[i].if_set & target_flags) == 0)
18301    i++;
18302
18303  fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
18304}
18305
18306#endif /* TARGET_MACHO */
18307
18308#if TARGET_ELF
18309static int
18310rs6000_elf_reloc_rw_mask (void)
18311{
18312  if (flag_pic)
18313    return 3;
18314  else if (DEFAULT_ABI == ABI_AIX)
18315    return 2;
18316  else
18317    return 0;
18318}
18319
18320/* Record an element in the table of global constructors.  SYMBOL is
18321   a SYMBOL_REF of the function to be called; PRIORITY is a number
18322   between 0 and MAX_INIT_PRIORITY.
18323
18324   This differs from default_named_section_asm_out_constructor in
18325   that we have special handling for -mrelocatable.  */
18326
18327static void
18328rs6000_elf_asm_out_constructor (rtx symbol, int priority)
18329{
18330  const char *section = ".ctors";
18331  char buf[16];
18332
18333  if (priority != DEFAULT_INIT_PRIORITY)
18334    {
18335      sprintf (buf, ".ctors.%.5u",
18336	       /* Invert the numbering so the linker puts us in the proper
18337		  order; constructors are run from right to left, and the
18338		  linker sorts in increasing order.  */
18339	       MAX_INIT_PRIORITY - priority);
18340      section = buf;
18341    }
18342
18343  switch_to_section (get_section (section, SECTION_WRITE, NULL));
18344  assemble_align (POINTER_SIZE);
18345
18346  if (TARGET_RELOCATABLE)
18347    {
18348      fputs ("\t.long (", asm_out_file);
18349      output_addr_const (asm_out_file, symbol);
18350      fputs (")@fixup\n", asm_out_file);
18351    }
18352  else
18353    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
18354}
18355
18356static void
18357rs6000_elf_asm_out_destructor (rtx symbol, int priority)
18358{
18359  const char *section = ".dtors";
18360  char buf[16];
18361
18362  if (priority != DEFAULT_INIT_PRIORITY)
18363    {
18364      sprintf (buf, ".dtors.%.5u",
18365	       /* Invert the numbering so the linker puts us in the proper
18366		  order; constructors are run from right to left, and the
18367		  linker sorts in increasing order.  */
18368	       MAX_INIT_PRIORITY - priority);
18369      section = buf;
18370    }
18371
18372  switch_to_section (get_section (section, SECTION_WRITE, NULL));
18373  assemble_align (POINTER_SIZE);
18374
18375  if (TARGET_RELOCATABLE)
18376    {
18377      fputs ("\t.long (", asm_out_file);
18378      output_addr_const (asm_out_file, symbol);
18379      fputs (")@fixup\n", asm_out_file);
18380    }
18381  else
18382    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
18383}
18384
18385void
18386rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
18387{
18388  if (TARGET_64BIT)
18389    {
18390      fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
18391      ASM_OUTPUT_LABEL (file, name);
18392      fputs (DOUBLE_INT_ASM_OP, file);
18393      rs6000_output_function_entry (file, name);
18394      fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
18395      if (DOT_SYMBOLS)
18396	{
18397	  fputs ("\t.size\t", file);
18398	  assemble_name (file, name);
18399	  fputs (",24\n\t.type\t.", file);
18400	  assemble_name (file, name);
18401	  fputs (",@function\n", file);
18402	  if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
18403	    {
18404	      fputs ("\t.globl\t.", file);
18405	      assemble_name (file, name);
18406	      putc ('\n', file);
18407	    }
18408	}
18409      else
18410	ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
18411      ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
18412      rs6000_output_function_entry (file, name);
18413      fputs (":\n", file);
18414      return;
18415    }
18416
18417  if (TARGET_RELOCATABLE
18418      && !TARGET_SECURE_PLT
18419      && (get_pool_size () != 0 || current_function_profile)
18420      && uses_TOC ())
18421    {
18422      char buf[256];
18423
18424      (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
18425
18426      ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
18427      fprintf (file, "\t.long ");
18428      assemble_name (file, buf);
18429      putc ('-', file);
18430      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
18431      assemble_name (file, buf);
18432      putc ('\n', file);
18433    }
18434
18435  ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
18436  ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
18437
18438  if (DEFAULT_ABI == ABI_AIX)
18439    {
18440      const char *desc_name, *orig_name;
18441
18442      orig_name = (*targetm.strip_name_encoding) (name);
18443      desc_name = orig_name;
18444      while (*desc_name == '.')
18445	desc_name++;
18446
18447      if (TREE_PUBLIC (decl))
18448	fprintf (file, "\t.globl %s\n", desc_name);
18449
18450      fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18451      fprintf (file, "%s:\n", desc_name);
18452      fprintf (file, "\t.long %s\n", orig_name);
18453      fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
18454      if (DEFAULT_ABI == ABI_AIX)
18455	fputs ("\t.long 0\n", file);
18456      fprintf (file, "\t.previous\n");
18457    }
18458  ASM_OUTPUT_LABEL (file, name);
18459}
18460
18461static void
18462rs6000_elf_end_indicate_exec_stack (void)
18463{
18464  if (NEED_INDICATE_EXEC_STACK)
18465    file_end_indicate_exec_stack ();
18466}
18467#endif
18468
18469#if TARGET_XCOFF
18470static void
18471rs6000_xcoff_asm_output_anchor (rtx symbol)
18472{
18473  char buffer[100];
18474
18475  sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
18476	   SYMBOL_REF_BLOCK_OFFSET (symbol));
18477  ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
18478}
18479
18480static void
18481rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
18482{
18483  fputs (GLOBAL_ASM_OP, stream);
18484  RS6000_OUTPUT_BASENAME (stream, name);
18485  putc ('\n', stream);
18486}
18487
18488/* A get_unnamed_decl callback, used for read-only sections.  PTR
18489   points to the section string variable.  */
18490
18491static void
18492rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
18493{
18494  fprintf (asm_out_file, "\t.csect %s[RO],3\n",
18495	   *(const char *const *) directive);
18496}
18497
18498/* Likewise for read-write sections.  */
18499
18500static void
18501rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
18502{
18503  fprintf (asm_out_file, "\t.csect %s[RW],3\n",
18504	   *(const char *const *) directive);
18505}
18506
18507/* A get_unnamed_section callback, used for switching to toc_section.  */
18508
18509static void
18510rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
18511{
18512  if (TARGET_MINIMAL_TOC)
18513    {
18514      /* toc_section is always selected at least once from
18515	 rs6000_xcoff_file_start, so this is guaranteed to
18516	 always be defined once and only once in each file.  */
18517      if (!toc_initialized)
18518	{
18519	  fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
18520	  fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
18521	  toc_initialized = 1;
18522	}
18523      fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
18524	       (TARGET_32BIT ? "" : ",3"));
18525    }
18526  else
18527    fputs ("\t.toc\n", asm_out_file);
18528}
18529
18530/* Implement TARGET_ASM_INIT_SECTIONS.  */
18531
18532static void
18533rs6000_xcoff_asm_init_sections (void)
18534{
18535  read_only_data_section
18536    = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
18537			   &xcoff_read_only_section_name);
18538
18539  private_data_section
18540    = get_unnamed_section (SECTION_WRITE,
18541			   rs6000_xcoff_output_readwrite_section_asm_op,
18542			   &xcoff_private_data_section_name);
18543
18544  read_only_private_data_section
18545    = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
18546			   &xcoff_private_data_section_name);
18547
18548  toc_section
18549    = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
18550
18551  readonly_data_section = read_only_data_section;
18552  exception_section = data_section;
18553}
18554
18555static int
18556rs6000_xcoff_reloc_rw_mask (void)
18557{
18558  return 3;
18559}
18560
18561static void
18562rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
18563				tree decl ATTRIBUTE_UNUSED)
18564{
18565  int smclass;
18566  static const char * const suffix[3] = { "PR", "RO", "RW" };
18567
18568  if (flags & SECTION_CODE)
18569    smclass = 0;
18570  else if (flags & SECTION_WRITE)
18571    smclass = 2;
18572  else
18573    smclass = 1;
18574
18575  fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
18576	   (flags & SECTION_CODE) ? "." : "",
18577	   name, suffix[smclass], flags & SECTION_ENTSIZE);
18578}
18579
18580static section *
18581rs6000_xcoff_select_section (tree decl, int reloc,
18582			     unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18583{
18584  if (decl_readonly_section (decl, reloc))
18585    {
18586      if (TREE_PUBLIC (decl))
18587	return read_only_data_section;
18588      else
18589	return read_only_private_data_section;
18590    }
18591  else
18592    {
18593      if (TREE_PUBLIC (decl))
18594	return data_section;
18595      else
18596	return private_data_section;
18597    }
18598}
18599
18600static void
18601rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
18602{
18603  const char *name;
18604
18605  /* Use select_section for private and uninitialized data.  */
18606  if (!TREE_PUBLIC (decl)
18607      || DECL_COMMON (decl)
18608      || DECL_INITIAL (decl) == NULL_TREE
18609      || DECL_INITIAL (decl) == error_mark_node
18610      || (flag_zero_initialized_in_bss
18611	  && initializer_zerop (DECL_INITIAL (decl))))
18612    return;
18613
18614  name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18615  name = (*targetm.strip_name_encoding) (name);
18616  DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
18617}
18618
18619/* Select section for constant in constant pool.
18620
18621   On RS/6000, all constants are in the private read-only data area.
18622   However, if this is being placed in the TOC it must be output as a
18623   toc entry.  */
18624
18625static section *
18626rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
18627				 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
18628{
18629  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
18630    return toc_section;
18631  else
18632    return read_only_private_data_section;
18633}
18634
18635/* Remove any trailing [DS] or the like from the symbol name.  */
18636
18637static const char *
18638rs6000_xcoff_strip_name_encoding (const char *name)
18639{
18640  size_t len;
18641  if (*name == '*')
18642    name++;
18643  len = strlen (name);
18644  if (name[len - 1] == ']')
18645    return ggc_alloc_string (name, len - 4);
18646  else
18647    return name;
18648}
18649
18650/* Section attributes.  AIX is always PIC.  */
18651
18652static unsigned int
18653rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
18654{
18655  unsigned int align;
18656  unsigned int flags = default_section_type_flags (decl, name, reloc);
18657
18658  /* Align to at least UNIT size.  */
18659  if (flags & SECTION_CODE)
18660    align = MIN_UNITS_PER_WORD;
18661  else
18662    /* Increase alignment of large objects if not already stricter.  */
18663    align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
18664		 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
18665		 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
18666
18667  return flags | (exact_log2 (align) & SECTION_ENTSIZE);
18668}
18669
18670/* Output at beginning of assembler file.
18671
18672   Initialize the section names for the RS/6000 at this point.
18673
18674   Specify filename, including full path, to assembler.
18675
18676   We want to go into the TOC section so at least one .toc will be emitted.
18677   Also, in order to output proper .bs/.es pairs, we need at least one static
18678   [RW] section emitted.
18679
18680   Finally, declare mcount when profiling to make the assembler happy.  */
18681
18682static void
18683rs6000_xcoff_file_start (void)
18684{
18685  rs6000_gen_section_name (&xcoff_bss_section_name,
18686			   main_input_filename, ".bss_");
18687  rs6000_gen_section_name (&xcoff_private_data_section_name,
18688			   main_input_filename, ".rw_");
18689  rs6000_gen_section_name (&xcoff_read_only_section_name,
18690			   main_input_filename, ".ro_");
18691
18692  fputs ("\t.file\t", asm_out_file);
18693  output_quoted_string (asm_out_file, main_input_filename);
18694  fputc ('\n', asm_out_file);
18695  if (write_symbols != NO_DEBUG)
18696    switch_to_section (private_data_section);
18697  switch_to_section (text_section);
18698  if (profile_flag)
18699    fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
18700  rs6000_file_start ();
18701}
18702
18703/* Output at end of assembler file.
18704   On the RS/6000, referencing data should automatically pull in text.  */
18705
18706static void
18707rs6000_xcoff_file_end (void)
18708{
18709  switch_to_section (text_section);
18710  fputs ("_section_.text:\n", asm_out_file);
18711  switch_to_section (data_section);
18712  fputs (TARGET_32BIT
18713	 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
18714	 asm_out_file);
18715}
18716#endif /* TARGET_XCOFF */
18717
18718/* Compute a (partial) cost for rtx X.  Return true if the complete
18719   cost has been computed, and false if subexpressions should be
18720   scanned.  In either case, *TOTAL contains the cost result.  */
18721
18722static bool
18723rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
18724{
18725  enum machine_mode mode = GET_MODE (x);
18726
18727  switch (code)
18728    {
18729      /* On the RS/6000, if it is valid in the insn, it is free.  */
18730    case CONST_INT:
18731      if (((outer_code == SET
18732	    || outer_code == PLUS
18733	    || outer_code == MINUS)
18734	   && (satisfies_constraint_I (x)
18735	       || satisfies_constraint_L (x)))
18736	  || (outer_code == AND
18737	      && (satisfies_constraint_K (x)
18738		  || (mode == SImode
18739		      ? satisfies_constraint_L (x)
18740		      : satisfies_constraint_J (x))
18741		  || mask_operand (x, mode)
18742		  || (mode == DImode
18743		      && mask64_operand (x, DImode))))
18744	  || ((outer_code == IOR || outer_code == XOR)
18745	      && (satisfies_constraint_K (x)
18746		  || (mode == SImode
18747		      ? satisfies_constraint_L (x)
18748		      : satisfies_constraint_J (x))))
18749	  || outer_code == ASHIFT
18750	  || outer_code == ASHIFTRT
18751	  || outer_code == LSHIFTRT
18752	  || outer_code == ROTATE
18753	  || outer_code == ROTATERT
18754	  || outer_code == ZERO_EXTRACT
18755	  || (outer_code == MULT
18756	      && satisfies_constraint_I (x))
18757	  || ((outer_code == DIV || outer_code == UDIV
18758	       || outer_code == MOD || outer_code == UMOD)
18759	      && exact_log2 (INTVAL (x)) >= 0)
18760	  || (outer_code == COMPARE
18761	      && (satisfies_constraint_I (x)
18762		  || satisfies_constraint_K (x)))
18763	  || (outer_code == EQ
18764	      && (satisfies_constraint_I (x)
18765		  || satisfies_constraint_K (x)
18766		  || (mode == SImode
18767		      ? satisfies_constraint_L (x)
18768		      : satisfies_constraint_J (x))))
18769	  || (outer_code == GTU
18770	      && satisfies_constraint_I (x))
18771	  || (outer_code == LTU
18772	      && satisfies_constraint_P (x)))
18773	{
18774	  *total = 0;
18775	  return true;
18776	}
18777      else if ((outer_code == PLUS
18778		&& reg_or_add_cint_operand (x, VOIDmode))
18779	       || (outer_code == MINUS
18780		   && reg_or_sub_cint_operand (x, VOIDmode))
18781	       || ((outer_code == SET
18782		    || outer_code == IOR
18783		    || outer_code == XOR)
18784		   && (INTVAL (x)
18785		       & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
18786	{
18787	  *total = COSTS_N_INSNS (1);
18788	  return true;
18789	}
18790      /* FALLTHRU */
18791
18792    case CONST_DOUBLE:
18793      if (mode == DImode && code == CONST_DOUBLE)
18794	{
18795	  if ((outer_code == IOR || outer_code == XOR)
18796	      && CONST_DOUBLE_HIGH (x) == 0
18797	      && (CONST_DOUBLE_LOW (x)
18798		  & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
18799	    {
18800	      *total = 0;
18801	      return true;
18802	    }
18803	  else if ((outer_code == AND && and64_2_operand (x, DImode))
18804		   || ((outer_code == SET
18805			|| outer_code == IOR
18806			|| outer_code == XOR)
18807		       && CONST_DOUBLE_HIGH (x) == 0))
18808	    {
18809	      *total = COSTS_N_INSNS (1);
18810	      return true;
18811	    }
18812	}
18813      /* FALLTHRU */
18814
18815    case CONST:
18816    case HIGH:
18817    case SYMBOL_REF:
18818    case MEM:
18819      /* When optimizing for size, MEM should be slightly more expensive
18820	 than generating address, e.g., (plus (reg) (const)).
18821	 L1 cache latency is about two instructions.  */
18822      *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
18823      return true;
18824
18825    case LABEL_REF:
18826      *total = 0;
18827      return true;
18828
18829    case PLUS:
18830      if (mode == DFmode)
18831	{
18832	  if (GET_CODE (XEXP (x, 0)) == MULT)
18833	    {
18834	      /* FNMA accounted in outer NEG.  */
18835	      if (outer_code == NEG)
18836		*total = rs6000_cost->dmul - rs6000_cost->fp;
18837	      else
18838		*total = rs6000_cost->dmul;
18839	    }
18840	  else
18841	    *total = rs6000_cost->fp;
18842	}
18843      else if (mode == SFmode)
18844	{
18845	  /* FNMA accounted in outer NEG.  */
18846	  if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18847	    *total = 0;
18848	  else
18849	    *total = rs6000_cost->fp;
18850	}
18851      else
18852	*total = COSTS_N_INSNS (1);
18853      return false;
18854
18855    case MINUS:
18856      if (mode == DFmode)
18857	{
18858	  if (GET_CODE (XEXP (x, 0)) == MULT)
18859	    {
18860	      /* FNMA accounted in outer NEG.  */
18861	      if (outer_code == NEG)
18862		*total = 0;
18863	      else
18864		*total = rs6000_cost->dmul;
18865	    }
18866	  else
18867	    *total = rs6000_cost->fp;
18868	}
18869      else if (mode == SFmode)
18870	{
18871	  /* FNMA accounted in outer NEG.  */
18872	  if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18873	    *total = 0;
18874	  else
18875	    *total = rs6000_cost->fp;
18876	}
18877      else
18878	*total = COSTS_N_INSNS (1);
18879      return false;
18880
18881    case MULT:
18882      if (GET_CODE (XEXP (x, 1)) == CONST_INT
18883	  && satisfies_constraint_I (XEXP (x, 1)))
18884	{
18885	  if (INTVAL (XEXP (x, 1)) >= -256
18886	      && INTVAL (XEXP (x, 1)) <= 255)
18887	    *total = rs6000_cost->mulsi_const9;
18888	  else
18889	    *total = rs6000_cost->mulsi_const;
18890	}
18891      /* FMA accounted in outer PLUS/MINUS.  */
18892      else if ((mode == DFmode || mode == SFmode)
18893	       && (outer_code == PLUS || outer_code == MINUS))
18894	*total = 0;
18895      else if (mode == DFmode)
18896	*total = rs6000_cost->dmul;
18897      else if (mode == SFmode)
18898	*total = rs6000_cost->fp;
18899      else if (mode == DImode)
18900	*total = rs6000_cost->muldi;
18901      else
18902	*total = rs6000_cost->mulsi;
18903      return false;
18904
18905    case DIV:
18906    case MOD:
18907      if (FLOAT_MODE_P (mode))
18908	{
18909	  *total = mode == DFmode ? rs6000_cost->ddiv
18910				  : rs6000_cost->sdiv;
18911	  return false;
18912	}
18913      /* FALLTHRU */
18914
18915    case UDIV:
18916    case UMOD:
18917      if (GET_CODE (XEXP (x, 1)) == CONST_INT
18918	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
18919	{
18920	  if (code == DIV || code == MOD)
18921	    /* Shift, addze */
18922	    *total = COSTS_N_INSNS (2);
18923	  else
18924	    /* Shift */
18925	    *total = COSTS_N_INSNS (1);
18926	}
18927      else
18928	{
18929	  if (GET_MODE (XEXP (x, 1)) == DImode)
18930	    *total = rs6000_cost->divdi;
18931	  else
18932	    *total = rs6000_cost->divsi;
18933	}
18934      /* Add in shift and subtract for MOD. */
18935      if (code == MOD || code == UMOD)
18936	*total += COSTS_N_INSNS (2);
18937      return false;
18938
18939    case FFS:
18940      *total = COSTS_N_INSNS (4);
18941      return false;
18942
18943    case NOT:
18944      if (outer_code == AND || outer_code == IOR || outer_code == XOR)
18945	{
18946	  *total = 0;
18947	  return false;
18948	}
18949      /* FALLTHRU */
18950
18951    case AND:
18952    case IOR:
18953    case XOR:
18954    case ZERO_EXTRACT:
18955      *total = COSTS_N_INSNS (1);
18956      return false;
18957
18958    case ASHIFT:
18959    case ASHIFTRT:
18960    case LSHIFTRT:
18961    case ROTATE:
18962    case ROTATERT:
18963      /* Handle mul_highpart.  */
18964      if (outer_code == TRUNCATE
18965	  && GET_CODE (XEXP (x, 0)) == MULT)
18966	{
18967	  if (mode == DImode)
18968	    *total = rs6000_cost->muldi;
18969	  else
18970	    *total = rs6000_cost->mulsi;
18971	  return true;
18972	}
18973      else if (outer_code == AND)
18974	*total = 0;
18975      else
18976	*total = COSTS_N_INSNS (1);
18977      return false;
18978
18979    case SIGN_EXTEND:
18980    case ZERO_EXTEND:
18981      if (GET_CODE (XEXP (x, 0)) == MEM)
18982	*total = 0;
18983      else
18984	*total = COSTS_N_INSNS (1);
18985      return false;
18986
18987    case COMPARE:
18988    case NEG:
18989    case ABS:
18990      if (!FLOAT_MODE_P (mode))
18991	{
18992	  *total = COSTS_N_INSNS (1);
18993	  return false;
18994	}
18995      /* FALLTHRU */
18996
18997    case FLOAT:
18998    case UNSIGNED_FLOAT:
18999    case FIX:
19000    case UNSIGNED_FIX:
19001    case FLOAT_TRUNCATE:
19002      *total = rs6000_cost->fp;
19003      return false;
19004
19005    case FLOAT_EXTEND:
19006      if (mode == DFmode)
19007	*total = 0;
19008      else
19009	*total = rs6000_cost->fp;
19010      return false;
19011
19012    case UNSPEC:
19013      switch (XINT (x, 1))
19014	{
19015	case UNSPEC_FRSP:
19016	  *total = rs6000_cost->fp;
19017	  return true;
19018
19019	default:
19020	  break;
19021	}
19022      break;
19023
19024    case CALL:
19025    case IF_THEN_ELSE:
19026      if (optimize_size)
19027	{
19028	  *total = COSTS_N_INSNS (1);
19029	  return true;
19030	}
19031      else if (FLOAT_MODE_P (mode)
19032	       && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
19033	{
19034	  *total = rs6000_cost->fp;
19035	  return false;
19036	}
19037      break;
19038
19039    case EQ:
19040    case GTU:
19041    case LTU:
19042      /* Carry bit requires mode == Pmode.
19043	 NEG or PLUS already counted so only add one.  */
19044      if (mode == Pmode
19045	  && (outer_code == NEG || outer_code == PLUS))
19046	{
19047	  *total = COSTS_N_INSNS (1);
19048	  return true;
19049	}
19050      if (outer_code == SET)
19051	{
19052	  if (XEXP (x, 1) == const0_rtx)
19053	    {
19054	      *total = COSTS_N_INSNS (2);
19055	      return true;
19056	    }
19057	  else if (mode == Pmode)
19058	    {
19059	      *total = COSTS_N_INSNS (3);
19060	      return false;
19061	    }
19062	}
19063      /* FALLTHRU */
19064
19065    case GT:
19066    case LT:
19067    case UNORDERED:
19068      if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
19069	{
19070	  *total = COSTS_N_INSNS (2);
19071	  return true;
19072	}
19073      /* CC COMPARE.  */
19074      if (outer_code == COMPARE)
19075	{
19076	  *total = 0;
19077	  return true;
19078	}
19079      break;
19080
19081    default:
19082      break;
19083    }
19084
19085  return false;
19086}
19087
19088/* A C expression returning the cost of moving data from a register of class
19089   CLASS1 to one of CLASS2.  */
19090
19091int
19092rs6000_register_move_cost (enum machine_mode mode,
19093			   enum reg_class from, enum reg_class to)
19094{
19095  /*  Moves from/to GENERAL_REGS.  */
19096  if (reg_classes_intersect_p (to, GENERAL_REGS)
19097      || reg_classes_intersect_p (from, GENERAL_REGS))
19098    {
19099      if (! reg_classes_intersect_p (to, GENERAL_REGS))
19100	from = to;
19101
19102      if (from == FLOAT_REGS || from == ALTIVEC_REGS)
19103	return (rs6000_memory_move_cost (mode, from, 0)
19104		+ rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
19105
19106      /* It's more expensive to move CR_REGS than CR0_REGS because of the
19107	 shift.  */
19108      else if (from == CR_REGS)
19109	return 4;
19110
19111      else
19112	/* A move will cost one instruction per GPR moved.  */
19113	return 2 * hard_regno_nregs[0][mode];
19114    }
19115
19116  /* Moving between two similar registers is just one instruction.  */
19117  else if (reg_classes_intersect_p (to, from))
19118    return mode == TFmode ? 4 : 2;
19119
19120  /* Everything else has to go through GENERAL_REGS.  */
19121  else
19122    return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
19123	    + rs6000_register_move_cost (mode, from, GENERAL_REGS));
19124}
19125
19126/* A C expressions returning the cost of moving data of MODE from a register to
19127   or from memory.  */
19128
19129int
19130rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
19131			 int in ATTRIBUTE_UNUSED)
19132{
19133  if (reg_classes_intersect_p (class, GENERAL_REGS))
19134    return 4 * hard_regno_nregs[0][mode];
19135  else if (reg_classes_intersect_p (class, FLOAT_REGS))
19136    return 4 * hard_regno_nregs[32][mode];
19137  else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
19138    return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
19139  else
19140    return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
19141}
19142
19143/* Newton-Raphson approximation of single-precision floating point divide n/d.
19144   Assumes no trapping math and finite arguments.  */
19145
19146void
19147rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
19148{
19149  rtx x0, e0, e1, y1, u0, v0, one;
19150
19151  x0 = gen_reg_rtx (SFmode);
19152  e0 = gen_reg_rtx (SFmode);
19153  e1 = gen_reg_rtx (SFmode);
19154  y1 = gen_reg_rtx (SFmode);
19155  u0 = gen_reg_rtx (SFmode);
19156  v0 = gen_reg_rtx (SFmode);
19157  one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
19158
19159  /* x0 = 1./d estimate */
19160  emit_insn (gen_rtx_SET (VOIDmode, x0,
19161			  gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
19162					  UNSPEC_FRES)));
19163  /* e0 = 1. - d * x0 */
19164  emit_insn (gen_rtx_SET (VOIDmode, e0,
19165			  gen_rtx_MINUS (SFmode, one,
19166					 gen_rtx_MULT (SFmode, d, x0))));
19167  /* e1 = e0 + e0 * e0 */
19168  emit_insn (gen_rtx_SET (VOIDmode, e1,
19169			  gen_rtx_PLUS (SFmode,
19170					gen_rtx_MULT (SFmode, e0, e0), e0)));
19171  /* y1 = x0 + e1 * x0 */
19172  emit_insn (gen_rtx_SET (VOIDmode, y1,
19173			  gen_rtx_PLUS (SFmode,
19174					gen_rtx_MULT (SFmode, e1, x0), x0)));
19175  /* u0 = n * y1 */
19176  emit_insn (gen_rtx_SET (VOIDmode, u0,
19177			  gen_rtx_MULT (SFmode, n, y1)));
19178  /* v0 = n - d * u0 */
19179  emit_insn (gen_rtx_SET (VOIDmode, v0,
19180			  gen_rtx_MINUS (SFmode, n,
19181					 gen_rtx_MULT (SFmode, d, u0))));
19182  /* res = u0 + v0 * y1 */
19183  emit_insn (gen_rtx_SET (VOIDmode, res,
19184			  gen_rtx_PLUS (SFmode,
19185					gen_rtx_MULT (SFmode, v0, y1), u0)));
19186}
19187
19188/* Newton-Raphson approximation of double-precision floating point divide n/d.
19189   Assumes no trapping math and finite arguments.  */
19190
19191void
19192rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
19193{
19194  rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
19195
19196  x0 = gen_reg_rtx (DFmode);
19197  e0 = gen_reg_rtx (DFmode);
19198  e1 = gen_reg_rtx (DFmode);
19199  e2 = gen_reg_rtx (DFmode);
19200  y1 = gen_reg_rtx (DFmode);
19201  y2 = gen_reg_rtx (DFmode);
19202  y3 = gen_reg_rtx (DFmode);
19203  u0 = gen_reg_rtx (DFmode);
19204  v0 = gen_reg_rtx (DFmode);
19205  one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
19206
19207  /* x0 = 1./d estimate */
19208  emit_insn (gen_rtx_SET (VOIDmode, x0,
19209			  gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
19210					  UNSPEC_FRES)));
19211  /* e0 = 1. - d * x0 */
19212  emit_insn (gen_rtx_SET (VOIDmode, e0,
19213			  gen_rtx_MINUS (DFmode, one,
19214					 gen_rtx_MULT (SFmode, d, x0))));
19215  /* y1 = x0 + e0 * x0 */
19216  emit_insn (gen_rtx_SET (VOIDmode, y1,
19217			  gen_rtx_PLUS (DFmode,
19218					gen_rtx_MULT (DFmode, e0, x0), x0)));
19219  /* e1 = e0 * e0 */
19220  emit_insn (gen_rtx_SET (VOIDmode, e1,
19221			  gen_rtx_MULT (DFmode, e0, e0)));
19222  /* y2 = y1 + e1 * y1 */
19223  emit_insn (gen_rtx_SET (VOIDmode, y2,
19224			  gen_rtx_PLUS (DFmode,
19225					gen_rtx_MULT (DFmode, e1, y1), y1)));
19226  /* e2 = e1 * e1 */
19227  emit_insn (gen_rtx_SET (VOIDmode, e2,
19228			  gen_rtx_MULT (DFmode, e1, e1)));
19229  /* y3 = y2 + e2 * y2 */
19230  emit_insn (gen_rtx_SET (VOIDmode, y3,
19231			  gen_rtx_PLUS (DFmode,
19232					gen_rtx_MULT (DFmode, e2, y2), y2)));
19233  /* u0 = n * y3 */
19234  emit_insn (gen_rtx_SET (VOIDmode, u0,
19235			  gen_rtx_MULT (DFmode, n, y3)));
19236  /* v0 = n - d * u0 */
19237  emit_insn (gen_rtx_SET (VOIDmode, v0,
19238			  gen_rtx_MINUS (DFmode, n,
19239					 gen_rtx_MULT (DFmode, d, u0))));
19240  /* res = u0 + v0 * y3 */
19241  emit_insn (gen_rtx_SET (VOIDmode, res,
19242			  gen_rtx_PLUS (DFmode,
19243					gen_rtx_MULT (DFmode, v0, y3), u0)));
19244}
19245
19246/* Return an RTX representing where to find the function value of a
19247   function returning MODE.  */
19248static rtx
19249rs6000_complex_function_value (enum machine_mode mode)
19250{
19251  unsigned int regno;
19252  rtx r1, r2;
19253  enum machine_mode inner = GET_MODE_INNER (mode);
19254  unsigned int inner_bytes = GET_MODE_SIZE (inner);
19255
19256  if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
19257    regno = FP_ARG_RETURN;
19258  else
19259    {
19260      regno = GP_ARG_RETURN;
19261
19262      /* 32-bit is OK since it'll go in r3/r4.  */
19263      if (TARGET_32BIT && inner_bytes >= 4)
19264	return gen_rtx_REG (mode, regno);
19265    }
19266
19267  if (inner_bytes >= 8)
19268    return gen_rtx_REG (mode, regno);
19269
19270  r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
19271			  const0_rtx);
19272  r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
19273			  GEN_INT (inner_bytes));
19274  return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
19275}
19276
19277/* Define how to find the value returned by a function.
19278   VALTYPE is the data type of the value (as a tree).
19279   If the precise function being called is known, FUNC is its FUNCTION_DECL;
19280   otherwise, FUNC is 0.
19281
19282   On the SPE, both FPs and vectors are returned in r3.
19283
19284   On RS/6000 an integer value is in r3 and a floating-point value is in
19285   fp1, unless -msoft-float.  */
19286
19287rtx
19288rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
19289{
19290  enum machine_mode mode;
19291  unsigned int regno;
19292
19293  /* Special handling for structs in darwin64.  */
19294  if (rs6000_darwin64_abi
19295      && TYPE_MODE (valtype) == BLKmode
19296      && TREE_CODE (valtype) == RECORD_TYPE
19297      && int_size_in_bytes (valtype) > 0)
19298    {
19299      CUMULATIVE_ARGS valcum;
19300      rtx valret;
19301
19302      valcum.words = 0;
19303      valcum.fregno = FP_ARG_MIN_REG;
19304      valcum.vregno = ALTIVEC_ARG_MIN_REG;
19305      /* Do a trial code generation as if this were going to be passed as
19306	 an argument; if any part goes in memory, we return NULL.  */
19307      valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
19308      if (valret)
19309	return valret;
19310      /* Otherwise fall through to standard ABI rules.  */
19311    }
19312
19313  if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
19314    {
19315      /* Long long return value need be split in -mpowerpc64, 32bit ABI.  */
19316      return gen_rtx_PARALLEL (DImode,
19317	gen_rtvec (2,
19318		   gen_rtx_EXPR_LIST (VOIDmode,
19319				      gen_rtx_REG (SImode, GP_ARG_RETURN),
19320				      const0_rtx),
19321		   gen_rtx_EXPR_LIST (VOIDmode,
19322				      gen_rtx_REG (SImode,
19323						   GP_ARG_RETURN + 1),
19324				      GEN_INT (4))));
19325    }
19326  if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
19327    {
19328      return gen_rtx_PARALLEL (DCmode,
19329	gen_rtvec (4,
19330		   gen_rtx_EXPR_LIST (VOIDmode,
19331				      gen_rtx_REG (SImode, GP_ARG_RETURN),
19332				      const0_rtx),
19333		   gen_rtx_EXPR_LIST (VOIDmode,
19334				      gen_rtx_REG (SImode,
19335						   GP_ARG_RETURN + 1),
19336				      GEN_INT (4)),
19337		   gen_rtx_EXPR_LIST (VOIDmode,
19338				      gen_rtx_REG (SImode,
19339						   GP_ARG_RETURN + 2),
19340				      GEN_INT (8)),
19341		   gen_rtx_EXPR_LIST (VOIDmode,
19342				      gen_rtx_REG (SImode,
19343						   GP_ARG_RETURN + 3),
19344				      GEN_INT (12))));
19345    }
19346
19347  mode = TYPE_MODE (valtype);
19348  if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
19349      || POINTER_TYPE_P (valtype))
19350    mode = TARGET_32BIT ? SImode : DImode;
19351
19352  if (DECIMAL_FLOAT_MODE_P (mode))
19353    regno = GP_ARG_RETURN;
19354  else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
19355    regno = FP_ARG_RETURN;
19356  else if (TREE_CODE (valtype) == COMPLEX_TYPE
19357	   && targetm.calls.split_complex_arg)
19358    return rs6000_complex_function_value (mode);
19359  else if (TREE_CODE (valtype) == VECTOR_TYPE
19360	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
19361	   && ALTIVEC_VECTOR_MODE (mode))
19362    regno = ALTIVEC_ARG_RETURN;
19363  else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
19364	   && (mode == DFmode || mode == DCmode))
19365    return spe_build_register_parallel (mode, GP_ARG_RETURN);
19366  else
19367    regno = GP_ARG_RETURN;
19368
19369  return gen_rtx_REG (mode, regno);
19370}
19371
19372/* Define how to find the value returned by a library function
19373   assuming the value has mode MODE.  */
19374rtx
19375rs6000_libcall_value (enum machine_mode mode)
19376{
19377  unsigned int regno;
19378
19379  if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
19380    {
19381      /* Long long return value need be split in -mpowerpc64, 32bit ABI.  */
19382      return gen_rtx_PARALLEL (DImode,
19383	gen_rtvec (2,
19384		   gen_rtx_EXPR_LIST (VOIDmode,
19385				      gen_rtx_REG (SImode, GP_ARG_RETURN),
19386				      const0_rtx),
19387		   gen_rtx_EXPR_LIST (VOIDmode,
19388				      gen_rtx_REG (SImode,
19389						   GP_ARG_RETURN + 1),
19390				      GEN_INT (4))));
19391    }
19392
19393  if (DECIMAL_FLOAT_MODE_P (mode))
19394    regno = GP_ARG_RETURN;
19395  else if (SCALAR_FLOAT_MODE_P (mode)
19396	   && TARGET_HARD_FLOAT && TARGET_FPRS)
19397    regno = FP_ARG_RETURN;
19398  else if (ALTIVEC_VECTOR_MODE (mode)
19399	   && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
19400    regno = ALTIVEC_ARG_RETURN;
19401  else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
19402    return rs6000_complex_function_value (mode);
19403  else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
19404	   && (mode == DFmode || mode == DCmode))
19405    return spe_build_register_parallel (mode, GP_ARG_RETURN);
19406  else
19407    regno = GP_ARG_RETURN;
19408
19409  return gen_rtx_REG (mode, regno);
19410}
19411
19412/* Define the offset between two registers, FROM to be eliminated and its
19413   replacement TO, at the start of a routine.  */
19414HOST_WIDE_INT
19415rs6000_initial_elimination_offset (int from, int to)
19416{
19417  rs6000_stack_t *info = rs6000_stack_info ();
19418  HOST_WIDE_INT offset;
19419
19420  if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19421    offset = info->push_p ? 0 : -info->total_size;
19422  else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19423    {
19424      offset = info->push_p ? 0 : -info->total_size;
19425      if (FRAME_GROWS_DOWNWARD)
19426	offset += info->fixed_size + info->vars_size + info->parm_size;
19427    }
19428  else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
19429    offset = FRAME_GROWS_DOWNWARD
19430	     ? info->fixed_size + info->vars_size + info->parm_size
19431	     : 0;
19432  else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
19433    offset = info->total_size;
19434  else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19435    offset = info->push_p ? info->total_size : 0;
19436  else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
19437    offset = 0;
19438  else
19439    gcc_unreachable ();
19440
19441  return offset;
19442}
19443
19444/* Return true if TYPE is a SPE or AltiVec opaque type.  */
19445
19446static bool
19447rs6000_is_opaque_type (tree type)
19448{
19449  return (type == opaque_V2SI_type_node
19450	      || type == opaque_V2SF_type_node
19451	      || type == opaque_p_V2SI_type_node
19452	      || type == opaque_V4SI_type_node);
19453}
19454
19455static rtx
19456rs6000_dwarf_register_span (rtx reg)
19457{
19458  unsigned regno;
19459
19460  if (TARGET_SPE
19461      && (SPE_VECTOR_MODE (GET_MODE (reg))
19462	  || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
19463    ;
19464  else
19465    return NULL_RTX;
19466
19467  regno = REGNO (reg);
19468
19469  /* The duality of the SPE register size wreaks all kinds of havoc.
19470     This is a way of distinguishing r0 in 32-bits from r0 in
19471     64-bits.  */
19472  return
19473    gen_rtx_PARALLEL (VOIDmode,
19474		      BYTES_BIG_ENDIAN
19475		      ? gen_rtvec (2,
19476				   gen_rtx_REG (SImode, regno + 1200),
19477				   gen_rtx_REG (SImode, regno))
19478		      : gen_rtvec (2,
19479				   gen_rtx_REG (SImode, regno),
19480				   gen_rtx_REG (SImode, regno + 1200)));
19481}
19482
19483/* Map internal gcc register numbers to DWARF2 register numbers.  */
19484
19485unsigned int
19486rs6000_dbx_register_number (unsigned int regno)
19487{
19488  if (regno <= 63 || write_symbols != DWARF2_DEBUG)
19489    return regno;
19490  if (regno == MQ_REGNO)
19491    return 100;
19492  if (regno == LINK_REGISTER_REGNUM)
19493    return 108;
19494  if (regno == COUNT_REGISTER_REGNUM)
19495    return 109;
19496  if (CR_REGNO_P (regno))
19497    return regno - CR0_REGNO + 86;
19498  if (regno == XER_REGNO)
19499    return 101;
19500  if (ALTIVEC_REGNO_P (regno))
19501    return regno - FIRST_ALTIVEC_REGNO + 1124;
19502  if (regno == VRSAVE_REGNO)
19503    return 356;
19504  if (regno == VSCR_REGNO)
19505    return 67;
19506  if (regno == SPE_ACC_REGNO)
19507    return 99;
19508  if (regno == SPEFSCR_REGNO)
19509    return 612;
19510  /* SPE high reg number.  We get these values of regno from
19511     rs6000_dwarf_register_span.  */
19512  gcc_assert (regno >= 1200 && regno < 1232);
19513  return regno;
19514}
19515
19516/* target hook eh_return_filter_mode */
19517static enum machine_mode
19518rs6000_eh_return_filter_mode (void)
19519{
19520  return TARGET_32BIT ? SImode : word_mode;
19521}
19522
19523/* Target hook for scalar_mode_supported_p.  */
19524static bool
19525rs6000_scalar_mode_supported_p (enum machine_mode mode)
19526{
19527  if (DECIMAL_FLOAT_MODE_P (mode))
19528    return true;
19529  else
19530    return default_scalar_mode_supported_p (mode);
19531}
19532
19533/* Target hook for vector_mode_supported_p.  */
19534static bool
19535rs6000_vector_mode_supported_p (enum machine_mode mode)
19536{
19537
19538  if (TARGET_SPE && SPE_VECTOR_MODE (mode))
19539    return true;
19540
19541  else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
19542    return true;
19543
19544  else
19545    return false;
19546}
19547
19548/* Target hook for invalid_arg_for_unprototyped_fn. */
19549static const char *
19550invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
19551{
19552  return (!rs6000_darwin64_abi
19553	  && typelist == 0
19554          && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
19555          && (funcdecl == NULL_TREE
19556              || (TREE_CODE (funcdecl) == FUNCTION_DECL
19557                  && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
19558	  ? N_("AltiVec argument passed to unprototyped function")
19559	  : NULL;
19560}
19561
19562/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
19563   setup by using __stack_chk_fail_local hidden function instead of
19564   calling __stack_chk_fail directly.  Otherwise it is better to call
19565   __stack_chk_fail directly.  */
19566
19567static tree
19568rs6000_stack_protect_fail (void)
19569{
19570  return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
19571	 ? default_hidden_stack_protect_fail ()
19572	 : default_external_stack_protect_fail ();
19573}
19574
19575#include "gt-rs6000.h"
19576