rs6000.c revision 96263
1/* Subroutines used for code generation on IBM RS/6000.
2   Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002 Free Software Foundation, Inc.
4   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING.  If not, write to
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
28#include "real.h"
29#include "insn-config.h"
30#include "conditions.h"
31#include "insn-attr.h"
32#include "flags.h"
33#include "recog.h"
34#include "obstack.h"
35#include "tree.h"
36#include "expr.h"
37#include "optabs.h"
38#include "except.h"
39#include "function.h"
40#include "output.h"
41#include "basic-block.h"
42#include "integrate.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "hashtab.h"
46#include "tm_p.h"
47#include "target.h"
48#include "target-def.h"
49#include "langhooks.h"
50#include "reload.h"
51
52#ifndef TARGET_NO_PROTOTYPE
53#define TARGET_NO_PROTOTYPE 0
54#endif
55
56#define min(A,B)	((A) < (B) ? (A) : (B))
57#define max(A,B)	((A) > (B) ? (A) : (B))
58
59/* Target cpu type */
60
61enum processor_type rs6000_cpu;
62struct rs6000_cpu_select rs6000_select[3] =
63{
64  /* switch		name,			tune	arch */
65  { (const char *)0,	"--with-cpu=",		1,	1 },
66  { (const char *)0,	"-mcpu=",		1,	1 },
67  { (const char *)0,	"-mtune=",		1,	0 },
68};
69
70/* Size of long double */
71const char *rs6000_long_double_size_string;
72int rs6000_long_double_type_size;
73
74/* Whether -mabi=altivec has appeared */
75int rs6000_altivec_abi;
76
77/* Set to non-zero once AIX common-mode calls have been defined.  */
78static int common_mode_defined;
79
80/* Save information from a "cmpxx" operation until the branch or scc is
81   emitted.  */
82rtx rs6000_compare_op0, rs6000_compare_op1;
83int rs6000_compare_fp_p;
84
85/* Label number of label created for -mrelocatable, to call to so we can
86   get the address of the GOT section */
87int rs6000_pic_labelno;
88
89#ifdef USING_ELFOS_H
90/* Which abi to adhere to */
91const char *rs6000_abi_name = RS6000_ABI_NAME;
92
93/* Semantics of the small data area */
94enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
95
96/* Which small data model to use */
97const char *rs6000_sdata_name = (char *)0;
98
99/* Counter for labels which are to be placed in .fixup.  */
100int fixuplabelno = 0;
101#endif
102
103/* ABI enumeration available for subtarget to use.  */
104enum rs6000_abi rs6000_current_abi;
105
106/* ABI string from -mabi= option.  */
107const char *rs6000_abi_string;
108
109/* Debug flags */
110const char *rs6000_debug_name;
111int rs6000_debug_stack;		/* debug stack applications */
112int rs6000_debug_arg;		/* debug argument handling */
113
114/* Flag to say the TOC is initialized */
115int toc_initialized;
116char toc_label_name[10];
117
118/* Alias set for saves and restores from the rs6000 stack.  */
119static int rs6000_sr_alias_set;
120
121static void rs6000_add_gc_roots PARAMS ((void));
122static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124static void validate_condition_mode
125  PARAMS ((enum rtx_code, enum machine_mode));
126static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127static void rs6000_maybe_dead PARAMS ((rtx));
128static void rs6000_emit_stack_tie PARAMS ((void));
129static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131static unsigned rs6000_hash_constant PARAMS ((rtx));
132static unsigned toc_hash_function PARAMS ((const void *));
133static int toc_hash_eq PARAMS ((const void *, const void *));
134static int toc_hash_mark_entry PARAMS ((void **, void *));
135static void toc_hash_mark_table PARAMS ((void *));
136static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137static void rs6000_free_machine_status PARAMS ((struct function *));
138static void rs6000_init_machine_status PARAMS ((struct function *));
139static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140static int rs6000_ra_ever_killed PARAMS ((void));
141static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142const struct attribute_spec rs6000_attribute_table[];
143static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146  HOST_WIDE_INT, HOST_WIDE_INT));
147#if TARGET_ELF
148static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
149							   int));
150static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
152#endif
153#ifdef OBJECT_FORMAT_COFF
154static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
155#endif
156static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157static int rs6000_adjust_priority PARAMS ((rtx, int));
158static int rs6000_issue_rate PARAMS ((void));
159
160static void rs6000_init_builtins PARAMS ((void));
161static void altivec_init_builtins PARAMS ((void));
162static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
167static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
168static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
169static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
170static void rs6000_parse_abi_options PARAMS ((void));
171static int first_altivec_reg_to_save PARAMS ((void));
172static unsigned int compute_vrsave_mask PARAMS ((void));
173static void is_altivec_return_reg PARAMS ((rtx, void *));
174int vrsave_operation PARAMS ((rtx, enum machine_mode));
175static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
176static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
177static int easy_vector_constant PARAMS ((rtx));
178
179/* Default register names.  */
180char rs6000_reg_names[][8] =
181{
182      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
183      "8",  "9", "10", "11", "12", "13", "14", "15",
184     "16", "17", "18", "19", "20", "21", "22", "23",
185     "24", "25", "26", "27", "28", "29", "30", "31",
186      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
187      "8",  "9", "10", "11", "12", "13", "14", "15",
188     "16", "17", "18", "19", "20", "21", "22", "23",
189     "24", "25", "26", "27", "28", "29", "30", "31",
190     "mq", "lr", "ctr","ap",
191      "0",  "1",  "2",  "3",  "4",  "5",  "6",  "7",
192      "xer",
193      /* AltiVec registers.  */
194      "0",  "1",  "2",  "3",  "4",  "5",  "6", "7",
195      "8",  "9",  "10", "11", "12", "13", "14", "15",
196      "16", "17", "18", "19", "20", "21", "22", "23",
197      "24", "25", "26", "27", "28", "29", "30", "31",
198      "vrsave"
199};
200
201#ifdef TARGET_REGNAMES
202static const char alt_reg_names[][8] =
203{
204   "%r0",   "%r1",  "%r2",  "%r3",  "%r4",  "%r5",  "%r6",  "%r7",
205   "%r8",   "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
206  "%r16",  "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
207  "%r24",  "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
208   "%f0",   "%f1",  "%f2",  "%f3",  "%f4",  "%f5",  "%f6",  "%f7",
209   "%f8",   "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
210  "%f16",  "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
211  "%f24",  "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
212    "mq",    "lr",  "ctr",   "ap",
213  "%cr0",  "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
214   "xer",
215   /* AltiVec registers.  */
216   "%v0",  "%v1",  "%v2",  "%v3",  "%v4",  "%v5",  "%v6", "%v7",
217   "%v8",  "%v9",  "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
218   "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
219   "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
220   "vrsave"
221};
222#endif
223
224#ifndef MASK_STRICT_ALIGN
225#define MASK_STRICT_ALIGN 0
226#endif
227
228/* Initialize the GCC target structure.  */
229#undef TARGET_ATTRIBUTE_TABLE
230#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
231
232#undef TARGET_ASM_ALIGNED_DI_OP
233#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
234
235/* Default unaligned ops are only provided for ELF.  Find the ops needed
236   for non-ELF systems.  */
237#ifndef OBJECT_FORMAT_ELF
238#ifdef OBJECT_FORMAT_COFF
239/* For XCOFF.  rs6000_assemble_integer will handle unaligned DIs on
240   64-bit targets.  */
241#undef TARGET_ASM_UNALIGNED_HI_OP
242#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
243#undef TARGET_ASM_UNALIGNED_SI_OP
244#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
245#undef TARGET_ASM_UNALIGNED_DI_OP
246#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
247#else
248/* For Darwin.  */
249#undef TARGET_ASM_UNALIGNED_HI_OP
250#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
251#undef TARGET_ASM_UNALIGNED_SI_OP
252#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
253#endif
254#endif
255
256/* This hook deals with fixups for relocatable code and DI-mode objects
257   in 64-bit code.  */
258#undef TARGET_ASM_INTEGER
259#define TARGET_ASM_INTEGER rs6000_assemble_integer
260
261#undef TARGET_ASM_FUNCTION_PROLOGUE
262#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
263#undef TARGET_ASM_FUNCTION_EPILOGUE
264#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
265
266#if TARGET_ELF
267#undef TARGET_SECTION_TYPE_FLAGS
268#define TARGET_SECTION_TYPE_FLAGS  rs6000_elf_section_type_flags
269#endif
270
271#undef TARGET_SCHED_ISSUE_RATE
272#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
273#undef TARGET_SCHED_ADJUST_COST
274#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
275#undef TARGET_SCHED_ADJUST_PRIORITY
276#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
277
278#undef TARGET_INIT_BUILTINS
279#define TARGET_INIT_BUILTINS rs6000_init_builtins
280
281#undef TARGET_EXPAND_BUILTIN
282#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
283
284/* The VRSAVE bitmask puts bit %v0 as the most significant bit.  */
285#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
286
287struct gcc_target targetm = TARGET_INITIALIZER;
288
289/* Override command line options.  Mostly we process the processor
290   type and sometimes adjust other TARGET_ options.  */
291
292void
293rs6000_override_options (default_cpu)
294     const char *default_cpu;
295{
296  size_t i, j;
297  struct rs6000_cpu_select *ptr;
298
299  /* Simplify the entries below by making a mask for any POWER
300     variant and any PowerPC variant.  */
301
302#define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
303#define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
304		       | MASK_PPC_GFXOPT | MASK_POWERPC64)
305#define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
306
307  static struct ptt
308    {
309      const char *const name;		/* Canonical processor name.  */
310      const enum processor_type processor; /* Processor type enum value.  */
311      const int target_enable;	/* Target flags to enable.  */
312      const int target_disable;	/* Target flags to disable.  */
313    } const processor_target_table[]
314      = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
315	    POWER_MASKS | POWERPC_MASKS},
316	 {"power", PROCESSOR_POWER,
317	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
318	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
319	 {"power2", PROCESSOR_POWER,
320	    MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
321	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
322	 {"power3", PROCESSOR_PPC630,
323	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
324	    POWER_MASKS | MASK_PPC_GPOPT},
325	 {"powerpc", PROCESSOR_POWERPC,
326	    MASK_POWERPC | MASK_NEW_MNEMONICS,
327	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
328	 {"powerpc64", PROCESSOR_POWERPC64,
329	    MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
330	    POWER_MASKS | POWERPC_OPT_MASKS},
331	 {"rios", PROCESSOR_RIOS1,
332	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
333	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
334	 {"rios1", PROCESSOR_RIOS1,
335	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
336	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
337	 {"rsc", PROCESSOR_PPC601,
338	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
339	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
340	 {"rsc1", PROCESSOR_PPC601,
341	    MASK_POWER | MASK_MULTIPLE | MASK_STRING,
342	    MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
343	 {"rios2", PROCESSOR_RIOS2,
344	    MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
345	    POWERPC_MASKS | MASK_NEW_MNEMONICS},
346	 {"rs64a", PROCESSOR_RS64A,
347	    MASK_POWERPC | MASK_NEW_MNEMONICS,
348	    POWER_MASKS | POWERPC_OPT_MASKS},
349	 {"401", PROCESSOR_PPC403,
350	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
351	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
352	 {"403", PROCESSOR_PPC403,
353	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
354	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
355	 {"405", PROCESSOR_PPC405,
356	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
357	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
358	 {"505", PROCESSOR_MPCCORE,
359	    MASK_POWERPC | MASK_NEW_MNEMONICS,
360	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
361	 {"601", PROCESSOR_PPC601,
362	    MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
363	    MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
364	 {"602", PROCESSOR_PPC603,
365	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
366	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
367	 {"603", PROCESSOR_PPC603,
368	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
369	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
370	 {"603e", PROCESSOR_PPC603,
371	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
372	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
373	 {"ec603e", PROCESSOR_PPC603,
374	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
375	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
376	 {"604", PROCESSOR_PPC604,
377	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
378	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
379	 {"604e", PROCESSOR_PPC604e,
380	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
381	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
382	 {"620", PROCESSOR_PPC620,
383	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
384	    POWER_MASKS | MASK_PPC_GPOPT},
385	 {"630", PROCESSOR_PPC630,
386	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
387	    POWER_MASKS | MASK_PPC_GPOPT},
388	 {"740", PROCESSOR_PPC750,
389 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
390 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
391	 {"750", PROCESSOR_PPC750,
392 	    MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
393 	    POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
394	 {"7400", PROCESSOR_PPC7400,
395            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
396            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
397	 {"7450", PROCESSOR_PPC7450,
398            MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
399            POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
400	 {"801", PROCESSOR_MPCCORE,
401	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
402	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
403	 {"821", PROCESSOR_MPCCORE,
404	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
405	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
406	 {"823", PROCESSOR_MPCCORE,
407	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
408	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
409	 {"860", PROCESSOR_MPCCORE,
410	    MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
411	    POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
412
413  size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
414
415  /* Save current -mmultiple/-mno-multiple status.  */
416  int multiple = TARGET_MULTIPLE;
417  /* Save current -mstring/-mno-string status.  */
418  int string = TARGET_STRING;
419
420  /* Identify the processor type.  */
421  rs6000_select[0].string = default_cpu;
422  rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
423
424  for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
425    {
426      ptr = &rs6000_select[i];
427      if (ptr->string != (char *)0 && ptr->string[0] != '\0')
428	{
429	  for (j = 0; j < ptt_size; j++)
430	    if (! strcmp (ptr->string, processor_target_table[j].name))
431	      {
432		if (ptr->set_tune_p)
433		  rs6000_cpu = processor_target_table[j].processor;
434
435		if (ptr->set_arch_p)
436		  {
437		    target_flags |= processor_target_table[j].target_enable;
438		    target_flags &= ~processor_target_table[j].target_disable;
439		  }
440		break;
441	      }
442
443	  if (j == ptt_size)
444	    error ("bad value (%s) for %s switch", ptr->string, ptr->name);
445	}
446    }
447
448  /* If we are optimizing big endian systems for space, use the store
449     multiple instructions.  */
450  if (BYTES_BIG_ENDIAN && optimize_size)
451    target_flags |= MASK_MULTIPLE;
452
453  /* If -mmultiple or -mno-multiple was explicitly used, don't
454     override with the processor default */
455  if (TARGET_MULTIPLE_SET)
456    target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
457
458  /* If -mstring or -mno-string was explicitly used, don't override
459     with the processor default.  */
460  if (TARGET_STRING_SET)
461    target_flags = (target_flags & ~MASK_STRING) | string;
462
463  /* Don't allow -mmultiple or -mstring on little endian systems
464     unless the cpu is a 750, because the hardware doesn't support the
465     instructions used in little endian mode, and causes an alignment
466     trap.  The 750 does not cause an alignment trap (except when the
467     target is unaligned).  */
468
469  if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
470    {
471      if (TARGET_MULTIPLE)
472	{
473	  target_flags &= ~MASK_MULTIPLE;
474	  if (TARGET_MULTIPLE_SET)
475	    warning ("-mmultiple is not supported on little endian systems");
476	}
477
478      if (TARGET_STRING)
479	{
480	  target_flags &= ~MASK_STRING;
481	  if (TARGET_STRING_SET)
482	    warning ("-mstring is not supported on little endian systems");
483	}
484    }
485
486  if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
487    {
488      flag_pic = 0;
489
490      if (extra_warnings)
491	warning ("-f%s ignored (all code is position independent)",
492		 (flag_pic > 1) ? "PIC" : "pic");
493    }
494
495#ifdef XCOFF_DEBUGGING_INFO
496  if (flag_function_sections && (write_symbols != NO_DEBUG)
497      && DEFAULT_ABI == ABI_AIX)
498    {
499      warning ("-ffunction-sections disabled on AIX when debugging");
500      flag_function_sections = 0;
501    }
502
503  if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
504    {
505      warning ("-fdata-sections not supported on AIX");
506      flag_data_sections = 0;
507    }
508#endif
509
510  /* Set debug flags */
511  if (rs6000_debug_name)
512    {
513      if (! strcmp (rs6000_debug_name, "all"))
514	rs6000_debug_stack = rs6000_debug_arg = 1;
515      else if (! strcmp (rs6000_debug_name, "stack"))
516	rs6000_debug_stack = 1;
517      else if (! strcmp (rs6000_debug_name, "arg"))
518	rs6000_debug_arg = 1;
519      else
520	error ("unknown -mdebug-%s switch", rs6000_debug_name);
521    }
522
523  /* Set size of long double */
524  rs6000_long_double_type_size = 64;
525  if (rs6000_long_double_size_string)
526    {
527      char *tail;
528      int size = strtol (rs6000_long_double_size_string, &tail, 10);
529      if (*tail != '\0' || (size != 64 && size != 128))
530	error ("Unknown switch -mlong-double-%s",
531	       rs6000_long_double_size_string);
532      else
533	rs6000_long_double_type_size = size;
534    }
535
536  /* Handle -mabi= options.  */
537  rs6000_parse_abi_options ();
538
539#ifdef TARGET_REGNAMES
540  /* If the user desires alternate register names, copy in the
541     alternate names now.  */
542  if (TARGET_REGNAMES)
543    memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
544#endif
545
546#ifdef SUBTARGET_OVERRIDE_OPTIONS
547  SUBTARGET_OVERRIDE_OPTIONS;
548#endif
549#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
550  SUBSUBTARGET_OVERRIDE_OPTIONS;
551#endif
552
553  /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
554     If -maix-struct-return or -msvr4-struct-return was explicitly
555     used, don't override with the ABI default.  */
556  if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
557    {
558      if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
559	target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
560      else
561	target_flags |= MASK_AIX_STRUCT_RET;
562    }
563
564  /* Register global variables with the garbage collector.  */
565  rs6000_add_gc_roots ();
566
567  /* Allocate an alias set for register saves & restores from stack.  */
568  rs6000_sr_alias_set = new_alias_set ();
569
570  if (TARGET_TOC)
571    ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
572
573  /* We can only guarantee the availability of DI pseudo-ops when
574     assembling for 64-bit targets.  */
575  if (!TARGET_64BIT)
576    {
577      targetm.asm_out.aligned_op.di = NULL;
578      targetm.asm_out.unaligned_op.di = NULL;
579    }
580
581  /* Arrange to save and restore machine status around nested functions.  */
582  init_machine_status = rs6000_init_machine_status;
583  free_machine_status = rs6000_free_machine_status;
584}
585
586/* Handle -mabi= options.  */
587static void
588rs6000_parse_abi_options ()
589{
590  if (rs6000_abi_string == 0)
591    return;
592  else if (! strcmp (rs6000_abi_string, "altivec"))
593    rs6000_altivec_abi = 1;
594  else if (! strcmp (rs6000_abi_string, "no-altivec"))
595    rs6000_altivec_abi = 0;
596  else
597    error ("unknown ABI specified: '%s'", rs6000_abi_string);
598}
599
600void
601optimization_options (level, size)
602     int level ATTRIBUTE_UNUSED;
603     int size ATTRIBUTE_UNUSED;
604{
605}
606
607/* Do anything needed at the start of the asm file.  */
608
609void
610rs6000_file_start (file, default_cpu)
611     FILE *file;
612     const char *default_cpu;
613{
614  size_t i;
615  char buffer[80];
616  const char *start = buffer;
617  struct rs6000_cpu_select *ptr;
618
619  if (flag_verbose_asm)
620    {
621      sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
622      rs6000_select[0].string = default_cpu;
623
624      for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
625	{
626	  ptr = &rs6000_select[i];
627	  if (ptr->string != (char *)0 && ptr->string[0] != '\0')
628	    {
629	      fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
630	      start = "";
631	    }
632	}
633
634#ifdef USING_ELFOS_H
635      switch (rs6000_sdata)
636	{
637	case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
638	case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
639	case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
640	case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
641	}
642
643      if (rs6000_sdata && g_switch_value)
644	{
645	  fprintf (file, "%s -G %d", start, g_switch_value);
646	  start = "";
647	}
648#endif
649
650      if (*start == '\0')
651	putc ('\n', file);
652    }
653}
654
655
656/* Create a CONST_DOUBLE from a string.  */
657
658struct rtx_def *
659rs6000_float_const (string, mode)
660     const char *string;
661     enum machine_mode mode;
662{
663  REAL_VALUE_TYPE value;
664  value = REAL_VALUE_ATOF (string, mode);
665  return immed_real_const_1 (value, mode);
666}
667
668/* Return non-zero if this function is known to have a null epilogue.  */
669
670int
671direct_return ()
672{
673  if (reload_completed)
674    {
675      rs6000_stack_t *info = rs6000_stack_info ();
676
677      if (info->first_gp_reg_save == 32
678	  && info->first_fp_reg_save == 64
679	  && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
680	  && ! info->lr_save_p
681	  && ! info->cr_save_p
682	  && info->vrsave_mask == 0
683	  && ! info->push_p)
684	return 1;
685    }
686
687  return 0;
688}
689
690/* Returns 1 always.  */
691
692int
693any_operand (op, mode)
694     rtx op ATTRIBUTE_UNUSED;
695     enum machine_mode mode ATTRIBUTE_UNUSED;
696{
697  return 1;
698}
699
700/* Returns 1 if op is the count register.  */
701int
702count_register_operand (op, mode)
703     rtx op;
704     enum machine_mode mode ATTRIBUTE_UNUSED;
705{
706  if (GET_CODE (op) != REG)
707    return 0;
708
709  if (REGNO (op) == COUNT_REGISTER_REGNUM)
710    return 1;
711
712  if (REGNO (op) > FIRST_PSEUDO_REGISTER)
713    return 1;
714
715  return 0;
716}
717
718/* Returns 1 if op is an altivec register.  */
719int
720altivec_register_operand (op, mode)
721     rtx op;
722     enum machine_mode mode ATTRIBUTE_UNUSED;
723{
724
725  return (register_operand (op, mode)
726	  && (GET_CODE (op) != REG
727	      || REGNO (op) > FIRST_PSEUDO_REGISTER
728	      || ALTIVEC_REGNO_P (REGNO (op))));
729}
730
731int
732xer_operand (op, mode)
733     rtx op;
734     enum machine_mode mode ATTRIBUTE_UNUSED;
735{
736  if (GET_CODE (op) != REG)
737    return 0;
738
739  if (XER_REGNO_P (REGNO (op)))
740    return 1;
741
742  return 0;
743}
744
745/* Return 1 if OP is a signed 8-bit constant.  Int multiplication
746   by such constants completes more quickly.  */
747
748int
749s8bit_cint_operand (op, mode)
750     rtx op;
751     enum machine_mode mode ATTRIBUTE_UNUSED;
752{
753  return ( GET_CODE (op) == CONST_INT
754	  && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
755}
756
757/* Return 1 if OP is a constant that can fit in a D field.  */
758
759int
760short_cint_operand (op, mode)
761     rtx op;
762     enum machine_mode mode ATTRIBUTE_UNUSED;
763{
764  return (GET_CODE (op) == CONST_INT
765	  && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
766}
767
768/* Similar for an unsigned D field.  */
769
770int
771u_short_cint_operand (op, mode)
772     rtx op;
773     enum machine_mode mode ATTRIBUTE_UNUSED;
774{
775  return (GET_CODE (op) == CONST_INT
776	  && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
777}
778
779/* Return 1 if OP is a CONST_INT that cannot fit in a signed D field.  */
780
781int
782non_short_cint_operand (op, mode)
783     rtx op;
784     enum machine_mode mode ATTRIBUTE_UNUSED;
785{
786  return (GET_CODE (op) == CONST_INT
787	  && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
788}
789
790/* Returns 1 if OP is a CONST_INT that is a positive value
791   and an exact power of 2.  */
792
793int
794exact_log2_cint_operand (op, mode)
795     rtx op;
796     enum machine_mode mode ATTRIBUTE_UNUSED;
797{
798  return (GET_CODE (op) == CONST_INT
799	  && INTVAL (op) > 0
800	  && exact_log2 (INTVAL (op)) >= 0);
801}
802
803/* Returns 1 if OP is a register that is not special (i.e., not MQ,
804   ctr, or lr).  */
805
806int
807gpc_reg_operand (op, mode)
808     rtx op;
809     enum machine_mode mode;
810{
811  return (register_operand (op, mode)
812	  && (GET_CODE (op) != REG
813	      || (REGNO (op) >= ARG_POINTER_REGNUM
814		  && !XER_REGNO_P (REGNO (op)))
815	      || REGNO (op) < MQ_REGNO));
816}
817
818/* Returns 1 if OP is either a pseudo-register or a register denoting a
819   CR field.  */
820
821int
822cc_reg_operand (op, mode)
823     rtx op;
824     enum machine_mode mode;
825{
826  return (register_operand (op, mode)
827	  && (GET_CODE (op) != REG
828	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
829	      || CR_REGNO_P (REGNO (op))));
830}
831
832/* Returns 1 if OP is either a pseudo-register or a register denoting a
833   CR field that isn't CR0.  */
834
835int
836cc_reg_not_cr0_operand (op, mode)
837     rtx op;
838     enum machine_mode mode;
839{
840  return (register_operand (op, mode)
841	  && (GET_CODE (op) != REG
842	      || REGNO (op) >= FIRST_PSEUDO_REGISTER
843	      || CR_REGNO_NOT_CR0_P (REGNO (op))));
844}
845
846/* Returns 1 if OP is either a constant integer valid for a D-field or
847   a non-special register.  If a register, it must be in the proper
848   mode unless MODE is VOIDmode.  */
849
850int
851reg_or_short_operand (op, mode)
852      rtx op;
853      enum machine_mode mode;
854{
855  return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
856}
857
858/* Similar, except check if the negation of the constant would be
859   valid for a D-field.  */
860
861int
862reg_or_neg_short_operand (op, mode)
863      rtx op;
864      enum machine_mode mode;
865{
866  if (GET_CODE (op) == CONST_INT)
867    return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
868
869  return gpc_reg_operand (op, mode);
870}
871
872/* Returns 1 if OP is either a constant integer valid for a DS-field or
873   a non-special register.  If a register, it must be in the proper
874   mode unless MODE is VOIDmode.  */
875
876int
877reg_or_aligned_short_operand (op, mode)
878      rtx op;
879      enum machine_mode mode;
880{
881  if (gpc_reg_operand (op, mode))
882    return 1;
883  else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
884    return 1;
885
886  return 0;
887}
888
889
890/* Return 1 if the operand is either a register or an integer whose
891   high-order 16 bits are zero.  */
892
893int
894reg_or_u_short_operand (op, mode)
895     rtx op;
896     enum machine_mode mode;
897{
898  return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
899}
900
901/* Return 1 is the operand is either a non-special register or ANY
902   constant integer.  */
903
904int
905reg_or_cint_operand (op, mode)
906    rtx op;
907    enum machine_mode mode;
908{
909  return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
910}
911
912/* Return 1 is the operand is either a non-special register or ANY
913   32-bit signed constant integer.  */
914
915int
916reg_or_arith_cint_operand (op, mode)
917    rtx op;
918    enum machine_mode mode;
919{
920  return (gpc_reg_operand (op, mode)
921	  || (GET_CODE (op) == CONST_INT
922#if HOST_BITS_PER_WIDE_INT != 32
923	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
924		  < (unsigned HOST_WIDE_INT) 0x100000000ll)
925#endif
926	      ));
927}
928
929/* Return 1 is the operand is either a non-special register or a 32-bit
930   signed constant integer valid for 64-bit addition.  */
931
932int
933reg_or_add_cint64_operand (op, mode)
934    rtx op;
935    enum machine_mode mode;
936{
937  return (gpc_reg_operand (op, mode)
938	  || (GET_CODE (op) == CONST_INT
939	      && INTVAL (op) < 0x7fff8000
940#if HOST_BITS_PER_WIDE_INT != 32
941	      && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
942		  < 0x100000000ll)
943#endif
944	      ));
945}
946
947/* Return 1 is the operand is either a non-special register or a 32-bit
948   signed constant integer valid for 64-bit subtraction.  */
949
950int
951reg_or_sub_cint64_operand (op, mode)
952    rtx op;
953    enum machine_mode mode;
954{
955  return (gpc_reg_operand (op, mode)
956	  || (GET_CODE (op) == CONST_INT
957	      && (- INTVAL (op)) < 0x7fff8000
958#if HOST_BITS_PER_WIDE_INT != 32
959	      && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
960		  < 0x100000000ll)
961#endif
962	      ));
963}
964
965/* Return 1 is the operand is either a non-special register or ANY
966   32-bit unsigned constant integer.  */
967
968int
969reg_or_logical_cint_operand (op, mode)
970    rtx op;
971    enum machine_mode mode;
972{
973  if (GET_CODE (op) == CONST_INT)
974    {
975      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
976	{
977	  if (GET_MODE_BITSIZE (mode) <= 32)
978	    abort ();
979
980	  if (INTVAL (op) < 0)
981	    return 0;
982	}
983
984      return ((INTVAL (op) & GET_MODE_MASK (mode)
985	       & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
986    }
987  else if (GET_CODE (op) == CONST_DOUBLE)
988    {
989      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
990	  || mode != DImode)
991	abort ();
992
993      return CONST_DOUBLE_HIGH (op) == 0;
994    }
995  else
996    return gpc_reg_operand (op, mode);
997}
998
999/* Return 1 if the operand is an operand that can be loaded via the GOT.  */
1000
1001int
1002got_operand (op, mode)
1003     rtx op;
1004     enum machine_mode mode ATTRIBUTE_UNUSED;
1005{
1006  return (GET_CODE (op) == SYMBOL_REF
1007	  || GET_CODE (op) == CONST
1008	  || GET_CODE (op) == LABEL_REF);
1009}
1010
1011/* Return 1 if the operand is a simple references that can be loaded via
1012   the GOT (labels involving addition aren't allowed).  */
1013
1014int
1015got_no_const_operand (op, mode)
1016     rtx op;
1017     enum machine_mode mode ATTRIBUTE_UNUSED;
1018{
1019  return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1020}
1021
1022/* Return the number of instructions it takes to form a constant in an
1023   integer register.  */
1024
1025static int
1026num_insns_constant_wide (value)
1027     HOST_WIDE_INT value;
1028{
1029  /* signed constant loadable with {cal|addi} */
1030  if (CONST_OK_FOR_LETTER_P (value, 'I'))
1031    return 1;
1032
1033  /* constant loadable with {cau|addis} */
1034  else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1035    return 1;
1036
1037#if HOST_BITS_PER_WIDE_INT == 64
1038  else if (TARGET_POWERPC64)
1039    {
1040      HOST_WIDE_INT low  = value & 0xffffffff;
1041      HOST_WIDE_INT high = value >> 32;
1042
1043      low = (low ^ 0x80000000) - 0x80000000;  /* sign extend */
1044
1045      if (high == 0 && (low & 0x80000000) == 0)
1046	return 2;
1047
1048      else if (high == -1 && (low & 0x80000000) != 0)
1049	return 2;
1050
1051      else if (! low)
1052	return num_insns_constant_wide (high) + 1;
1053
1054      else
1055	return (num_insns_constant_wide (high)
1056		+ num_insns_constant_wide (low) + 1);
1057    }
1058#endif
1059
1060  else
1061    return 2;
1062}
1063
1064int
1065num_insns_constant (op, mode)
1066     rtx op;
1067     enum machine_mode mode;
1068{
1069  if (GET_CODE (op) == CONST_INT)
1070    {
1071#if HOST_BITS_PER_WIDE_INT == 64
1072      if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1073	  && mask64_operand (op, mode))
1074	    return 2;
1075      else
1076#endif
1077	return num_insns_constant_wide (INTVAL (op));
1078    }
1079
1080  else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1081    {
1082      long l;
1083      REAL_VALUE_TYPE rv;
1084
1085      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1086      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1087      return num_insns_constant_wide ((HOST_WIDE_INT) l);
1088    }
1089
1090  else if (GET_CODE (op) == CONST_DOUBLE)
1091    {
1092      HOST_WIDE_INT low;
1093      HOST_WIDE_INT high;
1094      long l[2];
1095      REAL_VALUE_TYPE rv;
1096      int endian = (WORDS_BIG_ENDIAN == 0);
1097
1098      if (mode == VOIDmode || mode == DImode)
1099	{
1100	  high = CONST_DOUBLE_HIGH (op);
1101	  low  = CONST_DOUBLE_LOW (op);
1102	}
1103      else
1104	{
1105	  REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1106	  REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1107	  high = l[endian];
1108	  low  = l[1 - endian];
1109	}
1110
1111      if (TARGET_32BIT)
1112	return (num_insns_constant_wide (low)
1113		+ num_insns_constant_wide (high));
1114
1115      else
1116	{
1117	  if (high == 0 && low >= 0)
1118	    return num_insns_constant_wide (low);
1119
1120	  else if (high == -1 && low < 0)
1121	    return num_insns_constant_wide (low);
1122
1123	  else if (mask64_operand (op, mode))
1124	    return 2;
1125
1126	  else if (low == 0)
1127	    return num_insns_constant_wide (high) + 1;
1128
1129	  else
1130	    return (num_insns_constant_wide (high)
1131		    + num_insns_constant_wide (low) + 1);
1132	}
1133    }
1134
1135  else
1136    abort ();
1137}
1138
1139/* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1140   register with one instruction per word.  We only do this if we can
1141   safely read CONST_DOUBLE_{LOW,HIGH}.  */
1142
1143int
1144easy_fp_constant (op, mode)
1145     rtx op;
1146     enum machine_mode mode;
1147{
1148  if (GET_CODE (op) != CONST_DOUBLE
1149      || GET_MODE (op) != mode
1150      || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1151    return 0;
1152
1153  /* Consider all constants with -msoft-float to be easy.  */
1154  if (TARGET_SOFT_FLOAT && mode != DImode)
1155    return 1;
1156
1157  /* If we are using V.4 style PIC, consider all constants to be hard.  */
1158  if (flag_pic && DEFAULT_ABI == ABI_V4)
1159    return 0;
1160
1161#ifdef TARGET_RELOCATABLE
1162  /* Similarly if we are using -mrelocatable, consider all constants
1163     to be hard.  */
1164  if (TARGET_RELOCATABLE)
1165    return 0;
1166#endif
1167
1168  if (mode == DFmode)
1169    {
1170      long k[2];
1171      REAL_VALUE_TYPE rv;
1172
1173      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1174      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1175
1176      return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1177	      && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1178    }
1179
1180  else if (mode == SFmode)
1181    {
1182      long l;
1183      REAL_VALUE_TYPE rv;
1184
1185      REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1186      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1187
1188      return num_insns_constant_wide (l) == 1;
1189    }
1190
1191  else if (mode == DImode)
1192    return ((TARGET_POWERPC64
1193	     && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1194	    || (num_insns_constant (op, DImode) <= 2));
1195
1196  else if (mode == SImode)
1197    return 1;
1198  else
1199    abort ();
1200}
1201
1202/* Return 1 if the operand is a CONST_INT and can be put into a
1203   register with one instruction.  */
1204
1205static int
1206easy_vector_constant (op)
1207     rtx op;
1208{
1209  rtx elt;
1210  int units, i;
1211
1212  if (GET_CODE (op) != CONST_VECTOR)
1213    return 0;
1214
1215  units = CONST_VECTOR_NUNITS (op);
1216
1217  /* We can generate 0 easily.  Look for that.  */
1218  for (i = 0; i < units; ++i)
1219    {
1220      elt = CONST_VECTOR_ELT (op, i);
1221
1222      /* We could probably simplify this by just checking for equality
1223	 with CONST0_RTX for the current mode, but let's be safe
1224	 instead.  */
1225
1226      switch (GET_CODE (elt))
1227	{
1228	case CONST_INT:
1229	  if (INTVAL (elt) != 0)
1230	    return 0;
1231	  break;
1232	case CONST_DOUBLE:
1233	  if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1234	    return 0;
1235	  break;
1236	default:
1237	  return 0;
1238	}
1239    }
1240
1241  /* We could probably generate a few other constants trivially, but
1242     gcc doesn't generate them yet.  FIXME later.  */
1243  return 1;
1244}
1245
1246/* Return 1 if the operand is the constant 0.  This works for scalars
1247   as well as vectors.  */
1248int
1249zero_constant (op, mode)
1250     rtx op;
1251     enum machine_mode mode;
1252{
1253  return op == CONST0_RTX (mode);
1254}
1255
1256/* Return 1 if the operand is 0.0.  */
1257int
1258zero_fp_constant (op, mode)
1259     rtx op;
1260     enum machine_mode mode;
1261{
1262  return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1263}
1264
1265/* Return 1 if the operand is in volatile memory.  Note that during
1266   the RTL generation phase, memory_operand does not return TRUE for
1267   volatile memory references.  So this function allows us to
1268   recognize volatile references where its safe.  */
1269
1270int
1271volatile_mem_operand (op, mode)
1272     rtx op;
1273     enum machine_mode mode;
1274{
1275  if (GET_CODE (op) != MEM)
1276    return 0;
1277
1278  if (!MEM_VOLATILE_P (op))
1279    return 0;
1280
1281  if (mode != GET_MODE (op))
1282    return 0;
1283
1284  if (reload_completed)
1285    return memory_operand (op, mode);
1286
1287  if (reload_in_progress)
1288    return strict_memory_address_p (mode, XEXP (op, 0));
1289
1290  return memory_address_p (mode, XEXP (op, 0));
1291}
1292
1293/* Return 1 if the operand is an offsettable memory operand.  */
1294
1295int
1296offsettable_mem_operand (op, mode)
1297     rtx op;
1298     enum machine_mode mode;
1299{
1300  return ((GET_CODE (op) == MEM)
1301	  && offsettable_address_p (reload_completed || reload_in_progress,
1302				    mode, XEXP (op, 0)));
1303}
1304
1305/* Return 1 if the operand is either an easy FP constant (see above) or
1306   memory.  */
1307
1308int
1309mem_or_easy_const_operand (op, mode)
1310     rtx op;
1311     enum machine_mode mode;
1312{
1313  return memory_operand (op, mode) || easy_fp_constant (op, mode);
1314}
1315
1316/* Return 1 if the operand is either a non-special register or an item
1317   that can be used as the operand of a `mode' add insn.  */
1318
1319int
1320add_operand (op, mode)
1321    rtx op;
1322    enum machine_mode mode;
1323{
1324  if (GET_CODE (op) == CONST_INT)
1325    return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1326	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1327
1328  return gpc_reg_operand (op, mode);
1329}
1330
1331/* Return 1 if OP is a constant but not a valid add_operand.  */
1332
1333int
1334non_add_cint_operand (op, mode)
1335     rtx op;
1336     enum machine_mode mode ATTRIBUTE_UNUSED;
1337{
1338  return (GET_CODE (op) == CONST_INT
1339	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1340	  && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1341}
1342
1343/* Return 1 if the operand is a non-special register or a constant that
1344   can be used as the operand of an OR or XOR insn on the RS/6000.  */
1345
1346int
1347logical_operand (op, mode)
1348     rtx op;
1349     enum machine_mode mode;
1350{
1351  HOST_WIDE_INT opl, oph;
1352
1353  if (gpc_reg_operand (op, mode))
1354    return 1;
1355
1356  if (GET_CODE (op) == CONST_INT)
1357    {
1358      opl = INTVAL (op) & GET_MODE_MASK (mode);
1359
1360#if HOST_BITS_PER_WIDE_INT <= 32
1361      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1362	return 0;
1363#endif
1364    }
1365  else if (GET_CODE (op) == CONST_DOUBLE)
1366    {
1367      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1368	abort ();
1369
1370      opl = CONST_DOUBLE_LOW (op);
1371      oph = CONST_DOUBLE_HIGH (op);
1372      if (oph != 0)
1373	return 0;
1374    }
1375  else
1376    return 0;
1377
1378  return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1379	  || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1380}
1381
1382/* Return 1 if C is a constant that is not a logical operand (as
1383   above), but could be split into one.  */
1384
1385int
1386non_logical_cint_operand (op, mode)
1387     rtx op;
1388     enum machine_mode mode;
1389{
1390  return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1391	  && ! logical_operand (op, mode)
1392	  && reg_or_logical_cint_operand (op, mode));
1393}
1394
1395/* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1396   RS/6000.  It is if there are no more than two 1->0 or 0->1 transitions.
1397   Reject all ones and all zeros, since these should have been optimized
1398   away and confuse the making of MB and ME.  */
1399
1400int
1401mask_operand (op, mode)
1402     rtx op;
1403     enum machine_mode mode ATTRIBUTE_UNUSED;
1404{
1405  HOST_WIDE_INT c, lsb;
1406
1407  if (GET_CODE (op) != CONST_INT)
1408    return 0;
1409
1410  c = INTVAL (op);
1411
1412  /* Fail in 64-bit mode if the mask wraps around because the upper
1413     32-bits of the mask will all be 1s, contrary to GCC's internal view.  */
1414  if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1415    return 0;
1416
1417  /* We don't change the number of transitions by inverting,
1418     so make sure we start with the LS bit zero.  */
1419  if (c & 1)
1420    c = ~c;
1421
1422  /* Reject all zeros or all ones.  */
1423  if (c == 0)
1424    return 0;
1425
1426  /* Find the first transition.  */
1427  lsb = c & -c;
1428
1429  /* Invert to look for a second transition.  */
1430  c = ~c;
1431
1432  /* Erase first transition.  */
1433  c &= -lsb;
1434
1435  /* Find the second transition (if any).  */
1436  lsb = c & -c;
1437
1438  /* Match if all the bits above are 1's (or c is zero).  */
1439  return c == -lsb;
1440}
1441
1442/* Return 1 if the operand is a constant that is a PowerPC64 mask.
1443   It is if there are no more than one 1->0 or 0->1 transitions.
1444   Reject all ones and all zeros, since these should have been optimized
1445   away and confuse the making of MB and ME.  */
1446
1447int
1448mask64_operand (op, mode)
1449     rtx op;
1450     enum machine_mode mode;
1451{
1452  if (GET_CODE (op) == CONST_INT)
1453    {
1454      HOST_WIDE_INT c, lsb;
1455
1456      /* We don't change the number of transitions by inverting,
1457	 so make sure we start with the LS bit zero.  */
1458      c = INTVAL (op);
1459      if (c & 1)
1460	c = ~c;
1461
1462      /* Reject all zeros or all ones.  */
1463      if (c == 0)
1464	return 0;
1465
1466      /* Find the transition, and check that all bits above are 1's.  */
1467      lsb = c & -c;
1468      return c == -lsb;
1469    }
1470  else if (GET_CODE (op) == CONST_DOUBLE
1471	   && (mode == VOIDmode || mode == DImode))
1472    {
1473      HOST_WIDE_INT low, high, lsb;
1474
1475      if (HOST_BITS_PER_WIDE_INT < 64)
1476	high = CONST_DOUBLE_HIGH (op);
1477
1478      low = CONST_DOUBLE_LOW (op);
1479      if (low & 1)
1480	{
1481	  if (HOST_BITS_PER_WIDE_INT < 64)
1482	    high = ~high;
1483	  low = ~low;
1484	}
1485
1486      if (low == 0)
1487	{
1488	  if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1489	    return 0;
1490
1491	  lsb = high & -high;
1492	  return high == -lsb;
1493	}
1494
1495      lsb = low & -low;
1496      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1497    }
1498  else
1499    return 0;
1500}
1501
1502/* Return 1 if the operand is either a non-special register or a constant
1503   that can be used as the operand of a PowerPC64 logical AND insn.  */
1504
1505int
1506and64_operand (op, mode)
1507    rtx op;
1508    enum machine_mode mode;
1509{
1510  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1511    return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1512
1513  return (logical_operand (op, mode) || mask64_operand (op, mode));
1514}
1515
1516/* Return 1 if the operand is either a non-special register or a
1517   constant that can be used as the operand of an RS/6000 logical AND insn.  */
1518
1519int
1520and_operand (op, mode)
1521    rtx op;
1522    enum machine_mode mode;
1523{
1524  if (fixed_regs[CR0_REGNO])	/* CR0 not available, don't do andi./andis.  */
1525    return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1526
1527  return (logical_operand (op, mode) || mask_operand (op, mode));
1528}
1529
1530/* Return 1 if the operand is a general register or memory operand.  */
1531
1532int
1533reg_or_mem_operand (op, mode)
1534     rtx op;
1535     enum machine_mode mode;
1536{
1537  return (gpc_reg_operand (op, mode)
1538	  || memory_operand (op, mode)
1539	  || volatile_mem_operand (op, mode));
1540}
1541
1542/* Return 1 if the operand is a general register or memory operand without
1543   pre_inc or pre_dec which produces invalid form of PowerPC lwa
1544   instruction.  */
1545
1546int
1547lwa_operand (op, mode)
1548     rtx op;
1549     enum machine_mode mode;
1550{
1551  rtx inner = op;
1552
1553  if (reload_completed && GET_CODE (inner) == SUBREG)
1554    inner = SUBREG_REG (inner);
1555
1556  return gpc_reg_operand (inner, mode)
1557    || (memory_operand (inner, mode)
1558	&& GET_CODE (XEXP (inner, 0)) != PRE_INC
1559	&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
1560	&& (GET_CODE (XEXP (inner, 0)) != PLUS
1561	    || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1562	    || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1563}
1564
1565/* Return 1 if the operand, used inside a MEM, is a valid first argument
1566   to CALL.  This is a SYMBOL_REF or a pseudo-register, which will be
1567   forced to lr.  */
1568
1569int
1570call_operand (op, mode)
1571     rtx op;
1572     enum machine_mode mode;
1573{
1574  if (mode != VOIDmode && GET_MODE (op) != mode)
1575    return 0;
1576
1577  return (GET_CODE (op) == SYMBOL_REF
1578	  || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1579}
1580
1581/* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1582   this file and the function is not weakly defined.  */
1583
1584int
1585current_file_function_operand (op, mode)
1586     rtx op;
1587     enum machine_mode mode ATTRIBUTE_UNUSED;
1588{
1589  return (GET_CODE (op) == SYMBOL_REF
1590	  && (SYMBOL_REF_FLAG (op)
1591	      || (op == XEXP (DECL_RTL (current_function_decl), 0)
1592	          && ! DECL_WEAK (current_function_decl))));
1593}
1594
1595/* Return 1 if this operand is a valid input for a move insn.  */
1596
1597int
1598input_operand (op, mode)
1599     rtx op;
1600     enum machine_mode mode;
1601{
1602  /* Memory is always valid.  */
1603  if (memory_operand (op, mode))
1604    return 1;
1605
1606  /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary.  */
1607  if (GET_CODE (op) == CONSTANT_P_RTX)
1608    return 1;
1609
1610  /* For floating-point, easy constants are valid.  */
1611  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1612      && CONSTANT_P (op)
1613      && easy_fp_constant (op, mode))
1614    return 1;
1615
1616  /* Allow any integer constant.  */
1617  if (GET_MODE_CLASS (mode) == MODE_INT
1618      && (GET_CODE (op) == CONST_INT
1619	  || GET_CODE (op) == CONST_DOUBLE))
1620    return 1;
1621
1622  /* For floating-point or multi-word mode, the only remaining valid type
1623     is a register.  */
1624  if (GET_MODE_CLASS (mode) == MODE_FLOAT
1625      || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1626    return register_operand (op, mode);
1627
1628  /* The only cases left are integral modes one word or smaller (we
1629     do not get called for MODE_CC values).  These can be in any
1630     register.  */
1631  if (register_operand (op, mode))
1632    return 1;
1633
1634  /* A SYMBOL_REF referring to the TOC is valid.  */
1635  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1636    return 1;
1637
1638  /* A constant pool expression (relative to the TOC) is valid */
1639  if (TOC_RELATIVE_EXPR_P (op))
1640    return 1;
1641
1642  /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1643     to be valid.  */
1644  if (DEFAULT_ABI == ABI_V4
1645      && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1646      && small_data_operand (op, Pmode))
1647    return 1;
1648
1649  return 0;
1650}
1651
1652/* Return 1 for an operand in small memory on V.4/eabi.  */
1653
1654int
1655small_data_operand (op, mode)
1656     rtx op ATTRIBUTE_UNUSED;
1657     enum machine_mode mode ATTRIBUTE_UNUSED;
1658{
1659#if TARGET_ELF
1660  rtx sym_ref;
1661
1662  if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1663    return 0;
1664
1665  if (DEFAULT_ABI != ABI_V4)
1666    return 0;
1667
1668  if (GET_CODE (op) == SYMBOL_REF)
1669    sym_ref = op;
1670
1671  else if (GET_CODE (op) != CONST
1672	   || GET_CODE (XEXP (op, 0)) != PLUS
1673	   || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1674	   || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1675    return 0;
1676
1677  else
1678    {
1679      rtx sum = XEXP (op, 0);
1680      HOST_WIDE_INT summand;
1681
1682      /* We have to be careful here, because it is the referenced address
1683        that must be 32k from _SDA_BASE_, not just the symbol.  */
1684      summand = INTVAL (XEXP (sum, 1));
1685      if (summand < 0 || summand > g_switch_value)
1686       return 0;
1687
1688      sym_ref = XEXP (sum, 0);
1689    }
1690
1691  if (*XSTR (sym_ref, 0) != '@')
1692    return 0;
1693
1694  return 1;
1695
1696#else
1697  return 0;
1698#endif
1699}
1700
1701static int
1702constant_pool_expr_1 (op, have_sym, have_toc)
1703    rtx op;
1704    int *have_sym;
1705    int *have_toc;
1706{
1707  switch (GET_CODE(op))
1708    {
1709    case SYMBOL_REF:
1710      if (CONSTANT_POOL_ADDRESS_P (op))
1711	{
1712	  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1713	    {
1714	      *have_sym = 1;
1715	      return 1;
1716	    }
1717	  else
1718	    return 0;
1719	}
1720      else if (! strcmp (XSTR (op, 0), toc_label_name))
1721	{
1722	  *have_toc = 1;
1723	  return 1;
1724	}
1725      else
1726	return 0;
1727    case PLUS:
1728    case MINUS:
1729      return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1730	      && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1731    case CONST:
1732      return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1733    case CONST_INT:
1734      return 1;
1735    default:
1736      return 0;
1737    }
1738}
1739
1740int
1741constant_pool_expr_p (op)
1742    rtx op;
1743{
1744  int have_sym = 0;
1745  int have_toc = 0;
1746  return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1747}
1748
1749int
1750toc_relative_expr_p (op)
1751    rtx op;
1752{
1753    int have_sym = 0;
1754    int have_toc = 0;
1755    return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1756}
1757
1758/* Try machine-dependent ways of modifying an illegitimate address
1759   to be legitimate.  If we find one, return the new, valid address.
1760   This is used from only one place: `memory_address' in explow.c.
1761
1762   OLDX is the address as it was before break_out_memory_refs was
1763   called.  In some cases it is useful to look at this to decide what
1764   needs to be done.
1765
1766   MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1767
1768   It is always safe for this function to do nothing.  It exists to
1769   recognize opportunities to optimize the output.
1770
1771   On RS/6000, first check for the sum of a register with a constant
1772   integer that is out of range.  If so, generate code to add the
1773   constant with the low-order 16 bits masked to the register and force
1774   this result into another register (this can be done with `cau').
1775   Then generate an address of REG+(CONST&0xffff), allowing for the
1776   possibility of bit 16 being a one.
1777
1778   Then check for the sum of a register and something not constant, try to
1779   load the other things into a register and return the sum.  */
1780rtx
1781rs6000_legitimize_address (x, oldx, mode)
1782     rtx x;
1783     rtx oldx ATTRIBUTE_UNUSED;
1784     enum machine_mode mode;
1785{
1786  if (GET_CODE (x) == PLUS
1787      && GET_CODE (XEXP (x, 0)) == REG
1788      && GET_CODE (XEXP (x, 1)) == CONST_INT
1789      && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1790    {
1791      HOST_WIDE_INT high_int, low_int;
1792      rtx sum;
1793      high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1794      low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1795      if (low_int & 0x8000)
1796	high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1797      sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1798					 GEN_INT (high_int)), 0);
1799      return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1800    }
1801  else if (GET_CODE (x) == PLUS
1802	   && GET_CODE (XEXP (x, 0)) == REG
1803	   && GET_CODE (XEXP (x, 1)) != CONST_INT
1804	   && GET_MODE_NUNITS (mode) == 1
1805	   && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1806	   && (TARGET_POWERPC64 || mode != DImode)
1807	   && mode != TImode)
1808    {
1809      return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1810			   force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1811    }
1812  else if (ALTIVEC_VECTOR_MODE (mode))
1813    {
1814      rtx reg;
1815
1816      /* Make sure both operands are registers.  */
1817      if (GET_CODE (x) == PLUS)
1818	return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1819			     force_reg (Pmode, XEXP (x, 1)));
1820
1821      reg = force_reg (Pmode, x);
1822      return reg;
1823    }
1824  else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1825	   && GET_CODE (x) != CONST_INT
1826	   && GET_CODE (x) != CONST_DOUBLE
1827	   && CONSTANT_P (x)
1828	   && GET_MODE_NUNITS (mode) == 1
1829	   && (GET_MODE_BITSIZE (mode) <= 32
1830	       || (TARGET_HARD_FLOAT && mode == DFmode)))
1831    {
1832      rtx reg = gen_reg_rtx (Pmode);
1833      emit_insn (gen_elf_high (reg, (x)));
1834      return gen_rtx_LO_SUM (Pmode, reg, (x));
1835    }
1836  else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1837	   && ! flag_pic
1838	   && GET_CODE (x) != CONST_INT
1839	   && GET_CODE (x) != CONST_DOUBLE
1840	   && CONSTANT_P (x)
1841	   && (TARGET_HARD_FLOAT || mode != DFmode)
1842	   && mode != DImode
1843	   && mode != TImode)
1844    {
1845      rtx reg = gen_reg_rtx (Pmode);
1846      emit_insn (gen_macho_high (reg, (x)));
1847      return gen_rtx_LO_SUM (Pmode, reg, (x));
1848    }
1849  else if (TARGET_TOC
1850	   && CONSTANT_POOL_EXPR_P (x)
1851	   && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1852    {
1853      return create_TOC_reference (x);
1854    }
1855  else
1856    return NULL_RTX;
1857}
1858
1859/* The convention appears to be to define this wherever it is used.
1860   With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1861   is now used here.  */
1862#ifndef REG_MODE_OK_FOR_BASE_P
1863#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1864#endif
1865
1866/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS.  Returns a value to
1867   replace the input X, or the original X if no replacement is called for.
1868   The output parameter *WIN is 1 if the calling macro should goto WIN,
1869   0 if it should not.
1870
1871   For RS/6000, we wish to handle large displacements off a base
1872   register by splitting the addend across an addiu/addis and the mem insn.
1873   This cuts number of extra insns needed from 3 to 1.
1874
1875   On Darwin, we use this to generate code for floating point constants.
1876   A movsf_low is generated so we wind up with 2 instructions rather than 3.
1877   The Darwin code is inside #if TARGET_MACHO because only then is
1878   machopic_function_base_name() defined.  */
1879rtx
1880rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1881    rtx x;
1882    enum machine_mode mode;
1883    int opnum;
1884    int type;
1885    int ind_levels ATTRIBUTE_UNUSED;
1886    int *win;
1887{
1888  /* We must recognize output that we have already generated ourselves.  */
1889  if (GET_CODE (x) == PLUS
1890      && GET_CODE (XEXP (x, 0)) == PLUS
1891      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1892      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1893      && GET_CODE (XEXP (x, 1)) == CONST_INT)
1894    {
1895      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1896                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1897                   opnum, (enum reload_type)type);
1898      *win = 1;
1899      return x;
1900    }
1901
1902#if TARGET_MACHO
1903  if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1904      && GET_CODE (x) == LO_SUM
1905      && GET_CODE (XEXP (x, 0)) == PLUS
1906      && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1907      && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1908      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1909      && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1910      && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1911      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1912      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1913    {
1914      /* Result of previous invocation of this function on Darwin
1915	 floating point constant.  */
1916      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1917		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1918		opnum, (enum reload_type)type);
1919      *win = 1;
1920      return x;
1921    }
1922#endif
1923  if (GET_CODE (x) == PLUS
1924      && GET_CODE (XEXP (x, 0)) == REG
1925      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1926      && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1927      && GET_CODE (XEXP (x, 1)) == CONST_INT
1928      && !ALTIVEC_VECTOR_MODE (mode))
1929    {
1930      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1931      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1932      HOST_WIDE_INT high
1933        = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1934
1935      /* Check for 32-bit overflow.  */
1936      if (high + low != val)
1937        {
1938	  *win = 0;
1939	  return x;
1940	}
1941
1942      /* Reload the high part into a base reg; leave the low part
1943         in the mem directly.  */
1944
1945      x = gen_rtx_PLUS (GET_MODE (x),
1946                        gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1947                                      GEN_INT (high)),
1948                        GEN_INT (low));
1949
1950      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1951                   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1952                   opnum, (enum reload_type)type);
1953      *win = 1;
1954      return x;
1955    }
1956#if TARGET_MACHO
1957  if (GET_CODE (x) == SYMBOL_REF
1958      && DEFAULT_ABI == ABI_DARWIN
1959      && !ALTIVEC_VECTOR_MODE (mode)
1960      && flag_pic)
1961    {
1962      /* Darwin load of floating point constant.  */
1963      rtx offset = gen_rtx (CONST, Pmode,
1964		    gen_rtx (MINUS, Pmode, x,
1965		    gen_rtx (SYMBOL_REF, Pmode,
1966			machopic_function_base_name ())));
1967      x = gen_rtx (LO_SUM, GET_MODE (x),
1968	    gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1969		gen_rtx (HIGH, Pmode, offset)), offset);
1970      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1971		BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1972		opnum, (enum reload_type)type);
1973      *win = 1;
1974      return x;
1975    }
1976#endif
1977  if (TARGET_TOC
1978      && CONSTANT_POOL_EXPR_P (x)
1979      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1980    {
1981      (x) = create_TOC_reference (x);
1982      *win = 1;
1983      return x;
1984    }
1985  *win = 0;
1986  return x;
1987}
1988
1989/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1990   that is a valid memory address for an instruction.
1991   The MODE argument is the machine mode for the MEM expression
1992   that wants to use this address.
1993
1994   On the RS/6000, there are four valid address: a SYMBOL_REF that
1995   refers to a constant pool entry of an address (or the sum of it
1996   plus a constant), a short (16-bit signed) constant plus a register,
1997   the sum of two registers, or a register indirect, possibly with an
1998   auto-increment.  For DFmode and DImode with an constant plus register,
1999   we must ensure that both words are addressable or PowerPC64 with offset
2000   word aligned.
2001
2002   For modes spanning multiple registers (DFmode in 32-bit GPRs,
2003   32-bit DImode, TImode), indexed addressing cannot be used because
2004   adjacent memory cells are accessed by adding word-sized offsets
2005   during assembly output.  */
2006int
2007rs6000_legitimate_address (mode, x, reg_ok_strict)
2008    enum machine_mode mode;
2009    rtx x;
2010    int reg_ok_strict;
2011{
2012  if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2013    return 1;
2014  if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2015      && TARGET_UPDATE
2016      && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2017    return 1;
2018  if (LEGITIMATE_SMALL_DATA_P (mode, x))
2019    return 1;
2020  if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2021    return 1;
2022  /* If not REG_OK_STRICT (before reload) let pass any stack offset.  */
2023  if (! reg_ok_strict
2024      && GET_CODE (x) == PLUS
2025      && GET_CODE (XEXP (x, 0)) == REG
2026      && XEXP (x, 0) == virtual_stack_vars_rtx
2027      && GET_CODE (XEXP (x, 1)) == CONST_INT)
2028    return 1;
2029  if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2030    return 1;
2031  if (mode != TImode
2032      && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2033      && (TARGET_POWERPC64 || mode != DImode)
2034      && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2035    return 1;
2036  if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2037    return 1;
2038  return 0;
2039}
2040
2041/* Try to output insns to set TARGET equal to the constant C if it can
2042   be done in less than N insns.  Do all computations in MODE.
2043   Returns the place where the output has been placed if it can be
2044   done and the insns have been emitted.  If it would take more than N
2045   insns, zero is returned and no insns and emitted.  */
2046
2047rtx
2048rs6000_emit_set_const (dest, mode, source, n)
2049     rtx dest, source;
2050     enum machine_mode mode;
2051     int n ATTRIBUTE_UNUSED;
2052{
2053  HOST_WIDE_INT c0, c1;
2054
2055  if (mode == QImode || mode == HImode || mode == SImode)
2056    {
2057      if (dest == NULL)
2058        dest = gen_reg_rtx (mode);
2059      emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2060      return dest;
2061    }
2062
2063  if (GET_CODE (source) == CONST_INT)
2064    {
2065      c0 = INTVAL (source);
2066      c1 = -(c0 < 0);
2067    }
2068  else if (GET_CODE (source) == CONST_DOUBLE)
2069    {
2070#if HOST_BITS_PER_WIDE_INT >= 64
2071      c0 = CONST_DOUBLE_LOW (source);
2072      c1 = -(c0 < 0);
2073#else
2074      c0 = CONST_DOUBLE_LOW (source);
2075      c1 = CONST_DOUBLE_HIGH (source);
2076#endif
2077    }
2078  else
2079    abort ();
2080
2081  return rs6000_emit_set_long_const (dest, c0, c1);
2082}
2083
2084/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2085   fall back to a straight forward decomposition.  We do this to avoid
2086   exponential run times encountered when looking for longer sequences
2087   with rs6000_emit_set_const.  */
2088static rtx
2089rs6000_emit_set_long_const (dest, c1, c2)
2090     rtx dest;
2091     HOST_WIDE_INT c1, c2;
2092{
2093  if (!TARGET_POWERPC64)
2094    {
2095      rtx operand1, operand2;
2096
2097      operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2098					DImode);
2099      operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2100					DImode);
2101      emit_move_insn (operand1, GEN_INT (c1));
2102      emit_move_insn (operand2, GEN_INT (c2));
2103    }
2104  else
2105    {
2106      HOST_WIDE_INT ud1, ud2, ud3, ud4;
2107
2108      ud1 = c1 & 0xffff;
2109      ud2 = (c1 & 0xffff0000) >> 16;
2110#if HOST_BITS_PER_WIDE_INT >= 64
2111      c2 = c1 >> 32;
2112#endif
2113      ud3 = c2 & 0xffff;
2114      ud4 = (c2 & 0xffff0000) >> 16;
2115
2116      if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2117	  || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2118	{
2119	  if (ud1 & 0x8000)
2120	    emit_move_insn (dest, GEN_INT (((ud1  ^ 0x8000) -  0x8000)));
2121	  else
2122	    emit_move_insn (dest, GEN_INT (ud1));
2123	}
2124
2125      else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2126	       || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2127	{
2128	  if (ud2 & 0x8000)
2129	    emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2130					   - 0x80000000));
2131	  else
2132	    emit_move_insn (dest, GEN_INT (ud2 << 16));
2133	  if (ud1 != 0)
2134	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2135	}
2136      else if ((ud4 == 0xffff && (ud3 & 0x8000))
2137	       || (ud4 == 0 && ! (ud3 & 0x8000)))
2138	{
2139	  if (ud3 & 0x8000)
2140	    emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2141					   - 0x80000000));
2142	  else
2143	    emit_move_insn (dest, GEN_INT (ud3 << 16));
2144
2145	  if (ud2 != 0)
2146	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2147	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2148	  if (ud1 != 0)
2149	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2150	}
2151      else
2152	{
2153	  if (ud4 & 0x8000)
2154	    emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2155					   - 0x80000000));
2156	  else
2157	    emit_move_insn (dest, GEN_INT (ud4 << 16));
2158
2159	  if (ud3 != 0)
2160	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2161
2162	  emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2163	  if (ud2 != 0)
2164	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2165					       GEN_INT (ud2 << 16)));
2166	  if (ud1 != 0)
2167	    emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2168	}
2169    }
2170  return dest;
2171}
2172
2173/* Emit a move from SOURCE to DEST in mode MODE.  */
2174void
2175rs6000_emit_move (dest, source, mode)
2176     rtx dest;
2177     rtx source;
2178     enum machine_mode mode;
2179{
2180  rtx operands[2];
2181  operands[0] = dest;
2182  operands[1] = source;
2183
2184  /* Sanity checks.  Check that we get CONST_DOUBLE only when we should.  */
2185  if (GET_CODE (operands[1]) == CONST_DOUBLE
2186      && ! FLOAT_MODE_P (mode)
2187      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2188    {
2189      /* FIXME.  This should never happen.  */
2190      /* Since it seems that it does, do the safe thing and convert
2191	 to a CONST_INT.  */
2192      operands[1] =
2193	GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2194    }
2195  if (GET_CODE (operands[1]) == CONST_DOUBLE
2196      && ! FLOAT_MODE_P (mode)
2197      && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2198	   && CONST_DOUBLE_LOW (operands[1]) >= 0)
2199	  || (CONST_DOUBLE_HIGH (operands[1]) == -1
2200	      && CONST_DOUBLE_LOW (operands[1]) < 0)))
2201    abort ();
2202
2203  /* Check if GCC is setting up a block move that will end up using FP
2204     registers as temporaries.  We must make sure this is acceptable.  */
2205  if (GET_CODE (operands[0]) == MEM
2206      && GET_CODE (operands[1]) == MEM
2207      && mode == DImode
2208      && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2209	  || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2210      && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2211					    ? 32 : MEM_ALIGN (operands[0])))
2212	    || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2213					       ? 32
2214					       : MEM_ALIGN (operands[1]))))
2215      && ! MEM_VOLATILE_P (operands [0])
2216      && ! MEM_VOLATILE_P (operands [1]))
2217    {
2218      emit_move_insn (adjust_address (operands[0], SImode, 0),
2219		      adjust_address (operands[1], SImode, 0));
2220      emit_move_insn (adjust_address (operands[0], SImode, 4),
2221		      adjust_address (operands[1], SImode, 4));
2222      return;
2223    }
2224
2225  if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2226    operands[1] = force_reg (mode, operands[1]);
2227
2228  if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2229      && GET_CODE (operands[0]) == MEM)
2230    {
2231      int regnum;
2232
2233      if (reload_in_progress || reload_completed)
2234	regnum = true_regnum (operands[1]);
2235      else if (GET_CODE (operands[1]) == REG)
2236	regnum = REGNO (operands[1]);
2237      else
2238	regnum = -1;
2239
2240      /* If operands[1] is a register, on POWER it may have
2241	 double-precision data in it, so truncate it to single
2242	 precision.  */
2243      if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2244	{
2245	  rtx newreg;
2246	  newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2247	  emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2248	  operands[1] = newreg;
2249	}
2250    }
2251
2252  /* Handle the case where reload calls us with an invalid address;
2253     and the case of CONSTANT_P_RTX.  */
2254  if (!VECTOR_MODE_P (mode)
2255      && (! general_operand (operands[1], mode)
2256	  || ! nonimmediate_operand (operands[0], mode)
2257	  || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2258    {
2259      emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2260      return;
2261    }
2262
2263  /* FIXME:  In the long term, this switch statement should go away
2264     and be replaced by a sequence of tests based on things like
2265     mode == Pmode.  */
2266  switch (mode)
2267    {
2268    case HImode:
2269    case QImode:
2270      if (CONSTANT_P (operands[1])
2271	  && GET_CODE (operands[1]) != CONST_INT)
2272	operands[1] = force_const_mem (mode, operands[1]);
2273      break;
2274
2275    case TFmode:
2276    case DFmode:
2277    case SFmode:
2278      if (CONSTANT_P (operands[1])
2279	  && ! easy_fp_constant (operands[1], mode))
2280	operands[1] = force_const_mem (mode, operands[1]);
2281      break;
2282
2283    case V16QImode:
2284    case V8HImode:
2285    case V4SFmode:
2286    case V4SImode:
2287      if (CONSTANT_P (operands[1])
2288	  && !easy_vector_constant (operands[1]))
2289	operands[1] = force_const_mem (mode, operands[1]);
2290      break;
2291
2292    case SImode:
2293    case DImode:
2294      /* Use default pattern for address of ELF small data */
2295      if (TARGET_ELF
2296	  && mode == Pmode
2297	  && DEFAULT_ABI == ABI_V4
2298	  && (GET_CODE (operands[1]) == SYMBOL_REF
2299	      || GET_CODE (operands[1]) == CONST)
2300	  && small_data_operand (operands[1], mode))
2301	{
2302	  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2303	  return;
2304	}
2305
2306      if (DEFAULT_ABI == ABI_V4
2307	  && mode == Pmode && mode == SImode
2308	  && flag_pic == 1 && got_operand (operands[1], mode))
2309	{
2310	  emit_insn (gen_movsi_got (operands[0], operands[1]));
2311	  return;
2312	}
2313
2314      if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2315	  && TARGET_NO_TOC && ! flag_pic
2316	  && mode == Pmode
2317	  && CONSTANT_P (operands[1])
2318	  && GET_CODE (operands[1]) != HIGH
2319	  && GET_CODE (operands[1]) != CONST_INT)
2320	{
2321	  rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2322
2323	  /* If this is a function address on -mcall-aixdesc,
2324	     convert it to the address of the descriptor.  */
2325	  if (DEFAULT_ABI == ABI_AIX
2326	      && GET_CODE (operands[1]) == SYMBOL_REF
2327	      && XSTR (operands[1], 0)[0] == '.')
2328	    {
2329	      const char *name = XSTR (operands[1], 0);
2330	      rtx new_ref;
2331	      while (*name == '.')
2332		name++;
2333	      new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2334	      CONSTANT_POOL_ADDRESS_P (new_ref)
2335		= CONSTANT_POOL_ADDRESS_P (operands[1]);
2336	      SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2337	      SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2338	      operands[1] = new_ref;
2339	    }
2340
2341	  if (DEFAULT_ABI == ABI_DARWIN)
2342	    {
2343	      emit_insn (gen_macho_high (target, operands[1]));
2344	      emit_insn (gen_macho_low (operands[0], target, operands[1]));
2345	      return;
2346	    }
2347
2348	  emit_insn (gen_elf_high (target, operands[1]));
2349	  emit_insn (gen_elf_low (operands[0], target, operands[1]));
2350	  return;
2351	}
2352
2353      /* If this is a SYMBOL_REF that refers to a constant pool entry,
2354	 and we have put it in the TOC, we just need to make a TOC-relative
2355	 reference to it.  */
2356      if (TARGET_TOC
2357	  && GET_CODE (operands[1]) == SYMBOL_REF
2358	  && CONSTANT_POOL_EXPR_P (operands[1])
2359	  && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2360					      get_pool_mode (operands[1])))
2361	{
2362	  operands[1] = create_TOC_reference (operands[1]);
2363	}
2364      else if (mode == Pmode
2365	       && CONSTANT_P (operands[1])
2366	       && ((GET_CODE (operands[1]) != CONST_INT
2367		    && ! easy_fp_constant (operands[1], mode))
2368		   || (GET_CODE (operands[1]) == CONST_INT
2369		       && num_insns_constant (operands[1], mode) > 2)
2370		   || (GET_CODE (operands[0]) == REG
2371		       && FP_REGNO_P (REGNO (operands[0]))))
2372	       && GET_CODE (operands[1]) != HIGH
2373	       && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2374	       && ! TOC_RELATIVE_EXPR_P (operands[1]))
2375	{
2376	  /* Emit a USE operation so that the constant isn't deleted if
2377	     expensive optimizations are turned on because nobody
2378	     references it.  This should only be done for operands that
2379	     contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2380	     This should not be done for operands that contain LABEL_REFs.
2381	     For now, we just handle the obvious case.  */
2382	  if (GET_CODE (operands[1]) != LABEL_REF)
2383	    emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2384
2385#if TARGET_MACHO
2386	  /* Darwin uses a special PIC legitimizer.  */
2387	  if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2388	    {
2389	      operands[1] =
2390		rs6000_machopic_legitimize_pic_address (operands[1], mode,
2391							operands[0]);
2392	      if (operands[0] != operands[1])
2393		emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2394	      return;
2395	    }
2396#endif
2397
2398	  /* If we are to limit the number of things we put in the TOC and
2399	     this is a symbol plus a constant we can add in one insn,
2400	     just put the symbol in the TOC and add the constant.  Don't do
2401	     this if reload is in progress.  */
2402	  if (GET_CODE (operands[1]) == CONST
2403	      && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2404	      && GET_CODE (XEXP (operands[1], 0)) == PLUS
2405	      && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2406	      && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2407		  || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2408	      && ! side_effects_p (operands[0]))
2409	    {
2410	      rtx sym =
2411		force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2412	      rtx other = XEXP (XEXP (operands[1], 0), 1);
2413
2414	      sym = force_reg (mode, sym);
2415	      if (mode == SImode)
2416		emit_insn (gen_addsi3 (operands[0], sym, other));
2417	      else
2418		emit_insn (gen_adddi3 (operands[0], sym, other));
2419	      return;
2420	    }
2421
2422	  operands[1] = force_const_mem (mode, operands[1]);
2423
2424	  if (TARGET_TOC
2425	      && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2426	      && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2427			get_pool_constant (XEXP (operands[1], 0)),
2428			get_pool_mode (XEXP (operands[1], 0))))
2429	    {
2430	      operands[1]
2431		= gen_rtx_MEM (mode,
2432			       create_TOC_reference (XEXP (operands[1], 0)));
2433	      set_mem_alias_set (operands[1], get_TOC_alias_set ());
2434	      RTX_UNCHANGING_P (operands[1]) = 1;
2435	    }
2436	}
2437      break;
2438
2439    case TImode:
2440      if (GET_CODE (operands[0]) == MEM
2441	  && GET_CODE (XEXP (operands[0], 0)) != REG
2442	  && ! reload_in_progress)
2443	operands[0]
2444	  = replace_equiv_address (operands[0],
2445				   copy_addr_to_reg (XEXP (operands[0], 0)));
2446
2447      if (GET_CODE (operands[1]) == MEM
2448	  && GET_CODE (XEXP (operands[1], 0)) != REG
2449	  && ! reload_in_progress)
2450	operands[1]
2451	  = replace_equiv_address (operands[1],
2452				   copy_addr_to_reg (XEXP (operands[1], 0)));
2453      break;
2454
2455    default:
2456      abort ();
2457    }
2458
2459  /* Above, we may have called force_const_mem which may have returned
2460     an invalid address.  If we can, fix this up; otherwise, reload will
2461     have to deal with it.  */
2462  if (GET_CODE (operands[1]) == MEM
2463      && ! memory_address_p (mode, XEXP (operands[1], 0))
2464      && ! reload_in_progress)
2465    operands[1] = adjust_address (operands[1], mode, 0);
2466
2467  emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2468  return;
2469}
2470
2471/* Initialize a variable CUM of type CUMULATIVE_ARGS
2472   for a call to a function whose data type is FNTYPE.
2473   For a library call, FNTYPE is 0.
2474
2475   For incoming args we set the number of arguments in the prototype large
2476   so we never return a PARALLEL.  */
2477
2478void
2479init_cumulative_args (cum, fntype, libname, incoming)
2480     CUMULATIVE_ARGS *cum;
2481     tree fntype;
2482     rtx libname ATTRIBUTE_UNUSED;
2483     int incoming;
2484{
2485  static CUMULATIVE_ARGS zero_cumulative;
2486
2487  *cum = zero_cumulative;
2488  cum->words = 0;
2489  cum->fregno = FP_ARG_MIN_REG;
2490  cum->vregno = ALTIVEC_ARG_MIN_REG;
2491  cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2492  cum->call_cookie = CALL_NORMAL;
2493  cum->sysv_gregno = GP_ARG_MIN_REG;
2494
2495  if (incoming)
2496    cum->nargs_prototype = 1000;		/* don't return a PARALLEL */
2497
2498  else if (cum->prototype)
2499    cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2500			    + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2501			       || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2502
2503  else
2504    cum->nargs_prototype = 0;
2505
2506  cum->orig_nargs = cum->nargs_prototype;
2507
2508  /* Check for longcall's */
2509  if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2510    cum->call_cookie = CALL_LONG;
2511
2512  if (TARGET_DEBUG_ARG)
2513    {
2514      fprintf (stderr, "\ninit_cumulative_args:");
2515      if (fntype)
2516	{
2517	  tree ret_type = TREE_TYPE (fntype);
2518	  fprintf (stderr, " ret code = %s,",
2519		   tree_code_name[ (int)TREE_CODE (ret_type) ]);
2520	}
2521
2522      if (cum->call_cookie & CALL_LONG)
2523	fprintf (stderr, " longcall,");
2524
2525      fprintf (stderr, " proto = %d, nargs = %d\n",
2526	       cum->prototype, cum->nargs_prototype);
2527    }
2528}
2529
2530/* If defined, a C expression which determines whether, and in which
2531   direction, to pad out an argument with extra space.  The value
2532   should be of type `enum direction': either `upward' to pad above
2533   the argument, `downward' to pad below, or `none' to inhibit
2534   padding.
2535
2536   For the AIX ABI structs are always stored left shifted in their
2537   argument slot.  */
2538
2539enum direction
2540function_arg_padding (mode, type)
2541     enum machine_mode mode;
2542     tree type;
2543{
2544  if (type != 0 && AGGREGATE_TYPE_P (type))
2545    return upward;
2546
2547  /* This is the default definition.  */
2548  return (! BYTES_BIG_ENDIAN
2549          ? upward
2550          : ((mode == BLKmode
2551              ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2552                 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2553              : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2554             ? downward : upward));
2555}
2556
2557/* If defined, a C expression that gives the alignment boundary, in bits,
2558   of an argument with the specified mode and type.  If it is not defined,
2559   PARM_BOUNDARY is used for all arguments.
2560
2561   V.4 wants long longs to be double word aligned.  */
2562
2563int
2564function_arg_boundary (mode, type)
2565     enum machine_mode mode;
2566     tree type ATTRIBUTE_UNUSED;
2567{
2568  if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2569    return 64;
2570  else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2571    return 128;
2572  else
2573    return PARM_BOUNDARY;
2574}
2575
2576/* Update the data in CUM to advance over an argument
2577   of mode MODE and data type TYPE.
2578   (TYPE is null for libcalls where that information may not be available.)  */
2579
2580void
2581function_arg_advance (cum, mode, type, named)
2582     CUMULATIVE_ARGS *cum;
2583     enum machine_mode mode;
2584     tree type;
2585     int named;
2586{
2587  cum->nargs_prototype--;
2588
2589  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2590    {
2591      if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2592	cum->vregno++;
2593      else
2594	cum->words += RS6000_ARG_SIZE (mode, type);
2595    }
2596  else if (DEFAULT_ABI == ABI_V4)
2597    {
2598      if (TARGET_HARD_FLOAT
2599	  && (mode == SFmode || mode == DFmode))
2600	{
2601	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2602	    cum->fregno++;
2603	  else
2604	    {
2605	      if (mode == DFmode)
2606	        cum->words += cum->words & 1;
2607	      cum->words += RS6000_ARG_SIZE (mode, type);
2608	    }
2609	}
2610      else
2611	{
2612	  int n_words;
2613	  int gregno = cum->sysv_gregno;
2614
2615	  /* Aggregates and IEEE quad get passed by reference.  */
2616	  if ((type && AGGREGATE_TYPE_P (type))
2617	      || mode == TFmode)
2618	    n_words = 1;
2619	  else
2620	    n_words = RS6000_ARG_SIZE (mode, type);
2621
2622	  /* Long long is put in odd registers.  */
2623	  if (n_words == 2 && (gregno & 1) == 0)
2624	    gregno += 1;
2625
2626	  /* Long long is not split between registers and stack.  */
2627	  if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2628	    {
2629	      /* Long long is aligned on the stack.  */
2630	      if (n_words == 2)
2631		cum->words += cum->words & 1;
2632	      cum->words += n_words;
2633	    }
2634
2635	  /* Note: continuing to accumulate gregno past when we've started
2636	     spilling to the stack indicates the fact that we've started
2637	     spilling to the stack to expand_builtin_saveregs.  */
2638	  cum->sysv_gregno = gregno + n_words;
2639	}
2640
2641      if (TARGET_DEBUG_ARG)
2642	{
2643	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2644		   cum->words, cum->fregno);
2645	  fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2646		   cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2647	  fprintf (stderr, "mode = %4s, named = %d\n",
2648		   GET_MODE_NAME (mode), named);
2649	}
2650    }
2651  else
2652    {
2653      int align = (TARGET_32BIT && (cum->words & 1) != 0
2654		   && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2655
2656      cum->words += align + RS6000_ARG_SIZE (mode, type);
2657
2658      if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2659	cum->fregno++;
2660
2661      if (TARGET_DEBUG_ARG)
2662	{
2663	  fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2664		   cum->words, cum->fregno);
2665	  fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2666		   cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2667	  fprintf (stderr, "named = %d, align = %d\n", named, align);
2668	}
2669    }
2670}
2671
2672/* Determine where to put an argument to a function.
2673   Value is zero to push the argument on the stack,
2674   or a hard register in which to store the argument.
2675
2676   MODE is the argument's machine mode.
2677   TYPE is the data type of the argument (as a tree).
2678    This is null for libcalls where that information may
2679    not be available.
2680   CUM is a variable of type CUMULATIVE_ARGS which gives info about
2681    the preceding args and about the function being called.
2682   NAMED is nonzero if this argument is a named parameter
2683    (otherwise it is an extra parameter matching an ellipsis).
2684
2685   On RS/6000 the first eight words of non-FP are normally in registers
2686   and the rest are pushed.  Under AIX, the first 13 FP args are in registers.
2687   Under V.4, the first 8 FP args are in registers.
2688
2689   If this is floating-point and no prototype is specified, we use
2690   both an FP and integer register (or possibly FP reg and stack).  Library
2691   functions (when TYPE is zero) always have the proper types for args,
2692   so we can pass the FP value just in one register.  emit_library_function
2693   doesn't support PARALLEL anyway.  */
2694
2695struct rtx_def *
2696function_arg (cum, mode, type, named)
2697     CUMULATIVE_ARGS *cum;
2698     enum machine_mode mode;
2699     tree type;
2700     int named;
2701{
2702  enum rs6000_abi abi = DEFAULT_ABI;
2703
2704  /* Return a marker to indicate whether CR1 needs to set or clear the
2705     bit that V.4 uses to say fp args were passed in registers.
2706     Assume that we don't need the marker for software floating point,
2707     or compiler generated library calls.  */
2708  if (mode == VOIDmode)
2709    {
2710      if (abi == ABI_V4
2711	  && TARGET_HARD_FLOAT
2712	  && cum->nargs_prototype < 0
2713	  && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2714	{
2715	  return GEN_INT (cum->call_cookie
2716			  | ((cum->fregno == FP_ARG_MIN_REG)
2717			     ? CALL_V4_SET_FP_ARGS
2718			     : CALL_V4_CLEAR_FP_ARGS));
2719	}
2720
2721      return GEN_INT (cum->call_cookie);
2722    }
2723
2724  if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2725    {
2726      if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2727	return gen_rtx_REG (mode, cum->vregno);
2728      else
2729	return NULL;
2730    }
2731  else if (abi == ABI_V4)
2732    {
2733      if (TARGET_HARD_FLOAT
2734	  && (mode == SFmode || mode == DFmode))
2735	{
2736	  if (cum->fregno <= FP_ARG_V4_MAX_REG)
2737	    return gen_rtx_REG (mode, cum->fregno);
2738	  else
2739	    return NULL;
2740	}
2741      else
2742	{
2743	  int n_words;
2744	  int gregno = cum->sysv_gregno;
2745
2746	  /* Aggregates and IEEE quad get passed by reference.  */
2747	  if ((type && AGGREGATE_TYPE_P (type))
2748	      || mode == TFmode)
2749	    n_words = 1;
2750	  else
2751	    n_words = RS6000_ARG_SIZE (mode, type);
2752
2753	  /* Long long is put in odd registers.  */
2754	  if (n_words == 2 && (gregno & 1) == 0)
2755	    gregno += 1;
2756
2757	  /* Long long is not split between registers and stack.  */
2758	  if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2759	    return gen_rtx_REG (mode, gregno);
2760	  else
2761	    return NULL;
2762	}
2763    }
2764  else
2765    {
2766      int align = (TARGET_32BIT && (cum->words & 1) != 0
2767	           && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2768      int align_words = cum->words + align;
2769
2770      if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2771        return NULL_RTX;
2772
2773      if (USE_FP_FOR_ARG_P (*cum, mode, type))
2774	{
2775	  if (! type
2776	      || ((cum->nargs_prototype > 0)
2777	          /* IBM AIX extended its linkage convention definition always
2778		     to require FP args after register save area hole on the
2779		     stack.  */
2780	          && (DEFAULT_ABI != ABI_AIX
2781		      || ! TARGET_XL_CALL
2782		      || (align_words < GP_ARG_NUM_REG))))
2783	    return gen_rtx_REG (mode, cum->fregno);
2784
2785          return gen_rtx_PARALLEL (mode,
2786	    gen_rtvec (2,
2787		       gen_rtx_EXPR_LIST (VOIDmode,
2788				((align_words >= GP_ARG_NUM_REG)
2789				 ? NULL_RTX
2790				 : (align_words
2791				    + RS6000_ARG_SIZE (mode, type)
2792				    > GP_ARG_NUM_REG
2793				    /* If this is partially on the stack, then
2794				       we only include the portion actually
2795				       in registers here.  */
2796				    ? gen_rtx_REG (SImode,
2797					       GP_ARG_MIN_REG + align_words)
2798				    : gen_rtx_REG (mode,
2799					       GP_ARG_MIN_REG + align_words))),
2800				const0_rtx),
2801		       gen_rtx_EXPR_LIST (VOIDmode,
2802				gen_rtx_REG (mode, cum->fregno),
2803				const0_rtx)));
2804	}
2805      else if (align_words < GP_ARG_NUM_REG)
2806	return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2807      else
2808	return NULL_RTX;
2809    }
2810}
2811
2812/* For an arg passed partly in registers and partly in memory,
2813   this is the number of registers used.
2814   For args passed entirely in registers or entirely in memory, zero.  */
2815
2816int
2817function_arg_partial_nregs (cum, mode, type, named)
2818     CUMULATIVE_ARGS *cum;
2819     enum machine_mode mode;
2820     tree type;
2821     int named ATTRIBUTE_UNUSED;
2822{
2823  if (DEFAULT_ABI == ABI_V4)
2824    return 0;
2825
2826  if (USE_FP_FOR_ARG_P (*cum, mode, type)
2827      || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2828    {
2829      if (cum->nargs_prototype >= 0)
2830	return 0;
2831    }
2832
2833  if (cum->words < GP_ARG_NUM_REG
2834      && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2835    {
2836      int ret = GP_ARG_NUM_REG - cum->words;
2837      if (ret && TARGET_DEBUG_ARG)
2838	fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2839
2840      return ret;
2841    }
2842
2843  return 0;
2844}
2845
2846/* A C expression that indicates when an argument must be passed by
2847   reference.  If nonzero for an argument, a copy of that argument is
2848   made in memory and a pointer to the argument is passed instead of
2849   the argument itself.  The pointer is passed in whatever way is
2850   appropriate for passing a pointer to that type.
2851
2852   Under V.4, structures and unions are passed by reference.  */
2853
2854int
2855function_arg_pass_by_reference (cum, mode, type, named)
2856     CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2857     enum machine_mode mode ATTRIBUTE_UNUSED;
2858     tree type;
2859     int named ATTRIBUTE_UNUSED;
2860{
2861  if (DEFAULT_ABI == ABI_V4
2862      && ((type && AGGREGATE_TYPE_P (type))
2863	  || mode == TFmode))
2864    {
2865      if (TARGET_DEBUG_ARG)
2866	fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2867
2868      return 1;
2869    }
2870
2871  return 0;
2872}
2873
2874/* Perform any needed actions needed for a function that is receiving a
2875   variable number of arguments.
2876
2877   CUM is as above.
2878
2879   MODE and TYPE are the mode and type of the current parameter.
2880
2881   PRETEND_SIZE is a variable that should be set to the amount of stack
2882   that must be pushed by the prolog to pretend that our caller pushed
2883   it.
2884
2885   Normally, this macro will push all remaining incoming registers on the
2886   stack and set PRETEND_SIZE to the length of the registers pushed.  */
2887
2888void
2889setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2890     CUMULATIVE_ARGS *cum;
2891     enum machine_mode mode;
2892     tree type;
2893     int *pretend_size;
2894     int no_rtl;
2895
2896{
2897  CUMULATIVE_ARGS next_cum;
2898  int reg_size = TARGET_32BIT ? 4 : 8;
2899  rtx save_area = NULL_RTX, mem;
2900  int first_reg_offset, set;
2901  tree fntype;
2902  int stdarg_p;
2903
2904  fntype = TREE_TYPE (current_function_decl);
2905  stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2906	      && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2907		  != void_type_node));
2908
2909  /* For varargs, we do not want to skip the dummy va_dcl argument.
2910     For stdargs, we do want to skip the last named argument.  */
2911  next_cum = *cum;
2912  if (stdarg_p)
2913    function_arg_advance (&next_cum, mode, type, 1);
2914
2915  if (DEFAULT_ABI == ABI_V4)
2916    {
2917      /* Indicate to allocate space on the stack for varargs save area.  */
2918      /* ??? Does this really have to be located at a magic spot on the
2919	 stack, or can we allocate this with assign_stack_local instead.  */
2920      cfun->machine->sysv_varargs_p = 1;
2921      if (! no_rtl)
2922	save_area = plus_constant (virtual_stack_vars_rtx,
2923				   - RS6000_VARARGS_SIZE);
2924
2925      first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2926    }
2927  else
2928    {
2929      first_reg_offset = next_cum.words;
2930      save_area = virtual_incoming_args_rtx;
2931      cfun->machine->sysv_varargs_p = 0;
2932
2933      if (MUST_PASS_IN_STACK (mode, type))
2934	first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2935    }
2936
2937  set = get_varargs_alias_set ();
2938  if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2939    {
2940      mem = gen_rtx_MEM (BLKmode,
2941		         plus_constant (save_area,
2942					first_reg_offset * reg_size)),
2943      set_mem_alias_set (mem, set);
2944      set_mem_align (mem, BITS_PER_WORD);
2945
2946      move_block_from_reg
2947	(GP_ARG_MIN_REG + first_reg_offset, mem,
2948	 GP_ARG_NUM_REG - first_reg_offset,
2949	 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2950
2951      /* ??? Does ABI_V4 need this at all?  */
2952      *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2953    }
2954
2955  /* Save FP registers if needed.  */
2956  if (DEFAULT_ABI == ABI_V4
2957      && TARGET_HARD_FLOAT && ! no_rtl
2958      && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2959    {
2960      int fregno = next_cum.fregno;
2961      rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2962      rtx lab = gen_label_rtx ();
2963      int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2964
2965      emit_jump_insn (gen_rtx_SET (VOIDmode,
2966				   pc_rtx,
2967				   gen_rtx_IF_THEN_ELSE (VOIDmode,
2968					    gen_rtx_NE (VOIDmode, cr1,
2969						        const0_rtx),
2970					    gen_rtx_LABEL_REF (VOIDmode, lab),
2971					    pc_rtx)));
2972
2973      while (fregno <= FP_ARG_V4_MAX_REG)
2974	{
2975	  mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2976          set_mem_alias_set (mem, set);
2977	  emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2978	  fregno++;
2979	  off += 8;
2980	}
2981
2982      emit_label (lab);
2983    }
2984}
2985
2986/* Create the va_list data type.  */
2987
2988tree
2989rs6000_build_va_list ()
2990{
2991  tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2992
2993  /* For AIX, prefer 'char *' because that's what the system
2994     header files like.  */
2995  if (DEFAULT_ABI != ABI_V4)
2996    return build_pointer_type (char_type_node);
2997
2998  record = make_lang_type (RECORD_TYPE);
2999  type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3000
3001  f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3002		      unsigned_char_type_node);
3003  f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3004		      unsigned_char_type_node);
3005  f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3006		      ptr_type_node);
3007  f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3008		      ptr_type_node);
3009
3010  DECL_FIELD_CONTEXT (f_gpr) = record;
3011  DECL_FIELD_CONTEXT (f_fpr) = record;
3012  DECL_FIELD_CONTEXT (f_ovf) = record;
3013  DECL_FIELD_CONTEXT (f_sav) = record;
3014
3015  TREE_CHAIN (record) = type_decl;
3016  TYPE_NAME (record) = type_decl;
3017  TYPE_FIELDS (record) = f_gpr;
3018  TREE_CHAIN (f_gpr) = f_fpr;
3019  TREE_CHAIN (f_fpr) = f_ovf;
3020  TREE_CHAIN (f_ovf) = f_sav;
3021
3022  layout_type (record);
3023
3024  /* The correct type is an array type of one element.  */
3025  return build_array_type (record, build_index_type (size_zero_node));
3026}
3027
3028/* Implement va_start.  */
3029
3030void
3031rs6000_va_start (stdarg_p, valist, nextarg)
3032     int stdarg_p;
3033     tree valist;
3034     rtx nextarg;
3035{
3036  HOST_WIDE_INT words, n_gpr, n_fpr;
3037  tree f_gpr, f_fpr, f_ovf, f_sav;
3038  tree gpr, fpr, ovf, sav, t;
3039
3040  /* Only SVR4 needs something special.  */
3041  if (DEFAULT_ABI != ABI_V4)
3042    {
3043      std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3044      return;
3045    }
3046
3047  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3048  f_fpr = TREE_CHAIN (f_gpr);
3049  f_ovf = TREE_CHAIN (f_fpr);
3050  f_sav = TREE_CHAIN (f_ovf);
3051
3052  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3053  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3054  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3055  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3056  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3057
3058  /* Count number of gp and fp argument registers used.  */
3059  words = current_function_args_info.words;
3060  n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3061  n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3062
3063  if (TARGET_DEBUG_ARG)
3064    {
3065      fputs ("va_start: words = ", stderr);
3066      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3067      fputs (", n_gpr = ", stderr);
3068      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3069      fputs (", n_fpr = ", stderr);
3070      fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3071      putc ('\n', stderr);
3072    }
3073
3074  t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3075  TREE_SIDE_EFFECTS (t) = 1;
3076  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3077
3078  t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3079  TREE_SIDE_EFFECTS (t) = 1;
3080  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3081
3082  /* Find the overflow area.  */
3083  t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3084  if (words != 0)
3085    t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3086	       build_int_2 (words * UNITS_PER_WORD, 0));
3087  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3088  TREE_SIDE_EFFECTS (t) = 1;
3089  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3090
3091  /* Find the register save area.  */
3092  t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3093  t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3094	     build_int_2 (-RS6000_VARARGS_SIZE, -1));
3095  t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3096  TREE_SIDE_EFFECTS (t) = 1;
3097  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3098}
3099
3100/* Implement va_arg.  */
3101
3102rtx
3103rs6000_va_arg (valist, type)
3104     tree valist, type;
3105{
3106  tree f_gpr, f_fpr, f_ovf, f_sav;
3107  tree gpr, fpr, ovf, sav, reg, t, u;
3108  int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3109  rtx lab_false, lab_over, addr_rtx, r;
3110
3111  /* For AIX, the rule is that structures are passed left-aligned in
3112     their stack slot.  However, GCC does not presently do this:
3113     structures which are the same size as integer types are passed
3114     right-aligned, as if they were in fact integers.  This only
3115     matters for structures of size 1 or 2, or 4 when TARGET_64BIT.  */
3116  if (DEFAULT_ABI != ABI_V4)
3117    return std_expand_builtin_va_arg (valist, type);
3118
3119  f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3120  f_fpr = TREE_CHAIN (f_gpr);
3121  f_ovf = TREE_CHAIN (f_fpr);
3122  f_sav = TREE_CHAIN (f_ovf);
3123
3124  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3125  gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3126  fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3127  ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3128  sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3129
3130  size = int_size_in_bytes (type);
3131  rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3132
3133  if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3134    {
3135      /* Aggregates and long doubles are passed by reference.  */
3136      indirect_p = 1;
3137      reg = gpr;
3138      n_reg = 1;
3139      sav_ofs = 0;
3140      sav_scale = 4;
3141      size = UNITS_PER_WORD;
3142      rsize = 1;
3143    }
3144  else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3145    {
3146      /* FP args go in FP registers, if present.  */
3147      indirect_p = 0;
3148      reg = fpr;
3149      n_reg = 1;
3150      sav_ofs = 8*4;
3151      sav_scale = 8;
3152    }
3153  else
3154    {
3155      /* Otherwise into GP registers.  */
3156      indirect_p = 0;
3157      reg = gpr;
3158      n_reg = rsize;
3159      sav_ofs = 0;
3160      sav_scale = 4;
3161    }
3162
3163  /* Pull the value out of the saved registers ...  */
3164
3165  lab_false = gen_label_rtx ();
3166  lab_over = gen_label_rtx ();
3167  addr_rtx = gen_reg_rtx (Pmode);
3168
3169  /*  Vectors never go in registers.  */
3170  if (TREE_CODE (type) != VECTOR_TYPE)
3171    {
3172      TREE_THIS_VOLATILE (reg) = 1;
3173      emit_cmp_and_jump_insns
3174	(expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3175	 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3176	 lab_false);
3177
3178      /* Long long is aligned in the registers.  */
3179      if (n_reg > 1)
3180	{
3181	  u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3182		     build_int_2 (n_reg - 1, 0));
3183	  u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3184	  u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3185	  TREE_SIDE_EFFECTS (u) = 1;
3186	  expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3187	}
3188
3189      if (sav_ofs)
3190	t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3191      else
3192	t = sav;
3193
3194      u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3195		 build_int_2 (n_reg, 0));
3196      TREE_SIDE_EFFECTS (u) = 1;
3197
3198      u = build1 (CONVERT_EXPR, integer_type_node, u);
3199      TREE_SIDE_EFFECTS (u) = 1;
3200
3201      u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3202      TREE_SIDE_EFFECTS (u) = 1;
3203
3204      t = build (PLUS_EXPR, ptr_type_node, t, u);
3205      TREE_SIDE_EFFECTS (t) = 1;
3206
3207      r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3208      if (r != addr_rtx)
3209	emit_move_insn (addr_rtx, r);
3210
3211      emit_jump_insn (gen_jump (lab_over));
3212      emit_barrier ();
3213    }
3214
3215  emit_label (lab_false);
3216
3217  /* ... otherwise out of the overflow area.  */
3218
3219  /* Make sure we don't find reg 7 for the next int arg.
3220
3221     All AltiVec vectors go in the overflow area.  So in the AltiVec
3222     case we need to get the vectors from the overflow area, but
3223     remember where the GPRs and FPRs are.  */
3224  if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3225    {
3226      t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3227      TREE_SIDE_EFFECTS (t) = 1;
3228      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3229    }
3230
3231  /* Care for on-stack alignment if needed.  */
3232  if (rsize <= 1)
3233    t = ovf;
3234  else
3235    {
3236      int align;
3237
3238      /* Vectors are 16 byte aligned.  */
3239      if (TREE_CODE (type) == VECTOR_TYPE)
3240	align = 15;
3241      else
3242	align = 7;
3243
3244      t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3245      t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3246    }
3247  t = save_expr (t);
3248
3249  r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3250  if (r != addr_rtx)
3251    emit_move_insn (addr_rtx, r);
3252
3253  t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3254  t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3255  TREE_SIDE_EFFECTS (t) = 1;
3256  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3257
3258  emit_label (lab_over);
3259
3260  if (indirect_p)
3261    {
3262      r = gen_rtx_MEM (Pmode, addr_rtx);
3263      set_mem_alias_set (r, get_varargs_alias_set ());
3264      emit_move_insn (addr_rtx, r);
3265    }
3266
3267  return addr_rtx;
3268}
3269
3270/* Builtins.  */
3271
3272#define def_builtin(MASK, NAME, TYPE, CODE)				\
3273do {									\
3274  if ((MASK) & target_flags)						\
3275    builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL);	\
3276} while (0)
3277
3278struct builtin_description
3279{
3280  const unsigned int mask;
3281  const enum insn_code icode;
3282  const char *const name;
3283  const enum rs6000_builtins code;
3284};
3285
3286/* Simple ternary operations: VECd = foo (VECa, VECb, VECc).  */
3287
3288static const struct builtin_description bdesc_3arg[] =
3289{
3290  { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3291  { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3292  { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3293  { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3294  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3295  { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3296  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3297  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3298  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3299  { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3300  { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3301  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3302  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3303  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3304  { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3305  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3306  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3307  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3308  { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3309  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3310  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3311  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3312  { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3313};
3314
3315/* DST operations: void foo (void *, const int, const char).  */
3316
3317static const struct builtin_description bdesc_dst[] =
3318{
3319  { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3320  { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3321  { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3322  { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3323};
3324
3325/* Simple binary operations: VECc = foo (VECa, VECb).  */
3326
3327static const struct builtin_description bdesc_2arg[] =
3328{
3329  { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3330  { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3331  { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3332  { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3333  { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3334  { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3335  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3336  { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3337  { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3338  { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3339  { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3340  { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3341  { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3342  { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3343  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3344  { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3345  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3346  { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3347  { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3348  { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3349  { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3350  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3351  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3352  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3353  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3354  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3355  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3356  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3357  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3358  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3359  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3360  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3361  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3362  { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3363  { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3364  { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3365  { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3366  { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3367  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3368  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3369  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3370  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3371  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3372  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3373  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3374  { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3375  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3376  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3377  { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3378  { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3379  { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3380  { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3381  { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3382  { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3383  { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3384  { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3385  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3386  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3387  { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3388  { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3389  { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3390  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3391  { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3392  { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3393  { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3394  { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3395  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3396  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3397  { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3398  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3399  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3400  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3401  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3402  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3403  { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3404  { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3405  { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3406  { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3407  { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3408  { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3409  { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3410  { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3411  { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3412  { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3413  { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3414  { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3415  { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3416  { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3417  { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3418  { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3419  { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3420  { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3421  { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3422  { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3423  { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3424  { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3425  { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3426  { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3427  { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3428  { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3429  { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3430  { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3431  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3432  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3433  { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3434  { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3435  { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3436  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3437  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3438  { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3439  { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3440  { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3441  { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3442};
3443
3444/* AltiVec predicates.  */
3445
3446struct builtin_description_predicates
3447{
3448  const unsigned int mask;
3449  const enum insn_code icode;
3450  const char *opcode;
3451  const char *const name;
3452  const enum rs6000_builtins code;
3453};
3454
3455static const struct builtin_description_predicates bdesc_altivec_preds[] =
3456{
3457  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3458  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3459  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3460  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3461  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3462  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3463  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3464  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3465  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3466  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3467  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3468  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3469  { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3470};
3471
3472/* ABS* opreations.  */
3473
3474static const struct builtin_description bdesc_abs[] =
3475{
3476  { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3477  { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3478  { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3479  { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3480  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3481  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3482  { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3483};
3484
3485/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3486   foo (VECa).  */
3487
3488static const struct builtin_description bdesc_1arg[] =
3489{
3490  { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3491  { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3492  { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3493  { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3494  { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3495  { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3496  { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3497  { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3498  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3499  { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3500  { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3501  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3502  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3503  { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3504  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3505  { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3506  { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3507};
3508
3509static rtx
3510altivec_expand_unop_builtin (icode, arglist, target)
3511     enum insn_code icode;
3512     tree arglist;
3513     rtx target;
3514{
3515  rtx pat;
3516  tree arg0 = TREE_VALUE (arglist);
3517  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3518  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3519  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3520
3521  /* If we got invalid arguments bail out before generating bad rtl.  */
3522  if (arg0 == error_mark_node)
3523    return NULL_RTX;
3524
3525  if (target == 0
3526      || GET_MODE (target) != tmode
3527      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3528    target = gen_reg_rtx (tmode);
3529
3530  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3531    op0 = copy_to_mode_reg (mode0, op0);
3532
3533  pat = GEN_FCN (icode) (target, op0);
3534  if (! pat)
3535    return 0;
3536  emit_insn (pat);
3537
3538  return target;
3539}
3540
3541static rtx
3542altivec_expand_abs_builtin (icode, arglist, target)
3543     enum insn_code icode;
3544     tree arglist;
3545     rtx target;
3546{
3547  rtx pat, scratch1, scratch2;
3548  tree arg0 = TREE_VALUE (arglist);
3549  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3550  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3551  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3552
3553  /* If we have invalid arguments, bail out before generating bad rtl.  */
3554  if (arg0 == error_mark_node)
3555    return NULL_RTX;
3556
3557  if (target == 0
3558      || GET_MODE (target) != tmode
3559      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3560    target = gen_reg_rtx (tmode);
3561
3562  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3563    op0 = copy_to_mode_reg (mode0, op0);
3564
3565  scratch1 = gen_reg_rtx (mode0);
3566  scratch2 = gen_reg_rtx (mode0);
3567
3568  pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3569  if (! pat)
3570    return 0;
3571  emit_insn (pat);
3572
3573  return target;
3574}
3575
3576static rtx
3577altivec_expand_binop_builtin (icode, arglist, target)
3578     enum insn_code icode;
3579     tree arglist;
3580     rtx target;
3581{
3582  rtx pat;
3583  tree arg0 = TREE_VALUE (arglist);
3584  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3585  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3586  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3587  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3588  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3589  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3590
3591  /* If we got invalid arguments bail out before generating bad rtl.  */
3592  if (arg0 == error_mark_node || arg1 == error_mark_node)
3593    return NULL_RTX;
3594
3595  if (target == 0
3596      || GET_MODE (target) != tmode
3597      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3598    target = gen_reg_rtx (tmode);
3599
3600  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3601    op0 = copy_to_mode_reg (mode0, op0);
3602  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3603    op1 = copy_to_mode_reg (mode1, op1);
3604
3605  pat = GEN_FCN (icode) (target, op0, op1);
3606  if (! pat)
3607    return 0;
3608  emit_insn (pat);
3609
3610  return target;
3611}
3612
3613static rtx
3614altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3615     enum insn_code icode;
3616     const char *opcode;
3617     tree arglist;
3618     rtx target;
3619{
3620  rtx pat, scratch;
3621  tree cr6_form = TREE_VALUE (arglist);
3622  tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3623  tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3624  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3625  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3626  enum machine_mode tmode = SImode;
3627  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3628  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3629  int cr6_form_int;
3630
3631  if (TREE_CODE (cr6_form) != INTEGER_CST)
3632    {
3633      error ("argument 1 of __builtin_altivec_predicate must be a constant");
3634      return NULL_RTX;
3635    }
3636  else
3637    cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3638
3639  if (mode0 != mode1)
3640    abort ();
3641
3642  /* If we have invalid arguments, bail out before generating bad rtl.  */
3643  if (arg0 == error_mark_node || arg1 == error_mark_node)
3644    return NULL_RTX;
3645
3646  if (target == 0
3647      || GET_MODE (target) != tmode
3648      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3649    target = gen_reg_rtx (tmode);
3650
3651  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3652    op0 = copy_to_mode_reg (mode0, op0);
3653  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3654    op1 = copy_to_mode_reg (mode1, op1);
3655
3656  scratch = gen_reg_rtx (mode0);
3657
3658  pat = GEN_FCN (icode) (scratch, op0, op1,
3659			 gen_rtx (SYMBOL_REF, Pmode, opcode));
3660  if (! pat)
3661    return 0;
3662  emit_insn (pat);
3663
3664  /* The vec_any* and vec_all* predicates use the same opcodes for two
3665     different operations, but the bits in CR6 will be different
3666     depending on what information we want.  So we have to play tricks
3667     with CR6 to get the right bits out.
3668
3669     If you think this is disgusting, look at the specs for the
3670     AltiVec predicates.  */
3671
3672     switch (cr6_form_int)
3673       {
3674       case 0:
3675	 emit_insn (gen_cr6_test_for_zero (target));
3676	 break;
3677       case 1:
3678	 emit_insn (gen_cr6_test_for_zero_reverse (target));
3679	 break;
3680       case 2:
3681	 emit_insn (gen_cr6_test_for_lt (target));
3682	 break;
3683       case 3:
3684	 emit_insn (gen_cr6_test_for_lt_reverse (target));
3685	 break;
3686       default:
3687	 error ("argument 1 of __builtin_altivec_predicate is out of range");
3688	 break;
3689       }
3690
3691  return target;
3692}
3693
3694static rtx
3695altivec_expand_stv_builtin (icode, arglist)
3696     enum insn_code icode;
3697     tree arglist;
3698{
3699  tree arg0 = TREE_VALUE (arglist);
3700  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3701  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3702  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3703  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3704  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3705  rtx pat;
3706  enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3707  enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3708  enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3709
3710  /* Invalid arguments.  Bail before doing anything stoopid!  */
3711  if (arg0 == error_mark_node
3712      || arg1 == error_mark_node
3713      || arg2 == error_mark_node)
3714    return NULL_RTX;
3715
3716  if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3717    op0 = copy_to_mode_reg (mode2, op0);
3718  if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3719    op1 = copy_to_mode_reg (mode0, op1);
3720  if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3721    op2 = copy_to_mode_reg (mode1, op2);
3722
3723  pat = GEN_FCN (icode) (op1, op2, op0);
3724  if (pat)
3725    emit_insn (pat);
3726  return NULL_RTX;
3727}
3728
3729static rtx
3730altivec_expand_ternop_builtin (icode, arglist, target)
3731     enum insn_code icode;
3732     tree arglist;
3733     rtx target;
3734{
3735  rtx pat;
3736  tree arg0 = TREE_VALUE (arglist);
3737  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3738  tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3739  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3740  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3741  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3742  enum machine_mode tmode = insn_data[icode].operand[0].mode;
3743  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3744  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3745  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3746
3747  /* If we got invalid arguments bail out before generating bad rtl.  */
3748  if (arg0 == error_mark_node
3749      || arg1 == error_mark_node
3750      || arg2 == error_mark_node)
3751    return NULL_RTX;
3752
3753  if (target == 0
3754      || GET_MODE (target) != tmode
3755      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3756    target = gen_reg_rtx (tmode);
3757
3758  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3759    op0 = copy_to_mode_reg (mode0, op0);
3760  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3761    op1 = copy_to_mode_reg (mode1, op1);
3762  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3763    op2 = copy_to_mode_reg (mode2, op2);
3764
3765  pat = GEN_FCN (icode) (target, op0, op1, op2);
3766  if (! pat)
3767    return 0;
3768  emit_insn (pat);
3769
3770  return target;
3771}
3772static rtx
3773altivec_expand_builtin (exp, target)
3774     tree exp;
3775     rtx target;
3776{
3777  struct builtin_description *d;
3778  struct builtin_description_predicates *dp;
3779  size_t i;
3780  enum insn_code icode;
3781  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3782  tree arglist = TREE_OPERAND (exp, 1);
3783  tree arg0, arg1, arg2;
3784  rtx op0, op1, op2, pat;
3785  enum machine_mode tmode, mode0, mode1, mode2;
3786  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3787
3788  switch (fcode)
3789    {
3790    case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3791      icode = CODE_FOR_altivec_lvx_16qi;
3792      arg0 = TREE_VALUE (arglist);
3793      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3794      tmode = insn_data[icode].operand[0].mode;
3795      mode0 = insn_data[icode].operand[1].mode;
3796
3797      if (target == 0
3798	  || GET_MODE (target) != tmode
3799	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3800	target = gen_reg_rtx (tmode);
3801
3802      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3803	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3804
3805      pat = GEN_FCN (icode) (target, op0);
3806      if (! pat)
3807	return 0;
3808      emit_insn (pat);
3809      return target;
3810
3811    case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3812      icode = CODE_FOR_altivec_lvx_8hi;
3813      arg0 = TREE_VALUE (arglist);
3814      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3815      tmode = insn_data[icode].operand[0].mode;
3816      mode0 = insn_data[icode].operand[1].mode;
3817
3818      if (target == 0
3819	  || GET_MODE (target) != tmode
3820	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3821	target = gen_reg_rtx (tmode);
3822
3823      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3824	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3825
3826      pat = GEN_FCN (icode) (target, op0);
3827      if (! pat)
3828	return 0;
3829      emit_insn (pat);
3830      return target;
3831
3832    case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3833      icode = CODE_FOR_altivec_lvx_4si;
3834      arg0 = TREE_VALUE (arglist);
3835      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3836      tmode = insn_data[icode].operand[0].mode;
3837      mode0 = insn_data[icode].operand[1].mode;
3838
3839      if (target == 0
3840	  || GET_MODE (target) != tmode
3841	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3842	target = gen_reg_rtx (tmode);
3843
3844      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3845	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3846
3847      pat = GEN_FCN (icode) (target, op0);
3848      if (! pat)
3849	return 0;
3850      emit_insn (pat);
3851      return target;
3852
3853    case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3854      icode = CODE_FOR_altivec_lvx_4sf;
3855      arg0 = TREE_VALUE (arglist);
3856      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3857      tmode = insn_data[icode].operand[0].mode;
3858      mode0 = insn_data[icode].operand[1].mode;
3859
3860      if (target == 0
3861	  || GET_MODE (target) != tmode
3862	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3863	target = gen_reg_rtx (tmode);
3864
3865      if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3866	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3867
3868      pat = GEN_FCN (icode) (target, op0);
3869      if (! pat)
3870	return 0;
3871      emit_insn (pat);
3872      return target;
3873
3874    case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3875      icode = CODE_FOR_altivec_stvx_16qi;
3876      arg0 = TREE_VALUE (arglist);
3877      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3878      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3879      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3880      mode0 = insn_data[icode].operand[0].mode;
3881      mode1 = insn_data[icode].operand[1].mode;
3882
3883      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3884	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3885      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3886	op1 = copy_to_mode_reg (mode1, op1);
3887
3888      pat = GEN_FCN (icode) (op0, op1);
3889      if (pat)
3890	emit_insn (pat);
3891      return NULL_RTX;
3892
3893    case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3894      icode = CODE_FOR_altivec_stvx_8hi;
3895      arg0 = TREE_VALUE (arglist);
3896      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3897      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3898      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3899      mode0 = insn_data[icode].operand[0].mode;
3900      mode1 = insn_data[icode].operand[1].mode;
3901
3902      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3903	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3904      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3905	op1 = copy_to_mode_reg (mode1, op1);
3906
3907      pat = GEN_FCN (icode) (op0, op1);
3908      if (pat)
3909	emit_insn (pat);
3910      return NULL_RTX;
3911
3912    case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3913      icode = CODE_FOR_altivec_stvx_4si;
3914      arg0 = TREE_VALUE (arglist);
3915      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3916      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3917      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3918      mode0 = insn_data[icode].operand[0].mode;
3919      mode1 = insn_data[icode].operand[1].mode;
3920
3921      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3922	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3923      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3924	op1 = copy_to_mode_reg (mode1, op1);
3925
3926      pat = GEN_FCN (icode) (op0, op1);
3927      if (pat)
3928	emit_insn (pat);
3929      return NULL_RTX;
3930
3931    case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3932      icode = CODE_FOR_altivec_stvx_4sf;
3933      arg0 = TREE_VALUE (arglist);
3934      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3935      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3936      op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3937      mode0 = insn_data[icode].operand[0].mode;
3938      mode1 = insn_data[icode].operand[1].mode;
3939
3940      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3941	op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3942      if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3943	op1 = copy_to_mode_reg (mode1, op1);
3944
3945      pat = GEN_FCN (icode) (op0, op1);
3946      if (pat)
3947	emit_insn (pat);
3948      return NULL_RTX;
3949
3950    case ALTIVEC_BUILTIN_STVX:
3951      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3952    case ALTIVEC_BUILTIN_STVEBX:
3953      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3954    case ALTIVEC_BUILTIN_STVEHX:
3955      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3956    case ALTIVEC_BUILTIN_STVEWX:
3957      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3958    case ALTIVEC_BUILTIN_STVXL:
3959      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3960
3961    case ALTIVEC_BUILTIN_MFVSCR:
3962      icode = CODE_FOR_altivec_mfvscr;
3963      tmode = insn_data[icode].operand[0].mode;
3964
3965      if (target == 0
3966	  || GET_MODE (target) != tmode
3967	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3968	target = gen_reg_rtx (tmode);
3969
3970      pat = GEN_FCN (icode) (target);
3971      if (! pat)
3972	return 0;
3973      emit_insn (pat);
3974      return target;
3975
3976    case ALTIVEC_BUILTIN_MTVSCR:
3977      icode = CODE_FOR_altivec_mtvscr;
3978      arg0 = TREE_VALUE (arglist);
3979      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3980      mode0 = insn_data[icode].operand[0].mode;
3981
3982      /* If we got invalid arguments bail out before generating bad rtl.  */
3983      if (arg0 == error_mark_node)
3984	return NULL_RTX;
3985
3986      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3987	op0 = copy_to_mode_reg (mode0, op0);
3988
3989      pat = GEN_FCN (icode) (op0);
3990      if (pat)
3991	emit_insn (pat);
3992      return NULL_RTX;
3993
3994    case ALTIVEC_BUILTIN_DSSALL:
3995      emit_insn (gen_altivec_dssall ());
3996      return NULL_RTX;
3997
3998    case ALTIVEC_BUILTIN_DSS:
3999      icode = CODE_FOR_altivec_dss;
4000      arg0 = TREE_VALUE (arglist);
4001      op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4002      mode0 = insn_data[icode].operand[0].mode;
4003
4004      /* If we got invalid arguments bail out before generating bad rtl.  */
4005      if (arg0 == error_mark_node)
4006	return NULL_RTX;
4007
4008      if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4009	op0 = copy_to_mode_reg (mode0, op0);
4010
4011      emit_insn (gen_altivec_dss (op0));
4012      return NULL_RTX;
4013    }
4014
4015  /* Handle DST variants.  */
4016  d = (struct builtin_description *) bdesc_dst;
4017  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4018    if (d->code == fcode)
4019      {
4020	arg0 = TREE_VALUE (arglist);
4021	arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4022	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4023	op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4024	op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4025	op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4026	mode0 = insn_data[d->icode].operand[0].mode;
4027	mode1 = insn_data[d->icode].operand[1].mode;
4028	mode2 = insn_data[d->icode].operand[2].mode;
4029
4030	/* Invalid arguments, bail out before generating bad rtl.  */
4031	if (arg0 == error_mark_node
4032	    || arg1 == error_mark_node
4033	    || arg2 == error_mark_node)
4034	  return NULL_RTX;
4035
4036	if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4037	  op0 = copy_to_mode_reg (mode0, op0);
4038	if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4039	  op1 = copy_to_mode_reg (mode1, op1);
4040
4041	if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4042	  {
4043	    error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4044	    return NULL_RTX;
4045	  }
4046
4047	pat = GEN_FCN (d->icode) (op0, op1, op2);
4048	if (pat != 0)
4049	  emit_insn (pat);
4050
4051	return NULL_RTX;
4052      }
4053
4054  /* Expand abs* operations.  */
4055  d = (struct builtin_description *) bdesc_abs;
4056  for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4057    if (d->code == fcode)
4058      return altivec_expand_abs_builtin (d->icode, arglist, target);
4059
4060  /* Handle simple unary operations.  */
4061  d = (struct builtin_description *) bdesc_1arg;
4062  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4063    if (d->code == fcode)
4064      return altivec_expand_unop_builtin (d->icode, arglist, target);
4065
4066  /* Handle simple binary operations.  */
4067  d = (struct builtin_description *) bdesc_2arg;
4068  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4069    if (d->code == fcode)
4070      return altivec_expand_binop_builtin (d->icode, arglist, target);
4071
4072  /* Expand the AltiVec predicates.  */
4073  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4074  for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4075    if (dp->code == fcode)
4076      return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4077
4078  /* LV* are funky.  We initialized them differently.  */
4079  switch (fcode)
4080    {
4081    case ALTIVEC_BUILTIN_LVSL:
4082      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4083					   arglist, target);
4084    case ALTIVEC_BUILTIN_LVSR:
4085      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4086					   arglist, target);
4087    case ALTIVEC_BUILTIN_LVEBX:
4088      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4089					   arglist, target);
4090    case ALTIVEC_BUILTIN_LVEHX:
4091      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4092					   arglist, target);
4093    case ALTIVEC_BUILTIN_LVEWX:
4094      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4095					   arglist, target);
4096    case ALTIVEC_BUILTIN_LVXL:
4097      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4098					   arglist, target);
4099    case ALTIVEC_BUILTIN_LVX:
4100      return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4101					   arglist, target);
4102    default:
4103      break;
4104      /* Fall through.  */
4105    }
4106
4107  /* Handle simple ternary operations.  */
4108  d = (struct builtin_description *) bdesc_3arg;
4109  for (i = 0; i < sizeof  (bdesc_3arg) / sizeof *d; i++, d++)
4110    if (d->code == fcode)
4111      return altivec_expand_ternop_builtin (d->icode, arglist, target);
4112
4113  abort ();
4114  return NULL_RTX;
4115}
4116
4117/* Expand an expression EXP that calls a built-in function,
4118   with result going to TARGET if that's convenient
4119   (and in mode MODE if that's convenient).
4120   SUBTARGET may be used as the target for computing one of EXP's operands.
4121   IGNORE is nonzero if the value is to be ignored.  */
4122
4123static rtx
4124rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4125     tree exp;
4126     rtx target;
4127     rtx subtarget ATTRIBUTE_UNUSED;
4128     enum machine_mode mode ATTRIBUTE_UNUSED;
4129     int ignore ATTRIBUTE_UNUSED;
4130{
4131  if (TARGET_ALTIVEC)
4132    return altivec_expand_builtin (exp, target);
4133
4134  abort ();
4135}
4136
4137static void
4138rs6000_init_builtins ()
4139{
4140  if (TARGET_ALTIVEC)
4141    altivec_init_builtins ();
4142}
4143
4144static void
4145altivec_init_builtins (void)
4146{
4147  struct builtin_description *d;
4148  struct builtin_description_predicates *dp;
4149  size_t i;
4150
4151  tree endlink = void_list_node;
4152
4153  tree pint_type_node = build_pointer_type (integer_type_node);
4154  tree pvoid_type_node = build_pointer_type (void_type_node);
4155  tree pshort_type_node = build_pointer_type (short_integer_type_node);
4156  tree pchar_type_node = build_pointer_type (char_type_node);
4157  tree pfloat_type_node = build_pointer_type (float_type_node);
4158
4159  tree v4sf_ftype_v4sf_v4sf_v16qi
4160    = build_function_type (V4SF_type_node,
4161			   tree_cons (NULL_TREE, V4SF_type_node,
4162				      tree_cons (NULL_TREE, V4SF_type_node,
4163						 tree_cons (NULL_TREE,
4164							    V16QI_type_node,
4165							    endlink))));
4166  tree v4si_ftype_v4si_v4si_v16qi
4167    = build_function_type (V4SI_type_node,
4168			   tree_cons (NULL_TREE, V4SI_type_node,
4169				      tree_cons (NULL_TREE, V4SI_type_node,
4170						 tree_cons (NULL_TREE,
4171							    V16QI_type_node,
4172							    endlink))));
4173  tree v8hi_ftype_v8hi_v8hi_v16qi
4174    = build_function_type (V8HI_type_node,
4175			   tree_cons (NULL_TREE, V8HI_type_node,
4176				      tree_cons (NULL_TREE, V8HI_type_node,
4177						 tree_cons (NULL_TREE,
4178							    V16QI_type_node,
4179							    endlink))));
4180  tree v16qi_ftype_v16qi_v16qi_v16qi
4181    = build_function_type (V16QI_type_node,
4182			   tree_cons (NULL_TREE, V16QI_type_node,
4183				      tree_cons (NULL_TREE, V16QI_type_node,
4184						 tree_cons (NULL_TREE,
4185							    V16QI_type_node,
4186							    endlink))));
4187
4188  /* V4SI foo (char).  */
4189  tree v4si_ftype_char
4190    = build_function_type (V4SI_type_node,
4191		           tree_cons (NULL_TREE, char_type_node, endlink));
4192
4193  /* V8HI foo (char).  */
4194  tree v8hi_ftype_char
4195    = build_function_type (V8HI_type_node,
4196		           tree_cons (NULL_TREE, char_type_node, endlink));
4197
4198  /* V16QI foo (char).  */
4199  tree v16qi_ftype_char
4200    = build_function_type (V16QI_type_node,
4201		           tree_cons (NULL_TREE, char_type_node, endlink));
4202  /* V4SF foo (V4SF).  */
4203  tree v4sf_ftype_v4sf
4204    = build_function_type (V4SF_type_node,
4205			   tree_cons (NULL_TREE, V4SF_type_node, endlink));
4206
4207  /* V4SI foo (int *).  */
4208  tree v4si_ftype_pint
4209    = build_function_type (V4SI_type_node,
4210			   tree_cons (NULL_TREE, pint_type_node, endlink));
4211  /* V8HI foo (short *).  */
4212  tree v8hi_ftype_pshort
4213    = build_function_type (V8HI_type_node,
4214			   tree_cons (NULL_TREE, pshort_type_node, endlink));
4215  /* V16QI foo (char *).  */
4216  tree v16qi_ftype_pchar
4217    = build_function_type (V16QI_type_node,
4218			   tree_cons (NULL_TREE, pchar_type_node, endlink));
4219  /* V4SF foo (float *).  */
4220  tree v4sf_ftype_pfloat
4221    = build_function_type (V4SF_type_node,
4222			   tree_cons (NULL_TREE, pfloat_type_node, endlink));
4223
4224  /* V8HI foo (V16QI).  */
4225  tree v8hi_ftype_v16qi
4226    = build_function_type (V8HI_type_node,
4227			   tree_cons (NULL_TREE, V16QI_type_node, endlink));
4228
4229  /* void foo (void *, int, char/literal).  */
4230  tree void_ftype_pvoid_int_char
4231    = build_function_type (void_type_node,
4232			   tree_cons (NULL_TREE, pvoid_type_node,
4233				      tree_cons (NULL_TREE, integer_type_node,
4234						 tree_cons (NULL_TREE,
4235							    char_type_node,
4236							    endlink))));
4237
4238  /* void foo (int *, V4SI).  */
4239  tree void_ftype_pint_v4si
4240    = build_function_type (void_type_node,
4241			   tree_cons (NULL_TREE, pint_type_node,
4242				      tree_cons (NULL_TREE, V4SI_type_node,
4243						 endlink)));
4244  /* void foo (short *, V8HI).  */
4245  tree void_ftype_pshort_v8hi
4246    = build_function_type (void_type_node,
4247			   tree_cons (NULL_TREE, pshort_type_node,
4248				      tree_cons (NULL_TREE, V8HI_type_node,
4249						 endlink)));
4250  /* void foo (char *, V16QI).  */
4251  tree void_ftype_pchar_v16qi
4252    = build_function_type (void_type_node,
4253			   tree_cons (NULL_TREE, pchar_type_node,
4254				      tree_cons (NULL_TREE, V16QI_type_node,
4255						 endlink)));
4256  /* void foo (float *, V4SF).  */
4257  tree void_ftype_pfloat_v4sf
4258    = build_function_type (void_type_node,
4259			   tree_cons (NULL_TREE, pfloat_type_node,
4260				      tree_cons (NULL_TREE, V4SF_type_node,
4261						 endlink)));
4262
4263  /* void foo (V4SI).  */
4264  tree void_ftype_v4si
4265    = build_function_type (void_type_node,
4266			   tree_cons (NULL_TREE, V4SI_type_node,
4267				      endlink));
4268
4269  /* void foo (vint, int, void *).  */
4270  tree void_ftype_v4si_int_pvoid
4271    = build_function_type (void_type_node,
4272			   tree_cons (NULL_TREE, V4SI_type_node,
4273				      tree_cons (NULL_TREE, integer_type_node,
4274						 tree_cons (NULL_TREE,
4275							    pvoid_type_node,
4276							    endlink))));
4277
4278  /* void foo (vchar, int, void *).  */
4279  tree void_ftype_v16qi_int_pvoid
4280    = build_function_type (void_type_node,
4281			   tree_cons (NULL_TREE, V16QI_type_node,
4282				      tree_cons (NULL_TREE, integer_type_node,
4283						 tree_cons (NULL_TREE,
4284							    pvoid_type_node,
4285							    endlink))));
4286
4287  /* void foo (vshort, int, void *).  */
4288  tree void_ftype_v8hi_int_pvoid
4289    = build_function_type (void_type_node,
4290			   tree_cons (NULL_TREE, V8HI_type_node,
4291				      tree_cons (NULL_TREE, integer_type_node,
4292						 tree_cons (NULL_TREE,
4293							    pvoid_type_node,
4294							    endlink))));
4295
4296  /* void foo (char).  */
4297  tree void_ftype_qi
4298    = build_function_type (void_type_node,
4299			   tree_cons (NULL_TREE, char_type_node,
4300				      endlink));
4301
4302  /* void foo (void).  */
4303  tree void_ftype_void
4304    = build_function_type (void_type_node, void_list_node);
4305
4306  /* vshort foo (void).  */
4307  tree v8hi_ftype_void
4308    = build_function_type (V8HI_type_node, void_list_node);
4309
4310  tree v4si_ftype_v4si_v4si
4311    = build_function_type (V4SI_type_node,
4312			   tree_cons (NULL_TREE, V4SI_type_node,
4313				      tree_cons (NULL_TREE, V4SI_type_node,
4314						 endlink)));
4315
4316  /* These are for the unsigned 5 bit literals.  */
4317
4318  tree v4sf_ftype_v4si_char
4319    = build_function_type (V4SF_type_node,
4320			   tree_cons (NULL_TREE, V4SI_type_node,
4321				      tree_cons (NULL_TREE, char_type_node,
4322						 endlink)));
4323  tree v4si_ftype_v4sf_char
4324    = build_function_type (V4SI_type_node,
4325			   tree_cons (NULL_TREE, V4SF_type_node,
4326				      tree_cons (NULL_TREE, char_type_node,
4327						 endlink)));
4328  tree v4si_ftype_v4si_char
4329    = build_function_type (V4SI_type_node,
4330			   tree_cons (NULL_TREE, V4SI_type_node,
4331				      tree_cons (NULL_TREE, char_type_node,
4332						 endlink)));
4333  tree v8hi_ftype_v8hi_char
4334    = build_function_type (V8HI_type_node,
4335			   tree_cons (NULL_TREE, V8HI_type_node,
4336				      tree_cons (NULL_TREE, char_type_node,
4337						 endlink)));
4338  tree v16qi_ftype_v16qi_char
4339    = build_function_type (V16QI_type_node,
4340			   tree_cons (NULL_TREE, V16QI_type_node,
4341				      tree_cons (NULL_TREE, char_type_node,
4342						 endlink)));
4343
4344  /* These are for the unsigned 4 bit literals.  */
4345
4346  tree v16qi_ftype_v16qi_v16qi_char
4347    = build_function_type (V16QI_type_node,
4348			   tree_cons (NULL_TREE, V16QI_type_node,
4349				      tree_cons (NULL_TREE, V16QI_type_node,
4350						 tree_cons (NULL_TREE,
4351							    char_type_node,
4352							    endlink))));
4353
4354  tree v8hi_ftype_v8hi_v8hi_char
4355    = build_function_type (V8HI_type_node,
4356			   tree_cons (NULL_TREE, V8HI_type_node,
4357				      tree_cons (NULL_TREE, V8HI_type_node,
4358						 tree_cons (NULL_TREE,
4359							    char_type_node,
4360							    endlink))));
4361
4362  tree v4si_ftype_v4si_v4si_char
4363    = build_function_type (V4SI_type_node,
4364			   tree_cons (NULL_TREE, V4SI_type_node,
4365				      tree_cons (NULL_TREE, V4SI_type_node,
4366						 tree_cons (NULL_TREE,
4367							    char_type_node,
4368							    endlink))));
4369
4370  tree v4sf_ftype_v4sf_v4sf_char
4371    = build_function_type (V4SF_type_node,
4372			   tree_cons (NULL_TREE, V4SF_type_node,
4373				      tree_cons (NULL_TREE, V4SF_type_node,
4374						 tree_cons (NULL_TREE,
4375							    char_type_node,
4376							    endlink))));
4377
4378  /* End of 4 bit literals.  */
4379
4380  tree v4sf_ftype_v4sf_v4sf
4381    = build_function_type (V4SF_type_node,
4382			   tree_cons (NULL_TREE, V4SF_type_node,
4383				      tree_cons (NULL_TREE, V4SF_type_node,
4384						 endlink)));
4385  tree v4sf_ftype_v4sf_v4sf_v4si
4386    = build_function_type (V4SF_type_node,
4387			   tree_cons (NULL_TREE, V4SF_type_node,
4388				      tree_cons (NULL_TREE, V4SF_type_node,
4389						 tree_cons (NULL_TREE,
4390							    V4SI_type_node,
4391							    endlink))));
4392  tree v4sf_ftype_v4sf_v4sf_v4sf
4393    = build_function_type (V4SF_type_node,
4394			   tree_cons (NULL_TREE, V4SF_type_node,
4395				      tree_cons (NULL_TREE, V4SF_type_node,
4396						 tree_cons (NULL_TREE,
4397							    V4SF_type_node,
4398							    endlink))));
4399  tree v4si_ftype_v4si_v4si_v4si
4400    = build_function_type (V4SI_type_node,
4401			   tree_cons (NULL_TREE, V4SI_type_node,
4402				      tree_cons (NULL_TREE, V4SI_type_node,
4403						 tree_cons (NULL_TREE,
4404							    V4SI_type_node,
4405							    endlink))));
4406
4407  tree v8hi_ftype_v8hi_v8hi
4408    = build_function_type (V8HI_type_node,
4409			   tree_cons (NULL_TREE, V8HI_type_node,
4410				      tree_cons (NULL_TREE, V8HI_type_node,
4411						 endlink)));
4412  tree v8hi_ftype_v8hi_v8hi_v8hi
4413    = build_function_type (V8HI_type_node,
4414			   tree_cons (NULL_TREE, V8HI_type_node,
4415				      tree_cons (NULL_TREE, V8HI_type_node,
4416						 tree_cons (NULL_TREE,
4417							    V8HI_type_node,
4418							    endlink))));
4419 tree v4si_ftype_v8hi_v8hi_v4si
4420    = build_function_type (V4SI_type_node,
4421			   tree_cons (NULL_TREE, V8HI_type_node,
4422				      tree_cons (NULL_TREE, V8HI_type_node,
4423						 tree_cons (NULL_TREE,
4424							    V4SI_type_node,
4425							    endlink))));
4426 tree v4si_ftype_v16qi_v16qi_v4si
4427    = build_function_type (V4SI_type_node,
4428			   tree_cons (NULL_TREE, V16QI_type_node,
4429				      tree_cons (NULL_TREE, V16QI_type_node,
4430						 tree_cons (NULL_TREE,
4431							    V4SI_type_node,
4432							    endlink))));
4433
4434  tree v16qi_ftype_v16qi_v16qi
4435    = build_function_type (V16QI_type_node,
4436			   tree_cons (NULL_TREE, V16QI_type_node,
4437				      tree_cons (NULL_TREE, V16QI_type_node,
4438						 endlink)));
4439
4440  tree v4si_ftype_v4sf_v4sf
4441    = build_function_type (V4SI_type_node,
4442			   tree_cons (NULL_TREE, V4SF_type_node,
4443				      tree_cons (NULL_TREE, V4SF_type_node,
4444						 endlink)));
4445
4446  tree v4si_ftype_v4si
4447    = build_function_type (V4SI_type_node,
4448			   tree_cons (NULL_TREE, V4SI_type_node, endlink));
4449
4450  tree v8hi_ftype_v8hi
4451    = build_function_type (V8HI_type_node,
4452			   tree_cons (NULL_TREE, V8HI_type_node, endlink));
4453
4454  tree v16qi_ftype_v16qi
4455    = build_function_type (V16QI_type_node,
4456			   tree_cons (NULL_TREE, V16QI_type_node, endlink));
4457
4458  tree v8hi_ftype_v16qi_v16qi
4459    = build_function_type (V8HI_type_node,
4460			   tree_cons (NULL_TREE, V16QI_type_node,
4461				      tree_cons (NULL_TREE, V16QI_type_node,
4462						 endlink)));
4463
4464  tree v4si_ftype_v8hi_v8hi
4465    = build_function_type (V4SI_type_node,
4466			   tree_cons (NULL_TREE, V8HI_type_node,
4467				      tree_cons (NULL_TREE, V8HI_type_node,
4468						 endlink)));
4469
4470  tree v8hi_ftype_v4si_v4si
4471    = build_function_type (V8HI_type_node,
4472			   tree_cons (NULL_TREE, V4SI_type_node,
4473				      tree_cons (NULL_TREE, V4SI_type_node,
4474						 endlink)));
4475
4476  tree v16qi_ftype_v8hi_v8hi
4477    = build_function_type (V16QI_type_node,
4478			   tree_cons (NULL_TREE, V8HI_type_node,
4479				      tree_cons (NULL_TREE, V8HI_type_node,
4480						 endlink)));
4481
4482  tree v4si_ftype_v16qi_v4si
4483    = build_function_type (V4SI_type_node,
4484			   tree_cons (NULL_TREE, V16QI_type_node,
4485				      tree_cons (NULL_TREE, V4SI_type_node,
4486						 endlink)));
4487
4488  tree v4si_ftype_v16qi_v16qi
4489    = build_function_type (V4SI_type_node,
4490			   tree_cons (NULL_TREE, V16QI_type_node,
4491				      tree_cons (NULL_TREE, V16QI_type_node,
4492						 endlink)));
4493
4494  tree v4si_ftype_v8hi_v4si
4495    = build_function_type (V4SI_type_node,
4496			   tree_cons (NULL_TREE, V8HI_type_node,
4497				      tree_cons (NULL_TREE, V4SI_type_node,
4498						 endlink)));
4499
4500  tree v4si_ftype_v8hi
4501    = build_function_type (V4SI_type_node,
4502			   tree_cons (NULL_TREE, V8HI_type_node, endlink));
4503
4504  tree int_ftype_v4si_v4si
4505    = build_function_type (integer_type_node,
4506			   tree_cons (NULL_TREE, V4SI_type_node,
4507				      tree_cons (NULL_TREE, V4SI_type_node,
4508						 endlink)));
4509
4510  tree int_ftype_v4sf_v4sf
4511    = build_function_type (integer_type_node,
4512			   tree_cons (NULL_TREE, V4SF_type_node,
4513				      tree_cons (NULL_TREE, V4SF_type_node,
4514						 endlink)));
4515
4516  tree int_ftype_v16qi_v16qi
4517    = build_function_type (integer_type_node,
4518			   tree_cons (NULL_TREE, V16QI_type_node,
4519				      tree_cons (NULL_TREE, V16QI_type_node,
4520						 endlink)));
4521
4522  tree int_ftype_int_v4si_v4si
4523    = build_function_type
4524    (integer_type_node,
4525     tree_cons (NULL_TREE, integer_type_node,
4526		tree_cons (NULL_TREE, V4SI_type_node,
4527			   tree_cons (NULL_TREE, V4SI_type_node,
4528				      endlink))));
4529
4530  tree int_ftype_int_v4sf_v4sf
4531    = build_function_type
4532    (integer_type_node,
4533     tree_cons (NULL_TREE, integer_type_node,
4534		tree_cons (NULL_TREE, V4SF_type_node,
4535			   tree_cons (NULL_TREE, V4SF_type_node,
4536				      endlink))));
4537
4538  tree int_ftype_int_v8hi_v8hi
4539    = build_function_type
4540    (integer_type_node,
4541     tree_cons (NULL_TREE, integer_type_node,
4542		 tree_cons (NULL_TREE, V8HI_type_node,
4543			    tree_cons (NULL_TREE, V8HI_type_node,
4544				       endlink))));
4545
4546  tree int_ftype_int_v16qi_v16qi
4547    = build_function_type
4548    (integer_type_node,
4549     tree_cons (NULL_TREE, integer_type_node,
4550		tree_cons (NULL_TREE, V16QI_type_node,
4551			   tree_cons (NULL_TREE, V16QI_type_node,
4552				      endlink))));
4553
4554  tree v16qi_ftype_int_pvoid
4555    = build_function_type (V16QI_type_node,
4556			   tree_cons (NULL_TREE, integer_type_node,
4557				      tree_cons (NULL_TREE, pvoid_type_node,
4558						 endlink)));
4559
4560  tree v4si_ftype_int_pvoid
4561    = build_function_type (V4SI_type_node,
4562			   tree_cons (NULL_TREE, integer_type_node,
4563				      tree_cons (NULL_TREE, pvoid_type_node,
4564						 endlink)));
4565
4566  tree v8hi_ftype_int_pvoid
4567    = build_function_type (V8HI_type_node,
4568			   tree_cons (NULL_TREE, integer_type_node,
4569				      tree_cons (NULL_TREE, pvoid_type_node,
4570						 endlink)));
4571
4572  tree int_ftype_v8hi_v8hi
4573    = build_function_type (integer_type_node,
4574			   tree_cons (NULL_TREE, V8HI_type_node,
4575				      tree_cons (NULL_TREE, V8HI_type_node,
4576						 endlink)));
4577
4578  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4579  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4580  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4581  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4582  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4583  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4584  def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4585  def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4586  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4587  def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4588  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4589  def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4590  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4591  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4592  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4593  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4594  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4595  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4596  def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4597  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4598  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4599  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4600  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4601  def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4602
4603  /* Add the simple ternary operators.  */
4604  d = (struct builtin_description *) bdesc_3arg;
4605  for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4606    {
4607
4608      enum machine_mode mode0, mode1, mode2, mode3;
4609      tree type;
4610
4611      if (d->name == 0)
4612	continue;
4613
4614      mode0 = insn_data[d->icode].operand[0].mode;
4615      mode1 = insn_data[d->icode].operand[1].mode;
4616      mode2 = insn_data[d->icode].operand[2].mode;
4617      mode3 = insn_data[d->icode].operand[3].mode;
4618
4619      /* When all four are of the same mode.  */
4620      if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4621	{
4622	  switch (mode0)
4623	    {
4624	    case V4SImode:
4625	      type = v4si_ftype_v4si_v4si_v4si;
4626	      break;
4627	    case V4SFmode:
4628	      type = v4sf_ftype_v4sf_v4sf_v4sf;
4629	      break;
4630	    case V8HImode:
4631	      type = v8hi_ftype_v8hi_v8hi_v8hi;
4632	      break;
4633	    case V16QImode:
4634	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4635	      break;
4636	    default:
4637	      abort();
4638	    }
4639	}
4640      else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4641        {
4642	  switch (mode0)
4643	    {
4644	    case V4SImode:
4645	      type = v4si_ftype_v4si_v4si_v16qi;
4646	      break;
4647	    case V4SFmode:
4648	      type = v4sf_ftype_v4sf_v4sf_v16qi;
4649	      break;
4650	    case V8HImode:
4651	      type = v8hi_ftype_v8hi_v8hi_v16qi;
4652	      break;
4653	    case V16QImode:
4654	      type = v16qi_ftype_v16qi_v16qi_v16qi;
4655	      break;
4656	    default:
4657	      abort();
4658	    }
4659	}
4660      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4661	       && mode3 == V4SImode)
4662	type = v4si_ftype_v16qi_v16qi_v4si;
4663      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4664	       && mode3 == V4SImode)
4665	type = v4si_ftype_v8hi_v8hi_v4si;
4666      else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4667	       && mode3 == V4SImode)
4668	type = v4sf_ftype_v4sf_v4sf_v4si;
4669
4670      /* vchar, vchar, vchar, 4 bit literal.  */
4671      else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4672	       && mode3 == QImode)
4673	type = v16qi_ftype_v16qi_v16qi_char;
4674
4675      /* vshort, vshort, vshort, 4 bit literal.  */
4676      else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4677	       && mode3 == QImode)
4678	type = v8hi_ftype_v8hi_v8hi_char;
4679
4680      /* vint, vint, vint, 4 bit literal.  */
4681      else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4682	       && mode3 == QImode)
4683	type = v4si_ftype_v4si_v4si_char;
4684
4685      /* vfloat, vfloat, vfloat, 4 bit literal.  */
4686      else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4687	       && mode3 == QImode)
4688	type = v4sf_ftype_v4sf_v4sf_char;
4689
4690      else
4691	abort ();
4692
4693      def_builtin (d->mask, d->name, type, d->code);
4694    }
4695
4696  /* Add the DST variants.  */
4697  d = (struct builtin_description *) bdesc_dst;
4698  for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4699    def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4700
4701  /* Initialize the predicates.  */
4702  dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4703  for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4704    {
4705      enum machine_mode mode1;
4706      tree type;
4707
4708      mode1 = insn_data[dp->icode].operand[1].mode;
4709
4710      switch (mode1)
4711	{
4712	case V4SImode:
4713	  type = int_ftype_int_v4si_v4si;
4714	  break;
4715	case V8HImode:
4716	  type = int_ftype_int_v8hi_v8hi;
4717	  break;
4718	case V16QImode:
4719	  type = int_ftype_int_v16qi_v16qi;
4720	  break;
4721	case V4SFmode:
4722	  type = int_ftype_int_v4sf_v4sf;
4723	  break;
4724	default:
4725	  abort ();
4726	}
4727
4728      def_builtin (dp->mask, dp->name, type, dp->code);
4729    }
4730
4731  /* Add the simple binary operators.  */
4732  d = (struct builtin_description *) bdesc_2arg;
4733  for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4734    {
4735      enum machine_mode mode0, mode1, mode2;
4736      tree type;
4737
4738      if (d->name == 0)
4739	continue;
4740
4741      mode0 = insn_data[d->icode].operand[0].mode;
4742      mode1 = insn_data[d->icode].operand[1].mode;
4743      mode2 = insn_data[d->icode].operand[2].mode;
4744
4745      /* When all three operands are of the same mode.  */
4746      if (mode0 == mode1 && mode1 == mode2)
4747	{
4748	  switch (mode0)
4749	    {
4750	    case V4SFmode:
4751	      type = v4sf_ftype_v4sf_v4sf;
4752	      break;
4753	    case V4SImode:
4754	      type = v4si_ftype_v4si_v4si;
4755	      break;
4756	    case V16QImode:
4757	      type = v16qi_ftype_v16qi_v16qi;
4758	      break;
4759	    case V8HImode:
4760	      type = v8hi_ftype_v8hi_v8hi;
4761	      break;
4762	    default:
4763	      abort ();
4764	    }
4765	}
4766
4767      /* A few other combos we really don't want to do manually.  */
4768
4769      /* vint, vfloat, vfloat.  */
4770      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4771	type = v4si_ftype_v4sf_v4sf;
4772
4773      /* vshort, vchar, vchar.  */
4774      else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4775	type = v8hi_ftype_v16qi_v16qi;
4776
4777      /* vint, vshort, vshort.  */
4778      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4779	type = v4si_ftype_v8hi_v8hi;
4780
4781      /* vshort, vint, vint.  */
4782      else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4783	type = v8hi_ftype_v4si_v4si;
4784
4785      /* vchar, vshort, vshort.  */
4786      else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4787	type = v16qi_ftype_v8hi_v8hi;
4788
4789      /* vint, vchar, vint.  */
4790      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4791	type = v4si_ftype_v16qi_v4si;
4792
4793      /* vint, vchar, vchar.  */
4794      else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4795	type = v4si_ftype_v16qi_v16qi;
4796
4797      /* vint, vshort, vint.  */
4798      else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4799	type = v4si_ftype_v8hi_v4si;
4800
4801      /* vint, vint, 5 bit literal.  */
4802      else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4803	type = v4si_ftype_v4si_char;
4804
4805      /* vshort, vshort, 5 bit literal.  */
4806      else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4807	type = v8hi_ftype_v8hi_char;
4808
4809      /* vchar, vchar, 5 bit literal.  */
4810      else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4811	type = v16qi_ftype_v16qi_char;
4812
4813      /* vfloat, vint, 5 bit literal.  */
4814      else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4815	type = v4sf_ftype_v4si_char;
4816
4817      /* vint, vfloat, 5 bit literal.  */
4818      else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4819	type = v4si_ftype_v4sf_char;
4820
4821      /* int, x, x.  */
4822      else if (mode0 == SImode)
4823	{
4824	  switch (mode1)
4825	    {
4826	    case V4SImode:
4827	      type = int_ftype_v4si_v4si;
4828	      break;
4829	    case V4SFmode:
4830	      type = int_ftype_v4sf_v4sf;
4831	      break;
4832	    case V16QImode:
4833	      type = int_ftype_v16qi_v16qi;
4834	      break;
4835	    case V8HImode:
4836	      type = int_ftype_v8hi_v8hi;
4837	      break;
4838	    default:
4839	      abort ();
4840	    }
4841	}
4842
4843      else
4844	abort ();
4845
4846      def_builtin (d->mask, d->name, type, d->code);
4847    }
4848
4849  /* Initialize the abs* operators.  */
4850  d = (struct builtin_description *) bdesc_abs;
4851  for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4852    {
4853      enum machine_mode mode0;
4854      tree type;
4855
4856      mode0 = insn_data[d->icode].operand[0].mode;
4857
4858      switch (mode0)
4859	{
4860	case V4SImode:
4861	  type = v4si_ftype_v4si;
4862	  break;
4863	case V8HImode:
4864	  type = v8hi_ftype_v8hi;
4865	  break;
4866	case V16QImode:
4867	  type = v16qi_ftype_v16qi;
4868	  break;
4869	case V4SFmode:
4870	  type = v4sf_ftype_v4sf;
4871	  break;
4872	default:
4873	  abort ();
4874	}
4875
4876      def_builtin (d->mask, d->name, type, d->code);
4877    }
4878
4879  /* Add the simple unary operators.  */
4880  d = (struct builtin_description *) bdesc_1arg;
4881  for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4882    {
4883      enum machine_mode mode0, mode1;
4884      tree type;
4885
4886      if (d->name == 0)
4887	continue;
4888
4889      mode0 = insn_data[d->icode].operand[0].mode;
4890      mode1 = insn_data[d->icode].operand[1].mode;
4891
4892      if (mode0 == V4SImode && mode1 == QImode)
4893        type = v4si_ftype_char;
4894      else if (mode0 == V8HImode && mode1 == QImode)
4895        type = v8hi_ftype_char;
4896      else if (mode0 == V16QImode && mode1 == QImode)
4897        type = v16qi_ftype_char;
4898      else if (mode0 == V4SFmode && mode1 == V4SFmode)
4899	type = v4sf_ftype_v4sf;
4900      else if (mode0 == V8HImode && mode1 == V16QImode)
4901	type = v8hi_ftype_v16qi;
4902      else if (mode0 == V4SImode && mode1 == V8HImode)
4903	type = v4si_ftype_v8hi;
4904      else
4905	abort ();
4906
4907      def_builtin (d->mask, d->name, type, d->code);
4908    }
4909}
4910
4911
4912/* Generate a memory reference for expand_block_move, copying volatile,
4913   and other bits from an original memory reference.  */
4914
4915static rtx
4916expand_block_move_mem (mode, addr, orig_mem)
4917     enum machine_mode mode;
4918     rtx addr;
4919     rtx orig_mem;
4920{
4921  rtx mem = gen_rtx_MEM (mode, addr);
4922
4923  MEM_COPY_ATTRIBUTES (mem, orig_mem);
4924  return mem;
4925}
4926
4927/* Expand a block move operation, and return 1 if successful.  Return 0
4928   if we should let the compiler generate normal code.
4929
4930   operands[0] is the destination
4931   operands[1] is the source
4932   operands[2] is the length
4933   operands[3] is the alignment */
4934
4935#define MAX_MOVE_REG 4
4936
4937int
4938expand_block_move (operands)
4939     rtx operands[];
4940{
4941  rtx orig_dest = operands[0];
4942  rtx orig_src	= operands[1];
4943  rtx bytes_rtx	= operands[2];
4944  rtx align_rtx = operands[3];
4945  int constp	= (GET_CODE (bytes_rtx) == CONST_INT);
4946  int align;
4947  int bytes;
4948  int offset;
4949  int num_reg;
4950  int i;
4951  rtx src_reg;
4952  rtx dest_reg;
4953  rtx src_addr;
4954  rtx dest_addr;
4955  rtx tmp_reg;
4956  rtx stores[MAX_MOVE_REG];
4957  int move_bytes;
4958
4959  /* If this is not a fixed size move, just call memcpy */
4960  if (! constp)
4961    return 0;
4962
4963  /* If this is not a fixed size alignment, abort */
4964  if (GET_CODE (align_rtx) != CONST_INT)
4965    abort ();
4966  align = INTVAL (align_rtx);
4967
4968  /* Anything to move? */
4969  bytes = INTVAL (bytes_rtx);
4970  if (bytes <= 0)
4971    return 1;
4972
4973  /* store_one_arg depends on expand_block_move to handle at least the size of
4974     reg_parm_stack_space.  */
4975  if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4976    return 0;
4977
4978  /* Move the address into scratch registers.  */
4979  dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4980  src_reg  = copy_addr_to_reg (XEXP (orig_src,  0));
4981
4982  if (TARGET_STRING)	/* string instructions are available */
4983    {
4984      for ( ; bytes > 0; bytes -= move_bytes)
4985	{
4986	  if (bytes > 24		/* move up to 32 bytes at a time */
4987	      && ! fixed_regs[5]
4988	      && ! fixed_regs[6]
4989	      && ! fixed_regs[7]
4990	      && ! fixed_regs[8]
4991	      && ! fixed_regs[9]
4992	      && ! fixed_regs[10]
4993	      && ! fixed_regs[11]
4994	      && ! fixed_regs[12])
4995	    {
4996	      move_bytes = (bytes > 32) ? 32 : bytes;
4997	      emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4998								   dest_reg,
4999								   orig_dest),
5000					    expand_block_move_mem (BLKmode,
5001								   src_reg,
5002								   orig_src),
5003					    GEN_INT ((move_bytes == 32)
5004						     ? 0 : move_bytes),
5005					    align_rtx));
5006	    }
5007	  else if (bytes > 16	/* move up to 24 bytes at a time */
5008		   && ! fixed_regs[5]
5009		   && ! fixed_regs[6]
5010		   && ! fixed_regs[7]
5011		   && ! fixed_regs[8]
5012		   && ! fixed_regs[9]
5013		   && ! fixed_regs[10])
5014	    {
5015	      move_bytes = (bytes > 24) ? 24 : bytes;
5016	      emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
5017								   dest_reg,
5018								   orig_dest),
5019					    expand_block_move_mem (BLKmode,
5020								   src_reg,
5021								   orig_src),
5022					    GEN_INT (move_bytes),
5023					    align_rtx));
5024	    }
5025	  else if (bytes > 8	/* move up to 16 bytes at a time */
5026		   && ! fixed_regs[5]
5027		   && ! fixed_regs[6]
5028		   && ! fixed_regs[7]
5029		   && ! fixed_regs[8])
5030	    {
5031	      move_bytes = (bytes > 16) ? 16 : bytes;
5032	      emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5033								   dest_reg,
5034								   orig_dest),
5035					    expand_block_move_mem (BLKmode,
5036								   src_reg,
5037								   orig_src),
5038					    GEN_INT (move_bytes),
5039					    align_rtx));
5040	    }
5041	  else if (bytes >= 8 && TARGET_POWERPC64
5042		   /* 64-bit loads and stores require word-aligned
5043                      displacements.  */
5044		   && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5045	    {
5046	      move_bytes = 8;
5047	      tmp_reg = gen_reg_rtx (DImode);
5048	      emit_move_insn (tmp_reg,
5049			      expand_block_move_mem (DImode,
5050						     src_reg, orig_src));
5051	      emit_move_insn (expand_block_move_mem (DImode,
5052						     dest_reg, orig_dest),
5053			      tmp_reg);
5054	    }
5055	  else if (bytes > 4 && !TARGET_POWERPC64)
5056	    {			/* move up to 8 bytes at a time */
5057	      move_bytes = (bytes > 8) ? 8 : bytes;
5058	      emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5059								   dest_reg,
5060								   orig_dest),
5061					    expand_block_move_mem (BLKmode,
5062								   src_reg,
5063								   orig_src),
5064					    GEN_INT (move_bytes),
5065					    align_rtx));
5066	    }
5067	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5068	    {			/* move 4 bytes */
5069	      move_bytes = 4;
5070	      tmp_reg = gen_reg_rtx (SImode);
5071	      emit_move_insn (tmp_reg,
5072			      expand_block_move_mem (SImode,
5073						     src_reg, orig_src));
5074	      emit_move_insn (expand_block_move_mem (SImode,
5075						     dest_reg, orig_dest),
5076			      tmp_reg);
5077	    }
5078	  else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5079	    {			/* move 2 bytes */
5080	      move_bytes = 2;
5081	      tmp_reg = gen_reg_rtx (HImode);
5082	      emit_move_insn (tmp_reg,
5083			      expand_block_move_mem (HImode,
5084						     src_reg, orig_src));
5085	      emit_move_insn (expand_block_move_mem (HImode,
5086						     dest_reg, orig_dest),
5087			      tmp_reg);
5088	    }
5089	  else if (bytes == 1)	/* move 1 byte */
5090	    {
5091	      move_bytes = 1;
5092	      tmp_reg = gen_reg_rtx (QImode);
5093	      emit_move_insn (tmp_reg,
5094			      expand_block_move_mem (QImode,
5095						     src_reg, orig_src));
5096	      emit_move_insn (expand_block_move_mem (QImode,
5097						     dest_reg, orig_dest),
5098			      tmp_reg);
5099	    }
5100	  else
5101	    {			/* move up to 4 bytes at a time */
5102	      move_bytes = (bytes > 4) ? 4 : bytes;
5103	      emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5104								   dest_reg,
5105								   orig_dest),
5106					    expand_block_move_mem (BLKmode,
5107								   src_reg,
5108								   orig_src),
5109					    GEN_INT (move_bytes),
5110					    align_rtx));
5111	    }
5112
5113	  if (bytes > move_bytes)
5114	    {
5115	      if (! TARGET_POWERPC64)
5116		{
5117		  emit_insn (gen_addsi3 (src_reg, src_reg,
5118					 GEN_INT (move_bytes)));
5119		  emit_insn (gen_addsi3 (dest_reg, dest_reg,
5120					 GEN_INT (move_bytes)));
5121		}
5122	      else
5123		{
5124		  emit_insn (gen_adddi3 (src_reg, src_reg,
5125					 GEN_INT (move_bytes)));
5126		  emit_insn (gen_adddi3 (dest_reg, dest_reg,
5127					 GEN_INT (move_bytes)));
5128		}
5129	    }
5130	}
5131    }
5132
5133  else			/* string instructions not available */
5134    {
5135      num_reg = offset = 0;
5136      for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5137	{
5138	  /* Calculate the correct offset for src/dest */
5139	  if (offset == 0)
5140	    {
5141	      src_addr  = src_reg;
5142	      dest_addr = dest_reg;
5143	    }
5144	  else
5145	    {
5146	      src_addr = plus_constant (src_reg, offset);
5147	      dest_addr = plus_constant (dest_reg, offset);
5148	    }
5149
5150	  /* Generate the appropriate load and store, saving the stores
5151	     for later.  */
5152	  if (bytes >= 8 && TARGET_POWERPC64
5153	      /* 64-bit loads and stores require word-aligned
5154                 displacements.  */
5155	      && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5156	    {
5157	      move_bytes = 8;
5158	      tmp_reg = gen_reg_rtx (DImode);
5159	      emit_insn (gen_movdi (tmp_reg,
5160				    expand_block_move_mem (DImode,
5161							   src_addr,
5162							   orig_src)));
5163	      stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5164								    dest_addr,
5165								    orig_dest),
5166					     tmp_reg);
5167	    }
5168	  else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5169	    {
5170	      move_bytes = 4;
5171	      tmp_reg = gen_reg_rtx (SImode);
5172	      emit_insn (gen_movsi (tmp_reg,
5173				    expand_block_move_mem (SImode,
5174							   src_addr,
5175							   orig_src)));
5176	      stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5177								    dest_addr,
5178								    orig_dest),
5179					     tmp_reg);
5180	    }
5181	  else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5182	    {
5183	      move_bytes = 2;
5184	      tmp_reg = gen_reg_rtx (HImode);
5185	      emit_insn (gen_movhi (tmp_reg,
5186				    expand_block_move_mem (HImode,
5187							   src_addr,
5188							   orig_src)));
5189	      stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5190								    dest_addr,
5191								    orig_dest),
5192					     tmp_reg);
5193	    }
5194	  else
5195	    {
5196	      move_bytes = 1;
5197	      tmp_reg = gen_reg_rtx (QImode);
5198	      emit_insn (gen_movqi (tmp_reg,
5199				    expand_block_move_mem (QImode,
5200							   src_addr,
5201							   orig_src)));
5202	      stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5203								    dest_addr,
5204								    orig_dest),
5205					     tmp_reg);
5206	    }
5207
5208	  if (num_reg >= MAX_MOVE_REG)
5209	    {
5210	      for (i = 0; i < num_reg; i++)
5211		emit_insn (stores[i]);
5212	      num_reg = 0;
5213	    }
5214	}
5215
5216      for (i = 0; i < num_reg; i++)
5217	emit_insn (stores[i]);
5218    }
5219
5220  return 1;
5221}
5222
5223
5224/* Return 1 if OP is a load multiple operation.  It is known to be a
5225   PARALLEL and the first section will be tested.  */
5226
5227int
5228load_multiple_operation (op, mode)
5229     rtx op;
5230     enum machine_mode mode ATTRIBUTE_UNUSED;
5231{
5232  int count = XVECLEN (op, 0);
5233  unsigned int dest_regno;
5234  rtx src_addr;
5235  int i;
5236
5237  /* Perform a quick check so we don't blow up below.  */
5238  if (count <= 1
5239      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5240      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5241      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5242    return 0;
5243
5244  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5245  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5246
5247  for (i = 1; i < count; i++)
5248    {
5249      rtx elt = XVECEXP (op, 0, i);
5250
5251      if (GET_CODE (elt) != SET
5252	  || GET_CODE (SET_DEST (elt)) != REG
5253	  || GET_MODE (SET_DEST (elt)) != SImode
5254	  || REGNO (SET_DEST (elt)) != dest_regno + i
5255	  || GET_CODE (SET_SRC (elt)) != MEM
5256	  || GET_MODE (SET_SRC (elt)) != SImode
5257	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5258	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5259	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5260	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5261	return 0;
5262    }
5263
5264  return 1;
5265}
5266
5267/* Similar, but tests for store multiple.  Here, the second vector element
5268   is a CLOBBER.  It will be tested later.  */
5269
5270int
5271store_multiple_operation (op, mode)
5272     rtx op;
5273     enum machine_mode mode ATTRIBUTE_UNUSED;
5274{
5275  int count = XVECLEN (op, 0) - 1;
5276  unsigned int src_regno;
5277  rtx dest_addr;
5278  int i;
5279
5280  /* Perform a quick check so we don't blow up below.  */
5281  if (count <= 1
5282      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5283      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5284      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5285    return 0;
5286
5287  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5288  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5289
5290  for (i = 1; i < count; i++)
5291    {
5292      rtx elt = XVECEXP (op, 0, i + 1);
5293
5294      if (GET_CODE (elt) != SET
5295	  || GET_CODE (SET_SRC (elt)) != REG
5296	  || GET_MODE (SET_SRC (elt)) != SImode
5297	  || REGNO (SET_SRC (elt)) != src_regno + i
5298	  || GET_CODE (SET_DEST (elt)) != MEM
5299	  || GET_MODE (SET_DEST (elt)) != SImode
5300	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5301	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5302	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5303	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5304	return 0;
5305    }
5306
5307  return 1;
5308}
5309
5310/* Return 1 for a parallel vrsave operation.  */
5311
5312int
5313vrsave_operation (op, mode)
5314     rtx op;
5315     enum machine_mode mode ATTRIBUTE_UNUSED;
5316{
5317  int count = XVECLEN (op, 0);
5318  unsigned int dest_regno, src_regno;
5319  int i;
5320
5321  if (count <= 1
5322      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5323      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5324      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5325    return 0;
5326
5327  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5328  src_regno  = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5329
5330  if (dest_regno != VRSAVE_REGNO
5331      && src_regno != VRSAVE_REGNO)
5332    return 0;
5333
5334  for (i = 1; i < count; i++)
5335    {
5336      rtx elt = XVECEXP (op, 0, i);
5337
5338      if (GET_CODE (elt) != CLOBBER
5339	  && GET_CODE (elt) != SET)
5340	return 0;
5341    }
5342
5343  return 1;
5344}
5345
5346/* Return 1 for an PARALLEL suitable for mtcrf.  */
5347
5348int
5349mtcrf_operation (op, mode)
5350     rtx op;
5351     enum machine_mode mode ATTRIBUTE_UNUSED;
5352{
5353  int count = XVECLEN (op, 0);
5354  int i;
5355  rtx src_reg;
5356
5357  /* Perform a quick check so we don't blow up below.  */
5358  if (count < 1
5359      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5360      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5361      || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5362    return 0;
5363  src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5364
5365  if (GET_CODE (src_reg) != REG
5366      || GET_MODE (src_reg) != SImode
5367      || ! INT_REGNO_P (REGNO (src_reg)))
5368    return 0;
5369
5370  for (i = 0; i < count; i++)
5371    {
5372      rtx exp = XVECEXP (op, 0, i);
5373      rtx unspec;
5374      int maskval;
5375
5376      if (GET_CODE (exp) != SET
5377	  || GET_CODE (SET_DEST (exp)) != REG
5378	  || GET_MODE (SET_DEST (exp)) != CCmode
5379	  || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5380	return 0;
5381      unspec = SET_SRC (exp);
5382      maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5383
5384      if (GET_CODE (unspec) != UNSPEC
5385	  || XINT (unspec, 1) != 20
5386	  || XVECLEN (unspec, 0) != 2
5387	  || XVECEXP (unspec, 0, 0) != src_reg
5388	  || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5389	  || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5390	return 0;
5391    }
5392  return 1;
5393}
5394
5395/* Return 1 for an PARALLEL suitable for lmw.  */
5396
5397int
5398lmw_operation (op, mode)
5399     rtx op;
5400     enum machine_mode mode ATTRIBUTE_UNUSED;
5401{
5402  int count = XVECLEN (op, 0);
5403  unsigned int dest_regno;
5404  rtx src_addr;
5405  unsigned int base_regno;
5406  HOST_WIDE_INT offset;
5407  int i;
5408
5409  /* Perform a quick check so we don't blow up below.  */
5410  if (count <= 1
5411      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5412      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5413      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5414    return 0;
5415
5416  dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5417  src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5418
5419  if (dest_regno > 31
5420      || count != 32 - (int) dest_regno)
5421    return 0;
5422
5423  if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5424    {
5425      offset = 0;
5426      base_regno = REGNO (src_addr);
5427      if (base_regno == 0)
5428	return 0;
5429    }
5430  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5431    {
5432      offset = INTVAL (XEXP (src_addr, 1));
5433      base_regno = REGNO (XEXP (src_addr, 0));
5434    }
5435  else
5436    return 0;
5437
5438  for (i = 0; i < count; i++)
5439    {
5440      rtx elt = XVECEXP (op, 0, i);
5441      rtx newaddr;
5442      rtx addr_reg;
5443      HOST_WIDE_INT newoffset;
5444
5445      if (GET_CODE (elt) != SET
5446	  || GET_CODE (SET_DEST (elt)) != REG
5447	  || GET_MODE (SET_DEST (elt)) != SImode
5448	  || REGNO (SET_DEST (elt)) != dest_regno + i
5449	  || GET_CODE (SET_SRC (elt)) != MEM
5450	  || GET_MODE (SET_SRC (elt)) != SImode)
5451	return 0;
5452      newaddr = XEXP (SET_SRC (elt), 0);
5453      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5454	{
5455	  newoffset = 0;
5456	  addr_reg = newaddr;
5457	}
5458      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5459	{
5460	  addr_reg = XEXP (newaddr, 0);
5461	  newoffset = INTVAL (XEXP (newaddr, 1));
5462	}
5463      else
5464	return 0;
5465      if (REGNO (addr_reg) != base_regno
5466	  || newoffset != offset + 4 * i)
5467	return 0;
5468    }
5469
5470  return 1;
5471}
5472
5473/* Return 1 for an PARALLEL suitable for stmw.  */
5474
5475int
5476stmw_operation (op, mode)
5477     rtx op;
5478     enum machine_mode mode ATTRIBUTE_UNUSED;
5479{
5480  int count = XVECLEN (op, 0);
5481  unsigned int src_regno;
5482  rtx dest_addr;
5483  unsigned int base_regno;
5484  HOST_WIDE_INT offset;
5485  int i;
5486
5487  /* Perform a quick check so we don't blow up below.  */
5488  if (count <= 1
5489      || GET_CODE (XVECEXP (op, 0, 0)) != SET
5490      || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5491      || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5492    return 0;
5493
5494  src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5495  dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5496
5497  if (src_regno > 31
5498      || count != 32 - (int) src_regno)
5499    return 0;
5500
5501  if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5502    {
5503      offset = 0;
5504      base_regno = REGNO (dest_addr);
5505      if (base_regno == 0)
5506	return 0;
5507    }
5508  else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5509    {
5510      offset = INTVAL (XEXP (dest_addr, 1));
5511      base_regno = REGNO (XEXP (dest_addr, 0));
5512    }
5513  else
5514    return 0;
5515
5516  for (i = 0; i < count; i++)
5517    {
5518      rtx elt = XVECEXP (op, 0, i);
5519      rtx newaddr;
5520      rtx addr_reg;
5521      HOST_WIDE_INT newoffset;
5522
5523      if (GET_CODE (elt) != SET
5524	  || GET_CODE (SET_SRC (elt)) != REG
5525	  || GET_MODE (SET_SRC (elt)) != SImode
5526	  || REGNO (SET_SRC (elt)) != src_regno + i
5527	  || GET_CODE (SET_DEST (elt)) != MEM
5528	  || GET_MODE (SET_DEST (elt)) != SImode)
5529	return 0;
5530      newaddr = XEXP (SET_DEST (elt), 0);
5531      if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5532	{
5533	  newoffset = 0;
5534	  addr_reg = newaddr;
5535	}
5536      else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5537	{
5538	  addr_reg = XEXP (newaddr, 0);
5539	  newoffset = INTVAL (XEXP (newaddr, 1));
5540	}
5541      else
5542	return 0;
5543      if (REGNO (addr_reg) != base_regno
5544	  || newoffset != offset + 4 * i)
5545	return 0;
5546    }
5547
5548  return 1;
5549}
5550
5551/* A validation routine: say whether CODE, a condition code, and MODE
5552   match.  The other alternatives either don't make sense or should
5553   never be generated.  */
5554
5555static void
5556validate_condition_mode (code, mode)
5557     enum rtx_code code;
5558     enum machine_mode mode;
5559{
5560  if (GET_RTX_CLASS (code) != '<'
5561      || GET_MODE_CLASS (mode) != MODE_CC)
5562    abort ();
5563
5564  /* These don't make sense.  */
5565  if ((code == GT || code == LT || code == GE || code == LE)
5566      && mode == CCUNSmode)
5567    abort ();
5568
5569  if ((code == GTU || code == LTU || code == GEU || code == LEU)
5570      && mode != CCUNSmode)
5571    abort ();
5572
5573  if (mode != CCFPmode
5574      && (code == ORDERED || code == UNORDERED
5575	  || code == UNEQ || code == LTGT
5576	  || code == UNGT || code == UNLT
5577	  || code == UNGE || code == UNLE))
5578    abort ();
5579
5580  /* These should never be generated except for
5581     flag_unsafe_math_optimizations.  */
5582  if (mode == CCFPmode
5583      && ! flag_unsafe_math_optimizations
5584      && (code == LE || code == GE
5585	  || code == UNEQ || code == LTGT
5586	  || code == UNGT || code == UNLT))
5587    abort ();
5588
5589  /* These are invalid; the information is not there.  */
5590  if (mode == CCEQmode
5591      && code != EQ && code != NE)
5592    abort ();
5593}
5594
5595/* Return 1 if OP is a comparison operation that is valid for a branch insn.
5596   We only check the opcode against the mode of the CC value here.  */
5597
5598int
5599branch_comparison_operator (op, mode)
5600     rtx op;
5601     enum machine_mode mode ATTRIBUTE_UNUSED;
5602{
5603  enum rtx_code code = GET_CODE (op);
5604  enum machine_mode cc_mode;
5605
5606  if (GET_RTX_CLASS (code) != '<')
5607    return 0;
5608
5609  cc_mode = GET_MODE (XEXP (op, 0));
5610  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5611    return 0;
5612
5613  validate_condition_mode (code, cc_mode);
5614
5615  return 1;
5616}
5617
5618/* Return 1 if OP is a comparison operation that is valid for a branch
5619   insn and which is true if the corresponding bit in the CC register
5620   is set.  */
5621
5622int
5623branch_positive_comparison_operator (op, mode)
5624     rtx op;
5625     enum machine_mode mode;
5626{
5627  enum rtx_code code;
5628
5629  if (! branch_comparison_operator (op, mode))
5630    return 0;
5631
5632  code = GET_CODE (op);
5633  return (code == EQ || code == LT || code == GT
5634	  || code == LTU || code == GTU
5635	  || code == UNORDERED);
5636}
5637
5638/* Return 1 if OP is a comparison operation that is valid for an scc insn.
5639   We check the opcode against the mode of the CC value and disallow EQ or
5640   NE comparisons for integers.  */
5641
5642int
5643scc_comparison_operator (op, mode)
5644     rtx op;
5645     enum machine_mode mode;
5646{
5647  enum rtx_code code = GET_CODE (op);
5648  enum machine_mode cc_mode;
5649
5650  if (GET_MODE (op) != mode && mode != VOIDmode)
5651    return 0;
5652
5653  if (GET_RTX_CLASS (code) != '<')
5654    return 0;
5655
5656  cc_mode = GET_MODE (XEXP (op, 0));
5657  if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5658    return 0;
5659
5660  validate_condition_mode (code, cc_mode);
5661
5662  if (code == NE && cc_mode != CCFPmode)
5663    return 0;
5664
5665  return 1;
5666}
5667
5668int
5669trap_comparison_operator (op, mode)
5670    rtx op;
5671    enum machine_mode mode;
5672{
5673  if (mode != VOIDmode && mode != GET_MODE (op))
5674    return 0;
5675  return GET_RTX_CLASS (GET_CODE (op)) == '<';
5676}
5677
5678int
5679boolean_operator (op, mode)
5680    rtx op;
5681    enum machine_mode mode ATTRIBUTE_UNUSED;
5682{
5683  enum rtx_code code = GET_CODE (op);
5684  return (code == AND || code == IOR || code == XOR);
5685}
5686
5687int
5688boolean_or_operator (op, mode)
5689    rtx op;
5690    enum machine_mode mode ATTRIBUTE_UNUSED;
5691{
5692  enum rtx_code code = GET_CODE (op);
5693  return (code == IOR || code == XOR);
5694}
5695
5696int
5697min_max_operator (op, mode)
5698    rtx op;
5699    enum machine_mode mode ATTRIBUTE_UNUSED;
5700{
5701  enum rtx_code code = GET_CODE (op);
5702  return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5703}
5704
5705/* Return 1 if ANDOP is a mask that has no bits on that are not in the
5706   mask required to convert the result of a rotate insn into a shift
5707   left insn of SHIFTOP bits.  Both are known to be SImode CONST_INT.  */
5708
5709int
5710includes_lshift_p (shiftop, andop)
5711     rtx shiftop;
5712     rtx andop;
5713{
5714  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5715
5716  shift_mask <<= INTVAL (shiftop);
5717
5718  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5719}
5720
5721/* Similar, but for right shift.  */
5722
5723int
5724includes_rshift_p (shiftop, andop)
5725     rtx shiftop;
5726     rtx andop;
5727{
5728  unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5729
5730  shift_mask >>= INTVAL (shiftop);
5731
5732  return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5733}
5734
5735/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5736   to perform a left shift.  It must have exactly SHIFTOP least
5737   signifigant 0's, then one or more 1's, then zero or more 0's.  */
5738
5739int
5740includes_rldic_lshift_p (shiftop, andop)
5741     rtx shiftop;
5742     rtx andop;
5743{
5744  if (GET_CODE (andop) == CONST_INT)
5745    {
5746      HOST_WIDE_INT c, lsb, shift_mask;
5747
5748      c = INTVAL (andop);
5749      if (c == 0 || c == ~0)
5750	return 0;
5751
5752      shift_mask = ~0;
5753      shift_mask <<= INTVAL (shiftop);
5754
5755      /* Find the least signifigant one bit.  */
5756      lsb = c & -c;
5757
5758      /* It must coincide with the LSB of the shift mask.  */
5759      if (-lsb != shift_mask)
5760	return 0;
5761
5762      /* Invert to look for the next transition (if any).  */
5763      c = ~c;
5764
5765      /* Remove the low group of ones (originally low group of zeros).  */
5766      c &= -lsb;
5767
5768      /* Again find the lsb, and check we have all 1's above.  */
5769      lsb = c & -c;
5770      return c == -lsb;
5771    }
5772  else if (GET_CODE (andop) == CONST_DOUBLE
5773	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5774    {
5775      HOST_WIDE_INT low, high, lsb;
5776      HOST_WIDE_INT shift_mask_low, shift_mask_high;
5777
5778      low = CONST_DOUBLE_LOW (andop);
5779      if (HOST_BITS_PER_WIDE_INT < 64)
5780	high = CONST_DOUBLE_HIGH (andop);
5781
5782      if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5783	  || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5784	return 0;
5785
5786      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5787	{
5788	  shift_mask_high = ~0;
5789	  if (INTVAL (shiftop) > 32)
5790	    shift_mask_high <<= INTVAL (shiftop) - 32;
5791
5792	  lsb = high & -high;
5793
5794	  if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5795	    return 0;
5796
5797	  high = ~high;
5798	  high &= -lsb;
5799
5800	  lsb = high & -high;
5801	  return high == -lsb;
5802	}
5803
5804      shift_mask_low = ~0;
5805      shift_mask_low <<= INTVAL (shiftop);
5806
5807      lsb = low & -low;
5808
5809      if (-lsb != shift_mask_low)
5810	return 0;
5811
5812      if (HOST_BITS_PER_WIDE_INT < 64)
5813	high = ~high;
5814      low = ~low;
5815      low &= -lsb;
5816
5817      if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5818	{
5819	  lsb = high & -high;
5820	  return high == -lsb;
5821	}
5822
5823      lsb = low & -low;
5824      return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5825    }
5826  else
5827    return 0;
5828}
5829
5830/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5831   to perform a left shift.  It must have SHIFTOP or more least
5832   signifigant 0's, with the remainder of the word 1's.  */
5833
5834int
5835includes_rldicr_lshift_p (shiftop, andop)
5836     rtx shiftop;
5837     rtx andop;
5838{
5839  if (GET_CODE (andop) == CONST_INT)
5840    {
5841      HOST_WIDE_INT c, lsb, shift_mask;
5842
5843      shift_mask = ~0;
5844      shift_mask <<= INTVAL (shiftop);
5845      c = INTVAL (andop);
5846
5847      /* Find the least signifigant one bit.  */
5848      lsb = c & -c;
5849
5850      /* It must be covered by the shift mask.
5851	 This test also rejects c == 0.  */
5852      if ((lsb & shift_mask) == 0)
5853	return 0;
5854
5855      /* Check we have all 1's above the transition, and reject all 1's.  */
5856      return c == -lsb && lsb != 1;
5857    }
5858  else if (GET_CODE (andop) == CONST_DOUBLE
5859	   && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5860    {
5861      HOST_WIDE_INT low, lsb, shift_mask_low;
5862
5863      low = CONST_DOUBLE_LOW (andop);
5864
5865      if (HOST_BITS_PER_WIDE_INT < 64)
5866	{
5867	  HOST_WIDE_INT high, shift_mask_high;
5868
5869	  high = CONST_DOUBLE_HIGH (andop);
5870
5871	  if (low == 0)
5872	    {
5873	      shift_mask_high = ~0;
5874	      if (INTVAL (shiftop) > 32)
5875		shift_mask_high <<= INTVAL (shiftop) - 32;
5876
5877	      lsb = high & -high;
5878
5879	      if ((lsb & shift_mask_high) == 0)
5880		return 0;
5881
5882	      return high == -lsb;
5883	    }
5884	  if (high != ~0)
5885	    return 0;
5886	}
5887
5888      shift_mask_low = ~0;
5889      shift_mask_low <<= INTVAL (shiftop);
5890
5891      lsb = low & -low;
5892
5893      if ((lsb & shift_mask_low) == 0)
5894	return 0;
5895
5896      return low == -lsb && lsb != 1;
5897    }
5898  else
5899    return 0;
5900}
5901
5902/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5903   for lfq and stfq insns.
5904
5905   Note reg1 and reg2 *must* be hard registers.  To be sure we will
5906   abort if we are passed pseudo registers.  */
5907
5908int
5909registers_ok_for_quad_peep (reg1, reg2)
5910     rtx reg1, reg2;
5911{
5912  /* We might have been passed a SUBREG.  */
5913  if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5914    return 0;
5915
5916  return (REGNO (reg1) == REGNO (reg2) - 1);
5917}
5918
5919/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5920   addr1 and addr2 must be in consecutive memory locations
5921   (addr2 == addr1 + 8).  */
5922
5923int
5924addrs_ok_for_quad_peep (addr1, addr2)
5925     rtx addr1;
5926     rtx addr2;
5927{
5928  unsigned int reg1;
5929  int offset1;
5930
5931  /* Extract an offset (if used) from the first addr.  */
5932  if (GET_CODE (addr1) == PLUS)
5933    {
5934      /* If not a REG, return zero.  */
5935      if (GET_CODE (XEXP (addr1, 0)) != REG)
5936	return 0;
5937      else
5938	{
5939          reg1 = REGNO (XEXP (addr1, 0));
5940	  /* The offset must be constant!  */
5941	  if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5942            return 0;
5943          offset1 = INTVAL (XEXP (addr1, 1));
5944	}
5945    }
5946  else if (GET_CODE (addr1) != REG)
5947    return 0;
5948  else
5949    {
5950      reg1 = REGNO (addr1);
5951      /* This was a simple (mem (reg)) expression.  Offset is 0.  */
5952      offset1 = 0;
5953    }
5954
5955  /* Make sure the second address is a (mem (plus (reg) (const_int))).  */
5956  if (GET_CODE (addr2) != PLUS)
5957    return 0;
5958
5959  if (GET_CODE (XEXP (addr2, 0)) != REG
5960      || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5961    return 0;
5962
5963  if (reg1 != REGNO (XEXP (addr2, 0)))
5964    return 0;
5965
5966  /* The offset for the second addr must be 8 more than the first addr.  */
5967  if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5968    return 0;
5969
5970  /* All the tests passed.  addr1 and addr2 are valid for lfq or stfq
5971     instructions.  */
5972  return 1;
5973}
5974
5975/* Return the register class of a scratch register needed to copy IN into
5976   or out of a register in CLASS in MODE.  If it can be done directly,
5977   NO_REGS is returned.  */
5978
5979enum reg_class
5980secondary_reload_class (class, mode, in)
5981     enum reg_class class;
5982     enum machine_mode mode ATTRIBUTE_UNUSED;
5983     rtx in;
5984{
5985  int regno;
5986
5987  if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5988    {
5989      /* We cannot copy a symbolic operand directly into anything
5990         other than BASE_REGS for TARGET_ELF.  So indicate that a
5991         register from BASE_REGS is needed as an intermediate
5992         register.
5993
5994	 On Darwin, pic addresses require a load from memory, which
5995	 needs a base register.  */
5996      if (class != BASE_REGS
5997          && (GET_CODE (in) == SYMBOL_REF
5998              || GET_CODE (in) == HIGH
5999              || GET_CODE (in) == LABEL_REF
6000              || GET_CODE (in) == CONST))
6001        return BASE_REGS;
6002    }
6003
6004  if (GET_CODE (in) == REG)
6005    {
6006      regno = REGNO (in);
6007      if (regno >= FIRST_PSEUDO_REGISTER)
6008	{
6009	  regno = true_regnum (in);
6010	  if (regno >= FIRST_PSEUDO_REGISTER)
6011	    regno = -1;
6012	}
6013    }
6014  else if (GET_CODE (in) == SUBREG)
6015    {
6016      regno = true_regnum (in);
6017      if (regno >= FIRST_PSEUDO_REGISTER)
6018	regno = -1;
6019    }
6020  else
6021    regno = -1;
6022
6023  /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6024     into anything.  */
6025  if (class == GENERAL_REGS || class == BASE_REGS
6026      || (regno >= 0 && INT_REGNO_P (regno)))
6027    return NO_REGS;
6028
6029  /* Constants, memory, and FP registers can go into FP registers.  */
6030  if ((regno == -1 || FP_REGNO_P (regno))
6031      && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6032    return NO_REGS;
6033
6034  /* Memory, and AltiVec registers can go into AltiVec registers.  */
6035  if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6036      && class == ALTIVEC_REGS)
6037    return NO_REGS;
6038
6039  /* We can copy among the CR registers.  */
6040  if ((class == CR_REGS || class == CR0_REGS)
6041      && regno >= 0 && CR_REGNO_P (regno))
6042    return NO_REGS;
6043
6044  /* Otherwise, we need GENERAL_REGS.  */
6045  return GENERAL_REGS;
6046}
6047
6048/* Given a comparison operation, return the bit number in CCR to test.  We
6049   know this is a valid comparison.
6050
6051   SCC_P is 1 if this is for an scc.  That means that %D will have been
6052   used instead of %C, so the bits will be in different places.
6053
6054   Return -1 if OP isn't a valid comparison for some reason.  */
6055
6056int
6057ccr_bit (op, scc_p)
6058     rtx op;
6059     int scc_p;
6060{
6061  enum rtx_code code = GET_CODE (op);
6062  enum machine_mode cc_mode;
6063  int cc_regnum;
6064  int base_bit;
6065  rtx reg;
6066
6067  if (GET_RTX_CLASS (code) != '<')
6068    return -1;
6069
6070  reg = XEXP (op, 0);
6071
6072  if (GET_CODE (reg) != REG
6073      || ! CR_REGNO_P (REGNO (reg)))
6074    abort ();
6075
6076  cc_mode = GET_MODE (reg);
6077  cc_regnum = REGNO (reg);
6078  base_bit = 4 * (cc_regnum - CR0_REGNO);
6079
6080  validate_condition_mode (code, cc_mode);
6081
6082  switch (code)
6083    {
6084    case NE:
6085      return scc_p ? base_bit + 3 : base_bit + 2;
6086    case EQ:
6087      return base_bit + 2;
6088    case GT:  case GTU:  case UNLE:
6089      return base_bit + 1;
6090    case LT:  case LTU:  case UNGE:
6091      return base_bit;
6092    case ORDERED:  case UNORDERED:
6093      return base_bit + 3;
6094
6095    case GE:  case GEU:
6096      /* If scc, we will have done a cror to put the bit in the
6097	 unordered position.  So test that bit.  For integer, this is ! LT
6098	 unless this is an scc insn.  */
6099      return scc_p ? base_bit + 3 : base_bit;
6100
6101    case LE:  case LEU:
6102      return scc_p ? base_bit + 3 : base_bit + 1;
6103
6104    default:
6105      abort ();
6106    }
6107}
6108
6109/* Return the GOT register.  */
6110
6111struct rtx_def *
6112rs6000_got_register (value)
6113     rtx value ATTRIBUTE_UNUSED;
6114{
6115  /* The second flow pass currently (June 1999) can't update
6116     regs_ever_live without disturbing other parts of the compiler, so
6117     update it here to make the prolog/epilogue code happy.  */
6118  if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6119    regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6120
6121  current_function_uses_pic_offset_table = 1;
6122
6123  return pic_offset_table_rtx;
6124}
6125
6126/* Functions to init, mark and free struct machine_function.
6127   These will be called, via pointer variables,
6128   from push_function_context and pop_function_context.  */
6129
6130static void
6131rs6000_init_machine_status (p)
6132     struct function *p;
6133{
6134  p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6135}
6136
6137static void
6138rs6000_free_machine_status (p)
6139     struct function *p;
6140{
6141  if (p->machine == NULL)
6142    return;
6143
6144  free (p->machine);
6145  p->machine = NULL;
6146}
6147
6148
6149/* Print an operand.  Recognize special options, documented below.  */
6150
6151#if TARGET_ELF
6152#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6153#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6154#else
6155#define SMALL_DATA_RELOC "sda21"
6156#define SMALL_DATA_REG 0
6157#endif
6158
6159void
6160print_operand (file, x, code)
6161    FILE *file;
6162    rtx x;
6163    int code;
6164{
6165  int i;
6166  HOST_WIDE_INT val;
6167
6168  /* These macros test for integers and extract the low-order bits.  */
6169#define INT_P(X)  \
6170((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE)	\
6171 && GET_MODE (X) == VOIDmode)
6172
6173#define INT_LOWPART(X) \
6174  (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6175
6176  switch (code)
6177    {
6178    case '.':
6179      /* Write out an instruction after the call which may be replaced
6180	 with glue code by the loader.  This depends on the AIX version.  */
6181      asm_fprintf (file, RS6000_CALL_GLUE);
6182      return;
6183
6184      /* %a is output_address.  */
6185
6186    case 'A':
6187      /* If X is a constant integer whose low-order 5 bits are zero,
6188	 write 'l'.  Otherwise, write 'r'.  This is a kludge to fix a bug
6189	 in the AIX assembler where "sri" with a zero shift count
6190	 writes a trash instruction.  */
6191      if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6192	putc ('l', file);
6193      else
6194	putc ('r', file);
6195      return;
6196
6197    case 'b':
6198      /* If constant, low-order 16 bits of constant, unsigned.
6199	 Otherwise, write normally.  */
6200      if (INT_P (x))
6201	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6202      else
6203	print_operand (file, x, 0);
6204      return;
6205
6206    case 'B':
6207      /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6208	 for 64-bit mask direction.  */
6209      putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6210      return;
6211
6212      /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6213	 output_operand.  */
6214
6215    case 'D':
6216      /* There used to be a comment for 'C' reading "This is an
6217	   optional cror needed for certain floating-point
6218	   comparisons.  Otherwise write nothing."  */
6219
6220      /* Similar, except that this is for an scc, so we must be able to
6221	 encode the test in a single bit that is one.  We do the above
6222	 for any LE, GE, GEU, or LEU and invert the bit for NE.  */
6223      if (GET_CODE (x) == LE || GET_CODE (x) == GE
6224	  || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6225	{
6226	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6227
6228	  fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6229		   base_bit + 2,
6230		   base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6231	}
6232
6233      else if (GET_CODE (x) == NE)
6234	{
6235	  int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6236
6237	  fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6238		   base_bit + 2, base_bit + 2);
6239	}
6240      return;
6241
6242    case 'E':
6243      /* X is a CR register.  Print the number of the EQ bit of the CR */
6244      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6245	output_operand_lossage ("invalid %%E value");
6246      else
6247	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6248      return;
6249
6250    case 'f':
6251      /* X is a CR register.  Print the shift count needed to move it
6252	 to the high-order four bits.  */
6253      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6254	output_operand_lossage ("invalid %%f value");
6255      else
6256	fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6257      return;
6258
6259    case 'F':
6260      /* Similar, but print the count for the rotate in the opposite
6261	 direction.  */
6262      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6263	output_operand_lossage ("invalid %%F value");
6264      else
6265	fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6266      return;
6267
6268    case 'G':
6269      /* X is a constant integer.  If it is negative, print "m",
6270	 otherwise print "z".  This is to make a aze or ame insn.  */
6271      if (GET_CODE (x) != CONST_INT)
6272	output_operand_lossage ("invalid %%G value");
6273      else if (INTVAL (x) >= 0)
6274	putc ('z', file);
6275      else
6276	putc ('m', file);
6277      return;
6278
6279    case 'h':
6280      /* If constant, output low-order five bits.  Otherwise, write
6281	 normally.  */
6282      if (INT_P (x))
6283	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6284      else
6285	print_operand (file, x, 0);
6286      return;
6287
6288    case 'H':
6289      /* If constant, output low-order six bits.  Otherwise, write
6290	 normally.  */
6291      if (INT_P (x))
6292	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6293      else
6294	print_operand (file, x, 0);
6295      return;
6296
6297    case 'I':
6298      /* Print `i' if this is a constant, else nothing.  */
6299      if (INT_P (x))
6300	putc ('i', file);
6301      return;
6302
6303    case 'j':
6304      /* Write the bit number in CCR for jump.  */
6305      i = ccr_bit (x, 0);
6306      if (i == -1)
6307	output_operand_lossage ("invalid %%j code");
6308      else
6309	fprintf (file, "%d", i);
6310      return;
6311
6312    case 'J':
6313      /* Similar, but add one for shift count in rlinm for scc and pass
6314	 scc flag to `ccr_bit'.  */
6315      i = ccr_bit (x, 1);
6316      if (i == -1)
6317	output_operand_lossage ("invalid %%J code");
6318      else
6319	/* If we want bit 31, write a shift count of zero, not 32.  */
6320	fprintf (file, "%d", i == 31 ? 0 : i + 1);
6321      return;
6322
6323    case 'k':
6324      /* X must be a constant.  Write the 1's complement of the
6325	 constant.  */
6326      if (! INT_P (x))
6327	output_operand_lossage ("invalid %%k value");
6328      else
6329	fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6330      return;
6331
6332    case 'K':
6333      /* X must be a symbolic constant on ELF.  Write an
6334	 expression suitable for an 'addi' that adds in the low 16
6335	 bits of the MEM.  */
6336      if (GET_CODE (x) != CONST)
6337	{
6338	  print_operand_address (file, x);
6339	  fputs ("@l", file);
6340	}
6341      else
6342	{
6343	  if (GET_CODE (XEXP (x, 0)) != PLUS
6344	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6345		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6346	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6347	    output_operand_lossage ("invalid %%K value");
6348	  print_operand_address (file, XEXP (XEXP (x, 0), 0));
6349	  fputs ("@l", file);
6350	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6351	}
6352      return;
6353
6354      /* %l is output_asm_label.  */
6355
6356    case 'L':
6357      /* Write second word of DImode or DFmode reference.  Works on register
6358	 or non-indexed memory only.  */
6359      if (GET_CODE (x) == REG)
6360	fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6361      else if (GET_CODE (x) == MEM)
6362	{
6363	  /* Handle possible auto-increment.  Since it is pre-increment and
6364	     we have already done it, we can just use an offset of word.  */
6365	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6366	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6367	    output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6368					   UNITS_PER_WORD));
6369	  else
6370	    output_address (XEXP (adjust_address_nv (x, SImode,
6371						     UNITS_PER_WORD),
6372				  0));
6373
6374	  if (small_data_operand (x, GET_MODE (x)))
6375	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6376		     reg_names[SMALL_DATA_REG]);
6377	}
6378      return;
6379
6380    case 'm':
6381      /* MB value for a mask operand.  */
6382      if (! mask_operand (x, SImode))
6383	output_operand_lossage ("invalid %%m value");
6384
6385      val = INT_LOWPART (x);
6386
6387      /* If the high bit is set and the low bit is not, the value is zero.
6388	 If the high bit is zero, the value is the first 1 bit we find from
6389	 the left.  */
6390      if ((val & 0x80000000) && ((val & 1) == 0))
6391	{
6392	  putc ('0', file);
6393	  return;
6394	}
6395      else if ((val & 0x80000000) == 0)
6396	{
6397	  for (i = 1; i < 32; i++)
6398	    if ((val <<= 1) & 0x80000000)
6399	      break;
6400	  fprintf (file, "%d", i);
6401	  return;
6402	}
6403
6404      /* Otherwise, look for the first 0 bit from the right.  The result is its
6405	 number plus 1. We know the low-order bit is one.  */
6406      for (i = 0; i < 32; i++)
6407	if (((val >>= 1) & 1) == 0)
6408	  break;
6409
6410      /* If we ended in ...01, i would be 0.  The correct value is 31, so
6411	 we want 31 - i.  */
6412      fprintf (file, "%d", 31 - i);
6413      return;
6414
6415    case 'M':
6416      /* ME value for a mask operand.  */
6417      if (! mask_operand (x, SImode))
6418	output_operand_lossage ("invalid %%M value");
6419
6420      val = INT_LOWPART (x);
6421
6422      /* If the low bit is set and the high bit is not, the value is 31.
6423	 If the low bit is zero, the value is the first 1 bit we find from
6424	 the right.  */
6425      if ((val & 1) && ((val & 0x80000000) == 0))
6426	{
6427	  fputs ("31", file);
6428	  return;
6429	}
6430      else if ((val & 1) == 0)
6431	{
6432	  for (i = 0; i < 32; i++)
6433	    if ((val >>= 1) & 1)
6434	      break;
6435
6436	  /* If we had ....10, i would be 0.  The result should be
6437	     30, so we need 30 - i.  */
6438	  fprintf (file, "%d", 30 - i);
6439	  return;
6440	}
6441
6442      /* Otherwise, look for the first 0 bit from the left.  The result is its
6443	 number minus 1. We know the high-order bit is one.  */
6444      for (i = 0; i < 32; i++)
6445	if (((val <<= 1) & 0x80000000) == 0)
6446	  break;
6447
6448      fprintf (file, "%d", i);
6449      return;
6450
6451      /* %n outputs the negative of its operand.  */
6452
6453    case 'N':
6454      /* Write the number of elements in the vector times 4.  */
6455      if (GET_CODE (x) != PARALLEL)
6456	output_operand_lossage ("invalid %%N value");
6457      else
6458	fprintf (file, "%d", XVECLEN (x, 0) * 4);
6459      return;
6460
6461    case 'O':
6462      /* Similar, but subtract 1 first.  */
6463      if (GET_CODE (x) != PARALLEL)
6464	output_operand_lossage ("invalid %%O value");
6465      else
6466	fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6467      return;
6468
6469    case 'p':
6470      /* X is a CONST_INT that is a power of two.  Output the logarithm.  */
6471      if (! INT_P (x)
6472	  || INT_LOWPART (x) < 0
6473	  || (i = exact_log2 (INT_LOWPART (x))) < 0)
6474	output_operand_lossage ("invalid %%p value");
6475      else
6476	fprintf (file, "%d", i);
6477      return;
6478
6479    case 'P':
6480      /* The operand must be an indirect memory reference.  The result
6481	 is the register number.  */
6482      if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6483	  || REGNO (XEXP (x, 0)) >= 32)
6484	output_operand_lossage ("invalid %%P value");
6485      else
6486	fprintf (file, "%d", REGNO (XEXP (x, 0)));
6487      return;
6488
6489    case 'q':
6490      /* This outputs the logical code corresponding to a boolean
6491	 expression.  The expression may have one or both operands
6492	 negated (if one, only the first one).  For condition register
6493         logical operations, it will also treat the negated
6494         CR codes as NOTs, but not handle NOTs of them.  */
6495      {
6496	const char *const *t = 0;
6497	const char *s;
6498	enum rtx_code code = GET_CODE (x);
6499	static const char * const tbl[3][3] = {
6500	  { "and", "andc", "nor" },
6501	  { "or", "orc", "nand" },
6502	  { "xor", "eqv", "xor" } };
6503
6504	if (code == AND)
6505	  t = tbl[0];
6506	else if (code == IOR)
6507	  t = tbl[1];
6508	else if (code == XOR)
6509	  t = tbl[2];
6510	else
6511	  output_operand_lossage ("invalid %%q value");
6512
6513	if (GET_CODE (XEXP (x, 0)) != NOT)
6514	  s = t[0];
6515	else
6516	  {
6517	    if (GET_CODE (XEXP (x, 1)) == NOT)
6518	      s = t[2];
6519	    else
6520	      s = t[1];
6521	  }
6522
6523	fputs (s, file);
6524      }
6525      return;
6526
6527    case 'R':
6528      /* X is a CR register.  Print the mask for `mtcrf'.  */
6529      if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6530	output_operand_lossage ("invalid %%R value");
6531      else
6532	fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6533      return;
6534
6535    case 's':
6536      /* Low 5 bits of 32 - value */
6537      if (! INT_P (x))
6538	output_operand_lossage ("invalid %%s value");
6539      else
6540	fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6541      return;
6542
6543    case 'S':
6544      /* PowerPC64 mask position.  All 0's and all 1's are excluded.
6545	 CONST_INT 32-bit mask is considered sign-extended so any
6546	 transition must occur within the CONST_INT, not on the boundary.  */
6547      if (! mask64_operand (x, DImode))
6548	output_operand_lossage ("invalid %%S value");
6549
6550      val = INT_LOWPART (x);
6551
6552      if (val & 1)      /* Clear Left */
6553	{
6554	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6555	    if (!((val >>= 1) & 1))
6556	      break;
6557
6558#if HOST_BITS_PER_WIDE_INT == 32
6559	  if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6560	    {
6561	      val = CONST_DOUBLE_HIGH (x);
6562
6563	      if (val == 0)
6564		--i;
6565	      else
6566		for (i = 32; i < 64; i++)
6567		  if (!((val >>= 1) & 1))
6568		    break;
6569	    }
6570#endif
6571	/* i = index of last set bit from right
6572	   mask begins at 63 - i from left */
6573	  if (i > 63)
6574	    output_operand_lossage ("%%S computed all 1's mask");
6575
6576	  fprintf (file, "%d", 63 - i);
6577	  return;
6578	}
6579      else	/* Clear Right */
6580	{
6581	  for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6582	    if ((val >>= 1) & 1)
6583	      break;
6584
6585#if HOST_BITS_PER_WIDE_INT == 32
6586	if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6587	  {
6588	    val = CONST_DOUBLE_HIGH (x);
6589
6590	    if (val == (HOST_WIDE_INT) -1)
6591	      --i;
6592	    else
6593	      for (i = 32; i < 64; i++)
6594		if ((val >>= 1) & 1)
6595		  break;
6596	  }
6597#endif
6598	/* i = index of last clear bit from right
6599	   mask ends at 62 - i from left */
6600	  if (i > 62)
6601	    output_operand_lossage ("%%S computed all 0's mask");
6602
6603	  fprintf (file, "%d", 62 - i);
6604	  return;
6605	}
6606
6607    case 'T':
6608      /* Print the symbolic name of a branch target register.  */
6609      if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6610				  && REGNO (x) != COUNT_REGISTER_REGNUM))
6611	output_operand_lossage ("invalid %%T value");
6612      else if (REGNO (x) == LINK_REGISTER_REGNUM)
6613	fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6614      else
6615	fputs ("ctr", file);
6616      return;
6617
6618    case 'u':
6619      /* High-order 16 bits of constant for use in unsigned operand.  */
6620      if (! INT_P (x))
6621	output_operand_lossage ("invalid %%u value");
6622      else
6623	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6624		 (INT_LOWPART (x) >> 16) & 0xffff);
6625      return;
6626
6627    case 'v':
6628      /* High-order 16 bits of constant for use in signed operand.  */
6629      if (! INT_P (x))
6630	output_operand_lossage ("invalid %%v value");
6631      else
6632	fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6633		 (INT_LOWPART (x) >> 16) & 0xffff);
6634      return;
6635
6636    case 'U':
6637      /* Print `u' if this has an auto-increment or auto-decrement.  */
6638      if (GET_CODE (x) == MEM
6639	  && (GET_CODE (XEXP (x, 0)) == PRE_INC
6640	      || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6641	putc ('u', file);
6642      return;
6643
6644    case 'V':
6645      /* Print the trap code for this operand.  */
6646      switch (GET_CODE (x))
6647	{
6648	case EQ:
6649	  fputs ("eq", file);   /* 4 */
6650	  break;
6651	case NE:
6652	  fputs ("ne", file);   /* 24 */
6653	  break;
6654	case LT:
6655	  fputs ("lt", file);   /* 16 */
6656	  break;
6657	case LE:
6658	  fputs ("le", file);   /* 20 */
6659	  break;
6660	case GT:
6661	  fputs ("gt", file);   /* 8 */
6662	  break;
6663	case GE:
6664	  fputs ("ge", file);   /* 12 */
6665	  break;
6666	case LTU:
6667	  fputs ("llt", file);  /* 2 */
6668	  break;
6669	case LEU:
6670	  fputs ("lle", file);  /* 6 */
6671	  break;
6672	case GTU:
6673	  fputs ("lgt", file);  /* 1 */
6674	  break;
6675	case GEU:
6676	  fputs ("lge", file);  /* 5 */
6677	  break;
6678	default:
6679	  abort ();
6680	}
6681      break;
6682
6683    case 'w':
6684      /* If constant, low-order 16 bits of constant, signed.  Otherwise, write
6685	 normally.  */
6686      if (INT_P (x))
6687	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6688		 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6689      else
6690	print_operand (file, x, 0);
6691      return;
6692
6693    case 'W':
6694      /* MB value for a PowerPC64 rldic operand.  */
6695      val = (GET_CODE (x) == CONST_INT
6696	     ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6697
6698      if (val < 0)
6699	i = -1;
6700      else
6701	for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6702	  if ((val <<= 1) < 0)
6703	    break;
6704
6705#if HOST_BITS_PER_WIDE_INT == 32
6706      if (GET_CODE (x) == CONST_INT && i >= 0)
6707	i += 32;  /* zero-extend high-part was all 0's */
6708      else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6709	{
6710	  val = CONST_DOUBLE_LOW (x);
6711
6712	  if (val == 0)
6713	    abort ();
6714	  else if (val < 0)
6715	    --i;
6716	  else
6717	    for ( ; i < 64; i++)
6718	      if ((val <<= 1) < 0)
6719		break;
6720	}
6721#endif
6722
6723      fprintf (file, "%d", i + 1);
6724      return;
6725
6726    case 'X':
6727      if (GET_CODE (x) == MEM
6728	  && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6729	putc ('x', file);
6730      return;
6731
6732    case 'Y':
6733      /* Like 'L', for third word of TImode  */
6734      if (GET_CODE (x) == REG)
6735	fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6736      else if (GET_CODE (x) == MEM)
6737	{
6738	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6739	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6740	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6741	  else
6742	    output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6743	  if (small_data_operand (x, GET_MODE (x)))
6744	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6745		     reg_names[SMALL_DATA_REG]);
6746	}
6747      return;
6748
6749    case 'z':
6750      /* X is a SYMBOL_REF.  Write out the name preceded by a
6751	 period and without any trailing data in brackets.  Used for function
6752	 names.  If we are configured for System V (or the embedded ABI) on
6753	 the PowerPC, do not emit the period, since those systems do not use
6754	 TOCs and the like.  */
6755      if (GET_CODE (x) != SYMBOL_REF)
6756	abort ();
6757
6758      if (XSTR (x, 0)[0] != '.')
6759	{
6760	  switch (DEFAULT_ABI)
6761	    {
6762	    default:
6763	      abort ();
6764
6765	    case ABI_AIX:
6766	      putc ('.', file);
6767	      break;
6768
6769	    case ABI_V4:
6770	    case ABI_AIX_NODESC:
6771	    case ABI_DARWIN:
6772	      break;
6773	    }
6774	}
6775#if TARGET_AIX
6776      RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6777#else
6778      assemble_name (file, XSTR (x, 0));
6779#endif
6780      return;
6781
6782    case 'Z':
6783      /* Like 'L', for last word of TImode.  */
6784      if (GET_CODE (x) == REG)
6785	fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6786      else if (GET_CODE (x) == MEM)
6787	{
6788	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
6789	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6790	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6791	  else
6792	    output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6793	  if (small_data_operand (x, GET_MODE (x)))
6794	    fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6795		     reg_names[SMALL_DATA_REG]);
6796	}
6797      return;
6798
6799      /* Print AltiVec memory operand.  */
6800    case 'y':
6801      {
6802	rtx tmp;
6803
6804	if (GET_CODE (x) != MEM)
6805	  abort ();
6806
6807	tmp = XEXP (x, 0);
6808
6809	if (GET_CODE (tmp) == REG)
6810	  fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6811	else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6812	  {
6813	    if (REGNO (XEXP (tmp, 0)) == 0)
6814	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6815		       reg_names[ REGNO (XEXP (tmp, 0)) ]);
6816	    else
6817	      fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6818		       reg_names[ REGNO (XEXP (tmp, 1)) ]);
6819	  }
6820	else
6821	  abort ();
6822	break;
6823      }
6824
6825    case 0:
6826      if (GET_CODE (x) == REG)
6827	fprintf (file, "%s", reg_names[REGNO (x)]);
6828      else if (GET_CODE (x) == MEM)
6829	{
6830	  /* We need to handle PRE_INC and PRE_DEC here, since we need to
6831	     know the width from the mode.  */
6832	  if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6833	    fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6834		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6835	  else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6836	    fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6837		     reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6838	  else
6839	    output_address (XEXP (x, 0));
6840	}
6841      else
6842	output_addr_const (file, x);
6843      return;
6844
6845    default:
6846      output_operand_lossage ("invalid %%xn code");
6847    }
6848}
6849
6850/* Print the address of an operand.  */
6851
6852void
6853print_operand_address (file, x)
6854     FILE *file;
6855     rtx x;
6856{
6857  if (GET_CODE (x) == REG)
6858    fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6859  else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6860	   || GET_CODE (x) == LABEL_REF)
6861    {
6862      output_addr_const (file, x);
6863      if (small_data_operand (x, GET_MODE (x)))
6864	fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6865		 reg_names[SMALL_DATA_REG]);
6866      else if (TARGET_TOC)
6867	abort ();
6868    }
6869  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6870    {
6871      if (REGNO (XEXP (x, 0)) == 0)
6872	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6873		 reg_names[ REGNO (XEXP (x, 0)) ]);
6874      else
6875	fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6876		 reg_names[ REGNO (XEXP (x, 1)) ]);
6877    }
6878  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6879    {
6880      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6881      fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6882    }
6883#if TARGET_ELF
6884  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6885           && CONSTANT_P (XEXP (x, 1)))
6886    {
6887      output_addr_const (file, XEXP (x, 1));
6888      fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6889    }
6890#endif
6891#if TARGET_MACHO
6892  else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6893           && CONSTANT_P (XEXP (x, 1)))
6894    {
6895      fprintf (file, "lo16(");
6896      output_addr_const (file, XEXP (x, 1));
6897      fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6898    }
6899#endif
6900  else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6901    {
6902      if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6903	{
6904	  rtx contains_minus = XEXP (x, 1);
6905	  rtx minus, symref;
6906	  const char *name;
6907
6908	  /* Find the (minus (sym) (toc)) buried in X, and temporarily
6909	     turn it into (sym) for output_addr_const.  */
6910	  while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6911	    contains_minus = XEXP (contains_minus, 0);
6912
6913	  minus = XEXP (contains_minus, 0);
6914	  symref = XEXP (minus, 0);
6915	  XEXP (contains_minus, 0) = symref;
6916	  if (TARGET_ELF)
6917	    {
6918	      char *newname;
6919
6920	      name = XSTR (symref, 0);
6921	      newname = alloca (strlen (name) + sizeof ("@toc"));
6922	      strcpy (newname, name);
6923	      strcat (newname, "@toc");
6924	      XSTR (symref, 0) = newname;
6925	    }
6926	  output_addr_const (file, XEXP (x, 1));
6927	  if (TARGET_ELF)
6928	    XSTR (symref, 0) = name;
6929	  XEXP (contains_minus, 0) = minus;
6930	}
6931      else
6932	output_addr_const (file, XEXP (x, 1));
6933
6934      fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6935    }
6936  else
6937    abort ();
6938}
6939
6940/* Target hook for assembling integer objects.  The powerpc version has
6941   to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6942   is defined.  It also needs to handle DI-mode objects on 64-bit
6943   targets.  */
6944
6945static bool
6946rs6000_assemble_integer (x, size, aligned_p)
6947     rtx x;
6948     unsigned int size;
6949     int aligned_p;
6950{
6951#ifdef RELOCATABLE_NEEDS_FIXUP
6952  /* Special handling for SI values.  */
6953  if (size == 4 && aligned_p)
6954    {
6955      extern int in_toc_section PARAMS ((void));
6956      static int recurse = 0;
6957
6958      /* For -mrelocatable, we mark all addresses that need to be fixed up
6959	 in the .fixup section.  */
6960      if (TARGET_RELOCATABLE
6961	  && !in_toc_section ()
6962	  && !in_text_section ()
6963	  && !recurse
6964	  && GET_CODE (x) != CONST_INT
6965	  && GET_CODE (x) != CONST_DOUBLE
6966	  && CONSTANT_P (x))
6967	{
6968	  char buf[256];
6969
6970	  recurse = 1;
6971	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6972	  fixuplabelno++;
6973	  ASM_OUTPUT_LABEL (asm_out_file, buf);
6974	  fprintf (asm_out_file, "\t.long\t(");
6975	  output_addr_const (asm_out_file, x);
6976	  fprintf (asm_out_file, ")@fixup\n");
6977	  fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6978	  ASM_OUTPUT_ALIGN (asm_out_file, 2);
6979	  fprintf (asm_out_file, "\t.long\t");
6980	  assemble_name (asm_out_file, buf);
6981	  fprintf (asm_out_file, "\n\t.previous\n");
6982	  recurse = 0;
6983	  return true;
6984	}
6985      /* Remove initial .'s to turn a -mcall-aixdesc function
6986	 address into the address of the descriptor, not the function
6987	 itself.  */
6988      else if (GET_CODE (x) == SYMBOL_REF
6989	       && XSTR (x, 0)[0] == '.'
6990	       && DEFAULT_ABI == ABI_AIX)
6991	{
6992	  const char *name = XSTR (x, 0);
6993	  while (*name == '.')
6994	    name++;
6995
6996	  fprintf (asm_out_file, "\t.long\t%s\n", name);
6997	  return true;
6998	}
6999    }
7000#endif /* RELOCATABLE_NEEDS_FIXUP */
7001  return default_assemble_integer (x, size, aligned_p);
7002}
7003
7004enum rtx_code
7005rs6000_reverse_condition (mode, code)
7006     enum machine_mode mode;
7007     enum rtx_code code;
7008{
7009  /* Reversal of FP compares takes care -- an ordered compare
7010     becomes an unordered compare and vice versa.  */
7011  if (mode == CCFPmode)
7012    return reverse_condition_maybe_unordered (code);
7013  else
7014    return reverse_condition (code);
7015}
7016
7017/* Generate a compare for CODE.  Return a brand-new rtx that
7018   represents the result of the compare.  */
7019
7020static rtx
7021rs6000_generate_compare (code)
7022     enum rtx_code code;
7023{
7024  enum machine_mode comp_mode;
7025  rtx compare_result;
7026
7027  if (rs6000_compare_fp_p)
7028    comp_mode = CCFPmode;
7029  else if (code == GTU || code == LTU
7030	  || code == GEU || code == LEU)
7031    comp_mode = CCUNSmode;
7032  else
7033    comp_mode = CCmode;
7034
7035  /* First, the compare.  */
7036  compare_result = gen_reg_rtx (comp_mode);
7037  emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7038			  gen_rtx_COMPARE (comp_mode,
7039					   rs6000_compare_op0,
7040					   rs6000_compare_op1)));
7041
7042  /* Some kinds of FP comparisons need an OR operation;
7043     except for flag_unsafe_math_optimizations we don't bother.  */
7044  if (rs6000_compare_fp_p
7045      && ! flag_unsafe_math_optimizations
7046      && (code == LE || code == GE
7047	  || code == UNEQ || code == LTGT
7048	  || code == UNGT || code == UNLT))
7049    {
7050      enum rtx_code or1, or2;
7051      rtx or1_rtx, or2_rtx, compare2_rtx;
7052      rtx or_result = gen_reg_rtx (CCEQmode);
7053
7054      switch (code)
7055	{
7056	case LE: or1 = LT;  or2 = EQ;  break;
7057	case GE: or1 = GT;  or2 = EQ;  break;
7058	case UNEQ: or1 = UNORDERED;  or2 = EQ;  break;
7059	case LTGT: or1 = LT;  or2 = GT;  break;
7060	case UNGT: or1 = UNORDERED;  or2 = GT;  break;
7061	case UNLT: or1 = UNORDERED;  or2 = LT;  break;
7062	default:  abort ();
7063	}
7064      validate_condition_mode (or1, comp_mode);
7065      validate_condition_mode (or2, comp_mode);
7066      or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7067      or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7068      compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7069				      gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7070				      const_true_rtx);
7071      emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7072
7073      compare_result = or_result;
7074      code = EQ;
7075    }
7076
7077  validate_condition_mode (code, GET_MODE (compare_result));
7078
7079  return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7080}
7081
7082
7083/* Emit the RTL for an sCOND pattern.  */
7084
7085void
7086rs6000_emit_sCOND (code, result)
7087     enum rtx_code code;
7088     rtx result;
7089{
7090  rtx condition_rtx;
7091  enum machine_mode op_mode;
7092
7093  condition_rtx = rs6000_generate_compare (code);
7094
7095  op_mode = GET_MODE (rs6000_compare_op0);
7096  if (op_mode == VOIDmode)
7097    op_mode = GET_MODE (rs6000_compare_op1);
7098
7099  if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7100    {
7101      PUT_MODE (condition_rtx, DImode);
7102      convert_move (result, condition_rtx, 0);
7103    }
7104  else
7105    {
7106      PUT_MODE (condition_rtx, SImode);
7107      emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7108    }
7109}
7110
7111/* Emit a branch of kind CODE to location LOC.  */
7112
7113void
7114rs6000_emit_cbranch (code, loc)
7115     enum rtx_code code;
7116     rtx loc;
7117{
7118  rtx condition_rtx, loc_ref;
7119
7120  condition_rtx = rs6000_generate_compare (code);
7121  loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7122  emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7123			       gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7124						     loc_ref, pc_rtx)));
7125}
7126
7127/* Return the string to output a conditional branch to LABEL, which is
7128   the operand number of the label, or -1 if the branch is really a
7129   conditional return.
7130
7131   OP is the conditional expression.  XEXP (OP, 0) is assumed to be a
7132   condition code register and its mode specifies what kind of
7133   comparison we made.
7134
7135   REVERSED is non-zero if we should reverse the sense of the comparison.
7136
7137   INSN is the insn.  */
7138
7139char *
7140output_cbranch (op, label, reversed, insn)
7141     rtx op;
7142     const char * label;
7143     int reversed;
7144     rtx insn;
7145{
7146  static char string[64];
7147  enum rtx_code code = GET_CODE (op);
7148  rtx cc_reg = XEXP (op, 0);
7149  enum machine_mode mode = GET_MODE (cc_reg);
7150  int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7151  int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7152  int really_reversed = reversed ^ need_longbranch;
7153  char *s = string;
7154  const char *ccode;
7155  const char *pred;
7156  rtx note;
7157
7158  validate_condition_mode (code, mode);
7159
7160  /* Work out which way this really branches.  We could use
7161     reverse_condition_maybe_unordered here always but this
7162     makes the resulting assembler clearer.  */
7163  if (really_reversed)
7164    code = rs6000_reverse_condition (mode, code);
7165
7166  switch (code)
7167    {
7168      /* Not all of these are actually distinct opcodes, but
7169	 we distinguish them for clarity of the resulting assembler.  */
7170    case NE: case LTGT:
7171      ccode = "ne"; break;
7172    case EQ: case UNEQ:
7173      ccode = "eq"; break;
7174    case GE: case GEU:
7175      ccode = "ge"; break;
7176    case GT: case GTU: case UNGT:
7177      ccode = "gt"; break;
7178    case LE: case LEU:
7179      ccode = "le"; break;
7180    case LT: case LTU: case UNLT:
7181      ccode = "lt"; break;
7182    case UNORDERED: ccode = "un"; break;
7183    case ORDERED: ccode = "nu"; break;
7184    case UNGE: ccode = "nl"; break;
7185    case UNLE: ccode = "ng"; break;
7186    default:
7187      abort ();
7188    }
7189
7190  /* Maybe we have a guess as to how likely the branch is.
7191     The old mnemonics don't have a way to specify this information.  */
7192  note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7193  if (note != NULL_RTX)
7194    {
7195      /* PROB is the difference from 50%.  */
7196      int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7197
7198      /* For branches that are very close to 50%, assume not-taken.  */
7199      if (abs (prob) > REG_BR_PROB_BASE / 20
7200	  && ((prob > 0) ^ need_longbranch))
7201	pred = "+";
7202      else
7203	pred = "-";
7204    }
7205  else
7206    pred = "";
7207
7208  if (label == NULL)
7209    s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7210  else
7211    s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7212
7213  /* We need to escape any '%' characters in the reg_names string.
7214     Assume they'd only be the first character...  */
7215  if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7216    *s++ = '%';
7217  s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7218
7219  if (label != NULL)
7220    {
7221      /* If the branch distance was too far, we may have to use an
7222	 unconditional branch to go the distance.  */
7223      if (need_longbranch)
7224	s += sprintf (s, ",$+8\n\tb %s", label);
7225      else
7226	s += sprintf (s, ",%s", label);
7227    }
7228
7229  return string;
7230}
7231
7232/* Emit a conditional move: move TRUE_COND to DEST if OP of the
7233   operands of the last comparison is nonzero/true, FALSE_COND if it
7234   is zero/false.  Return 0 if the hardware has no such operation.  */
7235
7236int
7237rs6000_emit_cmove (dest, op, true_cond, false_cond)
7238     rtx dest;
7239     rtx op;
7240     rtx true_cond;
7241     rtx false_cond;
7242{
7243  enum rtx_code code = GET_CODE (op);
7244  rtx op0 = rs6000_compare_op0;
7245  rtx op1 = rs6000_compare_op1;
7246  REAL_VALUE_TYPE c1;
7247  enum machine_mode compare_mode = GET_MODE (op0);
7248  enum machine_mode result_mode = GET_MODE (dest);
7249  rtx temp;
7250
7251  /* These modes should always match. */
7252  if (GET_MODE (op1) != compare_mode)
7253    return 0;
7254  if (GET_MODE (true_cond) != result_mode)
7255    return 0;
7256  if (GET_MODE (false_cond) != result_mode)
7257    return 0;
7258
7259  /* First, work out if the hardware can do this at all, or
7260     if it's too slow...  */
7261  /* If the comparison is an integer one, since we only have fsel
7262     it'll be cheaper to use a branch.  */
7263  if (! rs6000_compare_fp_p)
7264    return 0;
7265
7266  /* Eliminate half of the comparisons by switching operands, this
7267     makes the remaining code simpler.  */
7268  if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7269      || code == LTGT || code == LT)
7270    {
7271      code = reverse_condition_maybe_unordered (code);
7272      temp = true_cond;
7273      true_cond = false_cond;
7274      false_cond = temp;
7275    }
7276
7277  /* UNEQ and LTGT take four instructions for a comparison with zero,
7278     it'll probably be faster to use a branch here too.  */
7279  if (code == UNEQ)
7280    return 0;
7281
7282  if (GET_CODE (op1) == CONST_DOUBLE)
7283    REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7284
7285  /* We're going to try to implement comparions by performing
7286     a subtract, then comparing against zero.  Unfortunately,
7287     Inf - Inf is NaN which is not zero, and so if we don't
7288     know that the the operand is finite and the comparison
7289     would treat EQ different to UNORDERED, we can't do it.  */
7290  if (! flag_unsafe_math_optimizations
7291      && code != GT && code != UNGE
7292      && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7293      /* Constructs of the form (a OP b ? a : b) are safe.  */
7294      && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7295	  || (! rtx_equal_p (op0, true_cond)
7296	      && ! rtx_equal_p (op1, true_cond))))
7297    return 0;
7298  /* At this point we know we can use fsel.  */
7299
7300  /* Reduce the comparison to a comparison against zero.  */
7301  temp = gen_reg_rtx (compare_mode);
7302  emit_insn (gen_rtx_SET (VOIDmode, temp,
7303			  gen_rtx_MINUS (compare_mode, op0, op1)));
7304  op0 = temp;
7305  op1 = CONST0_RTX (compare_mode);
7306
7307  /* If we don't care about NaNs we can reduce some of the comparisons
7308     down to faster ones.  */
7309  if (flag_unsafe_math_optimizations)
7310    switch (code)
7311      {
7312      case GT:
7313	code = LE;
7314	temp = true_cond;
7315	true_cond = false_cond;
7316	false_cond = temp;
7317	break;
7318      case UNGE:
7319	code = GE;
7320	break;
7321      case UNEQ:
7322	code = EQ;
7323	break;
7324      default:
7325	break;
7326      }
7327
7328  /* Now, reduce everything down to a GE.  */
7329  switch (code)
7330    {
7331    case GE:
7332      break;
7333
7334    case LE:
7335      temp = gen_reg_rtx (compare_mode);
7336      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7337      op0 = temp;
7338      break;
7339
7340    case ORDERED:
7341      temp = gen_reg_rtx (compare_mode);
7342      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7343      op0 = temp;
7344      break;
7345
7346    case EQ:
7347      temp = gen_reg_rtx (compare_mode);
7348      emit_insn (gen_rtx_SET (VOIDmode, temp,
7349			      gen_rtx_NEG (compare_mode,
7350					   gen_rtx_ABS (compare_mode, op0))));
7351      op0 = temp;
7352      break;
7353
7354    case UNGE:
7355      temp = gen_reg_rtx (result_mode);
7356      emit_insn (gen_rtx_SET (VOIDmode, temp,
7357			      gen_rtx_IF_THEN_ELSE (result_mode,
7358						    gen_rtx_GE (VOIDmode,
7359								op0, op1),
7360						    true_cond, false_cond)));
7361      false_cond = temp;
7362      true_cond = false_cond;
7363
7364      temp = gen_reg_rtx (compare_mode);
7365      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7366      op0 = temp;
7367      break;
7368
7369    case GT:
7370      temp = gen_reg_rtx (result_mode);
7371      emit_insn (gen_rtx_SET (VOIDmode, temp,
7372			      gen_rtx_IF_THEN_ELSE (result_mode,
7373						    gen_rtx_GE (VOIDmode,
7374								op0, op1),
7375						    true_cond, false_cond)));
7376      true_cond = temp;
7377      false_cond = true_cond;
7378
7379      temp = gen_reg_rtx (compare_mode);
7380      emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7381      op0 = temp;
7382      break;
7383
7384    default:
7385      abort ();
7386    }
7387
7388  emit_insn (gen_rtx_SET (VOIDmode, dest,
7389			  gen_rtx_IF_THEN_ELSE (result_mode,
7390						gen_rtx_GE (VOIDmode,
7391							    op0, op1),
7392						true_cond, false_cond)));
7393  return 1;
7394}
7395
7396void
7397rs6000_emit_minmax (dest, code, op0, op1)
7398     rtx dest;
7399     enum rtx_code code;
7400     rtx op0;
7401     rtx op1;
7402{
7403  enum machine_mode mode = GET_MODE (op0);
7404  rtx target;
7405  if (code == SMAX || code == UMAX)
7406    target = emit_conditional_move (dest, GE, op0, op1, mode,
7407				    op0, op1, mode, 0);
7408  else
7409    target = emit_conditional_move (dest, GE, op0, op1, mode,
7410				    op1, op0, mode, 0);
7411  if (target == NULL_RTX)
7412    abort ();
7413  if (target != dest)
7414    emit_move_insn (dest, target);
7415}
7416
7417/* This page contains routines that are used to determine what the
7418   function prologue and epilogue code will do and write them out.  */
7419
7420/* Return the first fixed-point register that is required to be
7421   saved. 32 if none.  */
7422
7423int
7424first_reg_to_save ()
7425{
7426  int first_reg;
7427
7428  /* Find lowest numbered live register.  */
7429  for (first_reg = 13; first_reg <= 31; first_reg++)
7430    if (regs_ever_live[first_reg]
7431	&& (! call_used_regs[first_reg]
7432	    || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7433		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7434		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7435      break;
7436
7437  if (current_function_profile)
7438    {
7439      /* AIX must save/restore every register that contains a parameter
7440	 before/after the .__mcount call plus an additional register
7441	 for the static chain, if needed; use registers from 30 down to 22
7442	 to do this.  */
7443      if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7444	{
7445	  int last_parm_reg, profile_first_reg;
7446
7447	  /* Figure out last used parameter register.  The proper thing
7448	     to do is to walk incoming args of the function.  A function
7449	     might have live parameter registers even if it has no
7450	     incoming args.  */
7451	  for (last_parm_reg = 10;
7452	       last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7453	       last_parm_reg--)
7454	    ;
7455
7456	  /* Calculate first reg for saving parameter registers
7457	     and static chain.
7458	     Skip reg 31 which may contain the frame pointer.  */
7459	  profile_first_reg = (33 - last_parm_reg
7460			       - (current_function_needs_context ? 1 : 0));
7461#if TARGET_MACHO
7462          /* Need to skip another reg to account for R31 being PICBASE
7463             (when flag_pic is set) or R30 being used as the frame
7464             pointer (when flag_pic is not set).  */
7465          --profile_first_reg;
7466#endif
7467	  /* Do not save frame pointer if no parameters needs to be saved.  */
7468	  if (profile_first_reg == 31)
7469	    profile_first_reg = 32;
7470
7471	  if (first_reg > profile_first_reg)
7472	    first_reg = profile_first_reg;
7473	}
7474
7475      /* SVR4 may need one register to preserve the static chain.  */
7476      else if (current_function_needs_context)
7477	{
7478	  /* Skip reg 31 which may contain the frame pointer.  */
7479	  if (first_reg > 30)
7480	    first_reg = 30;
7481	}
7482    }
7483
7484#if TARGET_MACHO
7485  if (flag_pic && current_function_uses_pic_offset_table &&
7486      (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7487    return RS6000_PIC_OFFSET_TABLE_REGNUM;
7488#endif
7489
7490  return first_reg;
7491}
7492
7493/* Similar, for FP regs.  */
7494
7495int
7496first_fp_reg_to_save ()
7497{
7498  int first_reg;
7499
7500  /* Find lowest numbered live register.  */
7501  for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7502    if (regs_ever_live[first_reg])
7503      break;
7504
7505  return first_reg;
7506}
7507
7508/* Similar, for AltiVec regs.  */
7509
7510static int
7511first_altivec_reg_to_save ()
7512{
7513  int i;
7514
7515  /* Stack frame remains as is unless we are in AltiVec ABI.  */
7516  if (! TARGET_ALTIVEC_ABI)
7517    return LAST_ALTIVEC_REGNO + 1;
7518
7519  /* Find lowest numbered live register.  */
7520  for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7521    if (regs_ever_live[i])
7522      break;
7523
7524  return i;
7525}
7526
7527/* Return a 32-bit mask of the AltiVec registers we need to set in
7528   VRSAVE.  Bit n of the return value is 1 if Vn is live.  The MSB in
7529   the 32-bit word is 0.  */
7530
7531static unsigned int
7532compute_vrsave_mask ()
7533{
7534  unsigned int i, mask = 0;
7535
7536  /* First, find out if we use _any_ altivec registers.  */
7537  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7538    if (regs_ever_live[i])
7539      mask |= ALTIVEC_REG_BIT (i);
7540
7541  if (mask == 0)
7542    return mask;
7543
7544  /* Next, add all registers that are call-clobbered.  We do this
7545     because post-reload register optimizers such as regrename_optimize
7546     may choose to use them.  They never change the register class
7547     chosen by reload, so cannot create new uses of altivec registers
7548     if there were none before, so the early exit above is safe.  */
7549  /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7550     altivec registers not saved in the mask, which might well make the
7551     adjustments below more effective in eliding the save/restore of
7552     VRSAVE in small functions.  */
7553  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7554    if (call_used_regs[i])
7555      mask |= ALTIVEC_REG_BIT (i);
7556
7557  /* Next, remove the argument registers from the set.  These must
7558     be in the VRSAVE mask set by the caller, so we don't need to add
7559     them in again.  More importantly, the mask we compute here is
7560     used to generate CLOBBERs in the set_vrsave insn, and we do not
7561     wish the argument registers to die.  */
7562  for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7563    mask &= ~ALTIVEC_REG_BIT (i);
7564
7565  /* Similarly, remove the return value from the set.  */
7566  {
7567    bool yes = false;
7568    diddle_return_value (is_altivec_return_reg, &yes);
7569    if (yes)
7570      mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7571  }
7572
7573  return mask;
7574}
7575
7576static void
7577is_altivec_return_reg (reg, xyes)
7578     rtx reg;
7579     void *xyes;
7580{
7581  bool *yes = (bool *) xyes;
7582  if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7583    *yes = true;
7584}
7585
7586
7587/* Calculate the stack information for the current function.  This is
7588   complicated by having two separate calling sequences, the AIX calling
7589   sequence and the V.4 calling sequence.
7590
7591   AIX (and Darwin/Mac OS X) stack frames look like:
7592							  32-bit  64-bit
7593	SP---->	+---------------------------------------+
7594		| back chain to caller			| 0	  0
7595		+---------------------------------------+
7596		| saved CR				| 4       8 (8-11)
7597		+---------------------------------------+
7598		| saved LR				| 8       16
7599		+---------------------------------------+
7600		| reserved for compilers		| 12      24
7601		+---------------------------------------+
7602		| reserved for binders			| 16      32
7603		+---------------------------------------+
7604		| saved TOC pointer			| 20      40
7605		+---------------------------------------+
7606		| Parameter save area (P)		| 24      48
7607		+---------------------------------------+
7608		| Alloca space (A)			| 24+P    etc.
7609		+---------------------------------------+
7610		| Local variable space (L)		| 24+P+A
7611		+---------------------------------------+
7612		| Float/int conversion temporary (X)	| 24+P+A+L
7613		+---------------------------------------+
7614		| Save area for AltiVec registers (W)	| 24+P+A+L+X
7615		+---------------------------------------+
7616		| AltiVec alignment padding (Y)		| 24+P+A+L+X+W
7617		+---------------------------------------+
7618		| Save area for VRSAVE register (Z)	| 24+P+A+L+X+W+Y
7619		+---------------------------------------+
7620		| Save area for GP registers (G)	| 24+P+A+X+L+X+W+Y+Z
7621		+---------------------------------------+
7622		| Save area for FP registers (F)	| 24+P+A+X+L+X+W+Y+Z+G
7623		+---------------------------------------+
7624	old SP->| back chain to caller's caller		|
7625		+---------------------------------------+
7626
7627   The required alignment for AIX configurations is two words (i.e., 8
7628   or 16 bytes).
7629
7630
7631   V.4 stack frames look like:
7632
7633	SP---->	+---------------------------------------+
7634		| back chain to caller			| 0
7635		+---------------------------------------+
7636		| caller's saved LR			| 4
7637		+---------------------------------------+
7638		| Parameter save area (P)		| 8
7639		+---------------------------------------+
7640		| Alloca space (A)			| 8+P
7641		+---------------------------------------+
7642		| Varargs save area (V)			| 8+P+A
7643		+---------------------------------------+
7644		| Local variable space (L)		| 8+P+A+V
7645		+---------------------------------------+
7646		| Float/int conversion temporary (X)	| 8+P+A+V+L
7647		+---------------------------------------+
7648		| Save area for AltiVec registers (W)	| 8+P+A+V+L+X
7649		+---------------------------------------+
7650		| AltiVec alignment padding (Y)		| 8+P+A+V+L+X+W
7651		+---------------------------------------+
7652		| Save area for VRSAVE register (Z)	| 8+P+A+V+L+X+W+Y
7653		+---------------------------------------+
7654		| saved CR (C)				| 8+P+A+V+L+X+W+Y+Z
7655		+---------------------------------------+
7656		| Save area for GP registers (G)	| 8+P+A+V+L+X+W+Y+Z+C
7657		+---------------------------------------+
7658		| Save area for FP registers (F)	| 8+P+A+V+L+X+W+Y+Z+C+G
7659		+---------------------------------------+
7660	old SP->| back chain to caller's caller		|
7661		+---------------------------------------+
7662
7663   The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7664   given.  (But note below and in sysv4.h that we require only 8 and
7665   may round up the size of our stack frame anyways.  The historical
7666   reason is early versions of powerpc-linux which didn't properly
7667   align the stack at program startup.  A happy side-effect is that
7668   -mno-eabi libraries can be used with -meabi programs.)
7669
7670   The EABI configuration defaults to the V.4 layout, unless
7671   -mcall-aix is used, in which case the AIX layout is used.  However,
7672   the stack alignment requirements may differ.  If -mno-eabi is not
7673   given, the required stack alignment is 8 bytes; if -mno-eabi is
7674   given, the required alignment is 16 bytes.  (But see V.4 comment
7675   above.)  */
7676
7677#ifndef ABI_STACK_BOUNDARY
7678#define ABI_STACK_BOUNDARY STACK_BOUNDARY
7679#endif
7680
7681rs6000_stack_t *
7682rs6000_stack_info ()
7683{
7684  static rs6000_stack_t info, zero_info;
7685  rs6000_stack_t *info_ptr = &info;
7686  int reg_size = TARGET_POWERPC64 ? 8 : 4;
7687  enum rs6000_abi abi;
7688  int ehrd_size;
7689  int total_raw_size;
7690
7691  /* Zero all fields portably.  */
7692  info = zero_info;
7693
7694  /* Select which calling sequence.  */
7695  info_ptr->abi = abi = DEFAULT_ABI;
7696
7697  /* Calculate which registers need to be saved & save area size.  */
7698  info_ptr->first_gp_reg_save = first_reg_to_save ();
7699  /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7700     even if it currently looks like we won't.  */
7701  if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7702       || (flag_pic == 1 && abi == ABI_V4)
7703       || (flag_pic && abi == ABI_DARWIN))
7704      && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7705    info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7706  else
7707    info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7708
7709  info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7710  info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7711
7712  info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7713  info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7714				 - info_ptr->first_altivec_reg_save);
7715
7716  /* Does this function call anything?  */
7717  info_ptr->calls_p = (! current_function_is_leaf
7718		       || cfun->machine->ra_needs_full_frame);
7719
7720  /* Determine if we need to save the link register.  */
7721  if (rs6000_ra_ever_killed ()
7722      || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7723#ifdef TARGET_RELOCATABLE
7724      || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7725#endif
7726      || (info_ptr->first_fp_reg_save != 64
7727	  && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7728      || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7729      || (abi == ABI_V4 && current_function_calls_alloca)
7730      || (DEFAULT_ABI == ABI_DARWIN
7731	  && flag_pic
7732	  && current_function_uses_pic_offset_table)
7733      || info_ptr->calls_p)
7734    {
7735      info_ptr->lr_save_p = 1;
7736      regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7737    }
7738
7739  /* Determine if we need to save the condition code registers.  */
7740  if (regs_ever_live[CR2_REGNO]
7741      || regs_ever_live[CR3_REGNO]
7742      || regs_ever_live[CR4_REGNO])
7743    {
7744      info_ptr->cr_save_p = 1;
7745      if (abi == ABI_V4)
7746	info_ptr->cr_size = reg_size;
7747    }
7748
7749  /* If the current function calls __builtin_eh_return, then we need
7750     to allocate stack space for registers that will hold data for
7751     the exception handler.  */
7752  if (current_function_calls_eh_return)
7753    {
7754      unsigned int i;
7755      for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7756	continue;
7757      ehrd_size = i * UNITS_PER_WORD;
7758    }
7759  else
7760    ehrd_size = 0;
7761
7762  /* Determine various sizes.  */
7763  info_ptr->reg_size     = reg_size;
7764  info_ptr->fixed_size   = RS6000_SAVE_AREA;
7765  info_ptr->varargs_size = RS6000_VARARGS_AREA;
7766  info_ptr->vars_size    = RS6000_ALIGN (get_frame_size (), 8);
7767  info_ptr->parm_size    = RS6000_ALIGN (current_function_outgoing_args_size,
7768					 8);
7769
7770  if (TARGET_ALTIVEC_ABI)
7771    {
7772      info_ptr->vrsave_mask = compute_vrsave_mask ();
7773      info_ptr->vrsave_size  = info_ptr->vrsave_mask ? 4 : 0;
7774    }
7775  else
7776    {
7777      info_ptr->vrsave_mask = 0;
7778      info_ptr->vrsave_size = 0;
7779    }
7780
7781  /* Calculate the offsets.  */
7782  switch (abi)
7783    {
7784    case ABI_NONE:
7785    default:
7786      abort ();
7787
7788    case ABI_AIX:
7789    case ABI_AIX_NODESC:
7790    case ABI_DARWIN:
7791      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7792      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7793
7794      if (TARGET_ALTIVEC_ABI)
7795	{
7796	  info_ptr->vrsave_save_offset
7797	    = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7798
7799	  /* Align stack so vector save area is on a quadword boundary.  */
7800	  if (info_ptr->altivec_size != 0)
7801	    info_ptr->altivec_padding_size
7802	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7803	  else
7804	    info_ptr->altivec_padding_size = 0;
7805
7806	  info_ptr->altivec_save_offset
7807	    = info_ptr->vrsave_save_offset
7808	    - info_ptr->altivec_padding_size
7809	    - info_ptr->altivec_size;
7810
7811	  /* Adjust for AltiVec case.  */
7812	  info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7813	}
7814      else
7815	info_ptr->ehrd_offset      = info_ptr->gp_save_offset - ehrd_size;
7816      info_ptr->cr_save_offset   = reg_size; /* first word when 64-bit.  */
7817      info_ptr->lr_save_offset   = 2*reg_size;
7818      break;
7819
7820    case ABI_V4:
7821      info_ptr->fp_save_offset   = - info_ptr->fp_size;
7822      info_ptr->gp_save_offset   = info_ptr->fp_save_offset - info_ptr->gp_size;
7823      info_ptr->cr_save_offset   = info_ptr->gp_save_offset - info_ptr->cr_size;
7824
7825      if (TARGET_ALTIVEC_ABI)
7826	{
7827	  info_ptr->vrsave_save_offset
7828	    = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7829
7830	  /* Align stack so vector save area is on a quadword boundary.  */
7831	  if (info_ptr->altivec_size != 0)
7832	    info_ptr->altivec_padding_size
7833	      = 16 - (-info_ptr->vrsave_save_offset % 16);
7834	  else
7835	    info_ptr->altivec_padding_size = 0;
7836
7837	  info_ptr->altivec_save_offset
7838	    = info_ptr->vrsave_save_offset
7839	    - info_ptr->altivec_padding_size
7840	    - info_ptr->altivec_size;
7841
7842	  /* Adjust for AltiVec case.  */
7843	  info_ptr->toc_save_offset
7844	    = info_ptr->altivec_save_offset - info_ptr->toc_size;
7845	}
7846      else
7847	info_ptr->toc_save_offset  = info_ptr->cr_save_offset - info_ptr->toc_size;
7848      info_ptr->ehrd_offset      = info_ptr->toc_save_offset - ehrd_size;
7849      info_ptr->lr_save_offset   = reg_size;
7850      break;
7851    }
7852
7853  info_ptr->save_size    = RS6000_ALIGN (info_ptr->fp_size
7854					 + info_ptr->gp_size
7855					 + info_ptr->altivec_size
7856					 + info_ptr->altivec_padding_size
7857					 + info_ptr->vrsave_size
7858					 + ehrd_size
7859					 + info_ptr->cr_size
7860					 + info_ptr->lr_size
7861					 + info_ptr->vrsave_size
7862					 + info_ptr->toc_size,
7863					 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7864					 ? 16 : 8);
7865
7866  total_raw_size	 = (info_ptr->vars_size
7867			    + info_ptr->parm_size
7868			    + info_ptr->save_size
7869			    + info_ptr->varargs_size
7870			    + info_ptr->fixed_size);
7871
7872  info_ptr->total_size =
7873    RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7874
7875  /* Determine if we need to allocate any stack frame:
7876
7877     For AIX we need to push the stack if a frame pointer is needed
7878     (because the stack might be dynamically adjusted), if we are
7879     debugging, if we make calls, or if the sum of fp_save, gp_save,
7880     and local variables are more than the space needed to save all
7881     non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7882     + 18*8 = 288 (GPR13 reserved).
7883
7884     For V.4 we don't have the stack cushion that AIX uses, but assume
7885     that the debugger can handle stackless frames.  */
7886
7887  if (info_ptr->calls_p)
7888    info_ptr->push_p = 1;
7889
7890  else if (abi == ABI_V4)
7891    info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7892
7893  else
7894    info_ptr->push_p = (frame_pointer_needed
7895			|| (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7896			|| ((total_raw_size - info_ptr->fixed_size)
7897			    > (TARGET_32BIT ? 220 : 288)));
7898
7899  /* Zero offsets if we're not saving those registers.  */
7900  if (info_ptr->fp_size == 0)
7901    info_ptr->fp_save_offset = 0;
7902
7903  if (info_ptr->gp_size == 0)
7904    info_ptr->gp_save_offset = 0;
7905
7906  if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7907    info_ptr->altivec_save_offset = 0;
7908
7909  if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7910    info_ptr->vrsave_save_offset = 0;
7911
7912  if (! info_ptr->lr_save_p)
7913    info_ptr->lr_save_offset = 0;
7914
7915  if (! info_ptr->cr_save_p)
7916    info_ptr->cr_save_offset = 0;
7917
7918  if (! info_ptr->toc_save_p)
7919    info_ptr->toc_save_offset = 0;
7920
7921  return info_ptr;
7922}
7923
7924void
7925debug_stack_info (info)
7926     rs6000_stack_t *info;
7927{
7928  const char *abi_string;
7929
7930  if (! info)
7931    info = rs6000_stack_info ();
7932
7933  fprintf (stderr, "\nStack information for function %s:\n",
7934	   ((current_function_decl && DECL_NAME (current_function_decl))
7935	    ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7936	    : "<unknown>"));
7937
7938  switch (info->abi)
7939    {
7940    default:		 abi_string = "Unknown";	break;
7941    case ABI_NONE:	 abi_string = "NONE";		break;
7942    case ABI_AIX:
7943    case ABI_AIX_NODESC: abi_string = "AIX";		break;
7944    case ABI_DARWIN:	 abi_string = "Darwin";		break;
7945    case ABI_V4:	 abi_string = "V.4";		break;
7946    }
7947
7948  fprintf (stderr, "\tABI                 = %5s\n", abi_string);
7949
7950  if (TARGET_ALTIVEC_ABI)
7951    fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7952
7953  if (info->first_gp_reg_save != 32)
7954    fprintf (stderr, "\tfirst_gp_reg_save   = %5d\n", info->first_gp_reg_save);
7955
7956  if (info->first_fp_reg_save != 64)
7957    fprintf (stderr, "\tfirst_fp_reg_save   = %5d\n", info->first_fp_reg_save);
7958
7959  if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7960    fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7961	     info->first_altivec_reg_save);
7962
7963  if (info->lr_save_p)
7964    fprintf (stderr, "\tlr_save_p           = %5d\n", info->lr_save_p);
7965
7966  if (info->cr_save_p)
7967    fprintf (stderr, "\tcr_save_p           = %5d\n", info->cr_save_p);
7968
7969  if (info->toc_save_p)
7970    fprintf (stderr, "\ttoc_save_p          = %5d\n", info->toc_save_p);
7971
7972  if (info->vrsave_mask)
7973    fprintf (stderr, "\tvrsave_mask         = 0x%x\n", info->vrsave_mask);
7974
7975  if (info->push_p)
7976    fprintf (stderr, "\tpush_p              = %5d\n", info->push_p);
7977
7978  if (info->calls_p)
7979    fprintf (stderr, "\tcalls_p             = %5d\n", info->calls_p);
7980
7981  if (info->gp_save_offset)
7982    fprintf (stderr, "\tgp_save_offset      = %5d\n", info->gp_save_offset);
7983
7984  if (info->fp_save_offset)
7985    fprintf (stderr, "\tfp_save_offset      = %5d\n", info->fp_save_offset);
7986
7987  if (info->altivec_save_offset)
7988    fprintf (stderr, "\taltivec_save_offset = %5d\n",
7989	     info->altivec_save_offset);
7990
7991  if (info->vrsave_save_offset)
7992    fprintf (stderr, "\tvrsave_save_offset  = %5d\n",
7993	     info->vrsave_save_offset);
7994
7995  if (info->lr_save_offset)
7996    fprintf (stderr, "\tlr_save_offset      = %5d\n", info->lr_save_offset);
7997
7998  if (info->cr_save_offset)
7999    fprintf (stderr, "\tcr_save_offset      = %5d\n", info->cr_save_offset);
8000
8001  if (info->toc_save_offset)
8002    fprintf (stderr, "\ttoc_save_offset     = %5d\n", info->toc_save_offset);
8003
8004  if (info->varargs_save_offset)
8005    fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
8006
8007  if (info->total_size)
8008    fprintf (stderr, "\ttotal_size          = %5d\n", info->total_size);
8009
8010  if (info->varargs_size)
8011    fprintf (stderr, "\tvarargs_size        = %5d\n", info->varargs_size);
8012
8013  if (info->vars_size)
8014    fprintf (stderr, "\tvars_size           = %5d\n", info->vars_size);
8015
8016  if (info->parm_size)
8017    fprintf (stderr, "\tparm_size           = %5d\n", info->parm_size);
8018
8019  if (info->fixed_size)
8020    fprintf (stderr, "\tfixed_size          = %5d\n", info->fixed_size);
8021
8022  if (info->gp_size)
8023    fprintf (stderr, "\tgp_size             = %5d\n", info->gp_size);
8024
8025  if (info->fp_size)
8026    fprintf (stderr, "\tfp_size             = %5d\n", info->fp_size);
8027
8028  if (info->altivec_size)
8029    fprintf (stderr, "\taltivec_size        = %5d\n", info->altivec_size);
8030
8031  if (info->vrsave_size)
8032    fprintf (stderr, "\tvrsave_size         = %5d\n", info->vrsave_size);
8033
8034  if (info->altivec_padding_size)
8035    fprintf (stderr, "\taltivec_padding_size= %5d\n",
8036	     info->altivec_padding_size);
8037
8038  if (info->lr_size)
8039    fprintf (stderr, "\tlr_size             = %5d\n", info->lr_size);
8040
8041  if (info->cr_size)
8042    fprintf (stderr, "\tcr_size             = %5d\n", info->cr_size);
8043
8044  if (info->toc_size)
8045    fprintf (stderr, "\ttoc_size            = %5d\n", info->toc_size);
8046
8047  if (info->save_size)
8048    fprintf (stderr, "\tsave_size           = %5d\n", info->save_size);
8049
8050  if (info->reg_size != 4)
8051    fprintf (stderr, "\treg_size            = %5d\n", info->reg_size);
8052
8053  fprintf (stderr, "\n");
8054}
8055
8056rtx
8057rs6000_return_addr (count, frame)
8058     int count;
8059     rtx frame;
8060{
8061  /* Currently we don't optimize very well between prolog and body
8062     code and for PIC code the code can be actually quite bad, so
8063     don't try to be too clever here.  */
8064  if (count != 0
8065      || flag_pic != 0
8066      || DEFAULT_ABI == ABI_AIX
8067      || DEFAULT_ABI == ABI_AIX_NODESC)
8068    {
8069      cfun->machine->ra_needs_full_frame = 1;
8070
8071      return
8072	gen_rtx_MEM
8073	  (Pmode,
8074	   memory_address
8075	   (Pmode,
8076	    plus_constant (copy_to_reg
8077			   (gen_rtx_MEM (Pmode,
8078					 memory_address (Pmode, frame))),
8079			   RETURN_ADDRESS_OFFSET)));
8080    }
8081
8082  return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8083}
8084
8085static int
8086rs6000_ra_ever_killed ()
8087{
8088  rtx top;
8089
8090#ifdef ASM_OUTPUT_MI_THUNK
8091  if (current_function_is_thunk)
8092    return 0;
8093#endif
8094  if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8095      || cfun->machine->ra_needs_full_frame)
8096    return regs_ever_live[LINK_REGISTER_REGNUM];
8097
8098  push_topmost_sequence ();
8099  top = get_insns ();
8100  pop_topmost_sequence ();
8101
8102  return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8103			    top, NULL_RTX);
8104}
8105
8106/* Add a REG_MAYBE_DEAD note to the insn.  */
8107static void
8108rs6000_maybe_dead (insn)
8109     rtx insn;
8110{
8111  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8112					const0_rtx,
8113					REG_NOTES (insn));
8114}
8115
8116/* Emit instructions needed to load the TOC register.
8117   This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8118   a constant pool; or for SVR4 -fpic.  */
8119
8120void
8121rs6000_emit_load_toc_table (fromprolog)
8122     int fromprolog;
8123{
8124  rtx dest;
8125  dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8126
8127  if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8128    {
8129      if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8130	{
8131	  rtx temp = (fromprolog
8132		      ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8133		      : gen_reg_rtx (Pmode));
8134	  rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8135	  rs6000_maybe_dead (emit_move_insn (dest, temp));
8136	}
8137      else if (flag_pic == 2)
8138        {
8139	  char buf[30];
8140	  rtx tempLR = (fromprolog
8141			? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8142			: gen_reg_rtx (Pmode));
8143	  rtx temp0 = (fromprolog
8144			? gen_rtx_REG (Pmode, 0)
8145			: gen_reg_rtx (Pmode));
8146	  rtx symF;
8147
8148	  /* possibly create the toc section */
8149	  if (! toc_initialized)
8150	    {
8151	      toc_section ();
8152	      function_section (current_function_decl);
8153	    }
8154
8155	  if (fromprolog)
8156	    {
8157	      rtx symL;
8158
8159	      ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8160	      symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8161
8162	      ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8163	      symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8164
8165	      rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8166								   symF)));
8167	      rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8168	      rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8169								   symL,
8170								   symF)));
8171	    }
8172	  else
8173	    {
8174	      rtx tocsym;
8175	      static int reload_toc_labelno = 0;
8176
8177	      tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8178
8179	      ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8180	      symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8181
8182	      rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8183								    symF,
8184								    tocsym)));
8185	      rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8186	      rs6000_maybe_dead (emit_move_insn (temp0,
8187						 gen_rtx_MEM (Pmode, dest)));
8188	    }
8189	  rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8190	}
8191      else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8192        {
8193	  /* This is for AIX code running in non-PIC ELF.  */
8194	  char buf[30];
8195	  rtx realsym;
8196	  ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8197	  realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8198
8199	  rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8200	  rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8201	}
8202      else
8203        abort ();
8204    }
8205  else
8206    {
8207      if (TARGET_32BIT)
8208        rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8209      else
8210        rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8211    }
8212}
8213
8214int
8215get_TOC_alias_set ()
8216{
8217    static int set = -1;
8218    if (set == -1)
8219      set = new_alias_set ();
8220    return set;
8221}
8222
8223/* This retuns nonzero if the current function uses the TOC.  This is
8224   determined by the presence of (unspec ... 7), which is generated by
8225   the various load_toc_* patterns.  */
8226
8227int
8228uses_TOC ()
8229{
8230    rtx insn;
8231
8232    for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8233      if (INSN_P (insn))
8234	{
8235	  rtx pat = PATTERN (insn);
8236	  int i;
8237
8238	  if (GET_CODE (pat) == PARALLEL)
8239	    for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8240	      if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8241		 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8242		  return 1;
8243	}
8244    return 0;
8245}
8246
8247rtx
8248create_TOC_reference (symbol)
8249    rtx symbol;
8250{
8251  return gen_rtx_PLUS (Pmode,
8252	   gen_rtx_REG (Pmode, TOC_REGISTER),
8253	     gen_rtx_CONST (Pmode,
8254	       gen_rtx_MINUS (Pmode, symbol,
8255		 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8256}
8257
8258#if TARGET_AIX
8259/* __throw will restore its own return address to be the same as the
8260   return address of the function that the throw is being made to.
8261   This is unfortunate, because we want to check the original
8262   return address to see if we need to restore the TOC.
8263   So we have to squirrel it away here.
8264   This is used only in compiling __throw and __rethrow.
8265
8266   Most of this code should be removed by CSE.  */
8267static rtx insn_after_throw;
8268
8269/* This does the saving...  */
8270void
8271rs6000_aix_emit_builtin_unwind_init ()
8272{
8273  rtx mem;
8274  rtx stack_top = gen_reg_rtx (Pmode);
8275  rtx opcode_addr = gen_reg_rtx (Pmode);
8276
8277  insn_after_throw = gen_reg_rtx (SImode);
8278
8279  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8280  emit_move_insn (stack_top, mem);
8281
8282  mem = gen_rtx_MEM (Pmode,
8283		     gen_rtx_PLUS (Pmode, stack_top,
8284				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8285  emit_move_insn (opcode_addr, mem);
8286  emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8287}
8288
8289/* Emit insns to _restore_ the TOC register, at runtime (specifically
8290   in _eh.o).  Only used on AIX.
8291
8292   The idea is that on AIX, function calls look like this:
8293	bl  somefunction-trampoline
8294	lwz r2,20(sp)
8295
8296   and later,
8297	somefunction-trampoline:
8298	stw r2,20(sp)
8299	 ... load function address in the count register ...
8300	bctr
8301   or like this, if the linker determines that this is not a cross-module call
8302   and so the TOC need not be restored:
8303	bl  somefunction
8304	nop
8305   or like this, if the compiler could determine that this is not a
8306   cross-module call:
8307	bl  somefunction
8308   now, the tricky bit here is that register 2 is saved and restored
8309   by the _linker_, so we can't readily generate debugging information
8310   for it.  So we need to go back up the call chain looking at the
8311   insns at return addresses to see which calls saved the TOC register
8312   and so see where it gets restored from.
8313
8314   Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8315   just before the actual epilogue.
8316
8317   On the bright side, this incurs no space or time overhead unless an
8318   exception is thrown, except for the extra code in libgcc.a.
8319
8320   The parameter STACKSIZE is a register containing (at runtime)
8321   the amount to be popped off the stack in addition to the stack frame
8322   of this routine (which will be __throw or __rethrow, and so is
8323   guaranteed to have a stack frame).  */
8324
8325void
8326rs6000_emit_eh_toc_restore (stacksize)
8327     rtx stacksize;
8328{
8329  rtx top_of_stack;
8330  rtx bottom_of_stack = gen_reg_rtx (Pmode);
8331  rtx tocompare = gen_reg_rtx (SImode);
8332  rtx opcode = gen_reg_rtx (SImode);
8333  rtx opcode_addr = gen_reg_rtx (Pmode);
8334  rtx mem;
8335  rtx loop_start = gen_label_rtx ();
8336  rtx no_toc_restore_needed = gen_label_rtx ();
8337  rtx loop_exit = gen_label_rtx ();
8338
8339  mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8340  set_mem_alias_set (mem, rs6000_sr_alias_set);
8341  emit_move_insn (bottom_of_stack, mem);
8342
8343  top_of_stack = expand_binop (Pmode, add_optab,
8344			       bottom_of_stack, stacksize,
8345			       NULL_RTX, 1, OPTAB_WIDEN);
8346
8347  emit_move_insn (tocompare,
8348		  GEN_INT (trunc_int_for_mode (TARGET_32BIT
8349					       ? 0x80410014
8350					       : 0xE8410028, SImode)));
8351
8352  if (insn_after_throw == NULL_RTX)
8353    abort ();
8354  emit_move_insn (opcode, insn_after_throw);
8355
8356  emit_note (NULL, NOTE_INSN_LOOP_BEG);
8357  emit_label (loop_start);
8358
8359  do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8360			   SImode, NULL_RTX, NULL_RTX,
8361			   no_toc_restore_needed);
8362
8363  mem = gen_rtx_MEM (Pmode,
8364		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8365				   GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8366  emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8367
8368  emit_label (no_toc_restore_needed);
8369  do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8370			   Pmode, NULL_RTX, NULL_RTX,
8371			   loop_exit);
8372
8373  mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8374  set_mem_alias_set (mem, rs6000_sr_alias_set);
8375  emit_move_insn (bottom_of_stack, mem);
8376
8377  mem = gen_rtx_MEM (Pmode,
8378		     gen_rtx_PLUS (Pmode, bottom_of_stack,
8379				   GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8380  emit_move_insn (opcode_addr, mem);
8381  emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8382
8383  emit_note (NULL, NOTE_INSN_LOOP_CONT);
8384  emit_jump (loop_start);
8385  emit_note (NULL, NOTE_INSN_LOOP_END);
8386  emit_label (loop_exit);
8387}
8388#endif /* TARGET_AIX */
8389
8390/* This ties together stack memory (MEM with an alias set of
8391   rs6000_sr_alias_set) and the change to the stack pointer.  */
8392
8393static void
8394rs6000_emit_stack_tie ()
8395{
8396  rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8397
8398  set_mem_alias_set (mem, rs6000_sr_alias_set);
8399  emit_insn (gen_stack_tie (mem));
8400}
8401
8402/* Emit the correct code for allocating stack space, as insns.
8403   If COPY_R12, make sure a copy of the old frame is left in r12.
8404   The generated code may use hard register 0 as a temporary.  */
8405
8406static void
8407rs6000_emit_allocate_stack (size, copy_r12)
8408     HOST_WIDE_INT size;
8409     int copy_r12;
8410{
8411  rtx insn;
8412  rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8413  rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8414  rtx todec = GEN_INT (-size);
8415
8416  if (current_function_limit_stack)
8417    {
8418      if (REG_P (stack_limit_rtx)
8419	  && REGNO (stack_limit_rtx) > 1
8420	  && REGNO (stack_limit_rtx) <= 31)
8421	{
8422	  emit_insn (Pmode == SImode
8423		     ? gen_addsi3 (tmp_reg,
8424				   stack_limit_rtx,
8425				   GEN_INT (size))
8426		     : gen_adddi3 (tmp_reg,
8427				   stack_limit_rtx,
8428				   GEN_INT (size)));
8429
8430	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8431				    const0_rtx));
8432	}
8433      else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8434	       && TARGET_32BIT
8435	       && DEFAULT_ABI == ABI_V4)
8436	{
8437	  rtx toload = gen_rtx_CONST (VOIDmode,
8438				      gen_rtx_PLUS (Pmode,
8439						    stack_limit_rtx,
8440						    GEN_INT (size)));
8441
8442	  emit_insn (gen_elf_high (tmp_reg, toload));
8443	  emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8444	  emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8445				    const0_rtx));
8446	}
8447      else
8448	warning ("stack limit expression is not supported");
8449    }
8450
8451  if (copy_r12 || ! TARGET_UPDATE)
8452    emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8453
8454  if (TARGET_UPDATE)
8455    {
8456      if (size > 32767)
8457	{
8458	  /* Need a note here so that try_split doesn't get confused.  */
8459	  if (get_last_insn() == NULL_RTX)
8460	    emit_note (0, NOTE_INSN_DELETED);
8461	  insn = emit_move_insn (tmp_reg, todec);
8462	  try_split (PATTERN (insn), insn, 0);
8463	  todec = tmp_reg;
8464	}
8465
8466      if (Pmode == SImode)
8467	insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8468					    todec, stack_reg));
8469      else
8470	insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8471					    todec, stack_reg));
8472    }
8473  else
8474    {
8475      if (Pmode == SImode)
8476	insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8477      else
8478	insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8479      emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8480		      gen_rtx_REG (Pmode, 12));
8481    }
8482
8483  RTX_FRAME_RELATED_P (insn) = 1;
8484  REG_NOTES (insn) =
8485    gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8486		       gen_rtx_SET (VOIDmode, stack_reg,
8487				    gen_rtx_PLUS (Pmode, stack_reg,
8488						  GEN_INT (-size))),
8489		       REG_NOTES (insn));
8490}
8491
8492/* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8493   knows that:
8494
8495     (mem (plus (blah) (regXX)))
8496
8497   is really:
8498
8499     (mem (plus (blah) (const VALUE_OF_REGXX))).  */
8500
8501static void
8502altivec_frame_fixup (insn, reg, val)
8503     rtx insn, reg;
8504     HOST_WIDE_INT val;
8505{
8506  rtx real;
8507
8508  real = copy_rtx (PATTERN (insn));
8509
8510  real = replace_rtx (real, reg, GEN_INT (val));
8511
8512  RTX_FRAME_RELATED_P (insn) = 1;
8513  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8514					real,
8515					REG_NOTES (insn));
8516}
8517
8518/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8519   with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8520   is not NULL.  It would be nice if dwarf2out_frame_debug_expr could
8521   deduce these equivalences by itself so it wasn't necessary to hold
8522   its hand so much.  */
8523
8524static void
8525rs6000_frame_related (insn, reg, val, reg2, rreg)
8526     rtx insn;
8527     rtx reg;
8528     HOST_WIDE_INT val;
8529     rtx reg2;
8530     rtx rreg;
8531{
8532  rtx real, temp;
8533
8534  real = copy_rtx (PATTERN (insn));
8535
8536  real = replace_rtx (real, reg,
8537		      gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8538							STACK_POINTER_REGNUM),
8539				    GEN_INT (val)));
8540
8541  /* We expect that 'real' is either a SET or a PARALLEL containing
8542     SETs (and possibly other stuff).  In a PARALLEL, all the SETs
8543     are important so they all have to be marked RTX_FRAME_RELATED_P.  */
8544
8545  if (GET_CODE (real) == SET)
8546    {
8547      rtx set = real;
8548
8549      temp = simplify_rtx (SET_SRC (set));
8550      if (temp)
8551	SET_SRC (set) = temp;
8552      temp = simplify_rtx (SET_DEST (set));
8553      if (temp)
8554	SET_DEST (set) = temp;
8555      if (GET_CODE (SET_DEST (set)) == MEM)
8556	{
8557	  temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8558	  if (temp)
8559	    XEXP (SET_DEST (set), 0) = temp;
8560	}
8561    }
8562  else if (GET_CODE (real) == PARALLEL)
8563    {
8564      int i;
8565      for (i = 0; i < XVECLEN (real, 0); i++)
8566	if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8567	  {
8568	    rtx set = XVECEXP (real, 0, i);
8569
8570	    temp = simplify_rtx (SET_SRC (set));
8571	    if (temp)
8572	      SET_SRC (set) = temp;
8573	    temp = simplify_rtx (SET_DEST (set));
8574	    if (temp)
8575	      SET_DEST (set) = temp;
8576	    if (GET_CODE (SET_DEST (set)) == MEM)
8577	      {
8578		temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8579		if (temp)
8580		  XEXP (SET_DEST (set), 0) = temp;
8581	      }
8582	    RTX_FRAME_RELATED_P (set) = 1;
8583	  }
8584    }
8585  else
8586    abort ();
8587
8588  if (reg2 != NULL_RTX)
8589    real = replace_rtx (real, reg2, rreg);
8590
8591  RTX_FRAME_RELATED_P (insn) = 1;
8592  REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8593					real,
8594					REG_NOTES (insn));
8595}
8596
8597/* Returns an insn that has a vrsave set operation with the
8598   appropriate CLOBBERs.  */
8599
8600static rtx
8601generate_set_vrsave (reg, info, epiloguep)
8602     rtx reg;
8603     rs6000_stack_t *info;
8604     int epiloguep;
8605{
8606  int nclobs, i;
8607  rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8608  rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8609
8610  clobs[0]
8611    = gen_rtx_SET (VOIDmode,
8612		   vrsave,
8613		   gen_rtx_UNSPEC_VOLATILE (SImode,
8614					    gen_rtvec (2, reg, vrsave),
8615					    30));
8616
8617  nclobs = 1;
8618
8619  /* We need to clobber the registers in the mask so the scheduler
8620     does not move sets to VRSAVE before sets of AltiVec registers.
8621
8622     However, if the function receives nonlocal gotos, reload will set
8623     all call saved registers live.  We will end up with:
8624
8625     	(set (reg 999) (mem))
8626	(parallel [ (set (reg vrsave) (unspec blah))
8627		    (clobber (reg 999))])
8628
8629     The clobber will cause the store into reg 999 to be dead, and
8630     flow will attempt to delete an epilogue insn.  In this case, we
8631     need an unspec use/set of the register.  */
8632
8633  for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8634    if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8635      {
8636	if (!epiloguep || call_used_regs [i])
8637	  clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8638					     gen_rtx_REG (V4SImode, i));
8639	else
8640	  {
8641	    rtx reg = gen_rtx_REG (V4SImode, i);
8642
8643	    clobs[nclobs++]
8644	      = gen_rtx_SET (VOIDmode,
8645			     reg,
8646			     gen_rtx_UNSPEC (V4SImode,
8647					     gen_rtvec (1, reg), 27));
8648	  }
8649      }
8650
8651  insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8652
8653  for (i = 0; i < nclobs; ++i)
8654    XVECEXP (insn, 0, i) = clobs[i];
8655
8656  return insn;
8657}
8658
8659/* Emit function prologue as insns.  */
8660
8661void
8662rs6000_emit_prologue ()
8663{
8664  rs6000_stack_t *info = rs6000_stack_info ();
8665  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8666  int reg_size = TARGET_POWERPC64 ? 8 : 4;
8667  rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8668  rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8669  rtx frame_reg_rtx = sp_reg_rtx;
8670  rtx cr_save_rtx = NULL;
8671  rtx insn;
8672  int saving_FPRs_inline;
8673  int using_store_multiple;
8674  HOST_WIDE_INT sp_offset = 0;
8675
8676  using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8677			  && info->first_gp_reg_save < 31);
8678  saving_FPRs_inline = (info->first_fp_reg_save == 64
8679			|| FP_SAVE_INLINE (info->first_fp_reg_save));
8680
8681  /* For V.4, update stack before we do any saving and set back pointer.  */
8682  if (info->push_p && DEFAULT_ABI == ABI_V4)
8683    {
8684      if (info->total_size < 32767)
8685	sp_offset = info->total_size;
8686      else
8687	frame_reg_rtx = frame_ptr_rtx;
8688      rs6000_emit_allocate_stack (info->total_size,
8689				  (frame_reg_rtx != sp_reg_rtx
8690				   && (info->cr_save_p
8691				       || info->lr_save_p
8692				       || info->first_fp_reg_save < 64
8693				       || info->first_gp_reg_save < 32
8694				       )));
8695      if (frame_reg_rtx != sp_reg_rtx)
8696	rs6000_emit_stack_tie ();
8697    }
8698
8699  /* Save AltiVec registers if needed.  */
8700  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8701    {
8702      int i;
8703
8704      /* There should be a non inline version of this, for when we
8705	 are saving lots of vector registers.  */
8706      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8707	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8708	  {
8709	    rtx areg, savereg, mem;
8710	    int offset;
8711
8712	    offset = info->altivec_save_offset + sp_offset
8713	      + 16 * (i - info->first_altivec_reg_save);
8714
8715	    savereg = gen_rtx_REG (V4SImode, i);
8716
8717	    areg = gen_rtx_REG (Pmode, 0);
8718	    emit_move_insn (areg, GEN_INT (offset));
8719
8720	    /* AltiVec addressing mode is [reg+reg].  */
8721	    mem = gen_rtx_MEM (V4SImode,
8722			       gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8723
8724	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8725
8726	    insn = emit_move_insn (mem, savereg);
8727
8728	    altivec_frame_fixup (insn, areg, offset);
8729	  }
8730    }
8731
8732  /* VRSAVE is a bit vector representing which AltiVec registers
8733     are used.  The OS uses this to determine which vector
8734     registers to save on a context switch.  We need to save
8735     VRSAVE on the stack frame, add whatever AltiVec registers we
8736     used in this function, and do the corresponding magic in the
8737     epilogue.  */
8738
8739  if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8740    {
8741      rtx reg, mem, vrsave;
8742      int offset;
8743
8744      /* Get VRSAVE onto a GPR.  */
8745      reg = gen_rtx_REG (SImode, 12);
8746      vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8747      if (TARGET_MACHO)
8748	emit_insn (gen_get_vrsave_internal (reg));
8749      else
8750	emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8751
8752      /* Save VRSAVE.  */
8753      offset = info->vrsave_save_offset + sp_offset;
8754      mem
8755	= gen_rtx_MEM (SImode,
8756		       gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8757      set_mem_alias_set (mem, rs6000_sr_alias_set);
8758      insn = emit_move_insn (mem, reg);
8759
8760      /* Include the registers in the mask.  */
8761      emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8762
8763      insn = emit_insn (generate_set_vrsave (reg, info, 0));
8764    }
8765
8766  /* If we use the link register, get it into r0.  */
8767  if (info->lr_save_p)
8768    emit_move_insn (gen_rtx_REG (Pmode, 0),
8769		    gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8770
8771  /* If we need to save CR, put it into r12.  */
8772  if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8773    {
8774      cr_save_rtx = gen_rtx_REG (SImode, 12);
8775      emit_insn (gen_movesi_from_cr (cr_save_rtx));
8776    }
8777
8778  /* Do any required saving of fpr's.  If only one or two to save, do
8779     it ourselves.  Otherwise, call function.  */
8780  if (saving_FPRs_inline)
8781    {
8782      int i;
8783      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8784	if ((regs_ever_live[info->first_fp_reg_save+i]
8785	     && ! call_used_regs[info->first_fp_reg_save+i]))
8786	  {
8787	    rtx addr, reg, mem;
8788	    reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8789	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8790				 GEN_INT (info->fp_save_offset
8791					  + sp_offset
8792					  + 8 * i));
8793	    mem = gen_rtx_MEM (DFmode, addr);
8794	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8795
8796	    insn = emit_move_insn (mem, reg);
8797	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8798				  NULL_RTX, NULL_RTX);
8799	  }
8800    }
8801  else if (info->first_fp_reg_save != 64)
8802    {
8803      int i;
8804      char rname[30];
8805      const char *alloc_rname;
8806      rtvec p;
8807      p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8808
8809      RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8810					  gen_rtx_REG (Pmode,
8811						       LINK_REGISTER_REGNUM));
8812      sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8813	       info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8814      alloc_rname = ggc_strdup (rname);
8815      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8816				      gen_rtx_SYMBOL_REF (Pmode,
8817							  alloc_rname));
8818      for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8819	{
8820	  rtx addr, reg, mem;
8821	  reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8822	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8823			       GEN_INT (info->fp_save_offset
8824					+ sp_offset + 8*i));
8825	  mem = gen_rtx_MEM (DFmode, addr);
8826	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8827
8828	  RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8829	}
8830      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8831      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8832			    NULL_RTX, NULL_RTX);
8833    }
8834
8835  /* Save GPRs.  This is done as a PARALLEL if we are using
8836     the store-multiple instructions.  */
8837  if (using_store_multiple)
8838    {
8839      rtvec p, dwarfp;
8840      int i;
8841      p = rtvec_alloc (32 - info->first_gp_reg_save);
8842      dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8843      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8844	{
8845	  rtx addr, reg, mem;
8846	  reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8847	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8848			       GEN_INT (info->gp_save_offset
8849					+ sp_offset
8850					+ reg_size * i));
8851	  mem = gen_rtx_MEM (reg_mode, addr);
8852	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8853
8854	  RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8855	}
8856      insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8857      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8858			    NULL_RTX, NULL_RTX);
8859    }
8860  else
8861    {
8862      int i;
8863      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8864	if ((regs_ever_live[info->first_gp_reg_save+i]
8865	     && ! call_used_regs[info->first_gp_reg_save+i])
8866	    || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8867		&& ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8868		    || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8869	  {
8870	    rtx addr, reg, mem;
8871	    reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8872	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8873				 GEN_INT (info->gp_save_offset
8874					  + sp_offset
8875					  + reg_size * i));
8876	    mem = gen_rtx_MEM (reg_mode, addr);
8877	    set_mem_alias_set (mem, rs6000_sr_alias_set);
8878
8879	    insn = emit_move_insn (mem, reg);
8880	    rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8881				  NULL_RTX, NULL_RTX);
8882	  }
8883    }
8884
8885  /* ??? There's no need to emit actual instructions here, but it's the
8886     easiest way to get the frame unwind information emitted.  */
8887  if (current_function_calls_eh_return)
8888    {
8889      unsigned int i, regno;
8890
8891      for (i = 0; ; ++i)
8892	{
8893	  rtx addr, reg, mem;
8894
8895	  regno = EH_RETURN_DATA_REGNO (i);
8896	  if (regno == INVALID_REGNUM)
8897	    break;
8898
8899	  reg = gen_rtx_REG (reg_mode, regno);
8900	  addr = plus_constant (frame_reg_rtx,
8901				info->ehrd_offset + sp_offset
8902				+ reg_size * (int) i);
8903	  mem = gen_rtx_MEM (reg_mode, addr);
8904	  set_mem_alias_set (mem, rs6000_sr_alias_set);
8905
8906	  insn = emit_move_insn (mem, reg);
8907	  rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8908				NULL_RTX, NULL_RTX);
8909	}
8910    }
8911
8912  /* Save lr if we used it.  */
8913  if (info->lr_save_p)
8914    {
8915      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8916			       GEN_INT (info->lr_save_offset + sp_offset));
8917      rtx reg = gen_rtx_REG (Pmode, 0);
8918      rtx mem = gen_rtx_MEM (Pmode, addr);
8919      /* This should not be of rs6000_sr_alias_set, because of
8920	 __builtin_return_address.  */
8921
8922      insn = emit_move_insn (mem, reg);
8923      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8924			    reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8925    }
8926
8927  /* Save CR if we use any that must be preserved.  */
8928  if (info->cr_save_p)
8929    {
8930      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8931			       GEN_INT (info->cr_save_offset + sp_offset));
8932      rtx mem = gen_rtx_MEM (SImode, addr);
8933
8934      set_mem_alias_set (mem, rs6000_sr_alias_set);
8935
8936      /* If r12 was used to hold the original sp, copy cr into r0 now
8937	 that it's free.  */
8938      if (REGNO (frame_reg_rtx) == 12)
8939	{
8940	  cr_save_rtx = gen_rtx_REG (SImode, 0);
8941	  emit_insn (gen_movesi_from_cr (cr_save_rtx));
8942	}
8943      insn = emit_move_insn (mem, cr_save_rtx);
8944
8945      /* Now, there's no way that dwarf2out_frame_debug_expr is going
8946	 to understand '(unspec:SI [(reg:CC 68) ...] 19)'.  But that's
8947	 OK.  All we have to do is specify that _one_ condition code
8948	 register is saved in this stack slot.  The thrower's epilogue
8949	 will then restore all the call-saved registers.
8950	 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux.  */
8951      rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8952			    cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8953    }
8954
8955  /* Update stack and set back pointer unless this is V.4,
8956     for which it was done previously.  */
8957  if (info->push_p && DEFAULT_ABI != ABI_V4)
8958    rs6000_emit_allocate_stack (info->total_size, FALSE);
8959
8960  /* Set frame pointer, if needed.  */
8961  if (frame_pointer_needed)
8962    {
8963      insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8964			     sp_reg_rtx);
8965      RTX_FRAME_RELATED_P (insn) = 1;
8966    }
8967
8968  /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up.  */
8969  if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8970      || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8971	  && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
8972  {
8973    /* If emit_load_toc_table will use the link register, we need to save
8974       it.  We use R11 for this purpose because emit_load_toc_table
8975       can use register 0.  This allows us to use a plain 'blr' to return
8976       from the procedure more often.  */
8977    int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8978				    && ! info->lr_save_p
8979				    && EXIT_BLOCK_PTR->pred != NULL);
8980    if (save_LR_around_toc_setup)
8981      emit_move_insn (gen_rtx_REG (Pmode, 11),
8982		      gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8983
8984    rs6000_emit_load_toc_table (TRUE);
8985
8986    if (save_LR_around_toc_setup)
8987      emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8988		      gen_rtx_REG (Pmode, 11));
8989  }
8990
8991  if (DEFAULT_ABI == ABI_DARWIN
8992      && flag_pic && current_function_uses_pic_offset_table)
8993    {
8994      rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8995
8996      rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8997
8998      rs6000_maybe_dead (
8999	emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
9000			gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
9001    }
9002}
9003
9004/* Write function prologue.  */
9005
9006static void
9007rs6000_output_function_prologue (file, size)
9008     FILE *file;
9009     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9010{
9011  rs6000_stack_t *info = rs6000_stack_info ();
9012
9013  if (TARGET_DEBUG_STACK)
9014    debug_stack_info (info);
9015
9016  /* Write .extern for any function we will call to save and restore
9017     fp values.  */
9018  if (info->first_fp_reg_save < 64
9019      && !FP_SAVE_INLINE (info->first_fp_reg_save))
9020    fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
9021	     SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
9022	     RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
9023	     RESTORE_FP_SUFFIX);
9024
9025  /* Write .extern for AIX common mode routines, if needed.  */
9026  if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
9027    {
9028      fputs ("\t.extern __mulh\n", file);
9029      fputs ("\t.extern __mull\n", file);
9030      fputs ("\t.extern __divss\n", file);
9031      fputs ("\t.extern __divus\n", file);
9032      fputs ("\t.extern __quoss\n", file);
9033      fputs ("\t.extern __quous\n", file);
9034      common_mode_defined = 1;
9035    }
9036
9037  if (! HAVE_prologue)
9038    {
9039      start_sequence ();
9040
9041      /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9042	 the "toplevel" insn chain.  */
9043      emit_note (0, NOTE_INSN_DELETED);
9044      rs6000_emit_prologue ();
9045      emit_note (0, NOTE_INSN_DELETED);
9046
9047      /* Expand INSN_ADDRESSES so final() doesn't crash. */
9048      {
9049	rtx insn;
9050	unsigned addr = 0;
9051	for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9052	  {
9053	    INSN_ADDRESSES_NEW (insn, addr);
9054	    addr += 4;
9055	  }
9056      }
9057
9058      if (TARGET_DEBUG_STACK)
9059	debug_rtx_list (get_insns (), 100);
9060      final (get_insns (), file, FALSE, FALSE);
9061      end_sequence ();
9062    }
9063
9064  rs6000_pic_labelno++;
9065}
9066
9067/* Emit function epilogue as insns.
9068
9069   At present, dwarf2out_frame_debug_expr doesn't understand
9070   register restores, so we don't bother setting RTX_FRAME_RELATED_P
9071   anywhere in the epilogue.  Most of the insns below would in any case
9072   need special notes to explain where r11 is in relation to the stack.  */
9073
9074void
9075rs6000_emit_epilogue (sibcall)
9076     int sibcall;
9077{
9078  rs6000_stack_t *info;
9079  int restoring_FPRs_inline;
9080  int using_load_multiple;
9081  int using_mfcr_multiple;
9082  int use_backchain_to_restore_sp;
9083  int sp_offset = 0;
9084  rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9085  rtx frame_reg_rtx = sp_reg_rtx;
9086  enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9087  int reg_size = TARGET_POWERPC64 ? 8 : 4;
9088  int i;
9089
9090  info = rs6000_stack_info ();
9091  using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9092			 && info->first_gp_reg_save < 31);
9093  restoring_FPRs_inline = (sibcall
9094			   || current_function_calls_eh_return
9095			   || info->first_fp_reg_save == 64
9096			   || FP_SAVE_INLINE (info->first_fp_reg_save));
9097  use_backchain_to_restore_sp = (frame_pointer_needed
9098				 || current_function_calls_alloca
9099				 || info->total_size > 32767);
9100  using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9101			 || rs6000_cpu == PROCESSOR_PPC603
9102			 || rs6000_cpu == PROCESSOR_PPC750
9103			 || optimize_size);
9104
9105  /* If we have a frame pointer, a call to alloca,  or a large stack
9106     frame, restore the old stack pointer using the backchain.  Otherwise,
9107     we know what size to update it with.  */
9108  if (use_backchain_to_restore_sp)
9109    {
9110      /* Under V.4, don't reset the stack pointer until after we're done
9111	 loading the saved registers.  */
9112      if (DEFAULT_ABI == ABI_V4)
9113	frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9114
9115      emit_move_insn (frame_reg_rtx,
9116		      gen_rtx_MEM (Pmode, sp_reg_rtx));
9117
9118    }
9119  else if (info->push_p)
9120    {
9121      if (DEFAULT_ABI == ABI_V4)
9122	sp_offset = info->total_size;
9123      else
9124	{
9125	  emit_insn (TARGET_32BIT
9126		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9127				   GEN_INT (info->total_size))
9128		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9129				   GEN_INT (info->total_size)));
9130	}
9131    }
9132
9133  /* Restore AltiVec registers if needed.  */
9134  if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9135    {
9136      int i;
9137
9138      for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9139	if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9140	  {
9141	    rtx addr, areg, mem;
9142
9143	    areg = gen_rtx_REG (Pmode, 0);
9144	    emit_move_insn
9145	      (areg, GEN_INT (info->altivec_save_offset
9146			      + sp_offset
9147			      + 16 * (i - info->first_altivec_reg_save)));
9148
9149	    /* AltiVec addressing mode is [reg+reg].  */
9150	    addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9151	    mem = gen_rtx_MEM (V4SImode, addr);
9152	    set_mem_alias_set (mem, rs6000_sr_alias_set);
9153
9154	    emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9155	  }
9156    }
9157
9158  /* Restore VRSAVE if needed.  */
9159  if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9160    {
9161      rtx addr, mem, reg;
9162
9163      addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9164			   GEN_INT (info->vrsave_save_offset + sp_offset));
9165      mem = gen_rtx_MEM (SImode, addr);
9166      set_mem_alias_set (mem, rs6000_sr_alias_set);
9167      reg = gen_rtx_REG (SImode, 12);
9168      emit_move_insn (reg, mem);
9169
9170      emit_insn (generate_set_vrsave (reg, info, 1));
9171    }
9172
9173  /* Get the old lr if we saved it.  */
9174  if (info->lr_save_p)
9175    {
9176      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9177			       GEN_INT (info->lr_save_offset + sp_offset));
9178      rtx mem = gen_rtx_MEM (Pmode, addr);
9179
9180      set_mem_alias_set (mem, rs6000_sr_alias_set);
9181
9182      emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9183    }
9184
9185  /* Get the old cr if we saved it.  */
9186  if (info->cr_save_p)
9187    {
9188      rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9189			       GEN_INT (info->cr_save_offset + sp_offset));
9190      rtx mem = gen_rtx_MEM (SImode, addr);
9191
9192      set_mem_alias_set (mem, rs6000_sr_alias_set);
9193
9194      emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9195    }
9196
9197  /* Set LR here to try to overlap restores below.  */
9198  if (info->lr_save_p)
9199    emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9200		    gen_rtx_REG (Pmode, 0));
9201
9202  /* Load exception handler data registers, if needed.  */
9203  if (current_function_calls_eh_return)
9204    {
9205      unsigned int i, regno;
9206
9207      for (i = 0; ; ++i)
9208	{
9209	  rtx addr, mem;
9210
9211	  regno = EH_RETURN_DATA_REGNO (i);
9212	  if (regno == INVALID_REGNUM)
9213	    break;
9214
9215	  addr = plus_constant (frame_reg_rtx,
9216				info->ehrd_offset + sp_offset
9217				+ reg_size * (int) i);
9218	  mem = gen_rtx_MEM (reg_mode, addr);
9219	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9220
9221	  emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9222	}
9223    }
9224
9225  /* Restore GPRs.  This is done as a PARALLEL if we are using
9226     the load-multiple instructions.  */
9227  if (using_load_multiple)
9228    {
9229      rtvec p;
9230      p = rtvec_alloc (32 - info->first_gp_reg_save);
9231      for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9232	{
9233	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9234				   GEN_INT (info->gp_save_offset
9235					    + sp_offset
9236					    + reg_size * i));
9237	  rtx mem = gen_rtx_MEM (reg_mode, addr);
9238
9239	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9240
9241	  RTVEC_ELT (p, i) =
9242	    gen_rtx_SET (VOIDmode,
9243			 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9244			 mem);
9245	}
9246      emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9247    }
9248  else
9249    for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9250      if ((regs_ever_live[info->first_gp_reg_save+i]
9251	   && ! call_used_regs[info->first_gp_reg_save+i])
9252	  || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9253	      && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9254		  || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9255	{
9256	  rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9257				   GEN_INT (info->gp_save_offset
9258					    + sp_offset
9259					    + reg_size * i));
9260	  rtx mem = gen_rtx_MEM (reg_mode, addr);
9261
9262	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9263
9264	  emit_move_insn (gen_rtx_REG (reg_mode,
9265				       info->first_gp_reg_save + i),
9266			  mem);
9267	}
9268
9269  /* Restore fpr's if we need to do it without calling a function.  */
9270  if (restoring_FPRs_inline)
9271    for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9272      if ((regs_ever_live[info->first_fp_reg_save+i]
9273	   && ! call_used_regs[info->first_fp_reg_save+i]))
9274	{
9275	  rtx addr, mem;
9276	  addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9277			       GEN_INT (info->fp_save_offset
9278					+ sp_offset
9279					+ 8 * i));
9280	  mem = gen_rtx_MEM (DFmode, addr);
9281	  set_mem_alias_set (mem, rs6000_sr_alias_set);
9282
9283	  emit_move_insn (gen_rtx_REG (DFmode,
9284				       info->first_fp_reg_save + i),
9285			  mem);
9286	}
9287
9288  /* If we saved cr, restore it here.  Just those that were used.  */
9289  if (info->cr_save_p)
9290    {
9291      rtx r12_rtx = gen_rtx_REG (SImode, 12);
9292      int count = 0;
9293
9294      if (using_mfcr_multiple)
9295	{
9296	  for (i = 0; i < 8; i++)
9297	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9298	      count++;
9299	  if (count == 0)
9300	    abort ();
9301	}
9302
9303      if (using_mfcr_multiple && count > 1)
9304	{
9305	  rtvec p;
9306	  int ndx;
9307
9308	  p = rtvec_alloc (count);
9309
9310	  ndx = 0;
9311	  for (i = 0; i < 8; i++)
9312	    if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9313	      {
9314		rtvec r = rtvec_alloc (2);
9315		RTVEC_ELT (r, 0) = r12_rtx;
9316		RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9317		RTVEC_ELT (p, ndx) =
9318		  gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9319			       gen_rtx_UNSPEC (CCmode, r, 20));
9320		ndx++;
9321	      }
9322	  emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9323	  if (ndx != count)
9324	    abort ();
9325	}
9326      else
9327	for (i = 0; i < 8; i++)
9328	  if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9329	    {
9330	      emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9331							   CR0_REGNO+i),
9332					      r12_rtx));
9333	    }
9334    }
9335
9336  /* If this is V.4, unwind the stack pointer after all of the loads
9337     have been done.  We need to emit a block here so that sched
9338     doesn't decide to move the sp change before the register restores
9339     (which may not have any obvious dependency on the stack).  This
9340     doesn't hurt performance, because there is no scheduling that can
9341     be done after this point.  */
9342  if (DEFAULT_ABI == ABI_V4)
9343    {
9344      if (frame_reg_rtx != sp_reg_rtx)
9345	  rs6000_emit_stack_tie ();
9346
9347      if (use_backchain_to_restore_sp)
9348	{
9349	  emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9350	}
9351      else if (sp_offset != 0)
9352	{
9353	  emit_insn (Pmode == SImode
9354		     ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9355				   GEN_INT (sp_offset))
9356		     : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9357				   GEN_INT (sp_offset)));
9358	}
9359    }
9360
9361  if (current_function_calls_eh_return)
9362    {
9363      rtx sa = EH_RETURN_STACKADJ_RTX;
9364      emit_insn (Pmode == SImode
9365		 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9366		 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9367    }
9368
9369  if (!sibcall)
9370    {
9371      rtvec p;
9372      if (! restoring_FPRs_inline)
9373	p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9374      else
9375	p = rtvec_alloc (2);
9376
9377      RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9378      RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9379				      gen_rtx_REG (Pmode,
9380						   LINK_REGISTER_REGNUM));
9381
9382      /* If we have to restore more than two FP registers, branch to the
9383	 restore function.  It will return to our caller.  */
9384      if (! restoring_FPRs_inline)
9385	{
9386	  int i;
9387	  char rname[30];
9388	  const char *alloc_rname;
9389
9390	  sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9391		   info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9392	  alloc_rname = ggc_strdup (rname);
9393	  RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9394					  gen_rtx_SYMBOL_REF (Pmode,
9395							      alloc_rname));
9396
9397	  for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9398	    {
9399	      rtx addr, mem;
9400	      addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9401				   GEN_INT (info->fp_save_offset + 8*i));
9402	      mem = gen_rtx_MEM (DFmode, addr);
9403	      set_mem_alias_set (mem, rs6000_sr_alias_set);
9404
9405	      RTVEC_ELT (p, i+3) =
9406		gen_rtx_SET (VOIDmode,
9407			     gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9408			     mem);
9409	    }
9410	}
9411
9412      emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9413    }
9414}
9415
9416/* Write function epilogue.  */
9417
9418static void
9419rs6000_output_function_epilogue (file, size)
9420     FILE *file;
9421     HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9422{
9423  rs6000_stack_t *info = rs6000_stack_info ();
9424  int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9425
9426  if (! HAVE_epilogue)
9427    {
9428      rtx insn = get_last_insn ();
9429      /* If the last insn was a BARRIER, we don't have to write anything except
9430	 the trace table.  */
9431      if (GET_CODE (insn) == NOTE)
9432	insn = prev_nonnote_insn (insn);
9433      if (insn == 0 ||  GET_CODE (insn) != BARRIER)
9434	{
9435	  /* This is slightly ugly, but at least we don't have two
9436	     copies of the epilogue-emitting code.  */
9437	  start_sequence ();
9438
9439	  /* A NOTE_INSN_DELETED is supposed to be at the start
9440	     and end of the "toplevel" insn chain.  */
9441	  emit_note (0, NOTE_INSN_DELETED);
9442	  rs6000_emit_epilogue (FALSE);
9443	  emit_note (0, NOTE_INSN_DELETED);
9444
9445	  /* Expand INSN_ADDRESSES so final() doesn't crash. */
9446	  {
9447	    rtx insn;
9448	    unsigned addr = 0;
9449	    for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9450	      {
9451		INSN_ADDRESSES_NEW (insn, addr);
9452		addr += 4;
9453	      }
9454	  }
9455
9456	  if (TARGET_DEBUG_STACK)
9457	    debug_rtx_list (get_insns (), 100);
9458	  final (get_insns (), file, FALSE, FALSE);
9459	  end_sequence ();
9460	}
9461    }
9462
9463  /* Output a traceback table here.  See /usr/include/sys/debug.h for info
9464     on its format.
9465
9466     We don't output a traceback table if -finhibit-size-directive was
9467     used.  The documentation for -finhibit-size-directive reads
9468     ``don't output a @code{.size} assembler directive, or anything
9469     else that would cause trouble if the function is split in the
9470     middle, and the two halves are placed at locations far apart in
9471     memory.''  The traceback table has this property, since it
9472     includes the offset from the start of the function to the
9473     traceback table itself.
9474
9475     System V.4 Powerpc's (and the embedded ABI derived from it) use a
9476     different traceback table.  */
9477  if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9478    {
9479      const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9480      const char *language_string = lang_hooks.name;
9481      int fixed_parms = 0, float_parms = 0, parm_info = 0;
9482      int i;
9483
9484      while (*fname == '.')	/* V.4 encodes . in the name */
9485	fname++;
9486
9487      /* Need label immediately before tbtab, so we can compute its offset
9488	 from the function start.  */
9489      if (*fname == '*')
9490	++fname;
9491      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9492      ASM_OUTPUT_LABEL (file, fname);
9493
9494      /* The .tbtab pseudo-op can only be used for the first eight
9495	 expressions, since it can't handle the possibly variable
9496	 length fields that follow.  However, if you omit the optional
9497	 fields, the assembler outputs zeros for all optional fields
9498	 anyways, giving each variable length field is minimum length
9499	 (as defined in sys/debug.h).  Thus we can not use the .tbtab
9500	 pseudo-op at all.  */
9501
9502      /* An all-zero word flags the start of the tbtab, for debuggers
9503	 that have to find it by searching forward from the entry
9504	 point or from the current pc.  */
9505      fputs ("\t.long 0\n", file);
9506
9507      /* Tbtab format type.  Use format type 0.  */
9508      fputs ("\t.byte 0,", file);
9509
9510      /* Language type.  Unfortunately, there doesn't seem to be any
9511	 official way to get this info, so we use language_string.  C
9512	 is 0.  C++ is 9.  No number defined for Obj-C, so use the
9513	 value for C for now.  There is no official value for Java,
9514         although IBM appears to be using 13.  There is no official value
9515	 for Chill, so we've chosen 44 pseudo-randomly.  */
9516      if (! strcmp (language_string, "GNU C")
9517	  || ! strcmp (language_string, "GNU Objective-C"))
9518	i = 0;
9519      else if (! strcmp (language_string, "GNU F77"))
9520	i = 1;
9521      else if (! strcmp (language_string, "GNU Ada"))
9522	i = 3;
9523      else if (! strcmp (language_string, "GNU Pascal"))
9524	i = 2;
9525      else if (! strcmp (language_string, "GNU C++"))
9526	i = 9;
9527      else if (! strcmp (language_string, "GNU Java"))
9528	i = 13;
9529      else if (! strcmp (language_string, "GNU CHILL"))
9530	i = 44;
9531      else
9532	abort ();
9533      fprintf (file, "%d,", i);
9534
9535      /* 8 single bit fields: global linkage (not set for C extern linkage,
9536	 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9537	 from start of procedure stored in tbtab, internal function, function
9538	 has controlled storage, function has no toc, function uses fp,
9539	 function logs/aborts fp operations.  */
9540      /* Assume that fp operations are used if any fp reg must be saved.  */
9541      fprintf (file, "%d,",
9542	       (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9543
9544      /* 6 bitfields: function is interrupt handler, name present in
9545	 proc table, function calls alloca, on condition directives
9546	 (controls stack walks, 3 bits), saves condition reg, saves
9547	 link reg.  */
9548      /* The `function calls alloca' bit seems to be set whenever reg 31 is
9549	 set up as a frame pointer, even when there is no alloca call.  */
9550      fprintf (file, "%d,",
9551	       ((optional_tbtab << 6)
9552		| ((optional_tbtab & frame_pointer_needed) << 5)
9553		| (info->cr_save_p << 1)
9554		| (info->lr_save_p)));
9555
9556      /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9557	 (6 bits).  */
9558      fprintf (file, "%d,",
9559	       (info->push_p << 7) | (64 - info->first_fp_reg_save));
9560
9561      /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits).  */
9562      fprintf (file, "%d,", (32 - first_reg_to_save ()));
9563
9564      if (optional_tbtab)
9565	{
9566	  /* Compute the parameter info from the function decl argument
9567	     list.  */
9568	  tree decl;
9569	  int next_parm_info_bit = 31;
9570
9571	  for (decl = DECL_ARGUMENTS (current_function_decl);
9572	       decl; decl = TREE_CHAIN (decl))
9573	    {
9574	      rtx parameter = DECL_INCOMING_RTL (decl);
9575	      enum machine_mode mode = GET_MODE (parameter);
9576
9577	      if (GET_CODE (parameter) == REG)
9578		{
9579		  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9580		    {
9581		      int bits;
9582
9583		      float_parms++;
9584
9585		      if (mode == SFmode)
9586			bits = 0x2;
9587		      else if (mode == DFmode)
9588			bits = 0x3;
9589		      else
9590			abort ();
9591
9592		      /* If only one bit will fit, don't or in this entry.  */
9593		      if (next_parm_info_bit > 0)
9594			parm_info |= (bits << (next_parm_info_bit - 1));
9595		      next_parm_info_bit -= 2;
9596		    }
9597		  else
9598		    {
9599		      fixed_parms += ((GET_MODE_SIZE (mode)
9600				       + (UNITS_PER_WORD - 1))
9601				      / UNITS_PER_WORD);
9602		      next_parm_info_bit -= 1;
9603		    }
9604		}
9605	    }
9606	}
9607
9608      /* Number of fixed point parameters.  */
9609      /* This is actually the number of words of fixed point parameters; thus
9610	 an 8 byte struct counts as 2; and thus the maximum value is 8.  */
9611      fprintf (file, "%d,", fixed_parms);
9612
9613      /* 2 bitfields: number of floating point parameters (7 bits), parameters
9614	 all on stack.  */
9615      /* This is actually the number of fp registers that hold parameters;
9616	 and thus the maximum value is 13.  */
9617      /* Set parameters on stack bit if parameters are not in their original
9618	 registers, regardless of whether they are on the stack?  Xlc
9619	 seems to set the bit when not optimizing.  */
9620      fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9621
9622      if (! optional_tbtab)
9623	return;
9624
9625      /* Optional fields follow.  Some are variable length.  */
9626
9627      /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9628	 11 double float.  */
9629      /* There is an entry for each parameter in a register, in the order that
9630	 they occur in the parameter list.  Any intervening arguments on the
9631	 stack are ignored.  If the list overflows a long (max possible length
9632	 34 bits) then completely leave off all elements that don't fit.  */
9633      /* Only emit this long if there was at least one parameter.  */
9634      if (fixed_parms || float_parms)
9635	fprintf (file, "\t.long %d\n", parm_info);
9636
9637      /* Offset from start of code to tb table.  */
9638      fputs ("\t.long ", file);
9639      ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9640#if TARGET_AIX
9641      RS6000_OUTPUT_BASENAME (file, fname);
9642#else
9643      assemble_name (file, fname);
9644#endif
9645      fputs ("-.", file);
9646#if TARGET_AIX
9647      RS6000_OUTPUT_BASENAME (file, fname);
9648#else
9649      assemble_name (file, fname);
9650#endif
9651      putc ('\n', file);
9652
9653      /* Interrupt handler mask.  */
9654      /* Omit this long, since we never set the interrupt handler bit
9655	 above.  */
9656
9657      /* Number of CTL (controlled storage) anchors.  */
9658      /* Omit this long, since the has_ctl bit is never set above.  */
9659
9660      /* Displacement into stack of each CTL anchor.  */
9661      /* Omit this list of longs, because there are no CTL anchors.  */
9662
9663      /* Length of function name.  */
9664      fprintf (file, "\t.short %d\n", (int) strlen (fname));
9665
9666      /* Function name.  */
9667      assemble_string (fname, strlen (fname));
9668
9669      /* Register for alloca automatic storage; this is always reg 31.
9670	 Only emit this if the alloca bit was set above.  */
9671      if (frame_pointer_needed)
9672	fputs ("\t.byte 31\n", file);
9673
9674      fputs ("\t.align 2\n", file);
9675    }
9676  return;
9677}
9678
9679/* A C compound statement that outputs the assembler code for a thunk
9680   function, used to implement C++ virtual function calls with
9681   multiple inheritance.  The thunk acts as a wrapper around a virtual
9682   function, adjusting the implicit object parameter before handing
9683   control off to the real function.
9684
9685   First, emit code to add the integer DELTA to the location that
9686   contains the incoming first argument.  Assume that this argument
9687   contains a pointer, and is the one used to pass the `this' pointer
9688   in C++.  This is the incoming argument *before* the function
9689   prologue, e.g. `%o0' on a sparc.  The addition must preserve the
9690   values of all other incoming arguments.
9691
9692   After the addition, emit code to jump to FUNCTION, which is a
9693   `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does
9694   not touch the return address.  Hence returning from FUNCTION will
9695   return to whoever called the current `thunk'.
9696
9697   The effect must be as if FUNCTION had been called directly with the
9698   adjusted first argument.  This macro is responsible for emitting
9699   all of the code for a thunk function; output_function_prologue()
9700   and output_function_epilogue() are not invoked.
9701
9702   The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already
9703   been extracted from it.)  It might possibly be useful on some
9704   targets, but probably not.
9705
9706   If you do not define this macro, the target-independent code in the
9707   C++ frontend will generate a less efficient heavyweight thunk that
9708   calls FUNCTION instead of jumping to it.  The generic approach does
9709   not support varargs.  */
9710
9711void
9712output_mi_thunk (file, thunk_fndecl, delta, function)
9713     FILE *file;
9714     tree thunk_fndecl ATTRIBUTE_UNUSED;
9715     int delta;
9716     tree function;
9717{
9718  const char *this_reg =
9719    reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9720  const char *prefix;
9721  const char *fname;
9722  const char *r0	 = reg_names[0];
9723  const char *toc	 = reg_names[2];
9724  const char *schain	 = reg_names[11];
9725  const char *r12	 = reg_names[12];
9726  char buf[512];
9727  static int labelno = 0;
9728
9729  /* Small constants that can be done by one add instruction.  */
9730  if (delta >= -32768 && delta <= 32767)
9731    {
9732      if (! TARGET_NEW_MNEMONICS)
9733	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9734      else
9735	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9736    }
9737
9738  /* Large constants that can be done by one addis instruction.  */
9739  else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9740    asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9741		 delta >> 16);
9742
9743  /* 32-bit constants that can be done by an add and addis instruction.  */
9744  else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9745    {
9746      /* Break into two pieces, propagating the sign bit from the low
9747	 word to the upper word.  */
9748      int delta_high = delta >> 16;
9749      int delta_low  = delta & 0xffff;
9750      if ((delta_low & 0x8000) != 0)
9751	{
9752	  delta_high++;
9753	  delta_low = (delta_low ^ 0x8000) - 0x8000;	/* sign extend */
9754	}
9755
9756      asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9757		   delta_high);
9758
9759      if (! TARGET_NEW_MNEMONICS)
9760	fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9761      else
9762	fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9763    }
9764
9765  /* 64-bit constants, fixme */
9766  else
9767    abort ();
9768
9769  /* Get the prefix in front of the names.  */
9770  switch (DEFAULT_ABI)
9771    {
9772    default:
9773      abort ();
9774
9775    case ABI_AIX:
9776      prefix = ".";
9777      break;
9778
9779    case ABI_V4:
9780    case ABI_AIX_NODESC:
9781      prefix = "";
9782      break;
9783    }
9784
9785  /* If the function is compiled in this module, jump to it directly.
9786     Otherwise, load up its address and jump to it.  */
9787
9788  fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9789
9790  if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9791      && ! lookup_attribute ("longcall",
9792			     TYPE_ATTRIBUTES (TREE_TYPE (function))))
9793    {
9794      fprintf (file, "\tb %s", prefix);
9795      assemble_name (file, fname);
9796      if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9797      putc ('\n', file);
9798    }
9799
9800  else
9801    {
9802      switch (DEFAULT_ABI)
9803	{
9804	default:
9805	  abort ();
9806
9807	case ABI_AIX:
9808	  /* Set up a TOC entry for the function.  */
9809	  ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9810	  toc_section ();
9811	  ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9812	  labelno++;
9813
9814	  if (TARGET_MINIMAL_TOC)
9815	    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9816	  else
9817	    {
9818	      fputs ("\t.tc ", file);
9819	      assemble_name (file, fname);
9820	      fputs ("[TC],", file);
9821	    }
9822	  assemble_name (file, fname);
9823	  putc ('\n', file);
9824	  text_section ();
9825	  if (TARGET_MINIMAL_TOC)
9826	    asm_fprintf (file, (TARGET_32BIT)
9827			 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9828			 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9829	  asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9830	  assemble_name (file, buf);
9831	  if (TARGET_ELF && TARGET_MINIMAL_TOC)
9832	    fputs ("-(.LCTOC1)", file);
9833	  asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9834	  asm_fprintf (file,
9835		       (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9836		       r0, r12);
9837
9838	  asm_fprintf (file,
9839		       (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9840		       toc, r12);
9841
9842	  asm_fprintf (file, "\tmtctr %s\n", r0);
9843	  asm_fprintf (file,
9844		       (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9845		       schain, r12);
9846
9847	  asm_fprintf (file, "\tbctr\n");
9848	  break;
9849
9850	case ABI_AIX_NODESC:
9851	case ABI_V4:
9852	  fprintf (file, "\tb %s", prefix);
9853	  assemble_name (file, fname);
9854	  if (flag_pic) fputs ("@plt", file);
9855	  putc ('\n', file);
9856	  break;
9857
9858#if TARGET_MACHO
9859	case ABI_DARWIN:
9860	  fprintf (file, "\tb %s", prefix);
9861	  if (flag_pic && !machopic_name_defined_p (fname))
9862	    assemble_name (file, machopic_stub_name (fname));
9863	  else
9864	    assemble_name (file, fname);
9865	  putc ('\n', file);
9866	  break;
9867#endif
9868	}
9869    }
9870}
9871
9872
9873/* A quick summary of the various types of 'constant-pool tables'
9874   under PowerPC:
9875
9876   Target	Flags		Name		One table per
9877   AIX		(none)		AIX TOC		object file
9878   AIX		-mfull-toc	AIX TOC		object file
9879   AIX		-mminimal-toc	AIX minimal TOC	translation unit
9880   SVR4/EABI	(none)		SVR4 SDATA	object file
9881   SVR4/EABI	-fpic		SVR4 pic	object file
9882   SVR4/EABI	-fPIC		SVR4 PIC	translation unit
9883   SVR4/EABI	-mrelocatable	EABI TOC	function
9884   SVR4/EABI	-maix		AIX TOC		object file
9885   SVR4/EABI	-maix -mminimal-toc
9886				AIX minimal TOC	translation unit
9887
9888   Name			Reg.	Set by	entries	      contains:
9889					made by	 addrs?	fp?	sum?
9890
9891   AIX TOC		2	crt0	as	 Y	option	option
9892   AIX minimal TOC	30	prolog	gcc	 Y	Y	option
9893   SVR4 SDATA		13	crt0	gcc	 N	Y	N
9894   SVR4 pic		30	prolog	ld	 Y	not yet	N
9895   SVR4 PIC		30	prolog	gcc	 Y	option	option
9896   EABI TOC		30	prolog	gcc	 Y	option	option
9897
9898*/
9899
9900/* Hash table stuff for keeping track of TOC entries.  */
9901
9902struct toc_hash_struct
9903{
9904  /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9905     ASM_OUTPUT_SPECIAL_POOL_ENTRY_P.  */
9906  rtx key;
9907  enum machine_mode key_mode;
9908  int labelno;
9909};
9910
9911static htab_t toc_hash_table;
9912
9913/* Hash functions for the hash table.  */
9914
9915static unsigned
9916rs6000_hash_constant (k)
9917     rtx k;
9918{
9919  unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9920  const char *format = GET_RTX_FORMAT (GET_CODE (k));
9921  int flen = strlen (format);
9922  int fidx;
9923
9924  if (GET_CODE (k) == LABEL_REF)
9925    return result * 1231 + X0INT (XEXP (k, 0), 3);
9926
9927  if (GET_CODE (k) == CONST_DOUBLE)
9928    fidx = 1;
9929  else if (GET_CODE (k) == CODE_LABEL)
9930    fidx = 3;
9931  else
9932    fidx = 0;
9933
9934  for (; fidx < flen; fidx++)
9935    switch (format[fidx])
9936      {
9937      case 's':
9938	{
9939	  unsigned i, len;
9940	  const char *str = XSTR (k, fidx);
9941	  len = strlen (str);
9942	  result = result * 613 + len;
9943	  for (i = 0; i < len; i++)
9944	    result = result * 613 + (unsigned) str[i];
9945	  break;
9946	}
9947      case 'u':
9948      case 'e':
9949	result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9950	break;
9951      case 'i':
9952      case 'n':
9953	result = result * 613 + (unsigned) XINT (k, fidx);
9954	break;
9955      case 'w':
9956	if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9957	  result = result * 613 + (unsigned) XWINT (k, fidx);
9958	else
9959	  {
9960	    size_t i;
9961	    for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9962	      result = result * 613 + (unsigned) (XWINT (k, fidx)
9963						  >> CHAR_BIT * i);
9964	  }
9965	break;
9966      default:
9967	abort ();
9968      }
9969  return result;
9970}
9971
9972static unsigned
9973toc_hash_function (hash_entry)
9974     const void * hash_entry;
9975{
9976  const struct toc_hash_struct *thc =
9977    (const struct toc_hash_struct *) hash_entry;
9978  return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9979}
9980
9981/* Compare H1 and H2 for equivalence.  */
9982
9983static int
9984toc_hash_eq (h1, h2)
9985     const void * h1;
9986     const void * h2;
9987{
9988  rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9989  rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9990
9991  if (((const struct toc_hash_struct *) h1)->key_mode
9992      != ((const struct toc_hash_struct *) h2)->key_mode)
9993    return 0;
9994
9995  /* Gotcha:  One of these const_doubles will be in memory.
9996     The other may be on the constant-pool chain.
9997     So rtx_equal_p will think they are different...  */
9998  if (r1 == r2)
9999    return 1;
10000  if (GET_CODE (r1) != GET_CODE (r2)
10001      || GET_MODE (r1) != GET_MODE (r2))
10002    return 0;
10003  if (GET_CODE (r1) == CONST_DOUBLE)
10004    {
10005      int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
10006      int i;
10007      for (i = 1; i < format_len; i++)
10008	if (XWINT (r1, i) != XWINT (r2, i))
10009	  return 0;
10010
10011      return 1;
10012    }
10013  else if (GET_CODE (r1) == LABEL_REF)
10014    return (CODE_LABEL_NUMBER (XEXP (r1, 0))
10015	    == CODE_LABEL_NUMBER (XEXP (r2, 0)));
10016  else
10017    return rtx_equal_p (r1, r2);
10018}
10019
10020/* Mark the hash table-entry HASH_ENTRY.  */
10021
10022static int
10023toc_hash_mark_entry (hash_slot, unused)
10024     void ** hash_slot;
10025     void * unused ATTRIBUTE_UNUSED;
10026{
10027  const struct toc_hash_struct * hash_entry =
10028    *(const struct toc_hash_struct **) hash_slot;
10029  rtx r = hash_entry->key;
10030  ggc_set_mark (hash_entry);
10031  /* For CODE_LABELS, we don't want to drag in the whole insn chain...  */
10032  if (GET_CODE (r) == LABEL_REF)
10033    {
10034      ggc_set_mark (r);
10035      ggc_set_mark (XEXP (r, 0));
10036    }
10037  else
10038    ggc_mark_rtx (r);
10039  return 1;
10040}
10041
10042/* Mark all the elements of the TOC hash-table *HT.  */
10043
10044static void
10045toc_hash_mark_table (vht)
10046     void *vht;
10047{
10048  htab_t *ht = vht;
10049
10050  htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
10051}
10052
10053/* These are the names given by the C++ front-end to vtables, and
10054   vtable-like objects.  Ideally, this logic should not be here;
10055   instead, there should be some programmatic way of inquiring as
10056   to whether or not an object is a vtable.  */
10057
10058#define VTABLE_NAME_P(NAME)				\
10059  (strncmp ("_vt.", name, strlen("_vt.")) == 0		\
10060  || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0	\
10061  || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0	\
10062  || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10063
10064void
10065rs6000_output_symbol_ref (file, x)
10066     FILE *file;
10067     rtx x;
10068{
10069  /* Currently C++ toc references to vtables can be emitted before it
10070     is decided whether the vtable is public or private.  If this is
10071     the case, then the linker will eventually complain that there is
10072     a reference to an unknown section.  Thus, for vtables only,
10073     we emit the TOC reference to reference the symbol and not the
10074     section.  */
10075  const char *name = XSTR (x, 0);
10076
10077  if (VTABLE_NAME_P (name))
10078    {
10079      RS6000_OUTPUT_BASENAME (file, name);
10080    }
10081  else
10082    assemble_name (file, name);
10083}
10084
10085/* Output a TOC entry.  We derive the entry name from what is being
10086   written.  */
10087
10088void
10089output_toc (file, x, labelno, mode)
10090     FILE *file;
10091     rtx x;
10092     int labelno;
10093     enum machine_mode mode;
10094{
10095  char buf[256];
10096  const char *name = buf;
10097  const char *real_name;
10098  rtx base = x;
10099  int offset = 0;
10100
10101  if (TARGET_NO_TOC)
10102    abort ();
10103
10104  /* When the linker won't eliminate them, don't output duplicate
10105     TOC entries (this happens on AIX if there is any kind of TOC,
10106     and on SVR4 under -fPIC or -mrelocatable).  */
10107  if (TARGET_TOC)
10108    {
10109      struct toc_hash_struct *h;
10110      void * * found;
10111
10112      h = ggc_alloc (sizeof (*h));
10113      h->key = x;
10114      h->key_mode = mode;
10115      h->labelno = labelno;
10116
10117      found = htab_find_slot (toc_hash_table, h, 1);
10118      if (*found == NULL)
10119	*found = h;
10120      else  /* This is indeed a duplicate.
10121	       Set this label equal to that label.  */
10122	{
10123	  fputs ("\t.set ", file);
10124	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10125	  fprintf (file, "%d,", labelno);
10126	  ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10127	  fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10128					      found)->labelno));
10129	  return;
10130	}
10131    }
10132
10133  /* If we're going to put a double constant in the TOC, make sure it's
10134     aligned properly when strict alignment is on.  */
10135  if (GET_CODE (x) == CONST_DOUBLE
10136      && STRICT_ALIGNMENT
10137      && GET_MODE_BITSIZE (mode) >= 64
10138      && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10139    ASM_OUTPUT_ALIGN (file, 3);
10140  }
10141
10142  ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10143
10144  /* Handle FP constants specially.  Note that if we have a minimal
10145     TOC, things we put here aren't actually in the TOC, so we can allow
10146     FP constants.  */
10147  if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10148    {
10149      REAL_VALUE_TYPE rv;
10150      long k[2];
10151
10152      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10153      REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10154
10155      if (TARGET_64BIT)
10156	{
10157	  if (TARGET_MINIMAL_TOC)
10158	    fputs (DOUBLE_INT_ASM_OP, file);
10159	  else
10160	    fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10161	  fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
10162	  return;
10163	}
10164      else
10165	{
10166	  if (TARGET_MINIMAL_TOC)
10167	    fputs ("\t.long ", file);
10168	  else
10169	    fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10170	  fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
10171	  return;
10172	}
10173    }
10174  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10175    {
10176      REAL_VALUE_TYPE rv;
10177      long l;
10178
10179      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10180      REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10181
10182      if (TARGET_64BIT)
10183	{
10184	  if (TARGET_MINIMAL_TOC)
10185	    fputs (DOUBLE_INT_ASM_OP, file);
10186	  else
10187	    fprintf (file, "\t.tc FS_%lx[TC],", l);
10188	  fprintf (file, "0x%lx00000000\n", l);
10189	  return;
10190	}
10191      else
10192	{
10193	  if (TARGET_MINIMAL_TOC)
10194	    fputs ("\t.long ", file);
10195	  else
10196	    fprintf (file, "\t.tc FS_%lx[TC],", l);
10197	  fprintf (file, "0x%lx\n", l);
10198	  return;
10199	}
10200    }
10201  else if (GET_MODE (x) == VOIDmode
10202	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10203    {
10204      unsigned HOST_WIDE_INT low;
10205      HOST_WIDE_INT high;
10206
10207      if (GET_CODE (x) == CONST_DOUBLE)
10208	{
10209	  low = CONST_DOUBLE_LOW (x);
10210	  high = CONST_DOUBLE_HIGH (x);
10211	}
10212      else
10213#if HOST_BITS_PER_WIDE_INT == 32
10214	{
10215	  low = INTVAL (x);
10216	  high = (low & 0x80000000) ? ~0 : 0;
10217	}
10218#else
10219	{
10220          low = INTVAL (x) & 0xffffffff;
10221          high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10222	}
10223#endif
10224
10225      /* TOC entries are always Pmode-sized, but since this
10226	 is a bigendian machine then if we're putting smaller
10227	 integer constants in the TOC we have to pad them.
10228	 (This is still a win over putting the constants in
10229	 a separate constant pool, because then we'd have
10230	 to have both a TOC entry _and_ the actual constant.)
10231
10232	 For a 32-bit target, CONST_INT values are loaded and shifted
10233	 entirely within `low' and can be stored in one TOC entry.  */
10234
10235      if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10236	abort ();/* It would be easy to make this work, but it doesn't now.  */
10237
10238      if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10239	lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10240		       POINTER_SIZE, &low, &high, 0);
10241
10242      if (TARGET_64BIT)
10243	{
10244	  if (TARGET_MINIMAL_TOC)
10245	    fputs (DOUBLE_INT_ASM_OP, file);
10246	  else
10247	    fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long) high, (long) low);
10248	  fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10249	  return;
10250	}
10251      else
10252	{
10253	  if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10254	    {
10255	      if (TARGET_MINIMAL_TOC)
10256		fputs ("\t.long ", file);
10257	      else
10258		fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10259			 (long) high, (long) low);
10260	      fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10261	    }
10262	  else
10263	    {
10264	      if (TARGET_MINIMAL_TOC)
10265		fputs ("\t.long ", file);
10266	      else
10267		fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10268	      fprintf (file, "0x%lx\n", (long) low);
10269	    }
10270	  return;
10271	}
10272    }
10273
10274  if (GET_CODE (x) == CONST)
10275    {
10276      if (GET_CODE (XEXP (x, 0)) != PLUS)
10277	abort ();
10278
10279      base = XEXP (XEXP (x, 0), 0);
10280      offset = INTVAL (XEXP (XEXP (x, 0), 1));
10281    }
10282
10283  if (GET_CODE (base) == SYMBOL_REF)
10284    name = XSTR (base, 0);
10285  else if (GET_CODE (base) == LABEL_REF)
10286    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10287  else if (GET_CODE (base) == CODE_LABEL)
10288    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10289  else
10290    abort ();
10291
10292  STRIP_NAME_ENCODING (real_name, name);
10293  if (TARGET_MINIMAL_TOC)
10294    fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10295  else
10296    {
10297      fprintf (file, "\t.tc %s", real_name);
10298
10299      if (offset < 0)
10300	fprintf (file, ".N%d", - offset);
10301      else if (offset)
10302	fprintf (file, ".P%d", offset);
10303
10304      fputs ("[TC],", file);
10305    }
10306
10307  /* Currently C++ toc references to vtables can be emitted before it
10308     is decided whether the vtable is public or private.  If this is
10309     the case, then the linker will eventually complain that there is
10310     a TOC reference to an unknown section.  Thus, for vtables only,
10311     we emit the TOC reference to reference the symbol and not the
10312     section.  */
10313  if (VTABLE_NAME_P (name))
10314    {
10315      RS6000_OUTPUT_BASENAME (file, name);
10316      if (offset < 0)
10317	fprintf (file, "%d", offset);
10318      else if (offset > 0)
10319	fprintf (file, "+%d", offset);
10320    }
10321  else
10322    output_addr_const (file, x);
10323  putc ('\n', file);
10324}
10325
10326/* Output an assembler pseudo-op to write an ASCII string of N characters
10327   starting at P to FILE.
10328
10329   On the RS/6000, we have to do this using the .byte operation and
10330   write out special characters outside the quoted string.
10331   Also, the assembler is broken; very long strings are truncated,
10332   so we must artificially break them up early.  */
10333
10334void
10335output_ascii (file, p, n)
10336     FILE *file;
10337     const char *p;
10338     int n;
10339{
10340  char c;
10341  int i, count_string;
10342  const char *for_string = "\t.byte \"";
10343  const char *for_decimal = "\t.byte ";
10344  const char *to_close = NULL;
10345
10346  count_string = 0;
10347  for (i = 0; i < n; i++)
10348    {
10349      c = *p++;
10350      if (c >= ' ' && c < 0177)
10351	{
10352	  if (for_string)
10353	    fputs (for_string, file);
10354	  putc (c, file);
10355
10356	  /* Write two quotes to get one.  */
10357	  if (c == '"')
10358	    {
10359	      putc (c, file);
10360	      ++count_string;
10361	    }
10362
10363	  for_string = NULL;
10364	  for_decimal = "\"\n\t.byte ";
10365	  to_close = "\"\n";
10366	  ++count_string;
10367
10368	  if (count_string >= 512)
10369	    {
10370	      fputs (to_close, file);
10371
10372	      for_string = "\t.byte \"";
10373	      for_decimal = "\t.byte ";
10374	      to_close = NULL;
10375	      count_string = 0;
10376	    }
10377	}
10378      else
10379	{
10380	  if (for_decimal)
10381	    fputs (for_decimal, file);
10382	  fprintf (file, "%d", c);
10383
10384	  for_string = "\n\t.byte \"";
10385	  for_decimal = ", ";
10386	  to_close = "\n";
10387	  count_string = 0;
10388	}
10389    }
10390
10391  /* Now close the string if we have written one.  Then end the line.  */
10392  if (to_close)
10393    fputs (to_close, file);
10394}
10395
10396/* Generate a unique section name for FILENAME for a section type
10397   represented by SECTION_DESC.  Output goes into BUF.
10398
10399   SECTION_DESC can be any string, as long as it is different for each
10400   possible section type.
10401
10402   We name the section in the same manner as xlc.  The name begins with an
10403   underscore followed by the filename (after stripping any leading directory
10404   names) with the last period replaced by the string SECTION_DESC.  If
10405   FILENAME does not contain a period, SECTION_DESC is appended to the end of
10406   the name.  */
10407
10408void
10409rs6000_gen_section_name (buf, filename, section_desc)
10410     char **buf;
10411     const char *filename;
10412     const char *section_desc;
10413{
10414  const char *q, *after_last_slash, *last_period = 0;
10415  char *p;
10416  int len;
10417
10418  after_last_slash = filename;
10419  for (q = filename; *q; q++)
10420    {
10421      if (*q == '/')
10422	after_last_slash = q + 1;
10423      else if (*q == '.')
10424	last_period = q;
10425    }
10426
10427  len = strlen (after_last_slash) + strlen (section_desc) + 2;
10428  *buf = (char *) permalloc (len);
10429
10430  p = *buf;
10431  *p++ = '_';
10432
10433  for (q = after_last_slash; *q; q++)
10434    {
10435      if (q == last_period)
10436        {
10437	  strcpy (p, section_desc);
10438	  p += strlen (section_desc);
10439        }
10440
10441      else if (ISALNUM (*q))
10442        *p++ = *q;
10443    }
10444
10445  if (last_period == 0)
10446    strcpy (p, section_desc);
10447  else
10448    *p = '\0';
10449}
10450
10451/* Emit profile function.  */
10452
10453void
10454output_profile_hook (labelno)
10455     int labelno;
10456{
10457  if (DEFAULT_ABI == ABI_AIX)
10458    {
10459      char buf[30];
10460      const char *label_name;
10461      rtx fun;
10462
10463      ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10464      STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10465      fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10466
10467      emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10468                         fun, Pmode);
10469    }
10470  else if (DEFAULT_ABI == ABI_DARWIN)
10471    {
10472      const char *mcount_name = RS6000_MCOUNT;
10473      int caller_addr_regno = LINK_REGISTER_REGNUM;
10474
10475      /* Be conservative and always set this, at least for now.  */
10476      current_function_uses_pic_offset_table = 1;
10477
10478#if TARGET_MACHO
10479      /* For PIC code, set up a stub and collect the caller's address
10480	 from r0, which is where the prologue puts it.  */
10481      if (flag_pic)
10482	{
10483	  mcount_name = machopic_stub_name (mcount_name);
10484	  if (current_function_uses_pic_offset_table)
10485	    caller_addr_regno = 0;
10486	}
10487#endif
10488      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10489			 0, VOIDmode, 1,
10490			 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10491    }
10492}
10493
10494/* Write function profiler code.  */
10495
10496void
10497output_function_profiler (file, labelno)
10498  FILE *file;
10499  int labelno;
10500{
10501  char buf[100];
10502
10503  ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10504  switch (DEFAULT_ABI)
10505    {
10506    default:
10507      abort ();
10508
10509    case ABI_V4:
10510    case ABI_AIX_NODESC:
10511      fprintf (file, "\tmflr %s\n", reg_names[0]);
10512      if (flag_pic == 1)
10513	{
10514	  fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10515	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10516		       reg_names[0], reg_names[1]);
10517	  asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10518	  asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10519	  assemble_name (file, buf);
10520	  asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10521	}
10522      else if (flag_pic > 1)
10523	{
10524	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10525		       reg_names[0], reg_names[1]);
10526	  /* Now, we need to get the address of the label.  */
10527	  fputs ("\tbl 1f\n\t.long ", file);
10528	  assemble_name (file, buf);
10529	  fputs ("-.\n1:", file);
10530	  asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10531	  asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10532		       reg_names[0], reg_names[11]);
10533	  asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10534		       reg_names[0], reg_names[0], reg_names[11]);
10535	}
10536      else
10537	{
10538	  asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10539	  assemble_name (file, buf);
10540	  fputs ("@ha\n", file);
10541	  asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10542		       reg_names[0], reg_names[1]);
10543	  asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10544	  assemble_name (file, buf);
10545	  asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10546	}
10547
10548      if (current_function_needs_context)
10549	asm_fprintf (file, "\tmr %s,%s\n",
10550		     reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10551      fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10552      if (current_function_needs_context)
10553	asm_fprintf (file, "\tmr %s,%s\n",
10554		     reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10555      break;
10556
10557    case ABI_AIX:
10558    case ABI_DARWIN:
10559      /* Don't do anything, done in output_profile_hook ().  */
10560      break;
10561
10562    }
10563}
10564
10565/* Adjust the cost of a scheduling dependency.  Return the new cost of
10566   a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
10567
10568static int
10569rs6000_adjust_cost (insn, link, dep_insn, cost)
10570     rtx insn;
10571     rtx link;
10572     rtx dep_insn ATTRIBUTE_UNUSED;
10573     int cost;
10574{
10575  if (! recog_memoized (insn))
10576    return 0;
10577
10578  if (REG_NOTE_KIND (link) != 0)
10579    return 0;
10580
10581  if (REG_NOTE_KIND (link) == 0)
10582    {
10583      /* Data dependency; DEP_INSN writes a register that INSN reads
10584	 some cycles later.  */
10585      switch (get_attr_type (insn))
10586	{
10587	case TYPE_JMPREG:
10588          /* Tell the first scheduling pass about the latency between
10589	     a mtctr and bctr (and mtlr and br/blr).  The first
10590	     scheduling pass will not know about this latency since
10591	     the mtctr instruction, which has the latency associated
10592	     to it, will be generated by reload.  */
10593          return TARGET_POWER ? 5 : 4;
10594	case TYPE_BRANCH:
10595	  /* Leave some extra cycles between a compare and its
10596	     dependent branch, to inhibit expensive mispredicts.  */
10597	  if ((rs6000_cpu_attr == CPU_PPC750
10598               || rs6000_cpu_attr == CPU_PPC7400
10599               || rs6000_cpu_attr == CPU_PPC7450)
10600	      && recog_memoized (dep_insn)
10601	      && (INSN_CODE (dep_insn) >= 0)
10602	      && (get_attr_type (dep_insn) == TYPE_COMPARE
10603		  || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10604		  || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10605		  || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10606	    return cost + 2;
10607	default:
10608	  break;
10609	}
10610      /* Fall out to return default cost.  */
10611    }
10612
10613  return cost;
10614}
10615
10616/* A C statement (sans semicolon) to update the integer scheduling
10617   priority INSN_PRIORITY (INSN).  Reduce the priority to execute the
10618   INSN earlier, increase the priority to execute INSN later.  Do not
10619   define this macro if you do not need to adjust the scheduling
10620   priorities of insns.  */
10621
10622static int
10623rs6000_adjust_priority (insn, priority)
10624     rtx insn ATTRIBUTE_UNUSED;
10625     int priority;
10626{
10627  /* On machines (like the 750) which have asymmetric integer units,
10628     where one integer unit can do multiply and divides and the other
10629     can't, reduce the priority of multiply/divide so it is scheduled
10630     before other integer operations.  */
10631
10632#if 0
10633  if (! INSN_P (insn))
10634    return priority;
10635
10636  if (GET_CODE (PATTERN (insn)) == USE)
10637    return priority;
10638
10639  switch (rs6000_cpu_attr) {
10640  case CPU_PPC750:
10641    switch (get_attr_type (insn))
10642      {
10643      default:
10644	break;
10645
10646      case TYPE_IMUL:
10647      case TYPE_IDIV:
10648	fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10649		 priority, priority);
10650	if (priority >= 0 && priority < 0x01000000)
10651	  priority >>= 3;
10652	break;
10653      }
10654  }
10655#endif
10656
10657  return priority;
10658}
10659
10660/* Return how many instructions the machine can issue per cycle.  */
10661
10662static int
10663rs6000_issue_rate ()
10664{
10665  switch (rs6000_cpu_attr) {
10666  case CPU_RIOS1:  /* ? */
10667  case CPU_RS64A:
10668  case CPU_PPC601: /* ? */
10669  case CPU_PPC7450:
10670    return 3;
10671  case CPU_PPC603:
10672  case CPU_PPC750:
10673  case CPU_PPC7400:
10674    return 2;
10675  case CPU_RIOS2:
10676  case CPU_PPC604:
10677  case CPU_PPC604E:
10678  case CPU_PPC620:
10679  case CPU_PPC630:
10680    return 4;
10681  default:
10682    return 1;
10683  }
10684}
10685
10686
10687/* Length in units of the trampoline for entering a nested function.  */
10688
10689int
10690rs6000_trampoline_size ()
10691{
10692  int ret = 0;
10693
10694  switch (DEFAULT_ABI)
10695    {
10696    default:
10697      abort ();
10698
10699    case ABI_AIX:
10700      ret = (TARGET_32BIT) ? 12 : 24;
10701      break;
10702
10703    case ABI_DARWIN:
10704    case ABI_V4:
10705    case ABI_AIX_NODESC:
10706      ret = (TARGET_32BIT) ? 40 : 48;
10707      break;
10708    }
10709
10710  return ret;
10711}
10712
10713/* Emit RTL insns to initialize the variable parts of a trampoline.
10714   FNADDR is an RTX for the address of the function's pure code.
10715   CXT is an RTX for the static chain value for the function.  */
10716
10717void
10718rs6000_initialize_trampoline (addr, fnaddr, cxt)
10719     rtx addr;
10720     rtx fnaddr;
10721     rtx cxt;
10722{
10723  enum machine_mode pmode = Pmode;
10724  int regsize = (TARGET_32BIT) ? 4 : 8;
10725  rtx ctx_reg = force_reg (pmode, cxt);
10726
10727  switch (DEFAULT_ABI)
10728    {
10729    default:
10730      abort ();
10731
10732/* Macros to shorten the code expansions below.  */
10733#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10734#define MEM_PLUS(addr,offset) \
10735  gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10736
10737    /* Under AIX, just build the 3 word function descriptor */
10738    case ABI_AIX:
10739      {
10740	rtx fn_reg = gen_reg_rtx (pmode);
10741	rtx toc_reg = gen_reg_rtx (pmode);
10742	emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10743	emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10744	emit_move_insn (MEM_DEREF (addr), fn_reg);
10745	emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10746	emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10747      }
10748      break;
10749
10750    /* Under V.4/eabi/darwin, __trampoline_setup does the real work.  */
10751    case ABI_DARWIN:
10752    case ABI_V4:
10753    case ABI_AIX_NODESC:
10754      emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10755			 FALSE, VOIDmode, 4,
10756			 addr, pmode,
10757			 GEN_INT (rs6000_trampoline_size ()), SImode,
10758			 fnaddr, pmode,
10759			 ctx_reg, pmode);
10760      break;
10761    }
10762
10763  return;
10764}
10765
10766
10767/* Table of valid machine attributes.  */
10768
10769const struct attribute_spec rs6000_attribute_table[] =
10770{
10771  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10772  { "longcall", 0, 0, false, true,  true,  rs6000_handle_longcall_attribute },
10773  { NULL,       0, 0, false, false, false, NULL }
10774};
10775
10776/* Handle a "longcall" attribute; arguments as in struct
10777   attribute_spec.handler.  */
10778
10779static tree
10780rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10781     tree *node;
10782     tree name;
10783     tree args ATTRIBUTE_UNUSED;
10784     int flags ATTRIBUTE_UNUSED;
10785     bool *no_add_attrs;
10786{
10787  if (TREE_CODE (*node) != FUNCTION_TYPE
10788      && TREE_CODE (*node) != FIELD_DECL
10789      && TREE_CODE (*node) != TYPE_DECL)
10790    {
10791      warning ("`%s' attribute only applies to functions",
10792	       IDENTIFIER_POINTER (name));
10793      *no_add_attrs = true;
10794    }
10795
10796  return NULL_TREE;
10797}
10798
10799/* Return a reference suitable for calling a function with the
10800   longcall attribute.  */
10801
10802struct rtx_def *
10803rs6000_longcall_ref (call_ref)
10804     rtx call_ref;
10805{
10806  const char *call_name;
10807  tree node;
10808
10809  if (GET_CODE (call_ref) != SYMBOL_REF)
10810    return call_ref;
10811
10812  /* System V adds '.' to the internal name, so skip them.  */
10813  call_name = XSTR (call_ref, 0);
10814  if (*call_name == '.')
10815    {
10816      while (*call_name == '.')
10817	call_name++;
10818
10819      node = get_identifier (call_name);
10820      call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10821    }
10822
10823  return force_reg (Pmode, call_ref);
10824}
10825
10826
10827/* A C statement or statements to switch to the appropriate section
10828   for output of RTX in mode MODE.  You can assume that RTX is some
10829   kind of constant in RTL.  The argument MODE is redundant except in
10830   the case of a `const_int' rtx.  Select the section by calling
10831   `text_section' or one of the alternatives for other sections.
10832
10833   Do not define this macro if you put all constants in the read-only
10834   data section.  */
10835
10836#ifdef USING_ELFOS_H
10837
10838void
10839rs6000_select_rtx_section (mode, x)
10840     enum machine_mode mode;
10841     rtx x;
10842{
10843  if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10844    toc_section ();
10845  else if (flag_pic
10846	   && (GET_CODE (x) == SYMBOL_REF
10847	       || GET_CODE (x) == LABEL_REF
10848	       || GET_CODE (x) == CONST))
10849    data_section ();
10850  else
10851    const_section ();
10852}
10853
10854/* A C statement or statements to switch to the appropriate
10855   section for output of DECL.  DECL is either a `VAR_DECL' node
10856   or a constant of some sort.  RELOC indicates whether forming
10857   the initial value of DECL requires link-time relocations.  */
10858
10859void
10860rs6000_select_section (decl, reloc)
10861     tree decl;
10862     int reloc;
10863{
10864  int size = int_size_in_bytes (TREE_TYPE (decl));
10865  int needs_sdata;
10866  int readonly;
10867  static void (* const sec_funcs[4]) PARAMS ((void)) = {
10868    &const_section,
10869    &sdata2_section,
10870    &data_section,
10871    &sdata_section
10872  };
10873
10874  needs_sdata = (size > 0
10875		 && size <= g_switch_value
10876		 && rs6000_sdata != SDATA_NONE
10877		 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10878
10879  if (TREE_CODE (decl) == STRING_CST)
10880    readonly = ! flag_writable_strings;
10881  else if (TREE_CODE (decl) == VAR_DECL)
10882    readonly = (! (flag_pic && reloc)
10883		&& TREE_READONLY (decl)
10884		&& ! TREE_SIDE_EFFECTS (decl)
10885		&& DECL_INITIAL (decl)
10886		&& DECL_INITIAL (decl) != error_mark_node
10887		&& TREE_CONSTANT (DECL_INITIAL (decl)));
10888  else if (TREE_CODE (decl) == CONSTRUCTOR)
10889    readonly = (! (flag_pic && reloc)
10890		&& ! TREE_SIDE_EFFECTS (decl)
10891		&& TREE_CONSTANT (decl));
10892  else
10893    readonly = 1;
10894  if (needs_sdata && rs6000_sdata != SDATA_EABI)
10895    readonly = 0;
10896
10897  (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10898}
10899
10900/* A C statement to build up a unique section name, expressed as a
10901   STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10902   RELOC indicates whether the initial value of EXP requires
10903   link-time relocations.  If you do not define this macro, GCC will use
10904   the symbol name prefixed by `.' as the section name.  Note - this
10905   macro can now be called for uninitialized data items as well as
10906   initialised data and functions.  */
10907
10908void
10909rs6000_unique_section (decl, reloc)
10910     tree decl;
10911     int reloc;
10912{
10913  int len;
10914  int sec;
10915  const char *name;
10916  char *string;
10917  const char *prefix;
10918
10919  static const char *const prefixes[7][2] =
10920  {
10921    { ".rodata.", ".gnu.linkonce.r." },
10922    { ".sdata2.", ".gnu.linkonce.s2." },
10923    { ".data.",   ".gnu.linkonce.d." },
10924    { ".sdata.",  ".gnu.linkonce.s." },
10925    { ".bss.",    ".gnu.linkonce.b." },
10926    { ".sbss.",   ".gnu.linkonce.sb." },
10927    { ".text.",   ".gnu.linkonce.t." }
10928  };
10929
10930  if (TREE_CODE (decl) == FUNCTION_DECL)
10931    sec = 6;
10932  else
10933    {
10934      int readonly;
10935      int needs_sdata;
10936      int size;
10937
10938      readonly = 1;
10939      if (TREE_CODE (decl) == STRING_CST)
10940	readonly = ! flag_writable_strings;
10941      else if (TREE_CODE (decl) == VAR_DECL)
10942	readonly = (! (flag_pic && reloc)
10943		    && TREE_READONLY (decl)
10944		    && ! TREE_SIDE_EFFECTS (decl)
10945		    && TREE_CONSTANT (DECL_INITIAL (decl)));
10946
10947      size = int_size_in_bytes (TREE_TYPE (decl));
10948      needs_sdata = (size > 0
10949		     && size <= g_switch_value
10950		     && rs6000_sdata != SDATA_NONE
10951		     && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10952
10953      if (DECL_INITIAL (decl) == 0
10954	  || DECL_INITIAL (decl) == error_mark_node)
10955	sec = 4;
10956      else if (! readonly)
10957	sec = 2;
10958      else
10959	sec = 0;
10960
10961      if (needs_sdata)
10962	{
10963	  /* .sdata2 is only for EABI.  */
10964	  if (sec == 0 && rs6000_sdata != SDATA_EABI)
10965	    sec = 2;
10966	  sec += 1;
10967	}
10968    }
10969
10970  STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10971  prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10972  len    = strlen (name) + strlen (prefix);
10973  string = alloca (len + 1);
10974
10975  sprintf (string, "%s%s", prefix, name);
10976
10977  DECL_SECTION_NAME (decl) = build_string (len, string);
10978}
10979
10980
10981/* If we are referencing a function that is static or is known to be
10982   in this file, make the SYMBOL_REF special.  We can use this to indicate
10983   that we can branch to this function without emitting a no-op after the
10984   call.  For real AIX calling sequences, we also replace the
10985   function name with the real name (1 or 2 leading .'s), rather than
10986   the function descriptor name.  This saves a lot of overriding code
10987   to read the prefixes.  */
10988
10989void
10990rs6000_encode_section_info (decl)
10991     tree decl;
10992{
10993  if (TREE_CODE (decl) == FUNCTION_DECL)
10994    {
10995      rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10996      if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10997          && ! DECL_WEAK (decl))
10998	SYMBOL_REF_FLAG (sym_ref) = 1;
10999
11000      if (DEFAULT_ABI == ABI_AIX)
11001	{
11002	  size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
11003	  size_t len2 = strlen (XSTR (sym_ref, 0));
11004	  char *str = alloca (len1 + len2 + 1);
11005	  str[0] = '.';
11006	  str[1] = '.';
11007	  memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
11008
11009	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
11010	}
11011    }
11012  else if (rs6000_sdata != SDATA_NONE
11013	   && DEFAULT_ABI == ABI_V4
11014	   && TREE_CODE (decl) == VAR_DECL)
11015    {
11016      int size = int_size_in_bytes (TREE_TYPE (decl));
11017      tree section_name = DECL_SECTION_NAME (decl);
11018      const char *name = (char *)0;
11019      int len = 0;
11020
11021      if (section_name)
11022	{
11023	  if (TREE_CODE (section_name) == STRING_CST)
11024	    {
11025	      name = TREE_STRING_POINTER (section_name);
11026	      len = TREE_STRING_LENGTH (section_name);
11027	    }
11028	  else
11029	    abort ();
11030	}
11031
11032      if ((size > 0 && size <= g_switch_value)
11033	  || (name
11034	      && ((len == sizeof (".sdata") - 1
11035		   && strcmp (name, ".sdata") == 0)
11036		  || (len == sizeof (".sdata2") - 1
11037		      && strcmp (name, ".sdata2") == 0)
11038		  || (len == sizeof (".sbss") - 1
11039		      && strcmp (name, ".sbss") == 0)
11040		  || (len == sizeof (".sbss2") - 1
11041		      && strcmp (name, ".sbss2") == 0)
11042		  || (len == sizeof (".PPC.EMB.sdata0") - 1
11043		      && strcmp (name, ".PPC.EMB.sdata0") == 0)
11044		  || (len == sizeof (".PPC.EMB.sbss0") - 1
11045		      && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11046	{
11047	  rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11048	  size_t len = strlen (XSTR (sym_ref, 0));
11049	  char *str = alloca (len + 2);
11050
11051	  str[0] = '@';
11052	  memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11053	  XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11054	}
11055    }
11056}
11057
11058#endif /* USING_ELFOS_H */
11059
11060
11061/* Return a REG that occurs in ADDR with coefficient 1.
11062   ADDR can be effectively incremented by incrementing REG.
11063
11064   r0 is special and we must not select it as an address
11065   register by this routine since our caller will try to
11066   increment the returned register via an "la" instruction.  */
11067
11068struct rtx_def *
11069find_addr_reg (addr)
11070     rtx addr;
11071{
11072  while (GET_CODE (addr) == PLUS)
11073    {
11074      if (GET_CODE (XEXP (addr, 0)) == REG
11075	  && REGNO (XEXP (addr, 0)) != 0)
11076	addr = XEXP (addr, 0);
11077      else if (GET_CODE (XEXP (addr, 1)) == REG
11078	       && REGNO (XEXP (addr, 1)) != 0)
11079	addr = XEXP (addr, 1);
11080      else if (CONSTANT_P (XEXP (addr, 0)))
11081	addr = XEXP (addr, 1);
11082      else if (CONSTANT_P (XEXP (addr, 1)))
11083	addr = XEXP (addr, 0);
11084      else
11085	abort ();
11086    }
11087  if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11088    return addr;
11089  abort ();
11090}
11091
11092void
11093rs6000_fatal_bad_address (op)
11094  rtx op;
11095{
11096  fatal_insn ("bad address", op);
11097}
11098
11099/* Called to register all of our global variables with the garbage
11100   collector.  */
11101
11102static void
11103rs6000_add_gc_roots ()
11104{
11105  ggc_add_rtx_root (&rs6000_compare_op0, 1);
11106  ggc_add_rtx_root (&rs6000_compare_op1, 1);
11107
11108  toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11109  ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11110		toc_hash_mark_table);
11111
11112#if TARGET_MACHO
11113  machopic_add_gc_roots ();
11114#endif
11115}
11116
11117#if TARGET_MACHO
11118
11119#if 0
11120/* Returns 1 if OP is either a symbol reference or a sum of a symbol
11121   reference and a constant.  */
11122
11123int
11124symbolic_operand (op)
11125     rtx op;
11126{
11127  switch (GET_CODE (op))
11128    {
11129    case SYMBOL_REF:
11130    case LABEL_REF:
11131      return 1;
11132    case CONST:
11133      op = XEXP (op, 0);
11134      return (GET_CODE (op) == SYMBOL_REF ||
11135	      (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11136	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11137	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
11138    default:
11139      return 0;
11140    }
11141}
11142#endif
11143
11144#ifdef RS6000_LONG_BRANCH
11145
11146static tree stub_list = 0;
11147
11148/* ADD_COMPILER_STUB adds the compiler generated stub for handling
11149   procedure calls to the linked list.  */
11150
11151void
11152add_compiler_stub (label_name, function_name, line_number)
11153     tree label_name;
11154     tree function_name;
11155     int line_number;
11156{
11157  tree stub = build_tree_list (function_name, label_name);
11158  TREE_TYPE (stub) = build_int_2 (line_number, 0);
11159  TREE_CHAIN (stub) = stub_list;
11160  stub_list = stub;
11161}
11162
11163#define STUB_LABEL_NAME(STUB)     TREE_VALUE (STUB)
11164#define STUB_FUNCTION_NAME(STUB)  TREE_PURPOSE (STUB)
11165#define STUB_LINE_NUMBER(STUB)    TREE_INT_CST_LOW (TREE_TYPE (STUB))
11166
11167/* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11168   handling procedure calls from the linked list and initializes the
11169   linked list.  */
11170
11171void
11172output_compiler_stub ()
11173{
11174  char tmp_buf[256];
11175  char label_buf[256];
11176  char *label;
11177  tree tmp_stub, stub;
11178
11179  if (!flag_pic)
11180    for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11181      {
11182	fprintf (asm_out_file,
11183		 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11184
11185#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11186	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11187	  fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11188#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11189
11190	if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11191	  strcpy (label_buf,
11192		  IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11193	else
11194	  {
11195	    label_buf[0] = '_';
11196	    strcpy (label_buf+1,
11197		    IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11198	  }
11199
11200	strcpy (tmp_buf, "lis r12,hi16(");
11201	strcat (tmp_buf, label_buf);
11202	strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11203	strcat (tmp_buf, label_buf);
11204	strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11205	output_asm_insn (tmp_buf, 0);
11206
11207#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11208	if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11209	  fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11210#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11211      }
11212
11213  stub_list = 0;
11214}
11215
11216/* NO_PREVIOUS_DEF checks in the link list whether the function name is
11217   already there or not.  */
11218
11219int
11220no_previous_def (function_name)
11221     tree function_name;
11222{
11223  tree stub;
11224  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11225    if (function_name == STUB_FUNCTION_NAME (stub))
11226      return 0;
11227  return 1;
11228}
11229
11230/* GET_PREV_LABEL gets the label name from the previous definition of
11231   the function.  */
11232
11233tree
11234get_prev_label (function_name)
11235     tree function_name;
11236{
11237  tree stub;
11238  for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11239    if (function_name == STUB_FUNCTION_NAME (stub))
11240      return STUB_LABEL_NAME (stub);
11241  return 0;
11242}
11243
11244/* INSN is either a function call or a millicode call.  It may have an
11245   unconditional jump in its delay slot.
11246
11247   CALL_DEST is the routine we are calling.  */
11248
11249char *
11250output_call (insn, call_dest, operand_number)
11251     rtx insn;
11252     rtx call_dest;
11253     int operand_number;
11254{
11255  static char buf[256];
11256  if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11257    {
11258      tree labelname;
11259      tree funname = get_identifier (XSTR (call_dest, 0));
11260
11261      if (no_previous_def (funname))
11262	{
11263	  int line_number;
11264	  rtx label_rtx = gen_label_rtx ();
11265	  char *label_buf, temp_buf[256];
11266	  ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11267				       CODE_LABEL_NUMBER (label_rtx));
11268	  label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11269	  labelname = get_identifier (label_buf);
11270	  for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11271	  if (insn)
11272	    line_number = NOTE_LINE_NUMBER (insn);
11273	  add_compiler_stub (labelname, funname, line_number);
11274	}
11275      else
11276	labelname = get_prev_label (funname);
11277
11278      sprintf (buf, "jbsr %%z%d,%.246s",
11279	       operand_number, IDENTIFIER_POINTER (labelname));
11280      return buf;
11281    }
11282  else
11283    {
11284      sprintf (buf, "bl %%z%d", operand_number);
11285      return buf;
11286    }
11287}
11288
11289#endif /* RS6000_LONG_BRANCH */
11290
11291#define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N)		\
11292  do {								\
11293    const char *const symbol_ = (SYMBOL);			\
11294    char *buffer_ = (BUF);					\
11295    if (symbol_[0] == '"')					\
11296      {								\
11297        sprintf(buffer_, "\"L%d$%s", (N), symbol_+1);		\
11298      }								\
11299    else if (name_needs_quotes(symbol_))			\
11300      {								\
11301        sprintf(buffer_, "\"L%d$%s\"", (N), symbol_);		\
11302      }								\
11303    else							\
11304      {								\
11305        sprintf(buffer_, "L%d$%s", (N), symbol_);		\
11306      }								\
11307  } while (0)
11308
11309
11310/* Generate PIC and indirect symbol stubs.  */
11311
11312void
11313machopic_output_stub (file, symb, stub)
11314     FILE *file;
11315     const char *symb, *stub;
11316{
11317  unsigned int length;
11318  char *symbol_name, *lazy_ptr_name;
11319  char *local_label_0;
11320  static int label = 0;
11321
11322  /* Lose our funky encoding stuff so it doesn't contaminate the stub.  */
11323  STRIP_NAME_ENCODING (symb, symb);
11324
11325  label += 1;
11326
11327  length = strlen (symb);
11328  symbol_name = alloca (length + 32);
11329  GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11330
11331  lazy_ptr_name = alloca (length + 32);
11332  GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11333
11334  local_label_0 = alloca (length + 32);
11335  GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11336
11337  if (flag_pic == 2)
11338    machopic_picsymbol_stub_section ();
11339  else
11340    machopic_symbol_stub_section ();
11341
11342  fprintf (file, "%s:\n", stub);
11343  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11344
11345  if (flag_pic == 2)
11346    {
11347      fprintf (file, "\tmflr r0\n");
11348      fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11349      fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11350      fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11351	       lazy_ptr_name, local_label_0);
11352      fprintf (file, "\tmtlr r0\n");
11353      fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11354	       lazy_ptr_name, local_label_0);
11355      fprintf (file, "\tmtctr r12\n");
11356      fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11357	       lazy_ptr_name, local_label_0);
11358      fprintf (file, "\tbctr\n");
11359    }
11360  else
11361    fprintf (file, "non-pure not supported\n");
11362
11363  machopic_lazy_symbol_ptr_section ();
11364  fprintf (file, "%s:\n", lazy_ptr_name);
11365  fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11366  fprintf (file, "\t.long dyld_stub_binding_helper\n");
11367}
11368
11369/* Legitimize PIC addresses.  If the address is already
11370   position-independent, we return ORIG.  Newly generated
11371   position-independent addresses go into a reg.  This is REG if non
11372   zero, otherwise we allocate register(s) as necessary.  */
11373
11374#define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11375
11376rtx
11377rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11378     rtx orig;
11379     enum machine_mode mode;
11380     rtx reg;
11381{
11382  rtx base, offset;
11383
11384  if (reg == NULL && ! reload_in_progress && ! reload_completed)
11385    reg = gen_reg_rtx (Pmode);
11386
11387  if (GET_CODE (orig) == CONST)
11388    {
11389      if (GET_CODE (XEXP (orig, 0)) == PLUS
11390	  && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11391	return orig;
11392
11393      if (GET_CODE (XEXP (orig, 0)) == PLUS)
11394	{
11395	  base =
11396	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11397						    Pmode, reg);
11398	  offset =
11399	    rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11400						    Pmode, reg);
11401	}
11402      else
11403	abort ();
11404
11405      if (GET_CODE (offset) == CONST_INT)
11406	{
11407	  if (SMALL_INT (offset))
11408	    return plus_constant (base, INTVAL (offset));
11409	  else if (! reload_in_progress && ! reload_completed)
11410	    offset = force_reg (Pmode, offset);
11411	  else
11412	    {
11413 	      rtx mem = force_const_mem (Pmode, orig);
11414	      return machopic_legitimize_pic_address (mem, Pmode, reg);
11415	    }
11416	}
11417      return gen_rtx (PLUS, Pmode, base, offset);
11418    }
11419
11420  /* Fall back on generic machopic code.  */
11421  return machopic_legitimize_pic_address (orig, mode, reg);
11422}
11423
11424/* This is just a placeholder to make linking work without having to
11425   add this to the generic Darwin EXTRA_SECTIONS.  If -mcall-aix is
11426   ever needed for Darwin (not too likely!) this would have to get a
11427   real definition.  */
11428
11429void
11430toc_section ()
11431{
11432}
11433
11434#endif /* TARGET_MACHO */
11435
11436#if TARGET_ELF
11437static unsigned int
11438rs6000_elf_section_type_flags (decl, name, reloc)
11439     tree decl;
11440     const char *name;
11441     int reloc;
11442{
11443  unsigned int flags = default_section_type_flags (decl, name, reloc);
11444
11445  if (TARGET_RELOCATABLE)
11446    flags |= SECTION_WRITE;
11447
11448  return flags;
11449}
11450
11451/* Record an element in the table of global constructors.  SYMBOL is
11452   a SYMBOL_REF of the function to be called; PRIORITY is a number
11453   between 0 and MAX_INIT_PRIORITY.
11454
11455   This differs from default_named_section_asm_out_constructor in
11456   that we have special handling for -mrelocatable.  */
11457
11458static void
11459rs6000_elf_asm_out_constructor (symbol, priority)
11460     rtx symbol;
11461     int priority;
11462{
11463  const char *section = ".ctors";
11464  char buf[16];
11465
11466  if (priority != DEFAULT_INIT_PRIORITY)
11467    {
11468      sprintf (buf, ".ctors.%.5u",
11469               /* Invert the numbering so the linker puts us in the proper
11470                  order; constructors are run from right to left, and the
11471                  linker sorts in increasing order.  */
11472               MAX_INIT_PRIORITY - priority);
11473      section = buf;
11474    }
11475
11476  named_section_flags (section, SECTION_WRITE);
11477  assemble_align (POINTER_SIZE);
11478
11479  if (TARGET_RELOCATABLE)
11480    {
11481      fputs ("\t.long (", asm_out_file);
11482      output_addr_const (asm_out_file, symbol);
11483      fputs (")@fixup\n", asm_out_file);
11484    }
11485  else
11486    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11487}
11488
11489static void
11490rs6000_elf_asm_out_destructor (symbol, priority)
11491     rtx symbol;
11492     int priority;
11493{
11494  const char *section = ".dtors";
11495  char buf[16];
11496
11497  if (priority != DEFAULT_INIT_PRIORITY)
11498    {
11499      sprintf (buf, ".dtors.%.5u",
11500               /* Invert the numbering so the linker puts us in the proper
11501                  order; constructors are run from right to left, and the
11502                  linker sorts in increasing order.  */
11503               MAX_INIT_PRIORITY - priority);
11504      section = buf;
11505    }
11506
11507  named_section_flags (section, SECTION_WRITE);
11508  assemble_align (POINTER_SIZE);
11509
11510  if (TARGET_RELOCATABLE)
11511    {
11512      fputs ("\t.long (", asm_out_file);
11513      output_addr_const (asm_out_file, symbol);
11514      fputs (")@fixup\n", asm_out_file);
11515    }
11516  else
11517    assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11518}
11519#endif
11520
11521#ifdef OBJECT_FORMAT_COFF
11522static void
11523xcoff_asm_named_section (name, flags)
11524     const char *name;
11525     unsigned int flags ATTRIBUTE_UNUSED;
11526{
11527  fprintf (asm_out_file, "\t.csect %s\n", name);
11528}
11529#endif
11530